gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// $Id$
/*
* CommandHelper
* Copyright (C) 2010 sk89q <http://www.sk89q.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.laytonsmith.commandhelper;
import com.laytonsmith.PureUtilities.ClassLoading.ClassDiscovery;
import com.laytonsmith.PureUtilities.ClassLoading.ClassDiscoveryCache;
import com.laytonsmith.PureUtilities.Common.FileUtil;
import com.laytonsmith.PureUtilities.Common.OSUtils;
import com.laytonsmith.PureUtilities.Common.ReflectionUtils;
import com.laytonsmith.PureUtilities.Common.StreamUtils;
import com.laytonsmith.PureUtilities.Common.StringUtils;
import com.laytonsmith.PureUtilities.ExecutionQueue;
import com.laytonsmith.PureUtilities.SimpleVersion;
import com.laytonsmith.PureUtilities.TermColors;
import com.laytonsmith.abstraction.Implementation;
import com.laytonsmith.abstraction.MCCommand;
import com.laytonsmith.abstraction.MCCommandSender;
import com.laytonsmith.abstraction.MCPlayer;
import com.laytonsmith.abstraction.MCServer;
import com.laytonsmith.abstraction.StaticLayer;
import com.laytonsmith.abstraction.bukkit.BukkitConvertor;
import com.laytonsmith.abstraction.bukkit.BukkitMCBlockCommandSender;
import com.laytonsmith.abstraction.bukkit.BukkitMCCommand;
import com.laytonsmith.abstraction.bukkit.entities.BukkitMCPlayer;
import com.laytonsmith.abstraction.enums.MCChatColor;
import com.laytonsmith.abstraction.enums.bukkit.BukkitMCBiomeType;
import com.laytonsmith.abstraction.enums.bukkit.BukkitMCEntityType;
import com.laytonsmith.annotations.EventIdentifier;
import com.laytonsmith.core.AliasCore;
import com.laytonsmith.core.CHLog;
import com.laytonsmith.core.Installer;
import com.laytonsmith.core.Main;
import com.laytonsmith.core.MethodScriptExecutionQueue;
import com.laytonsmith.core.MethodScriptFileLocations;
import com.laytonsmith.core.Prefs;
import com.laytonsmith.core.Profiles;
import com.laytonsmith.core.Static;
import com.laytonsmith.core.UpgradeLog;
import com.laytonsmith.core.constructs.Target;
import com.laytonsmith.core.extensions.ExtensionManager;
import com.laytonsmith.core.profiler.Profiler;
import com.laytonsmith.persistence.DataSourceException;
import com.laytonsmith.persistence.PersistenceNetwork;
import com.laytonsmith.persistence.ReadOnlyException;
import org.bukkit.Server;
import org.bukkit.command.BlockCommandSender;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.EventException;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerCommandPreprocessEvent;
import org.bukkit.event.server.ServerCommandEvent;
import org.bukkit.plugin.EventExecutor;
import org.bukkit.plugin.RegisteredListener;
import org.bukkit.plugin.TimedRegisteredListener;
import org.bukkit.plugin.java.JavaPlugin;
//import org.mcstats.Metrics;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Entry point for the plugin.
*
* @author sk89q
*/
public class CommandHelperPlugin extends JavaPlugin {
//Do not rename this field, it is changed reflectively in unit tests.
private static AliasCore ac;
public static MCServer myServer;
public static SimpleVersion version;
public static CommandHelperPlugin self;
public static ExecutorService hostnameLookupThreadPool;
public static ConcurrentHashMap<String, String> hostnameLookupCache;
private static int hostnameThreadPoolID = 0;
public Profiler profiler;
public final ExecutionQueue executionQueue = new MethodScriptExecutionQueue("CommandHelperExecutionQueue", "default");
public PersistenceNetwork persistenceNetwork;
public Profiles profiles;
//public boolean firstLoad = true;
public long interpreterUnlockedUntil = 0;
private Thread loadingThread;
/**
* Listener for the plugin system.
*/
final CommandHelperListener playerListener =
new CommandHelperListener(this);
/**
* Interpreter listener
*/
public final CommandHelperInterpreterListener interpreterListener =
new CommandHelperInterpreterListener(this);
/**
* Server Command Listener, for console commands
*/
final CommandHelperServerListener serverListener =
new CommandHelperServerListener();
@Override
public void onLoad() {
Implementation.setServerType(Implementation.Type.BUKKIT);
CommandHelperFileLocations.setDefault(new CommandHelperFileLocations());
CommandHelperFileLocations.getDefault().getCacheDirectory().mkdirs();
CommandHelperFileLocations.getDefault().getPreferencesDirectory().mkdirs();
UpgradeLog upgradeLog = new UpgradeLog(CommandHelperFileLocations.getDefault().getUpgradeLogFile());
upgradeLog.addUpgradeTask(new UpgradeLog.UpgradeTask() {
String version = null;
@Override
public boolean doRun() {
try {
version = "versionUpgrade-" + Main.loadSelfVersion();
return !hasBreadcrumb(version);
} catch (Exception ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
}
@Override
public void run() {
leaveBreadcrumb(version);
}
});
upgradeLog.addUpgradeTask(new UpgradeLog.UpgradeTask() {
File oldPreferences = new File(CommandHelperFileLocations.getDefault().getConfigDirectory(),
"preferences.txt");
@Override
public boolean doRun() {
return oldPreferences.exists()
&& !CommandHelperFileLocations.getDefault().getPreferencesFile().exists();
}
@Override
public void run() {
try {
Prefs.init(oldPreferences);
Prefs.SetColors();
Logger.getLogger("Minecraft").log(Level.INFO,
TermColors.YELLOW + "[" + Implementation.GetServerType().getBranding() + "] Old preferences.txt file detected. Moving preferences.txt to preferences.ini." + TermColors.reset());
FileUtil.copy(oldPreferences, CommandHelperFileLocations.getDefault().getPreferencesFile(), true);
oldPreferences.deleteOnExit();
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
upgradeLog.addUpgradeTask(new UpgradeLog.UpgradeTask() {
File cd = CommandHelperFileLocations.getDefault().getConfigDirectory();
private final String breadcrumb = "move-preference-files-v1.0";
@Override
public boolean doRun() {
return !hasBreadcrumb(breadcrumb)
&& new File(cd, "preferences.ini").exists();
}
@Override
public void run() {
//We need to move the following files:
//1. persistance.config to prefs/persistence.ini (note the correct spelling)
//2. preferences.ini to prefs/preferences.ini
//3. profiler.config to prefs/profiler.ini
//4. sql-profiles.xml to prefs/sql-profiles.xml
//5. We are not moving loggerPreferences.txt, instead just deleting it,
// because the defaults have changed. Most people aren't using this feature
// anyways. (The new one will write itself out upon installation.)
//Other than the config/prefs directory, we are hardcoding all the values, so
//we know they are correct (for old values). Any errors will be reported, but will not
//stop the entire process.
CommandHelperFileLocations p = CommandHelperFileLocations.getDefault();
try {
FileUtil.move(new File(cd, "persistance.config"), p.getPersistenceConfig());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
try {
FileUtil.move(new File(cd, "preferences.ini"), p.getPreferencesFile());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
try {
FileUtil.move(new File(cd, "profiler.config"), p.getProfilerConfigFile());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
try {
FileUtil.move(new File(cd, "sql-profiles.xml"), p.getProfilesFile());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
new File(cd, "logs/debug/loggerPreferences.txt").delete();
leaveBreadcrumb(breadcrumb);
StreamUtils.GetSystemOut().println("CommandHelper: Your preferences files have all been relocated to " + p.getPreferencesDirectory());
StreamUtils.GetSystemOut().println("CommandHelper: The loggerPreferences.txt file has been deleted and re-created, as the defaults have changed.");
}
});
// Renames the sql-profiles.xml file to the new name.
upgradeLog.addUpgradeTask(new UpgradeLog.UpgradeTask() {
// This should never change
private final File oldProfilesFile = new File(MethodScriptFileLocations.getDefault().getPreferencesDirectory(), "sql-profiles.xml");
@Override
public boolean doRun() {
return oldProfilesFile.exists();
}
@Override
public void run() {
try {
FileUtil.move(oldProfilesFile, MethodScriptFileLocations.getDefault().getProfilesFile());
StreamUtils.GetSystemOut().println("CommandHelper: sql-profiles.xml has been renamed to " + MethodScriptFileLocations.getDefault().getProfilesFile().getName());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
try {
upgradeLog.runTasks();
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
try{
Prefs.init(CommandHelperFileLocations.getDefault().getPreferencesFile());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
Prefs.SetColors();
CHLog.initialize(CommandHelperFileLocations.getDefault().getConfigDirectory());
Installer.Install(CommandHelperFileLocations.getDefault().getConfigDirectory());
if(new SimpleVersion(System.getProperty("java.version")).lt(new SimpleVersion("1.7"))){
CHLog.GetLogger().w(CHLog.Tags.GENERAL, "You appear to be running a version of Java older than Java 7. You should have plans"
+ " to upgrade at some point, as " + Implementation.GetServerType().getBranding() + " may require it at some point.", Target.UNKNOWN);
}
self = this;
ClassDiscoveryCache cdc = new ClassDiscoveryCache(CommandHelperFileLocations.getDefault().getCacheDirectory());
cdc.setLogger(Logger.getLogger(CommandHelperPlugin.class.getName()));
ClassDiscovery.getDefaultInstance().setClassDiscoveryCache(cdc);
ClassDiscovery.getDefaultInstance().addDiscoveryLocation(ClassDiscovery.GetClassContainer(CommandHelperPlugin.class));
ClassDiscovery.getDefaultInstance().addDiscoveryLocation(ClassDiscovery.GetClassContainer(Server.class));
StreamUtils.GetSystemOut().println("[CommandHelper] Running initial class discovery,"
+ " this will probably take a few seconds...");
myServer = StaticLayer.GetServer();
StreamUtils.GetSystemOut().println("[CommandHelper] Loading extensions in the background...");
loadingThread = new Thread("extensionloader") {
@Override
public void run() {
ExtensionManager.AddDiscoveryLocation(CommandHelperFileLocations.getDefault().getExtensionsDirectory());
if (OSUtils.GetOS() == OSUtils.OS.WINDOWS) {
// Using StreamUtils.GetSystemOut() here instead of the logger as the logger doesn't
// immediately print to the console.
StreamUtils.GetSystemOut().println("[CommandHelper] Caching extensions...");
ExtensionManager.Cache(CommandHelperFileLocations.getDefault().getExtensionCacheDirectory());
StreamUtils.GetSystemOut().println("[CommandHelper] Extension caching complete.");
}
ExtensionManager.Initialize(ClassDiscovery.getDefaultInstance());
StreamUtils.GetSystemOut().println("[CommandHelper] Extension loading complete.");
}
};
loadingThread.start();
}
/**
* Called on plugin enable.
*/
@Override
public void onEnable() {
if(loadingThread.isAlive()){
StreamUtils.GetSystemOut().println("[CommandHelper] Waiting for extension loading to complete...");
try {
loadingThread.join();
} catch (InterruptedException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
}
BukkitMCEntityType.build();
BukkitMCBiomeType.build();
//Metrics
// MCStats no longer appears to be supported. If it comes back, this code can be re-added
// try {
// org.mcstats.Metrics m = new Metrics(this);
// m.addCustomData(new Metrics.Plotter("Player count") {
//
// @Override
// public int getValue() {
// return Static.getServer().getOnlinePlayers().size();
// }
// });
// m.start();
// } catch (IOException e) {
// // Failed to submit the stats :-(
// }
try {
//This may seem redundant, but on a /reload, we want to refresh these
//properties.
Prefs.init(CommandHelperFileLocations.getDefault().getPreferencesFile());
} catch (IOException ex) {
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.SEVERE, null, ex);
}
if(Prefs.UseSudoFallback()){
Logger.getLogger(CommandHelperPlugin.class.getName()).log(Level.WARNING, "In your preferences, use-sudo-fallback is turned on. Consider turning this off if you can.");
}
CHLog.initialize(CommandHelperFileLocations.getDefault().getConfigDirectory());
version = new SimpleVersion(getDescription().getVersion());
String script_name = Prefs.ScriptName();
String main_file = Prefs.MainFile();
boolean showSplashScreen = Prefs.ShowSplashScreen();
if (showSplashScreen) {
StreamUtils.GetSystemOut().println(TermColors.reset());
//StreamUtils.GetSystemOut().flush();
StreamUtils.GetSystemOut().println("\n\n\n" + Static.Logo());
}
ac = new AliasCore(new File(CommandHelperFileLocations.getDefault().getConfigDirectory(), script_name),
CommandHelperFileLocations.getDefault().getLocalPackagesDirectory(),
CommandHelperFileLocations.getDefault().getPreferencesFile(),
new File(CommandHelperFileLocations.getDefault().getConfigDirectory(), main_file), this);
ac.reload(null, null);
//Clear out our hostname cache
hostnameLookupCache = new ConcurrentHashMap<>();
//Create a new thread pool, with a custom ThreadFactory,
//so we can more clearly name our threads.
hostnameLookupThreadPool = Executors.newFixedThreadPool(3, new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "CommandHelperHostnameLookup-" + (++hostnameThreadPoolID));
}
});
for (MCPlayer p : Static.getServer().getOnlinePlayers()) {
//Repopulate our cache for currently online players.
//New players that join later will get a lookup done
//on them at that time.
Static.HostnameCache(p);
}
BukkitDirtyRegisteredListener.PlayDirty();
registerEvents(playerListener);
//interpreter events
registerEvents(interpreterListener);
registerEvents(serverListener);
//Script events
StaticLayer.Startup(this);
playerListener.loadGlobalAliases();
interpreterListener.reload();
Static.getLogger().log(Level.INFO, "[CommandHelper] CommandHelper {0} enabled", getDescription().getVersion());
}
public static AliasCore getCore() {
return ac;
}
/**
* Disables the plugin.
*/
@Override
public void onDisable() {
//free up some memory
StaticLayer.GetConvertor().runShutdownHooks();
stopExecutionQueue();
ExtensionManager.Cleanup();
ac = null;
}
public void stopExecutionQueue() {
for (String queue : executionQueue.activeQueues()) {
executionQueue.clear(queue);
}
}
/**
* Register all events in a Listener class.
*
* @param listener
*/
public void registerEvents(Listener listener) {
getServer().getPluginManager().registerEvents(listener, this);
}
/*
* This method is based on Bukkit's JavaPluginLoader:createRegisteredListeners
* Part of this code would be run normally using the other register method
*/
public void registerEventsDynamic(Listener listener) {
for (final java.lang.reflect.Method method : listener.getClass().getMethods()) {
EventIdentifier identifier = method.getAnnotation(EventIdentifier.class);
EventHandler defaultHandler = method.getAnnotation(EventHandler.class);
EventPriority priority = EventPriority.LOWEST;
if (identifier == null || !identifier.event().existsInCurrent()) {
continue;
}
if (defaultHandler != null) {
priority = defaultHandler.priority();
}
Class<? extends Event> eventClass = null;
try {
eventClass = (Class<? extends Event>) Class.forName(identifier.className());
} catch (ClassNotFoundException | ClassCastException e) {
CHLog.GetLogger().e(CHLog.Tags.RUNTIME, "Could not listen for " + identifier.event().name()
+ " because the class " + identifier.className() + " could not be found."
+ " This problem is not expected to occur, so please report it on the bug tracker if it does.",
Target.UNKNOWN);
continue;
}
HandlerList handler;
try {
handler = (HandlerList) ReflectionUtils.invokeMethod(eventClass, null, "getHandlerList");
} catch (ReflectionUtils.ReflectionException ref) {
Class eventSuperClass = eventClass.getSuperclass();
if (eventSuperClass != null) {
try {
handler = (HandlerList) ReflectionUtils.invokeMethod(eventSuperClass, null, "getHandlerList");
} catch (ReflectionUtils.ReflectionException refInner) {
CHLog.GetLogger().e(CHLog.Tags.RUNTIME, "Could not listen for " + identifier.event().name()
+ " because the handler for class " + identifier.className()
+ " could not be found. An attempt has already been made to find the"
+ " correct handler, but" + eventSuperClass.getName()
+ " did not have it either. Please report this on the bug tracker.",
Target.UNKNOWN);
continue;
}
} else {
CHLog.GetLogger().e(CHLog.Tags.RUNTIME, "Could not listen for " + identifier.event().name()
+ " because the handler for class " + identifier.className()
+ " could not be found. An attempt has already been made to find the"
+ " correct handler, but no superclass could be found."
+ " Please report this on the bug tracker.",
Target.UNKNOWN);
continue;
}
}
final Class<? extends Event> finalEventClass = eventClass;
EventExecutor executor = new EventExecutor() {
@Override
public void execute(Listener listener, Event event) throws EventException {
try {
if (!finalEventClass.isAssignableFrom(event.getClass())) {
return;
}
method.invoke(listener, event);
} catch (InvocationTargetException ex) {
throw new EventException(ex.getCause());
} catch (Throwable t) {
throw new EventException(t);
}
}
};
if (this.getServer().getPluginManager().useTimings()) {
handler.register(new TimedRegisteredListener(listener, executor, priority, this, false));
} else {
handler.register(new RegisteredListener(listener, executor, priority, this, false));
}
}
}
@Override
public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) {
MCCommandSender mcsender = BukkitConvertor.BukkitGetCorrectSender(sender);
MCCommand cmd = new BukkitMCCommand(command);
return cmd.handleTabComplete(mcsender, alias, args);
}
/**
* Called when a command registered by this plugin is received.
* @param sender
* @param cmd
* @param commandLabel
* @param args
* @return
*/
@Override
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) {
String cmdName = cmd.getName().toLowerCase();
if ((sender.isOp() || (sender instanceof Player && (sender.hasPermission("commandhelper.reloadaliases")
|| sender.hasPermission("ch.reloadaliases"))))
&& (cmdName.equals("reloadaliases") || cmdName.equals("reloadalias") || cmdName.equals("recompile"))) {
MCPlayer player = null;
if (sender instanceof Player) {
player = new BukkitMCPlayer((Player) sender);
}
ac.reload(player, args);
return true;
} else if (cmdName.equalsIgnoreCase("commandhelper")) {
return args.length >= 1 && args[0].equalsIgnoreCase("null");
} else if (cmdName.equals("runalias")) {
//Hardcoded alias rebroadcast
if(args.length == 0){
return false;
}
String command = StringUtils.Join(args, " ");
if (sender instanceof Player) {
PlayerCommandPreprocessEvent pcpe = new PlayerCommandPreprocessEvent((Player) sender, command);
playerListener.onPlayerCommandPreprocess(pcpe);
} else if (sender instanceof ConsoleCommandSender) {
if (command.startsWith("/")) {
command = command.substring(1);
}
ServerCommandEvent sce = new ServerCommandEvent((ConsoleCommandSender) sender, command);
serverListener.onServerCommand(sce);
} else if(sender instanceof BlockCommandSender){
MCCommandSender s = new BukkitMCBlockCommandSender((BlockCommandSender)sender);
Static.getAliasCore().alias(command, s);
}
return true;
} else if(cmdName.equalsIgnoreCase("interpreter-on")){
if(sender instanceof ConsoleCommandSender){
int interpreterTimeout = Prefs.InterpreterTimeout();
if(interpreterTimeout != 0){
interpreterUnlockedUntil = (interpreterTimeout * 60 * 1000) + System.currentTimeMillis();
sender.sendMessage("Inpterpreter mode unlocked for " + interpreterTimeout + " minute"
+ (interpreterTimeout==1?"":"s"));
}
} else {
sender.sendMessage("This command can only be run from console.");
}
return true;
} else if (sender instanceof Player && cmdName.equalsIgnoreCase("interpreter")) {
if (!sender.hasPermission("commandhelper.interpreter")) {
sender.sendMessage(MCChatColor.RED + "You do not have permission to run that command");
} else if (!Prefs.EnableInterpreter()) {
sender.sendMessage(MCChatColor.RED + "The interpreter is currently disabled."
+ " Check your preferences file.");
} else if (Prefs.InterpreterTimeout() != 0 && interpreterUnlockedUntil < System.currentTimeMillis()) {
sender.sendMessage(MCChatColor.RED + "Interpreter mode is currently locked. Run \"interpreter-on\""
+ " console to unlock it. If you want to turn this off entirely, set the interpreter-timeout"
+ " option to 0 in " + CommandHelperFileLocations.getDefault().getPreferencesFile().getName());
} else {
interpreterListener.startInterpret(sender.getName());
sender.sendMessage(MCChatColor.YELLOW + "You are now in interpreter mode. Type a dash (-) on a"
+ " line by itself to exit, and >>> to enter multiline mode.");
}
return true;
} else {
MCCommandSender mcsender = BukkitConvertor.BukkitGetCorrectSender(sender);
MCCommand mccmd = new BukkitMCCommand(cmd);
return mccmd.handleCustomCommand(mcsender, commandLabel, args);
}
}
/**
* Joins a string from an array of strings.
*
* @param str
* @param delimiter
* @return
*/
public static String joinString(String[] str, String delimiter) {
if (str.length == 0) {
return "";
}
StringBuilder buffer = new StringBuilder(str[0]);
for (int i = 1; i < str.length; i++) {
buffer.append(delimiter).append(str[i]);
}
return buffer.toString();
}
/**
* Execute a command.
*
* @param player
*
* @param cmd
*/
public static void execCommand(MCPlayer player, String cmd) {
player.chat(cmd);
}
}
| |
package com.github.geequery.codegen.pdm.model;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import jef.tools.StringUtils;
/**
*
*/
public class MetaModel {
static final MetaReference[] EMPTY = new MetaReference[0];
protected String code;
protected String comment;
protected List<MetaTable> vTables = new ArrayList<MetaTable>();
protected String name;
private HashMap<String, MetaTable> tableIndex = new HashMap<String, MetaTable>();
protected HashMap<String, MetaReference> reference = new HashMap<String, MetaReference>();
/**
* @param ref
* @return
*/
public MetaReference getReference(String ref) {
return reference.get(ref);
}
/**
* @param aCode
* code
* @param aName
* name
* @param aComment
* comment for this MetaModel
*/
public MetaModel(String aCode, String aName, String aComment) {
super();
code = aCode;
name = aName;
comment = aComment;
}
/**
* Returns reference of MetaTable with specified code
*
* @param aCode
* code of MetaTable
* @return reference of MetaTable with specified code
* @throws NullPointerException
*/
public MetaTable getTable(String aCode) throws NullPointerException {
return tableIndex.get(StringUtils.upperCase(aCode));
}
/**
* @param aTable
* MetaTable to add
* @throws NullPointerException
*/
public void addTable(MetaTable aTable) {
vTables.add(aTable);
tableIndex.put(StringUtils.upperCase(aTable.getCode()), aTable);
for (MetaReference ref : aTable.importRefs) {
reference.put(ref.getCode(), ref);
}
}
/**
* Returns collection of tables
*
* @return collection of tables
*/
public List<MetaTable> getTables() {
return vTables;
}
private List<MetaTable> getSortedTables() {
MetaTable[] ts = this.vTables.toArray(new MetaTable[vTables.size()]);
Arrays.sort(ts, new Comparator<MetaTable>() {
public int compare(MetaTable o1, MetaTable o2) {
if (o1 == null && o2 == null)
return 0;
if (o1 == null)
return -1;
if (o2 == null)
return 1;
if (o1.name == null && o2.name == null)
return 0;
if (o1.name == null)
return -1;
if (o2.name == null)
return 1;
return o1.name.compareTo(o2.name);
}
});
return Arrays.asList(ts);
}
/**
* Returns code of this MetaModel
*
* @return code of this MetaModel
*/
public String getCode() {
return code;
}
/**
* Returns total columns of all tables within this MetaModel
*
* @return total columns of all tables within this MetaModel
*/
public int getAllColumnCount() {
int retVal = 0;
for (MetaTable table : vTables) {
retVal += table.getTotalColumns();
}
return retVal;
}
/**
* Returns description of this MetaModel
*
* @return description of this MetaModel
*/
public String getComment() {
return comment;
}
/**
* Returns name of this MetaModel
*
* @return name of this MetaModel
*/
public String getName() {
return name;
}
/**
* Returns total count of all MetaTable(s) within this MetaModel
*
* @return total count of all MetaTable(s) within this MetaModel
*/
public int getTableCount() {
return vTables.size();
}
/**
* Sets the code of this MetaModel
*
* @param aCode
* code to set
*/
public void setCode(String aCode) {
code = aCode;
}
/**
* Sets description of this MetaModel
*
* @param aDescription
* the description to set
*/
public void setComment(String aDescription) {
comment = aDescription;
}
/**
* Sets the name of this MetaModel
*
* @param aName
* The name to set.
*/
public void setName(String aName) {
name = aName;
}
/**
* Returns the string representation of this MetaModel
*
* @see java.lang.Object#toString()
*/
public String toString() {
return name;
}
/**
* Returns verbose description of this MetaModel
*
* @return verbose description of this MetaModel
*/
public String toStringVerbose() {
String retVal = "<HTML><TABLE border=\"1\">" + "<CAPTION><B>" + code + "</B></CAPTION>";
retVal += "<TBODY>";
retVal += "<TR><TD align=\"right\">" + "<B>name</B>" + "</TD>" + "<TD align=\"left\">" + name + "</TD></TR>";
retVal += "<TR><TD align=\"right\">" + "<B>code</B>" + "</TD>" + "<TD align=\"left\">" + code + "</TD></TR>";
retVal += "<TR><TD align=\"right\">" + "<B>comment</B>" + "</TD>" + "<TD align=\"left\">" + MetaUtils.insertHTMLBreaks(comment, 50) + "</TD></TR>";
retVal += "</TBODY></TABLE>";
retVal += "<TABLE border=\"1\">" + "<CAPTION><B> TABLES" + "</B></CAPTION><TBODY>";
retVal += "<TH><I>No.</I></TH><TH><I>Name</I></TH><TH><I>Code</I></TH>";
int count = 0;
for (MetaTable crntTable : this.getSortedTables()) {
retVal += "<TR><TD align=\"right\">";
retVal += ++count;
retVal += "</TD><TD>";
retVal += crntTable.getName();
retVal += "</TD><TD>";
retVal += crntTable.getCode();
retVal += "</TD></TR>";
}
retVal += "</TBODY></TABLE></HTML>";
return retVal;
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence;
import java.util.concurrent.Callable;
import org.camunda.bpm.engine.authorization.Permission;
import org.camunda.bpm.engine.authorization.Resource;
import org.camunda.bpm.engine.impl.AbstractQuery;
import org.camunda.bpm.engine.impl.cfg.auth.ResourceAuthorizationProvider;
import org.camunda.bpm.engine.impl.cmmn.entity.repository.CaseDefinitionManager;
import org.camunda.bpm.engine.impl.cmmn.entity.runtime.CaseExecutionManager;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.db.DbEntity;
import org.camunda.bpm.engine.impl.db.entitymanager.DbEntityManager;
import org.camunda.bpm.engine.impl.db.sql.DbSqlSession;
import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionDefinitionManager;
import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionRequirementsDefinitionManager;
import org.camunda.bpm.engine.impl.history.event.HistoricDecisionInstanceManager;
import org.camunda.bpm.engine.impl.identity.Authentication;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.Session;
import org.camunda.bpm.engine.impl.persistence.entity.AttachmentManager;
import org.camunda.bpm.engine.impl.persistence.entity.AuthorizationEntity;
import org.camunda.bpm.engine.impl.persistence.entity.AuthorizationManager;
import org.camunda.bpm.engine.impl.persistence.entity.BatchManager;
import org.camunda.bpm.engine.impl.persistence.entity.ByteArrayManager;
import org.camunda.bpm.engine.impl.persistence.entity.DeploymentManager;
import org.camunda.bpm.engine.impl.persistence.entity.EventSubscriptionManager;
import org.camunda.bpm.engine.impl.persistence.entity.ExecutionManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricActivityInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricBatchManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricCaseActivityInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricCaseInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricDetailManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricExternalTaskLogManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricIdentityLinkLogManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricIncidentManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricJobLogManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricProcessInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.ReportManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricTaskInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricVariableInstanceManager;
import org.camunda.bpm.engine.impl.persistence.entity.IdentityInfoManager;
import org.camunda.bpm.engine.impl.persistence.entity.IdentityLinkManager;
import org.camunda.bpm.engine.impl.persistence.entity.JobDefinitionManager;
import org.camunda.bpm.engine.impl.persistence.entity.JobManager;
import org.camunda.bpm.engine.impl.persistence.entity.ProcessDefinitionManager;
import org.camunda.bpm.engine.impl.persistence.entity.ResourceManager;
import org.camunda.bpm.engine.impl.persistence.entity.TaskManager;
import org.camunda.bpm.engine.impl.persistence.entity.TaskReportManager;
import org.camunda.bpm.engine.impl.persistence.entity.TenantManager;
import org.camunda.bpm.engine.impl.persistence.entity.UserOperationLogManager;
import org.camunda.bpm.engine.impl.persistence.entity.VariableInstanceManager;
/**
* @author Tom Baeyens
*/
public abstract class AbstractManager implements Session {
public void insert(DbEntity dbEntity) {
getDbEntityManager().insert(dbEntity);
}
public void delete(DbEntity dbEntity) {
getDbEntityManager().delete(dbEntity);
}
protected DbEntityManager getDbEntityManager() {
return getSession(DbEntityManager.class);
}
protected DbSqlSession getDbSqlSession() {
return getSession(DbSqlSession.class);
}
protected <T> T getSession(Class<T> sessionClass) {
return Context.getCommandContext().getSession(sessionClass);
}
protected DeploymentManager getDeploymentManager() {
return getSession(DeploymentManager.class);
}
protected ResourceManager getResourceManager() {
return getSession(ResourceManager.class);
}
protected ByteArrayManager getByteArrayManager() {
return getSession(ByteArrayManager.class);
}
protected ProcessDefinitionManager getProcessDefinitionManager() {
return getSession(ProcessDefinitionManager.class);
}
protected CaseDefinitionManager getCaseDefinitionManager() {
return getSession(CaseDefinitionManager.class);
}
protected DecisionDefinitionManager getDecisionDefinitionManager() {
return getSession(DecisionDefinitionManager.class);
}
protected DecisionRequirementsDefinitionManager getDecisionRequirementsDefinitionManager() {
return getSession(DecisionRequirementsDefinitionManager.class);
}
protected HistoricDecisionInstanceManager getHistoricDecisionInstanceManager() {
return getSession(HistoricDecisionInstanceManager.class);
}
protected CaseExecutionManager getCaseInstanceManager() {
return getSession(CaseExecutionManager.class);
}
protected CaseExecutionManager getCaseExecutionManager() {
return getSession(CaseExecutionManager.class);
}
protected ExecutionManager getProcessInstanceManager() {
return getSession(ExecutionManager.class);
}
protected TaskManager getTaskManager() {
return getSession(TaskManager.class);
}
protected TaskReportManager getTaskReportManager() {
return getSession(TaskReportManager.class);
}
protected IdentityLinkManager getIdentityLinkManager() {
return getSession(IdentityLinkManager.class);
}
protected VariableInstanceManager getVariableInstanceManager() {
return getSession(VariableInstanceManager.class);
}
protected HistoricProcessInstanceManager getHistoricProcessInstanceManager() {
return getSession(HistoricProcessInstanceManager.class);
}
protected HistoricCaseInstanceManager getHistoricCaseInstanceManager() {
return getSession(HistoricCaseInstanceManager.class);
}
protected HistoricDetailManager getHistoricDetailManager() {
return getSession(HistoricDetailManager.class);
}
protected HistoricVariableInstanceManager getHistoricVariableInstanceManager() {
return getSession(HistoricVariableInstanceManager.class);
}
protected HistoricActivityInstanceManager getHistoricActivityInstanceManager() {
return getSession(HistoricActivityInstanceManager.class);
}
protected HistoricCaseActivityInstanceManager getHistoricCaseActivityInstanceManager() {
return getSession(HistoricCaseActivityInstanceManager.class);
}
protected HistoricTaskInstanceManager getHistoricTaskInstanceManager() {
return getSession(HistoricTaskInstanceManager.class);
}
protected HistoricIncidentManager getHistoricIncidentManager() {
return getSession(HistoricIncidentManager.class);
}
protected HistoricIdentityLinkLogManager getHistoricIdentityLinkManager() {
return getSession(HistoricIdentityLinkLogManager.class);
}
protected HistoricJobLogManager getHistoricJobLogManager() {
return getSession(HistoricJobLogManager.class);
}
protected HistoricExternalTaskLogManager getHistoricExternalTaskLogManager() {
return getSession(HistoricExternalTaskLogManager.class);
}
protected JobManager getJobManager() {
return getSession(JobManager.class);
}
protected JobDefinitionManager getJobDefinitionManager() {
return getSession(JobDefinitionManager.class);
}
protected UserOperationLogManager getUserOperationLogManager() {
return getSession(UserOperationLogManager.class);
}
protected EventSubscriptionManager getEventSubscriptionManager() {
return getSession(EventSubscriptionManager.class);
}
protected IdentityInfoManager getIdentityInfoManager() {
return getSession(IdentityInfoManager.class);
}
protected AttachmentManager getAttachmentManager() {
return getSession(AttachmentManager.class);
}
protected ReportManager getHistoricReportManager() {
return getSession(ReportManager.class);
}
protected BatchManager getBatchManager() {
return getSession(BatchManager.class);
}
protected HistoricBatchManager getHistoricBatchManager() {
return getSession(HistoricBatchManager.class);
}
protected TenantManager getTenantManager() {
return getSession(TenantManager.class);
}
public void close() {
}
public void flush() {
}
// authorizations ///////////////////////////////////////
protected CommandContext getCommandContext() {
return Context.getCommandContext();
}
protected AuthorizationManager getAuthorizationManager() {
return getSession(AuthorizationManager.class);
}
protected void configureQuery(AbstractQuery<?,?> query, Resource resource) {
getAuthorizationManager().configureQuery(query, resource);
}
protected void checkAuthorization(Permission permission, Resource resource, String resourceId) {
getAuthorizationManager().checkAuthorization(permission, resource, resourceId);
}
public boolean isAuthorizationEnabled() {
return Context.getProcessEngineConfiguration().isAuthorizationEnabled();
}
protected Authentication getCurrentAuthentication() {
return Context.getCommandContext().getAuthentication();
}
protected ResourceAuthorizationProvider getResourceAuthorizationProvider() {
return Context.getProcessEngineConfiguration()
.getResourceAuthorizationProvider();
}
protected void deleteAuthorizations(Resource resource, String resourceId) {
getAuthorizationManager().deleteAuthorizationsByResourceId(resource, resourceId);
}
protected void deleteAuthorizationsForUser(Resource resource, String resourceId, String userId) {
getAuthorizationManager().deleteAuthorizationsByResourceIdAndUserId(resource, resourceId, userId);
}
protected void deleteAuthorizationsForGroup(Resource resource, String resourceId, String groupId) {
getAuthorizationManager().deleteAuthorizationsByResourceIdAndGroupId(resource, resourceId, groupId);
}
public void saveDefaultAuthorizations(final AuthorizationEntity[] authorizations) {
if(authorizations != null && authorizations.length > 0) {
Context.getCommandContext().runWithoutAuthorization(new Callable<Void>() {
public Void call() {
AuthorizationManager authorizationManager = getAuthorizationManager();
for (AuthorizationEntity authorization : authorizations) {
if(authorization.getId() == null) {
authorizationManager.insert(authorization);
} else {
authorizationManager.update(authorization);
}
}
return null;
}
});
}
}
public void deleteDefaultAuthorizations(final AuthorizationEntity[] authorizations) {
if(authorizations != null && authorizations.length > 0) {
Context.getCommandContext().runWithoutAuthorization(new Callable<Void>() {
public Void call() {
AuthorizationManager authorizationManager = getAuthorizationManager();
for (AuthorizationEntity authorization : authorizations) {
authorizationManager.delete(authorization);
}
return null;
}
});
}
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.intellij.ui;
import com.intellij.ui.components.JBList;
import com.intellij.ui.speedSearch.SpeedSearchSupply;
import com.intellij.util.Function;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.BidirectionalMap;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import javax.swing.plaf.basic.BasicRadioButtonUI;
import java.awt.*;
import java.awt.event.*;
import java.util.List;
import java.util.Map;
/**
* @author oleg
*/
public class CheckBoxList<T> extends JBList<JCheckBox> {
private final static int RESET_ROLLOVER = -1;
private final CellRenderer myCellRenderer;
private CheckBoxListListener checkBoxListListener;
private final BidirectionalMap<T, JCheckBox> myItemMap = new BidirectionalMap<>();
private int rollOverIndex = RESET_ROLLOVER;
public CheckBoxList(final CheckBoxListListener checkBoxListListener) {
this(new DefaultListModel<>(), checkBoxListListener);
}
public CheckBoxList(DefaultListModel<JCheckBox> dataModel, CheckBoxListListener checkBoxListListener) {
this(dataModel);
setCheckBoxListListener(checkBoxListListener);
}
public CheckBoxList() {
this(new DefaultListModel());
}
public CheckBoxList(final DefaultListModel dataModel) {
super();
//noinspection unchecked
setModel(dataModel);
myCellRenderer = new CellRenderer();
setCellRenderer(myCellRenderer);
setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
addKeyListener(new KeyAdapter() {
@Override
public void keyTyped(KeyEvent e) {
SpeedSearchSupply supply = SpeedSearchSupply.getSupply(CheckBoxList.this);
if (supply != null && supply.isPopupActive()) {
return;
}
if (e.getKeyChar() == ' ') {
Boolean value = null;
for (int index : getSelectedIndices()) {
if (index >= 0) {
JCheckBox checkbox = getCheckBoxAt(index);
value = value != null ? value : !checkbox.isSelected();
setSelected(checkbox, index, value);
}
}
}
}
});
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
if (isEnabled()) {
int index = locationToIndex(e.getPoint());
if (index != -1) {
JCheckBox checkBox = getCheckBoxAt(index);
Rectangle bounds = getCellBounds(index, index);
if (bounds == null) {
return false;
}
Point p = findPointRelativeToCheckBox(e.getX() - bounds.x, e.getY() - bounds.y, checkBox, index);
if (p != null) {
Dimension dim = getCheckBoxDimension(checkBox);
if (p.x >= 0 && p.x < dim.width && p.y >= 0 && p.y < dim.height) {
setSelected(checkBox, index, !checkBox.isSelected());
return true;
}
}
}
}
return false;
}
}.installOn(this);
if (UIUtil.isUnderWin10LookAndFeel()) {
addMouseMotionListener(new MouseMotionAdapter() {
@Override public void mouseMoved(MouseEvent e) {
Point point = e.getPoint();
int index = locationToIndex(point);
fireRollOverUpdated(index);
}
});
addMouseListener(new MouseAdapter() {
@Override public void mouseExited(MouseEvent e) {
fireRollOverUpdated(RESET_ROLLOVER);
}
@Override public void mousePressed(MouseEvent e) {
setPressed(e, true);
}
@Override public void mouseReleased(MouseEvent e) {
setPressed(e, false);
}
private void setPressed(MouseEvent e, boolean pressed) {
Point point = e.getPoint();
int index = locationToIndex(point);
if (index >= 0 && index < getModel().getSize()) {
JCheckBox cb = getModel().getElementAt(index);
cb.getModel().setPressed(pressed);
UIUtil.repaintViewport(CheckBoxList.this);
}
}
});
}
}
/**
* Reset old rollover row and set new rollover row.
* @param newIndex new rollover row. If newIndex is -1 then reset old rollover row only.
*/
private void fireRollOverUpdated(int newIndex) {
if (rollOverIndex >= 0) {
JCheckBox oldRollover = getModel().getElementAt(rollOverIndex);
oldRollover.getModel().setRollover(false);
}
rollOverIndex = newIndex;
if (rollOverIndex >= 0) {
JCheckBox newRollover = getModel().getElementAt(rollOverIndex);
newRollover.getModel().setRollover(true);
}
UIUtil.repaintViewport(this);
}
@NotNull
private static Dimension getCheckBoxDimension(@NotNull JCheckBox checkBox) {
Icon icon = null;
BasicRadioButtonUI ui = ObjectUtils.tryCast(checkBox.getUI(), BasicRadioButtonUI.class);
if (ui != null) {
icon = ui.getDefaultIcon();
}
if (icon == null) {
// com.intellij.ide.ui.laf.darcula.ui.DarculaCheckBoxUI.getDefaultIcon()
icon = JBUI.scale(EmptyIcon.create(20));
}
Insets margin = checkBox.getMargin();
return new Dimension(margin.left + icon.getIconWidth(), margin.top + icon.getIconHeight());
}
/**
* Find point relative to the checkbox. Performs lightweight calculations suitable for default rendering.
* @param x x-coordinate relative to the rendered component
* @param y y-coordinate relative to the rendered component
* @param checkBox JCheckBox instance
* @param index The list cell index
* @return A point relative to the checkbox or null, if it's outside of the checkbox.
*/
@Nullable
protected Point findPointRelativeToCheckBox(int x, int y, @NotNull JCheckBox checkBox, int index) {
int cx = x - myCellRenderer.getBorderInsets().left;
int cy = y - myCellRenderer.getBorderInsets().top;
return cx >= 0 && cy >= 0 ? new Point(cx, cy) : null;
}
/**
* Find point relative to the checkbox. Performs heavy calculations suitable for adjusted rendering
* where the checkbox location can be arbitrary inside the rendered component.
*
* @param x x-coordinate relative to the rendered component
* @param y y-coordinate relative to the rendered component
* @param checkBox JCheckBox instance
* @param index The list cell index
* @return A point relative to the checkbox or null, if it's outside of the checkbox.
*/
@Nullable
protected Point findPointRelativeToCheckBoxWithAdjustedRendering(int x, int y, @NotNull JCheckBox checkBox, int index) {
boolean selected = isSelectedIndex(index);
boolean hasFocus = hasFocus();
Component component = myCellRenderer.getListCellRendererComponent(this, checkBox, index, selected, hasFocus);
Rectangle bounds = getCellBounds(index, index);
bounds.x = 0;
bounds.y = 0;
component.setBounds(bounds);
if (component instanceof Container) {
Container c = (Container)component;
Component found = c.findComponentAt(x, y);
if (found == checkBox) {
Point checkBoxLocation = getChildLocationRelativeToAncestor(component, checkBox);
if (checkBoxLocation != null) {
return new Point(x - checkBoxLocation.x, y - checkBoxLocation.y);
}
}
}
return null;
}
@Nullable
private static Point getChildLocationRelativeToAncestor(@NotNull Component ancestor, @NotNull Component child) {
int dx = 0, dy = 0;
Component c = child;
while (c != null && c != ancestor) {
Point p = c.getLocation();
dx += p.x;
dy += p.y;
c = child.getParent();
}
return c == ancestor ? new Point(dx, dy) : null;
}
@NotNull
private JCheckBox getCheckBoxAt(int index) {
return getModel().getElementAt(index);
}
public void setStringItems(final Map<String, Boolean> items) {
clear();
for (Map.Entry<String, Boolean> entry : items.entrySet()) {
//noinspection unchecked
addItem((T)entry.getKey(), entry.getKey(), entry.getValue());
}
}
public void setItems(final List<T> items, @Nullable Function<T, String> converter) {
clear();
for (T item : items) {
String text = converter != null ? converter.fun(item) : item.toString();
addItem(item, text, false);
}
}
public void addItem(T item, String text, boolean selected) {
JCheckBox checkBox = new JCheckBox(text, selected);
checkBox.setOpaque(true); // to paint selection background
myItemMap.put(item, checkBox);
//noinspection unchecked
((DefaultListModel)getModel()).addElement(checkBox);
}
public void updateItem(@NotNull T oldItem, @NotNull T newItem, @NotNull String newText) {
JCheckBox checkBox = myItemMap.remove(oldItem);
myItemMap.put(newItem, checkBox);
checkBox.setText(newText);
DefaultListModel<JCheckBox> model = (DefaultListModel<JCheckBox>)getModel();
int ind = model.indexOf(checkBox);
if (ind >= 0) {
model.set(ind, checkBox); // to fire contentsChanged event
}
}
@Nullable
public T getItemAt(int index) {
JCheckBox checkBox = getModel().getElementAt(index);
List<T> value = myItemMap.getKeysByValue(checkBox);
return value == null || value.isEmpty() ? null : value.get(0);
}
public void clear() {
((DefaultListModel)getModel()).clear();
myItemMap.clear();
}
public boolean isItemSelected(int index) {
return getModel().getElementAt(index).isSelected();
}
public boolean isItemSelected(T item) {
JCheckBox checkBox = myItemMap.get(item);
return checkBox != null && checkBox.isSelected();
}
public void setItemSelected(T item, boolean selected) {
JCheckBox checkBox = myItemMap.get(item);
if (checkBox != null) {
checkBox.setSelected(selected);
}
}
private void setSelected(JCheckBox checkbox, int index, boolean value) {
checkbox.setSelected(value);
repaint();
// fire change notification in case if we've already initialized model
final ListModel model = getModel();
if (model instanceof DefaultListModel) {
//noinspection unchecked
((DefaultListModel)model).setElementAt(getModel().getElementAt(index), index);
}
if (checkBoxListListener != null) {
checkBoxListListener.checkBoxSelectionChanged(index, value);
}
}
public void setCheckBoxListListener(CheckBoxListListener checkBoxListListener) {
this.checkBoxListListener = checkBoxListListener;
}
protected JComponent adjustRendering(JComponent rootComponent,
final JCheckBox checkBox,
int index,
final boolean selected,
final boolean hasFocus) {
return rootComponent;
}
private class CellRenderer implements ListCellRenderer<JCheckBox> {
private final Border mySelectedBorder;
private final Border myBorder;
private final Insets myBorderInsets;
private CellRenderer() {
mySelectedBorder = UIManager.getBorder("List.focusCellHighlightBorder");
myBorderInsets = mySelectedBorder.getBorderInsets(new JCheckBox());
myBorder = new EmptyBorder(myBorderInsets);
}
@Override
public Component getListCellRendererComponent(JList list, JCheckBox checkbox, int index, boolean isSelected, boolean cellHasFocus) {
Color textColor = getForeground(isSelected);
Color backgroundColor = getBackground(isSelected);
Font font = getFont();
checkbox.setBackground(backgroundColor);
checkbox.setForeground(textColor);
checkbox.setEnabled(isEnabled());
checkbox.setFont(font);
checkbox.setFocusPainted(false);
checkbox.setBorderPainted(false);
checkbox.setOpaque(true);
String auxText = getSecondaryText(index);
JComponent rootComponent;
if (auxText != null) {
JPanel panel = new JPanel(new BorderLayout());
panel.add(checkbox, BorderLayout.LINE_START);
JLabel infoLabel = new JLabel(auxText, SwingConstants.RIGHT);
infoLabel.setBorder(new EmptyBorder(0, 0, 0, checkbox.getInsets().left));
infoLabel.setFont(UIUtil.getFont(UIUtil.FontSize.SMALL, font));
panel.add(infoLabel, BorderLayout.CENTER);
panel.setBackground(backgroundColor);
infoLabel.setForeground(isSelected ? textColor : JBColor.GRAY);
infoLabel.setBackground(backgroundColor);
rootComponent = panel;
}
else {
rootComponent = checkbox;
}
rootComponent.setBorder(isSelected ? mySelectedBorder : myBorder);
boolean isRollOver = checkbox.getModel().isRollover();
rootComponent = adjustRendering(rootComponent, checkbox, index, isSelected, cellHasFocus);
checkbox.getModel().setRollover(isRollOver);
return rootComponent;
}
@NotNull
private Insets getBorderInsets() {
return myBorderInsets;
}
}
@Nullable
protected String getSecondaryText(int index) {
return null;
}
protected Color getBackground(final boolean isSelected) {
return isSelected ? getSelectionBackground() : getBackground();
}
protected Color getForeground(final boolean isSelected) {
return isSelected ? getSelectionForeground() : getForeground();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.job.util;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.kylin.metadata.model.ColumnDesc;
import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
import org.apache.kylin.metadata.model.TableDesc;
import org.apache.kylin.metadata.model.TblColRef;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
public class FlatTableSqlQuoteUtils {
public static final String QUOTE = "`";
/**
* Quote identifier by default quote `
* @param identifier
* @return
*/
public static String quoteIdentifier(String identifier){
return QUOTE + identifier + QUOTE;
}
/**
* Quote table identity, eg. `default`.`kylin_sales`
* @param database
* @param table
* @param quote
* @return
*/
public static String quoteTableIdentity(String database, String table, String quote) {
String dbName = quote + database + quote;
String tableName = quote + table + quote;
return String.format(Locale.ROOT, "%s.%s", dbName, tableName).toUpperCase(Locale.ROOT);
}
/**
* use default quote ` to quote table identity
* @param database
* @param table
* @return
*/
public static String quoteTableIdentity(String database, String table) {
return quoteTableIdentity(database, table, QUOTE);
}
/**
* Used for quote identifiers in Sql Filter Expression & Computed Column Expression for flat table
* @param flatDesc
* @param quotation
* @return
*/
public static String quoteIdentifierInSqlExpr(IJoinedFlatTableDesc flatDesc, String sqlExpr, String quotation) {
Map<String, String> tabToAliasMap = buildTableToTableAliasMap(flatDesc);
Map<String, Map<String, String>> tabToColsMap = buildTableToColumnsMap(flatDesc);
boolean tableMatched = false;
for (String table : tabToAliasMap.keySet()) {
List<String> tabPatterns = getTableNameOrAliasPatterns(table);
if (isIdentifierNeedToQuote(sqlExpr, table, tabPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, quotation, table, tabPatterns);
tableMatched = true;
}
String tabAlias = tabToAliasMap.get(table);
List<String> tabAliasPatterns = getTableNameOrAliasPatterns(tabAlias);
if (isIdentifierNeedToQuote(sqlExpr, tabAlias, tabAliasPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, quotation, tabAlias, tabAliasPatterns);
tableMatched = true;
}
if (tableMatched) {
Set<String> columns = listColumnsInTable(table, tabToColsMap);
for (String column : columns) {
List<String> colPatterns = getColumnNameOrAliasPatterns(column);
if (isIdentifierNeedToQuote(sqlExpr, column, colPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, quotation, column, colPatterns);
}
if (columnHasAlias(table, column, tabToColsMap)) {
String colAlias = getColumnAlias(table, column, tabToColsMap);
List<String> colAliasPattern = getColumnNameOrAliasPatterns(colAlias);
if (isIdentifierNeedToQuote(sqlExpr, colAlias, colAliasPattern)) {
sqlExpr = quoteIdentifier(sqlExpr, quotation, colAlias, colPatterns);
}
}
}
}
tableMatched = false; //reset
}
return sqlExpr;
}
/**
* Used to quote identifiers for JDBC ext job when quoting cc expr
* @param tableDesc
* @param sqlExpr
* @param quot
* @return
*/
public static String quoteIdentifierInSqlExpr(TableDesc tableDesc, String sqlExpr, String quot) {
String table = tableDesc.getName();
boolean tableMatched = false;
List<String> tabPatterns = getTableNameOrAliasPatterns(table);
if (isIdentifierNeedToQuote(sqlExpr, table, tabPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, quot, table, tabPatterns);
tableMatched = true;
}
if (tableMatched) {
for (ColumnDesc columnDesc : tableDesc.getColumns()) {
String column = columnDesc.getName();
List<String> colPatterns = getColumnNameOrAliasPatterns(column);
if (isIdentifierNeedToQuote(sqlExpr, column, colPatterns)) {
sqlExpr = quoteIdentifier(sqlExpr, quot, column, colPatterns);
}
}
}
return sqlExpr;
}
public static List<String> getTableNameOrAliasPatterns(String tableName) {
// Pattern must contain three regex groups, and place identifier in sec group ($2)
List<String> patterns = Lists.newArrayList();
patterns.add("([+\\-*/%&|^=><\\s,(])(" + tableName.trim() + ")(\\.)");
patterns.add("([\\.\\s])(" + tableName.trim() + ")([,\\s)])");
patterns.add("(^)(" + tableName.trim() + ")([\\.])");
return patterns;
}
public static List<String> getColumnNameOrAliasPatterns(String colName) {
// Pattern must contain three regex groups, and place identifier in sec group ($2)
List<String> patterns = Lists.newArrayList();
patterns.add("([\\.\\s(])(" + colName.trim() + ")([+\\-*/%&|^=><\\s,)])");
patterns.add("(^)(" + colName.trim() + ")([+\\-*/%&|^=><\\s,)])");
return patterns;
}
// visible for test
static String quoteIdentifier(String sqlExpr, String quotation, String identifier,
List<String> identifierPatterns) {
String quotedIdentifier = quotation + identifier.trim() + quotation;
for (String pattern : identifierPatterns) {
Matcher matcher = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(sqlExpr);
if (matcher.find()) {
sqlExpr = matcher.replaceAll("$1" + quotedIdentifier + "$3");
}
}
return sqlExpr;
}
public static boolean isIdentifierNeedToQuote(String sqlExpr, String identifier, List<String> identifierPatterns) {
if (StringUtils.isBlank(sqlExpr) || StringUtils.isBlank(identifier)) {
return false;
}
for (String pattern : identifierPatterns) {
if (Pattern.compile(pattern, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(sqlExpr).find()) {
return true;
}
}
return false;
}
private static Map<String, String> buildTableToTableAliasMap(IJoinedFlatTableDesc flatDesc) {
Map<String, String> map = Maps.newHashMap();
List<TblColRef> colRefs = flatDesc.getAllColumns();
for (TblColRef colRef : colRefs) {
String tableName = colRef.getTableRef().getTableName();
String alias = colRef.getTableAlias();
map.put(tableName, alias);
}
return map;
}
private static Map<String, Map<String, String>> buildTableToColumnsMap(IJoinedFlatTableDesc flatDesc) {
Map<String, Map<String, String>> map = Maps.newHashMap();
List<TblColRef> colRefs = flatDesc.getAllColumns();
for (TblColRef colRef : colRefs) {
String colName = colRef.getName();
String tableName = colRef.getTableRef().getTableName();
String colAlias = colRef.getTableAlias() + "_" + colRef.getName();
if (map.containsKey(tableName)) {
map.get(tableName).put(colName, colAlias);
} else {
Map<String, String> colToAliasMap = Maps.newHashMap();
colToAliasMap.put(colName, colAlias);
map.put(tableName, colToAliasMap);
}
}
return map;
}
private static Map<String, String> getColToColAliasMapInTable(String tableName,
Map<String, Map<String, String>> tableToColumnsMap) {
if (tableToColumnsMap.containsKey(tableName)) {
return tableToColumnsMap.get(tableName);
}
return Maps.newHashMap();
}
private static Set<String> listColumnsInTable(String tableName,
Map<String, Map<String, String>> tableToColumnsMap) {
Map<String, String> colToAliasMap = getColToColAliasMapInTable(tableName, tableToColumnsMap);
return colToAliasMap.keySet();
}
private static boolean columnHasAlias(String tableName, String columnName,
Map<String, Map<String, String>> tableToColumnsMap) {
Map<String, String> colToAliasMap = getColToColAliasMapInTable(tableName, tableToColumnsMap);
if (colToAliasMap.containsKey(columnName)) {
return true;
}
return false;
}
private static String getColumnAlias(String tableName, String columnName,
Map<String, Map<String, String>> tableToColumnsMap) {
Map<String, String> colToAliasMap = getColToColAliasMapInTable(tableName, tableToColumnsMap);
if (colToAliasMap.containsKey(columnName)) {
return colToAliasMap.get(columnName);
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.transport.amqp.client;
import javax.jms.InvalidDestinationException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.activemq.transport.amqp.client.util.AsyncResult;
import org.apache.activemq.transport.amqp.client.util.ClientFuture;
import org.apache.activemq.transport.amqp.client.util.UnmodifiableSender;
import org.apache.qpid.proton.amqp.Binary;
import org.apache.qpid.proton.amqp.Symbol;
import org.apache.qpid.proton.amqp.messaging.Accepted;
import org.apache.qpid.proton.amqp.messaging.Modified;
import org.apache.qpid.proton.amqp.messaging.Outcome;
import org.apache.qpid.proton.amqp.messaging.Rejected;
import org.apache.qpid.proton.amqp.messaging.Released;
import org.apache.qpid.proton.amqp.messaging.Source;
import org.apache.qpid.proton.amqp.messaging.Target;
import org.apache.qpid.proton.amqp.transaction.TransactionalState;
import org.apache.qpid.proton.amqp.transport.DeliveryState;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.amqp.transport.ReceiverSettleMode;
import org.apache.qpid.proton.amqp.transport.SenderSettleMode;
import org.apache.qpid.proton.engine.Delivery;
import org.apache.qpid.proton.engine.Sender;
import org.apache.qpid.proton.message.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Sender class that manages a Proton sender endpoint.
*/
public class AmqpSender extends AmqpAbstractResource<Sender> {
private static final Logger LOG = LoggerFactory.getLogger(AmqpSender.class);
private static final byte[] EMPTY_BYTE_ARRAY = new byte[]{};
public static final long DEFAULT_SEND_TIMEOUT = 15000;
private final AmqpTransferTagGenerator tagGenerator = new AmqpTransferTagGenerator(true);
private final AtomicBoolean closed = new AtomicBoolean();
private final AmqpSession session;
private final String address;
private final String senderId;
private final Target userSpecifiedTarget;
private boolean presettle;
private long sendTimeout = DEFAULT_SEND_TIMEOUT;
private final Set<Delivery> pending = new LinkedHashSet<>();
private byte[] encodeBuffer = new byte[1024 * 8];
/**
* Create a new sender instance.
*
* @param session The parent session that created the session.
* @param address The address that this sender produces to.
* @param senderId The unique ID assigned to this sender.
*/
public AmqpSender(AmqpSession session, String address, String senderId) {
if (address != null && address.isEmpty()) {
throw new IllegalArgumentException("Address cannot be empty.");
}
this.session = session;
this.address = address;
this.senderId = senderId;
this.userSpecifiedTarget = null;
}
/**
* Create a new sender instance using the given Target when creating the link.
*
* @param session The parent session that created the session.
* @param target The target that this sender produces to.
* @param senderId The unique ID assigned to this sender.
*/
public AmqpSender(AmqpSession session, Target target, String senderId) {
if (target == null) {
throw new IllegalArgumentException("User specified Target cannot be null");
}
this.session = session;
this.userSpecifiedTarget = target;
this.address = target.getAddress();
this.senderId = senderId;
}
/**
* Sends the given message to this senders assigned address.
*
* @param message the message to send.
* @throws IOException if an error occurs during the send.
*/
public void send(final AmqpMessage message) throws IOException {
checkClosed();
send(message, null);
}
/**
* Sends the given message to this senders assigned address using the supplied transaction ID.
*
* @param message the message to send.
* @param txId the transaction ID to assign the outgoing send.
* @throws IOException if an error occurs during the send.
*/
public void send(final AmqpMessage message, final AmqpTransactionId txId) throws IOException {
checkClosed();
final ClientFuture sendRequest = new ClientFuture();
session.getScheduler().execute(new Runnable() {
@Override
public void run() {
try {
doSend(message, sendRequest, txId);
session.pumpToProtonTransport(sendRequest);
}
catch (Exception e) {
sendRequest.onFailure(e);
session.getConnection().fireClientException(e);
}
}
});
if (sendTimeout <= 0) {
sendRequest.sync();
}
else {
sendRequest.sync(sendTimeout, TimeUnit.MILLISECONDS);
}
}
/**
* Close the sender, a closed sender will throw exceptions if any further send
* calls are made.
*
* @throws IOException if an error occurs while closing the sender.
*/
public void close() throws IOException {
if (closed.compareAndSet(false, true)) {
final ClientFuture request = new ClientFuture();
session.getScheduler().execute(new Runnable() {
@Override
public void run() {
checkClosed();
close(request);
session.pumpToProtonTransport(request);
}
});
request.sync();
}
}
/**
* @return this session's parent AmqpSession.
*/
public AmqpSession getSession() {
return session;
}
/**
* @return an unmodifiable view of the underlying Sender instance.
*/
public Sender getSender() {
return new UnmodifiableSender(getEndpoint());
}
/**
* @return the assigned address of this sender.
*/
public String getAddress() {
return address;
}
//----- Sender configuration ---------------------------------------------//
/**
* @return will messages be settle on send.
*/
public boolean isPresettle() {
return presettle;
}
/**
* Configure is sent messages are marked as settled on send, defaults to false.
*
* @param presettle configure if this sender will presettle all sent messages.
*/
public void setPresettle(boolean presettle) {
this.presettle = presettle;
}
/**
* @return the currently configured send timeout.
*/
public long getSendTimeout() {
return sendTimeout;
}
/**
* Sets the amount of time the sender will block on a send before failing.
*
* @param sendTimeout time in milliseconds to wait.
*/
public void setSendTimeout(long sendTimeout) {
this.sendTimeout = sendTimeout;
}
//----- Private Sender implementation ------------------------------------//
private void checkClosed() {
if (isClosed()) {
throw new IllegalStateException("Sender is already closed");
}
}
@Override
protected void doOpen() {
Symbol[] outcomes = new Symbol[]{Accepted.DESCRIPTOR_SYMBOL, Rejected.DESCRIPTOR_SYMBOL};
Source source = new Source();
source.setAddress(senderId);
source.setOutcomes(outcomes);
Target target = userSpecifiedTarget;
if (target == null) {
target = new Target();
target.setAddress(address);
}
String senderName = senderId + ":" + address;
Sender sender = session.getEndpoint().sender(senderName);
sender.setSource(source);
sender.setTarget(target);
if (presettle) {
sender.setSenderSettleMode(SenderSettleMode.SETTLED);
}
else {
sender.setSenderSettleMode(SenderSettleMode.UNSETTLED);
}
sender.setReceiverSettleMode(ReceiverSettleMode.FIRST);
setEndpoint(sender);
super.doOpen();
}
@Override
protected void doOpenCompletion() {
// Verify the attach response contained a non-null target
org.apache.qpid.proton.amqp.transport.Target t = getEndpoint().getRemoteTarget();
if (t != null) {
super.doOpenCompletion();
}
else {
// No link terminus was created, the peer will now detach/close us.
}
}
@Override
protected void doOpenInspection() {
try {
getStateInspector().inspectOpenedResource(getSender());
}
catch (Throwable error) {
getStateInspector().markAsInvalid(error.getMessage());
}
}
@Override
protected void doClosedInspection() {
try {
getStateInspector().inspectClosedResource(getSender());
}
catch (Throwable error) {
getStateInspector().markAsInvalid(error.getMessage());
}
}
@Override
protected void doDetachedInspection() {
try {
getStateInspector().inspectDetachedResource(getSender());
}
catch (Throwable error) {
getStateInspector().markAsInvalid(error.getMessage());
}
}
@Override
protected Exception getOpenAbortException() {
// Verify the attach response contained a non-null target
org.apache.qpid.proton.amqp.transport.Target t = getEndpoint().getRemoteTarget();
if (t != null) {
return super.getOpenAbortException();
}
else {
// No link terminus was created, the peer has detach/closed us, create IDE.
return new InvalidDestinationException("Link creation was refused");
}
}
private void doSend(AmqpMessage message, AsyncResult request, AmqpTransactionId txId) throws Exception {
LOG.trace("Producer sending message: {}", message);
Delivery delivery = null;
if (presettle) {
delivery = getEndpoint().delivery(EMPTY_BYTE_ARRAY, 0, 0);
}
else {
byte[] tag = tagGenerator.getNextTag();
delivery = getEndpoint().delivery(tag, 0, tag.length);
}
delivery.setContext(request);
Binary amqpTxId = null;
if (txId != null) {
amqpTxId = txId.getRemoteTxId();
}
else if (session.isInTransaction()) {
amqpTxId = session.getTransactionId().getRemoteTxId();
}
if (amqpTxId != null) {
TransactionalState state = new TransactionalState();
state.setTxnId(amqpTxId);
delivery.disposition(state);
}
encodeAndSend(message.getWrappedMessage(), delivery);
if (presettle) {
delivery.settle();
request.onSuccess();
}
else {
pending.add(delivery);
getEndpoint().advance();
}
}
private void encodeAndSend(Message message, Delivery delivery) throws IOException {
int encodedSize;
while (true) {
try {
encodedSize = message.encode(encodeBuffer, 0, encodeBuffer.length);
break;
}
catch (java.nio.BufferOverflowException e) {
encodeBuffer = new byte[encodeBuffer.length * 2];
}
}
int sentSoFar = 0;
while (true) {
int sent = getEndpoint().send(encodeBuffer, sentSoFar, encodedSize - sentSoFar);
if (sent > 0) {
sentSoFar += sent;
if ((encodedSize - sentSoFar) == 0) {
break;
}
}
else {
LOG.warn("{} failed to send any data from current Message.", this);
}
}
}
@Override
public void processDeliveryUpdates(AmqpConnection connection) throws IOException {
List<Delivery> toRemove = new ArrayList<>();
for (Delivery delivery : pending) {
DeliveryState state = delivery.getRemoteState();
if (state == null) {
continue;
}
Outcome outcome = null;
if (state instanceof TransactionalState) {
LOG.trace("State of delivery is Transactional, retrieving outcome: {}", state);
outcome = ((TransactionalState) state).getOutcome();
}
else if (state instanceof Outcome) {
outcome = (Outcome) state;
}
else {
LOG.warn("Message send updated with unsupported state: {}", state);
outcome = null;
}
AsyncResult request = (AsyncResult) delivery.getContext();
Exception deliveryError = null;
if (outcome instanceof Accepted) {
LOG.trace("Outcome of delivery was accepted: {}", delivery);
if (request != null && !request.isComplete()) {
request.onSuccess();
}
}
else if (outcome instanceof Rejected) {
LOG.trace("Outcome of delivery was rejected: {}", delivery);
ErrorCondition remoteError = ((Rejected) outcome).getError();
if (remoteError == null) {
remoteError = getEndpoint().getRemoteCondition();
}
deliveryError = AmqpSupport.convertToException(remoteError);
}
else if (outcome instanceof Released) {
LOG.trace("Outcome of delivery was released: {}", delivery);
deliveryError = new IOException("Delivery failed: released by receiver");
}
else if (outcome instanceof Modified) {
LOG.trace("Outcome of delivery was modified: {}", delivery);
deliveryError = new IOException("Delivery failed: failure at remote");
}
if (deliveryError != null) {
if (request != null && !request.isComplete()) {
request.onFailure(deliveryError);
}
else {
connection.fireClientException(deliveryError);
}
}
tagGenerator.returnTag(delivery.getTag());
delivery.settle();
toRemove.add(delivery);
}
pending.removeAll(toRemove);
}
@Override
public String toString() {
return getClass().getSimpleName() + "{ address = " + address + "}";
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFAsyncConfigPropFlowRemovedSlaveVer15 implements OFAsyncConfigPropFlowRemovedSlave {
private static final Logger logger = LoggerFactory.getLogger(OFAsyncConfigPropFlowRemovedSlaveVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int LENGTH = 8;
private final static long DEFAULT_MASK = 0x0L;
// OF message fields
private final long mask;
//
// Immutable default instance
final static OFAsyncConfigPropFlowRemovedSlaveVer15 DEFAULT = new OFAsyncConfigPropFlowRemovedSlaveVer15(
DEFAULT_MASK
);
// package private constructor - used by readers, builders, and factory
OFAsyncConfigPropFlowRemovedSlaveVer15(long mask) {
this.mask = U32.normalize(mask);
}
// Accessors for OF message fields
@Override
public int getType() {
return 0x4;
}
@Override
public long getMask() {
return mask;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
public OFAsyncConfigPropFlowRemovedSlave.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFAsyncConfigPropFlowRemovedSlave.Builder {
final OFAsyncConfigPropFlowRemovedSlaveVer15 parentMessage;
// OF message fields
private boolean maskSet;
private long mask;
BuilderWithParent(OFAsyncConfigPropFlowRemovedSlaveVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0x4;
}
@Override
public long getMask() {
return mask;
}
@Override
public OFAsyncConfigPropFlowRemovedSlave.Builder setMask(long mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFAsyncConfigPropFlowRemovedSlave build() {
long mask = this.maskSet ? this.mask : parentMessage.mask;
//
return new OFAsyncConfigPropFlowRemovedSlaveVer15(
mask
);
}
}
static class Builder implements OFAsyncConfigPropFlowRemovedSlave.Builder {
// OF message fields
private boolean maskSet;
private long mask;
@Override
public int getType() {
return 0x4;
}
@Override
public long getMask() {
return mask;
}
@Override
public OFAsyncConfigPropFlowRemovedSlave.Builder setMask(long mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
//
@Override
public OFAsyncConfigPropFlowRemovedSlave build() {
long mask = this.maskSet ? this.mask : DEFAULT_MASK;
return new OFAsyncConfigPropFlowRemovedSlaveVer15(
mask
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFAsyncConfigPropFlowRemovedSlave> {
@Override
public OFAsyncConfigPropFlowRemovedSlave readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0x4
short type = bb.readShort();
if(type != (short) 0x4)
throw new OFParseError("Wrong type: Expected=0x4(0x4), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long mask = U32.f(bb.readInt());
OFAsyncConfigPropFlowRemovedSlaveVer15 asyncConfigPropFlowRemovedSlaveVer15 = new OFAsyncConfigPropFlowRemovedSlaveVer15(
mask
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", asyncConfigPropFlowRemovedSlaveVer15);
return asyncConfigPropFlowRemovedSlaveVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFAsyncConfigPropFlowRemovedSlaveVer15Funnel FUNNEL = new OFAsyncConfigPropFlowRemovedSlaveVer15Funnel();
static class OFAsyncConfigPropFlowRemovedSlaveVer15Funnel implements Funnel<OFAsyncConfigPropFlowRemovedSlaveVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFAsyncConfigPropFlowRemovedSlaveVer15 message, PrimitiveSink sink) {
// fixed value property type = 0x4
sink.putShort((short) 0x4);
// fixed value property length = 8
sink.putShort((short) 0x8);
sink.putLong(message.mask);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFAsyncConfigPropFlowRemovedSlaveVer15> {
@Override
public void write(ByteBuf bb, OFAsyncConfigPropFlowRemovedSlaveVer15 message) {
// fixed value property type = 0x4
bb.writeShort((short) 0x4);
// fixed value property length = 8
bb.writeShort((short) 0x8);
bb.writeInt(U32.t(message.mask));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFAsyncConfigPropFlowRemovedSlaveVer15(");
b.append("mask=").append(mask);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFAsyncConfigPropFlowRemovedSlaveVer15 other = (OFAsyncConfigPropFlowRemovedSlaveVer15) obj;
if( mask != other.mask)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (mask ^ (mask >>> 32));
return result;
}
}
| |
package geometry.diagram.edit.parts;
import geometry.diagram.edit.policies.GeometryTextSelectionEditPolicy;
import geometry.diagram.part.GeometryVisualIDRegistry;
import geometry.diagram.providers.GeometryElementTypes;
import geometry.diagram.providers.GeometryParserProvider;
import java.util.Collections;
import java.util.List;
import org.eclipse.draw2d.ConnectionLocator;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.transaction.RunnableWithResult;
import org.eclipse.gef.AccessibleEditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.requests.DirectEditRequest;
import org.eclipse.gef.tools.DirectEditManager;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParser;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserOptions;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.ITextAwareEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.LabelEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.LabelDirectEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.l10n.DiagramColorRegistry;
import org.eclipse.gmf.runtime.diagram.ui.label.ILabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.label.WrappingLabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.requests.RequestConstants;
import org.eclipse.gmf.runtime.diagram.ui.tools.TextDirectEditManager;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.emf.ui.services.parser.ISemanticParser;
import org.eclipse.gmf.runtime.notation.FontStyle;
import org.eclipse.gmf.runtime.notation.NotationPackage;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.directedit.TextDirectEditManager2;
import org.eclipse.gmf.tooling.runtime.draw2d.labels.SimpleLabelDelegate;
import org.eclipse.gmf.tooling.runtime.edit.policies.DefaultLinkLabelDragPolicy;
import org.eclipse.gmf.tooling.runtime.edit.policies.labels.IRefreshableFeedbackEditPolicy;
import org.eclipse.jface.text.contentassist.IContentAssistProcessor;
import org.eclipse.jface.viewers.ICellEditorValidator;
import org.eclipse.swt.SWT;
import org.eclipse.swt.accessibility.AccessibleEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Image;
// TODO: Auto-generated Javadoc
/**
* The Class LineLabelAppearanceLabelTokEditPart.
*
* @generated
*/
public class LineLabelAppearanceLabelTokEditPart extends LabelEditPart
implements ITextAwareEditPart {
/** The Constant VISUAL_ID. @generated */
public static final int VISUAL_ID = 6001;
/** The manager. @generated */
private DirectEditManager manager;
/** The parser. @generated */
private IParser parser;
/** The parser elements. @generated */
private List<?> parserElements;
/** The default text. @generated */
private String defaultText;
/** The label delegate. @generated */
private ILabelDelegate labelDelegate;
/**
* @generated
*/
static {
registerSnapBackPosition(
GeometryVisualIDRegistry
.getType(geometry.diagram.edit.parts.LineLabelAppearanceLabelTokEditPart.VISUAL_ID),
new Point(7, 7));
}
/**
* Instantiates a new line label appearance label tok edit part.
*
* @param view the view
* @generated
*/
public LineLabelAppearanceLabelTokEditPart(View view) {
super(view);
}
/**
* Creates the default edit policies.
*
* @generated
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicy.DIRECT_EDIT_ROLE,
new LabelDirectEditPolicy());
installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE,
new GeometryTextSelectionEditPolicy());
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE,
new DefaultLinkLabelDragPolicy());
}
/**
* Gets the key point.
*
* @return the key point
* @generated
*/
public int getKeyPoint() {
return ConnectionLocator.MIDDLE;
}
/**
* Gets the label text helper.
*
* @param figure the figure
* @return the label text helper
* @generated
*/
protected String getLabelTextHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getText();
} else if (figure instanceof Label) {
return ((Label) figure).getText();
} else {
return getLabelDelegate().getText();
}
}
/**
* Sets the label text helper.
*
* @param figure the figure
* @param text the text
* @generated
*/
protected void setLabelTextHelper(IFigure figure, String text) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setText(text);
} else if (figure instanceof Label) {
((Label) figure).setText(text);
} else {
getLabelDelegate().setText(text);
}
}
/**
* Gets the label icon helper.
*
* @param figure the figure
* @return the label icon helper
* @generated
*/
protected Image getLabelIconHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getIcon();
} else if (figure instanceof Label) {
return ((Label) figure).getIcon();
} else {
return getLabelDelegate().getIcon(0);
}
}
/**
* Sets the label icon helper.
*
* @param figure the figure
* @param icon the icon
* @generated
*/
protected void setLabelIconHelper(IFigure figure, Image icon) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setIcon(icon);
return;
} else if (figure instanceof Label) {
((Label) figure).setIcon(icon);
return;
} else {
getLabelDelegate().setIcon(icon, 0);
}
}
/**
* Sets the label.
*
* @param figure the new label
* @generated
*/
public void setLabel(WrappingLabel figure) {
unregisterVisuals();
setFigure(figure);
defaultText = getLabelTextHelper(figure);
registerVisuals();
refreshVisuals();
}
/**
* Gets the model children.
*
* @return the model children
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getModelChildren() {
return Collections.EMPTY_LIST;
}
/**
* Gets the child by semantic hint.
*
* @param semanticHint the semantic hint
* @return the child by semantic hint
* @generated
*/
public IGraphicalEditPart getChildBySemanticHint(String semanticHint) {
return null;
}
/**
* Gets the parser element.
*
* @return the parser element
* @generated
*/
protected EObject getParserElement() {
return resolveSemanticElement();
}
/**
* Gets the label icon.
*
* @return the label icon
* @generated
*/
protected Image getLabelIcon() {
return null;
}
/**
* Gets the label text.
*
* @return the label text
* @generated
*/
protected String getLabelText() {
String text = null;
EObject parserElement = getParserElement();
if (parserElement != null && getParser() != null) {
text = getParser().getPrintString(
new EObjectAdapter(parserElement),
getParserOptions().intValue());
}
if (text == null || text.length() == 0) {
text = defaultText;
}
return text;
}
/**
* Sets the label text.
*
* @param text the new label text
* @generated
*/
public void setLabelText(String text) {
setLabelTextHelper(getFigure(), text);
refreshSelectionFeedback();
}
/**
* Gets the edits the text.
*
* @return the edits the text
* @generated
*/
public String getEditText() {
if (getParserElement() == null || getParser() == null) {
return ""; //$NON-NLS-1$
}
return getParser().getEditString(
new EObjectAdapter(getParserElement()),
getParserOptions().intValue());
}
/**
* Checks if is editable.
*
* @return true, if is editable
* @generated
*/
protected boolean isEditable() {
return false;
}
/**
* Gets the edits the text validator.
*
* @return the edits the text validator
* @generated
*/
public ICellEditorValidator getEditTextValidator() {
return new ICellEditorValidator() {
public String isValid(final Object value) {
if (value instanceof String) {
final EObject element = getParserElement();
final IParser parser = getParser();
try {
IParserEditStatus valid = (IParserEditStatus) getEditingDomain()
.runExclusive(
new RunnableWithResult.Impl<IParserEditStatus>() {
public void run() {
setResult(parser
.isValidEditString(
new EObjectAdapter(
element),
(String) value));
}
});
return valid.getCode() == ParserEditStatus.EDITABLE ? null
: valid.getMessage();
} catch (InterruptedException ie) {
ie.printStackTrace();
}
}
// shouldn't get here
return null;
}
};
}
/**
* Gets the completion processor.
*
* @return the completion processor
* @generated
*/
public IContentAssistProcessor getCompletionProcessor() {
if (getParserElement() == null || getParser() == null) {
return null;
}
return getParser().getCompletionProcessor(
new EObjectAdapter(getParserElement()));
}
/**
* Gets the parser options.
*
* @return the parser options
* @generated
*/
public ParserOptions getParserOptions() {
return ParserOptions.NONE;
}
/**
* Gets the parser.
*
* @return the parser
* @generated
*/
public IParser getParser() {
if (parser == null) {
parser = GeometryParserProvider
.getParser(
GeometryElementTypes.Line_4001,
getParserElement(),
GeometryVisualIDRegistry
.getType(geometry.diagram.edit.parts.LineLabelAppearanceLabelTokEditPart.VISUAL_ID));
}
return parser;
}
/**
* Gets the manager.
*
* @return the manager
* @generated
*/
protected DirectEditManager getManager() {
if (manager == null) {
setManager(new TextDirectEditManager2(this, null,
GeometryEditPartFactory.getTextCellEditorLocator(this)));
}
return manager;
}
/**
* Sets the manager.
*
* @param manager the new manager
* @generated
*/
protected void setManager(DirectEditManager manager) {
this.manager = manager;
}
/**
* Perform direct edit.
*
* @generated
*/
protected void performDirectEdit() {
getManager().show();
}
/**
* Perform direct edit.
*
* @param eventLocation the event location
* @generated
*/
protected void performDirectEdit(Point eventLocation) {
if (getManager().getClass() == TextDirectEditManager2.class) {
((TextDirectEditManager2) getManager()).show(eventLocation
.getSWTPoint());
}
}
/**
* Perform direct edit.
*
* @param initialCharacter the initial character
* @generated
*/
private void performDirectEdit(char initialCharacter) {
if (getManager() instanceof TextDirectEditManager) {
((TextDirectEditManager) getManager()).show(initialCharacter);
} else //
if (getManager() instanceof TextDirectEditManager2) {
((TextDirectEditManager2) getManager()).show(initialCharacter);
} else //
{
performDirectEdit();
}
}
/**
* Perform direct edit request.
*
* @param request the request
* @generated
*/
protected void performDirectEditRequest(Request request) {
final Request theRequest = request;
try {
getEditingDomain().runExclusive(new Runnable() {
public void run() {
if (isActive() && isEditable()) {
if (theRequest
.getExtendedData()
.get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR) instanceof Character) {
Character initialChar = (Character) theRequest
.getExtendedData()
.get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR);
performDirectEdit(initialChar.charValue());
} else if ((theRequest instanceof DirectEditRequest)
&& (getEditText().equals(getLabelText()))) {
DirectEditRequest editRequest = (DirectEditRequest) theRequest;
performDirectEdit(editRequest.getLocation());
} else {
performDirectEdit();
}
}
}
});
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Refresh visuals.
*
* @generated
*/
protected void refreshVisuals() {
super.refreshVisuals();
refreshLabel();
refreshFont();
refreshFontColor();
refreshUnderline();
refreshStrikeThrough();
}
/**
* Refresh label.
*
* @generated
*/
protected void refreshLabel() {
setLabelTextHelper(getFigure(), getLabelText());
setLabelIconHelper(getFigure(), getLabelIcon());
refreshSelectionFeedback();
}
/**
* Refresh underline.
*
* @generated
*/
protected void refreshUnderline() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextUnderline(style.isUnderline());
}
}
/**
* Refresh strike through.
*
* @generated
*/
protected void refreshStrikeThrough() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextStrikeThrough(style
.isStrikeThrough());
}
}
/**
* Refresh font.
*
* @generated
*/
protected void refreshFont() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null) {
FontData fontData = new FontData(style.getFontName(),
style.getFontHeight(), (style.isBold() ? SWT.BOLD
: SWT.NORMAL)
| (style.isItalic() ? SWT.ITALIC : SWT.NORMAL));
setFont(fontData);
}
}
/**
* Refresh selection feedback.
*
* @generated
*/
private void refreshSelectionFeedback() {
requestEditPolicyFeedbackRefresh(EditPolicy.PRIMARY_DRAG_ROLE);
requestEditPolicyFeedbackRefresh(EditPolicy.SELECTION_FEEDBACK_ROLE);
}
/**
* Request edit policy feedback refresh.
*
* @param editPolicyKey the edit policy key
* @generated
*/
private void requestEditPolicyFeedbackRefresh(String editPolicyKey) {
Object editPolicy = getEditPolicy(editPolicyKey);
if (editPolicy instanceof IRefreshableFeedbackEditPolicy) {
((IRefreshableFeedbackEditPolicy) editPolicy).refreshFeedback();
}
}
/**
* Sets the font color.
*
* @param color the new font color
* @generated
*/
protected void setFontColor(Color color) {
getFigure().setForegroundColor(color);
}
/**
* Adds the semantic listeners.
*
* @generated
*/
protected void addSemanticListeners() {
if (getParser() instanceof ISemanticParser) {
EObject element = resolveSemanticElement();
parserElements = ((ISemanticParser) getParser())
.getSemanticElementsBeingParsed(element);
for (int i = 0; i < parserElements.size(); i++) {
addListenerFilter(
"SemanticModel" + i, this, (EObject) parserElements.get(i)); //$NON-NLS-1$
}
} else {
super.addSemanticListeners();
}
}
/**
* Removes the semantic listeners.
*
* @generated
*/
protected void removeSemanticListeners() {
if (parserElements != null) {
for (int i = 0; i < parserElements.size(); i++) {
removeListenerFilter("SemanticModel" + i); //$NON-NLS-1$
}
} else {
super.removeSemanticListeners();
}
}
/**
* Gets the accessible edit part.
*
* @return the accessible edit part
* @generated
*/
protected AccessibleEditPart getAccessibleEditPart() {
if (accessibleEP == null) {
accessibleEP = new AccessibleGraphicalEditPart() {
public void getName(AccessibleEvent e) {
e.result = getLabelTextHelper(getFigure());
}
};
}
return accessibleEP;
}
/**
* Gets the font style owner view.
*
* @return the font style owner view
* @generated
*/
private View getFontStyleOwnerView() {
return getPrimaryView();
}
/**
* Gets the label delegate.
*
* @return the label delegate
* @generated
*/
private ILabelDelegate getLabelDelegate() {
if (labelDelegate == null) {
IFigure label = getFigure();
if (label instanceof WrappingLabel) {
labelDelegate = new WrappingLabelDelegate((WrappingLabel) label);
} else {
labelDelegate = new SimpleLabelDelegate((Label) label);
}
}
return labelDelegate;
}
/**
* Gets the adapter.
*
* @param key the key
* @return the adapter
* @generated
*/
@Override
public Object getAdapter(Class key) {
if (ILabelDelegate.class.equals(key)) {
return getLabelDelegate();
}
return super.getAdapter(key);
}
/**
* Handle notification event.
*
* @param event the event
* @generated
*/
protected void handleNotificationEvent(Notification event) {
Object feature = event.getFeature();
if (NotationPackage.eINSTANCE.getFontStyle_FontColor().equals(feature)) {
Integer c = (Integer) event.getNewValue();
setFontColor(DiagramColorRegistry.getInstance().getColor(c));
} else if (NotationPackage.eINSTANCE.getFontStyle_Underline().equals(
feature)) {
refreshUnderline();
} else if (NotationPackage.eINSTANCE.getFontStyle_StrikeThrough()
.equals(feature)) {
refreshStrikeThrough();
} else if (NotationPackage.eINSTANCE.getFontStyle_FontHeight().equals(
feature)
|| NotationPackage.eINSTANCE.getFontStyle_FontName().equals(
feature)
|| NotationPackage.eINSTANCE.getFontStyle_Bold()
.equals(feature)
|| NotationPackage.eINSTANCE.getFontStyle_Italic().equals(
feature)) {
refreshFont();
} else {
if (getParser() != null
&& getParser().isAffectingEvent(event,
getParserOptions().intValue())) {
refreshLabel();
}
if (getParser() instanceof ISemanticParser) {
ISemanticParser modelParser = (ISemanticParser) getParser();
if (modelParser.areSemanticElementsAffected(null, event)) {
removeSemanticListeners();
if (resolveSemanticElement() != null) {
addSemanticListeners();
}
refreshLabel();
}
}
}
super.handleNotificationEvent(event);
}
/**
* Creates the figure.
*
* @return the i figure
* @generated
*/
protected IFigure createFigure() {
// Parent should assign one using setLabel() method
return null;
}
}
| |
/*
* Copyright 2011-2016 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ix;
import java.util.*;
import org.junit.*;
public class WindowTest {
@Test
public void normal() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(3, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2);
IxTestHelper.assertValues(list.get(1), 3, 4);
IxTestHelper.assertValues(list.get(2), 5);
}
@Test
public void normalSizeSkipSame() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2, 2);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(3, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2);
IxTestHelper.assertValues(list.get(1), 3, 4);
IxTestHelper.assertValues(list.get(2), 5);
}
@Test
public void normalOne() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(1);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(5, list.size());
IxTestHelper.assertValues(list.get(0), 1);
IxTestHelper.assertValues(list.get(1), 2);
IxTestHelper.assertValues(list.get(2), 3);
IxTestHelper.assertValues(list.get(3), 4);
IxTestHelper.assertValues(list.get(4), 5);
}
@Test
public void normalAll() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(5);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3, 4, 5);
}
@Test
public void normalMore() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(10);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3, 4, 5);
}
@Test
public void innerMovesParent() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(3);
Iterator<Ix<Integer>> it0 = source.iterator();
Ix<Integer> inner = it0.next();
Iterator<Integer> it1 = inner.iterator();
try {
inner.iterator();
Assert.fail("Should have thrown IllegalStateException");
} catch (IllegalStateException ex) {
Assert.assertEquals("This Window Ix iterable can be consumed only once.", ex.getMessage());
}
Assert.assertEquals(1, it1.next().intValue());
Assert.assertEquals(2, it1.next().intValue());
Assert.assertEquals(3, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
}
@Test
public void normalSkip() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2, 3);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(2, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2);
IxTestHelper.assertValues(list.get(1), 4, 5);
}
@Test
public void normalSkip2() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(1, 2);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(3, list.size());
IxTestHelper.assertValues(list.get(0), 1);
IxTestHelper.assertValues(list.get(1), 3);
IxTestHelper.assertValues(list.get(2), 5);
}
@Test
public void normalSkip3() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(1, 6);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1);
}
@Test
public void normalAllSkip() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(5, 10);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3, 4, 5);
}
@Test
public void normalMoreSkip() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(10, 15);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3, 4, 5);
}
@Test
public void justSkip() {
Ix<Ix<Integer>> source = Ix.just(1).window(2, 3);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(1, list.size());
IxTestHelper.assertValues(list.get(0), 1);
}
@Test
public void emptySkip() {
Ix<Ix<Integer>> source = Ix.<Integer>empty().window(2, 3);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(0, list.size());
}
@Test
public void skipInnerMovesParent() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2, 3);
Iterator<Ix<Integer>> it0 = source.iterator();
Ix<Integer> inner = it0.next();
Iterator<Integer> it1 = inner.iterator();
try {
inner.iterator();
Assert.fail("Should have thrown IllegalStateException");
} catch (IllegalStateException ex) {
Assert.assertEquals("This Window Ix iterable can be consumed only once.", ex.getMessage());
}
Assert.assertEquals(1, it1.next().intValue());
Assert.assertEquals(2, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
inner = it0.next();
it1 = inner.iterator();
Assert.assertEquals(4, it1.next().intValue());
Assert.assertEquals(5, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
Assert.assertFalse(it0.hasNext());
}
@Test
public void normalOverlap() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2, 1);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(5, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2);
IxTestHelper.assertValues(list.get(1), 2, 3);
IxTestHelper.assertValues(list.get(2), 3, 4);
IxTestHelper.assertValues(list.get(3), 4, 5);
IxTestHelper.assertValues(list.get(4), 5);
}
@Test
public void normalOverlap2() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(3, 1);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(5, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3);
IxTestHelper.assertValues(list.get(1), 2, 3, 4);
IxTestHelper.assertValues(list.get(2), 3, 4, 5);
IxTestHelper.assertValues(list.get(3), 4, 5);
IxTestHelper.assertValues(list.get(4), 5);
}
@Test
public void normalOverlap3() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(3, 2);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(3, list.size());
IxTestHelper.assertValues(list.get(0), 1, 2, 3);
IxTestHelper.assertValues(list.get(1), 3, 4, 5);
IxTestHelper.assertValues(list.get(2), 5);
}
@Test
public void nullExact() {
Ix<Ix<Integer>> source = Ix.<Integer>fromArray(null, null, null, null, null).window(2);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(3, list.size());
IxTestHelper.assertValues(list.get(0), null, null);
IxTestHelper.assertValues(list.get(1), null, null);
IxTestHelper.assertValues(list.get(2), (Integer)null);
}
@Test
public void nullExact2() {
Ix<Ix<Integer>> source = Ix.<Integer>fromArray(null, null, null, null, null).window(6);
Iterator<Integer> list = source.iterator().next().iterator();
Assert.assertNull(list.next());
Assert.assertNull(list.next());
Assert.assertNull(list.next());
Assert.assertNull(list.next());
Assert.assertNull(list.next());
Assert.assertFalse(list.hasNext());
}
@Test
public void nullSkip() {
Ix<Ix<Integer>> source = Ix.<Integer>fromArray(null, null, null, null, null).window(2, 3);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(2, list.size());
IxTestHelper.assertValues(list.get(0), null, null);
IxTestHelper.assertValues(list.get(1), null, null);
}
@Test
public void nullOverlap() {
Ix<Ix<Integer>> source = Ix.<Integer>fromArray(null, null, null, null, null).window(2, 1);
List<Ix<Integer>> list = source.collectToList().first();
Assert.assertEquals(5, list.size());
IxTestHelper.assertValues(list.get(0), null, null);
IxTestHelper.assertValues(list.get(1), null, null);
IxTestHelper.assertValues(list.get(2), null, null);
IxTestHelper.assertValues(list.get(3), null, null);
IxTestHelper.assertValues(list.get(4), (Integer)null);
}
@Test
public void overlapInnerMovesParent() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(2, 1);
Iterator<Ix<Integer>> it0 = source.iterator();
Ix<Integer> inner = it0.next();
Iterator<Integer> it1 = inner.iterator();
try {
inner.iterator();
Assert.fail("Should have thrown IllegalStateException");
} catch (IllegalStateException ex) {
Assert.assertEquals("This Window Ix iterable can be consumed only once.", ex.getMessage());
}
Assert.assertEquals(1, it1.next().intValue());
Assert.assertEquals(2, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
inner = it0.next();
it1 = inner.iterator();
Assert.assertEquals(2, it1.next().intValue());
Assert.assertEquals(3, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
Assert.assertTrue(it0.hasNext());
}
@Test
public void overlapInnerMovesParent2() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(3, 2);
Iterator<Ix<Integer>> it0 = source.iterator();
Ix<Integer> inner = it0.next();
Iterator<Integer> it1 = inner.iterator();
try {
inner.iterator();
Assert.fail("Should have thrown IllegalStateException");
} catch (IllegalStateException ex) {
Assert.assertEquals("This Window Ix iterable can be consumed only once.", ex.getMessage());
}
Assert.assertEquals(1, it1.next().intValue());
Assert.assertEquals(2, it1.next().intValue());
Assert.assertEquals(3, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
inner = it0.next();
it1 = inner.iterator();
Assert.assertEquals(3, it1.next().intValue());
Assert.assertEquals(4, it1.next().intValue());
Assert.assertEquals(5, it1.next().intValue());
Assert.assertFalse(it1.hasNext());
Assert.assertTrue(it0.hasNext());
}
@Test
public void overlapParentMoved() {
Ix<Ix<Integer>> source = Ix.range(1, 5).window(4, 3);
Iterator<Ix<Integer>> it0 = source.iterator();
Ix<Integer> inner1 = it0.next();
Ix<Integer> inner2 = it0.next();
Assert.assertFalse(it0.hasNext());
IxTestHelper.assertValues(inner1, 1, 2, 3, 4);
IxTestHelper.assertValues(inner2, 4, 5);
}
}
| |
/*
* Copyright 2011 Sourcesense
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sourcesense.stone.jcr.base.util;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.JackrabbitAccessControlList;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.jcr.AccessDeniedException;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.security.AccessControlEntry;
import javax.jcr.security.AccessControlException;
import javax.jcr.security.AccessControlList;
import javax.jcr.security.AccessControlManager;
import javax.jcr.security.AccessControlPolicy;
import javax.jcr.security.AccessControlPolicyIterator;
import javax.jcr.security.Privilege;
/**
* A simple utility class providing utilities with respect to
* access control over repositories.
*/
public class AccessControlUtil {
// the name of the accessor method for the AccessControlManager
private static final String METHOD_GET_ACCESS_CONTROL_MANAGER = "getAccessControlManager";
// the name of the accessor method for the UserManager
private static final String METHOD_GET_USER_MANAGER = "getUserManager";
// the name of the accessor method for the PrincipalManager
private static final String METHOD_GET_PRINCIPAL_MANAGER = "getPrincipalManager";
// the name of the JackrabbitAccessControlList method getPath
private static final String METHOD_JACKRABBIT_ACL_GET_PATH = "getPath";
// the name of the JackrabbitAccessControlList method
private static final String METHOD_JACKRABBIT_ACL_IS_EMPTY = "isEmpty";
// the name of the JackrabbitAccessControlList method
private static final String METHOD_JACKRABBIT_ACL_SIZE = "size";
// the name of the JackrabbitAccessControlList method
private static final String METHOD_JACKRABBIT_ACL_ADD_ENTRY = "addEntry";
// the name of the JackrabbitAccessControlEntry method
private static final String METHOD_JACKRABBIT_ACE_IS_ALLOW = "isAllow";
private static final Logger log = LoggerFactory.getLogger(AccessControlUtil.class);
// ---------- SessionImpl methods -----------------------------------------------------
/**
* Returns the <code>AccessControlManager</code> for the given
* <code>session</code>. If the session does not have a
* <code>getAccessControlManager</code> method, a
* <code>UnsupportedRepositoryOperationException</code> is thrown. Otherwise
* the <code>AccessControlManager</code> is returned or if the call fails,
* the respective exception is thrown.
*
* @param session The JCR Session whose <code>AccessControlManager</code> is
* to be returned. If the session is a pooled session, the
* session underlying the pooled session is actually used.
* @return The <code>AccessControlManager</code> of the session
* @throws UnsupportedRepositoryOperationException If the session has no
* <code>getAccessControlManager</code> method or the exception
* thrown by the method.
* @throws RepositoryException Forwarded from the
* <code>getAccessControlManager</code> method call.
*/
public static AccessControlManager getAccessControlManager(Session session)
throws UnsupportedRepositoryOperationException, RepositoryException {
return safeInvokeRepoMethod(session, METHOD_GET_ACCESS_CONTROL_MANAGER, AccessControlManager.class);
}
// ---------- JackrabbitSession methods -----------------------------------------------
/**
* Returns the <code>UserManager</code> for the given
* <code>session</code>. If the session does not have a
* <code>getUserManager</code> method, a
* <code>UnsupportedRepositoryOperationException</code> is thrown. Otherwise
* the <code>UserManager</code> is returned or if the call fails,
* the respective exception is thrown.
*
* @param session The JCR Session whose <code>UserManager</code> is
* to be returned. If the session is not a <code>JackrabbitSession</code>
* uses reflection to retrive the manager from the repository.
* @return The <code>UserManager</code> of the session.
* @throws AccessDeniedException If this session is not allowed
* to access user data.
* @throws UnsupportedRepositoryOperationException If the session has no
* <code>getUserManager</code> method or the exception
* thrown by the method.
* @throws RepositoryException Forwarded from the
* <code>getUserManager</code> method call.
*/
public static UserManager getUserManager(Session session)
throws AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
JackrabbitSession jackrabbitSession = getJackrabbitSession(session);
if(jackrabbitSession != null) {
return jackrabbitSession.getUserManager();
} else {
return safeInvokeRepoMethod(session, METHOD_GET_USER_MANAGER, UserManager.class);
}
}
/**
* Returns the <code>PrincipalManager</code> for the given
* <code>session</code>. If the session does not have a
* <code>PrincipalManager</code> method, a
* <code>UnsupportedRepositoryOperationException</code> is thrown. Otherwise
* the <code>PrincipalManager</code> is returned or if the call fails,
* the respective exception is thrown.
*
* @param session The JCR Session whose <code>PrincipalManager</code> is
* to be returned. If the session is not a <code>JackrabbitSession</code>
* uses reflection to retrive the manager from the repository.
* @return The <code>PrincipalManager</code> of the session.
* @throws AccessDeniedException
* @throws UnsupportedRepositoryOperationException If the session has no
* <code>PrincipalManager</code> method or the exception
* thrown by the method.
* @throws RepositoryException Forwarded from the
* <code>PrincipalManager</code> method call.
*/
public static PrincipalManager getPrincipalManager(Session session)
throws AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
JackrabbitSession jackrabbitSession = getJackrabbitSession(session);
if(jackrabbitSession != null) {
return jackrabbitSession.getPrincipalManager();
} else {
return safeInvokeRepoMethod(session, METHOD_GET_PRINCIPAL_MANAGER, PrincipalManager.class);
}
}
// ---------- AccessControlList methods -----------------------------------------------
/**
* Returns the path of the node <code>AccessControlList</code> acl
* has been created for.
*/
public static String getPath(AccessControlList acl) throws RepositoryException {
return safeInvokeRepoMethod(acl, METHOD_JACKRABBIT_ACL_GET_PATH, String.class);
}
/**
* Returns <code>true</code> if <code>AccessControlList</code> acl
* does not yet define any entries.
*/
public static boolean isEmpty(AccessControlList acl) throws RepositoryException {
return safeInvokeRepoMethod(acl, METHOD_JACKRABBIT_ACL_IS_EMPTY, Boolean.class);
}
/**
* Returns the number of acl entries or 0 if the acl is empty.
*/
public static int size(AccessControlList acl) throws RepositoryException {
return safeInvokeRepoMethod(acl, METHOD_JACKRABBIT_ACL_SIZE, Integer.class);
}
/**
* Same as {@link #addEntry(AccessControlList, Principal, Privilege[], boolean, Map)} using
* some implementation specific restrictions.
*/
@SuppressWarnings("unchecked")
public static boolean addEntry(AccessControlList acl, Principal principal, Privilege privileges[], boolean isAllow)
throws AccessControlException, RepositoryException {
Object[] args = new Object[] {principal, privileges, isAllow};
Class[] types = new Class[] {Principal.class, Privilege[].class, boolean.class};
return safeInvokeRepoMethod(acl, METHOD_JACKRABBIT_ACL_ADD_ENTRY, Boolean.class, args, types);
}
/**
* Adds an access control entry to the acl consisting of the specified
* <code>principal</code>, the specified <code>privileges</code>, the
* <code>isAllow</code> flag and an optional map containing additional
* restrictions.
* <p/>
* This method returns <code>true</code> if this policy was modified,
* <code>false</code> otherwise.
*/
@SuppressWarnings("unchecked")
public static boolean addEntry(AccessControlList acl, Principal principal, Privilege privileges[], boolean isAllow, Map restrictions)
throws UnsupportedRepositoryOperationException, RepositoryException {
Object[] args = new Object[] {principal, privileges, isAllow, restrictions};
Class[] types = new Class[] {Principal.class, Privilege[].class, boolean.class, Map.class};
return safeInvokeRepoMethod(acl, METHOD_JACKRABBIT_ACL_ADD_ENTRY, Boolean.class, args, types);
}
/**
* Replaces existing access control entries in the ACL for the specified
* <code>principal</code> and <code>resourcePath</code>. Any existing granted
* or denied privileges which do not conflict with the specified privileges
* are maintained. Where conflicts exist, existing privileges are dropped.
* The end result will be at most two ACEs for the principal: one for grants
* and one for denies. Aggregate privileges are disaggregated before checking
* for conflicts.
* @param session
* @param resourcePath
* @param principal
* @param grantedPrivilegeNames
* @param deniedPrivilegeNames
* @param removedPrivilegeNames privileges which, if they exist, should be
* removed for this principal and resource
* @throws RepositoryException
* @deprecated use @link {@link #replaceAccessControlEntry(Session, String, Principal, String[], String[], String[], String)} instead.
*/
public static void replaceAccessControlEntry(Session session, String resourcePath, Principal principal,
String[] grantedPrivilegeNames, String[] deniedPrivilegeNames, String[] removedPrivilegeNames)
throws RepositoryException {
replaceAccessControlEntry(session,
resourcePath,
principal,
grantedPrivilegeNames,
deniedPrivilegeNames,
removedPrivilegeNames,
null);
}
/**
* Replaces existing access control entries in the ACL for the specified
* <code>principal</code> and <code>resourcePath</code>. Any existing granted
* or denied privileges which do not conflict with the specified privileges
* are maintained. Where conflicts exist, existing privileges are dropped.
* The end result will be at most two ACEs for the principal: one for grants
* and one for denies. Aggregate privileges are disaggregated before checking
* for conflicts.
* @param session
* @param resourcePath
* @param principal
* @param grantedPrivilegeNames
* @param deniedPrivilegeNames
* @param removedPrivilegeNames privileges which, if they exist, should be
* removed for this principal and resource
* @param order where the access control entry should go in the list.
* Value should be one of these:
* <table>
* <tr><td>null</td><td>If the ACE for the principal doesn't exist add at the end, otherwise leave the ACE at it's current position.</td></tr>
* <tr><td>first</td><td>Place the target ACE as the first amongst its siblings</td></tr>
* <tr><td>last</td><td>Place the target ACE as the last amongst its siblings</td></tr>
* <tr><td>before xyz</td><td>Place the target ACE immediately before the sibling whose name is xyz</td></tr>
* <tr><td>after xyz</td><td>Place the target ACE immediately after the sibling whose name is xyz</td></tr>
* <tr><td>numeric</td><td>Place the target ACE at the specified numeric index</td></tr>
* </table>
* @throws RepositoryException
*/
public static void replaceAccessControlEntry(Session session, String resourcePath, Principal principal,
String[] grantedPrivilegeNames, String[] deniedPrivilegeNames, String[] removedPrivilegeNames,
String order)
throws RepositoryException {
AccessControlManager accessControlManager = getAccessControlManager(session);
Set<String> specifiedPrivilegeNames = new HashSet<String>();
Set<String> newGrantedPrivilegeNames = disaggregateToPrivilegeNames(accessControlManager, grantedPrivilegeNames, specifiedPrivilegeNames);
Set<String> newDeniedPrivilegeNames = disaggregateToPrivilegeNames(accessControlManager, deniedPrivilegeNames, specifiedPrivilegeNames);
disaggregateToPrivilegeNames(accessControlManager, removedPrivilegeNames, specifiedPrivilegeNames);
// Get or create the ACL for the node.
AccessControlList acl = null;
AccessControlPolicy[] policies = accessControlManager.getPolicies(resourcePath);
for (AccessControlPolicy policy : policies) {
if (policy instanceof AccessControlList) {
acl = (AccessControlList) policy;
break;
}
}
if (acl == null) {
AccessControlPolicyIterator applicablePolicies = accessControlManager.getApplicablePolicies(resourcePath);
while (applicablePolicies.hasNext()) {
AccessControlPolicy policy = applicablePolicies.nextAccessControlPolicy();
if (policy instanceof AccessControlList) {
acl = (AccessControlList) policy;
break;
}
}
}
if (acl == null) {
throw new RepositoryException("Could not obtain ACL for resource " + resourcePath);
}
// Used only for logging.
Set<Privilege> oldGrants = null;
Set<Privilege> oldDenies = null;
if (log.isDebugEnabled()) {
oldGrants = new HashSet<Privilege>();
oldDenies = new HashSet<Privilege>();
}
// Combine all existing ACEs for the target principal.
AccessControlEntry[] accessControlEntries = acl.getAccessControlEntries();
for (int i=0; i < accessControlEntries.length; i++) {
AccessControlEntry ace = accessControlEntries[i];
if (principal.equals(ace.getPrincipal())) {
if (log.isDebugEnabled()) {
log.debug("Found Existing ACE for principal {} on resource {}", new Object[] {principal.getName(), resourcePath});
}
if (order == null || order.length() == 0) {
//order not specified, so keep track of the original ACE position.
order = String.valueOf(i);
}
boolean isAllow = isAllow(ace);
Privilege[] privileges = ace.getPrivileges();
if (log.isDebugEnabled()) {
if (isAllow) {
oldGrants.addAll(Arrays.asList(privileges));
} else {
oldDenies.addAll(Arrays.asList(privileges));
}
}
for (Privilege privilege : privileges) {
Set<String> maintainedPrivileges = disaggregateToPrivilegeNames(privilege);
// If there is any overlap with the newly specified privileges, then
// break the existing privilege down; otherwise, maintain as is.
if (!maintainedPrivileges.removeAll(specifiedPrivilegeNames)) {
// No conflicts, so preserve the original.
maintainedPrivileges.clear();
maintainedPrivileges.add(privilege.getName());
}
if (!maintainedPrivileges.isEmpty()) {
if (isAllow) {
newGrantedPrivilegeNames.addAll(maintainedPrivileges);
} else {
newDeniedPrivilegeNames.addAll(maintainedPrivileges);
}
}
}
// Remove the old ACE.
acl.removeAccessControlEntry(ace);
}
}
//add a fresh ACE with the granted privileges
List<Privilege> grantedPrivilegeList = new ArrayList<Privilege>();
for (String name : newGrantedPrivilegeNames) {
Privilege privilege = accessControlManager.privilegeFromName(name);
grantedPrivilegeList.add(privilege);
}
if (grantedPrivilegeList.size() > 0) {
acl.addAccessControlEntry(principal, grantedPrivilegeList.toArray(new Privilege[grantedPrivilegeList.size()]));
}
//add a fresh ACE with the denied privileges
List<Privilege> deniedPrivilegeList = new ArrayList<Privilege>();
for (String name : newDeniedPrivilegeNames) {
Privilege privilege = accessControlManager.privilegeFromName(name);
deniedPrivilegeList.add(privilege);
}
if (deniedPrivilegeList.size() > 0) {
addEntry(acl, principal, deniedPrivilegeList.toArray(new Privilege[deniedPrivilegeList.size()]), false);
}
//order the ACL
reorderAccessControlEntries(acl, principal, order);
accessControlManager.setPolicy(resourcePath, acl);
if (log.isDebugEnabled()) {
List<String> oldGrantedNames = new ArrayList<String>(oldGrants.size());
for (Privilege privilege : oldGrants) {
oldGrantedNames.add(privilege.getName());
}
List<String> oldDeniedNames = new ArrayList<String>(oldDenies.size());
for (Privilege privilege : oldDenies) {
oldDeniedNames.add(privilege.getName());
}
log.debug("Updated ACE for principalName {} for resource {} from grants {}, denies {} to grants {}, denies {}", new Object [] {
principal.getName(), resourcePath, oldGrantedNames, oldDeniedNames, newGrantedPrivilegeNames, newDeniedPrivilegeNames
});
}
}
// ---------- AccessControlEntry methods -----------------------------------------------
/**
* Returns true if the AccessControlEntry represents 'allowed' rights or false
* it it represents 'denied' rights.
*/
public static boolean isAllow(AccessControlEntry ace) throws RepositoryException {
return safeInvokeRepoMethod(ace, METHOD_JACKRABBIT_ACE_IS_ALLOW, Boolean.class);
}
// ---------- internal -----------------------------------------------------
/**
* Use reflection to invoke a repository method.
*/
@SuppressWarnings("unchecked")
private static <T> T safeInvokeRepoMethod(Object target, String methodName, Class<T> returnType, Object[] args, Class[] argsTypes)
throws UnsupportedRepositoryOperationException, RepositoryException {
try {
Method m = target.getClass().getMethod(methodName, argsTypes);
if (!m.isAccessible()) {
m.setAccessible(true);
}
return (T) m.invoke(target, args);
} catch (InvocationTargetException ite) {
// wraps the exception thrown by the method
Throwable t = ite.getCause();
if (t instanceof UnsupportedRepositoryOperationException) {
throw (UnsupportedRepositoryOperationException) t;
} else if (t instanceof AccessDeniedException) {
throw (AccessDeniedException) t;
} else if (t instanceof AccessControlException) {
throw (AccessControlException) t;
} else if (t instanceof RepositoryException) {
throw (RepositoryException) t;
} else if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
throw new RepositoryException(methodName, t);
}
} catch (Throwable t) {
// any other problem is just encapsulated
throw new RepositoryException(methodName, t);
}
}
private static <T> T safeInvokeRepoMethod(Object target, String methodName, Class<T> returnType, Object... args)
throws UnsupportedRepositoryOperationException, RepositoryException {
return safeInvokeRepoMethod(target, methodName, returnType, args, new Class[0]);
}
/**
* Unwrap the jackrabbit session.
*/
private static JackrabbitSession getJackrabbitSession(Session session) {
if (session instanceof JackrabbitSession)
return (JackrabbitSession) session;
else
return null;
}
/**
* Helper routine to transform an input array of privilege names into a set in
* a null-safe way while also adding its disaggregated privileges to an input set.
*/
private static Set<String> disaggregateToPrivilegeNames(AccessControlManager accessControlManager,
String[] privilegeNames, Set<String> disaggregatedPrivilegeNames)
throws RepositoryException {
Set<String> originalPrivilegeNames = new HashSet<String>();
if (privilegeNames != null) {
for (String privilegeName : privilegeNames) {
originalPrivilegeNames.add(privilegeName);
Privilege privilege = accessControlManager.privilegeFromName(privilegeName);
disaggregatedPrivilegeNames.addAll(disaggregateToPrivilegeNames(privilege));
}
}
return originalPrivilegeNames;
}
/**
* Transform an aggregated privilege into a set of disaggregated privilege
* names. If the privilege is not an aggregate, the set will contain the
* original name.
*/
private static Set<String> disaggregateToPrivilegeNames(Privilege privilege) {
Set<String> disaggregatedPrivilegeNames = new HashSet<String>();
if (privilege.isAggregate()) {
Privilege[] privileges = privilege.getAggregatePrivileges();
for (Privilege disaggregate : privileges) {
disaggregatedPrivilegeNames.add(disaggregate.getName());
}
} else {
disaggregatedPrivilegeNames.add(privilege.getName());
}
return disaggregatedPrivilegeNames;
}
/**
* Move the ACE(s) for the specified principal to the position specified by the 'order'
* parameter.
*
* @param acl the acl of the node containing the ACE to position
* @param principal the user or group of the ACE to position
* @param order where the access control entry should go in the list.
* Value should be one of these:
* <table>
* <tr><td>first</td><td>Place the target ACE as the first amongst its siblings</td></tr>
* <tr><td>last</td><td>Place the target ACE as the last amongst its siblings</td></tr>
* <tr><td>before xyz</td><td>Place the target ACE immediately before the sibling whose name is xyz</td></tr>
* <tr><td>after xyz</td><td>Place the target ACE immediately after the sibling whose name is xyz</td></tr>
* <tr><td>numeric</td><td>Place the target ACE at the specified index</td></tr>
* </table>
* @throws RepositoryException
* @throws UnsupportedRepositoryOperationException
* @throws AccessControlException
*/
private static void reorderAccessControlEntries(AccessControlList acl,
Principal principal,
String order)
throws RepositoryException {
if (order == null || order.length() == 0) {
return; //nothing to do
}
if (acl instanceof JackrabbitAccessControlList) {
JackrabbitAccessControlList jacl = (JackrabbitAccessControlList)acl;
AccessControlEntry[] accessControlEntries = jacl.getAccessControlEntries();
if (accessControlEntries.length <= 1) {
return; //only one ACE, so nothing to reorder.
}
AccessControlEntry beforeEntry = null;
if ("first".equals(order)) {
beforeEntry = accessControlEntries[0];
} else if ("last".equals(order)) {
beforeEntry = null;
} else if (order.startsWith("before ")) {
String beforePrincipalName = order.substring(7);
//find the index of the ACE of the 'before' principal
for (int i=0; i < accessControlEntries.length; i++) {
if (beforePrincipalName.equals(accessControlEntries[i].getPrincipal().getName())) {
//found it!
beforeEntry = accessControlEntries[i];
break;
}
}
if (beforeEntry == null) {
//didn't find an ACE that matched the 'before' principal
throw new IllegalArgumentException("No ACE was found for the specified principal: " + beforePrincipalName);
}
} else if (order.startsWith("after ")) {
String afterPrincipalName = order.substring(6);
//find the index of the ACE of the 'after' principal
for (int i = accessControlEntries.length - 1; i >= 0; i--) {
if (afterPrincipalName.equals(accessControlEntries[i].getPrincipal().getName())) {
//found it!
// the 'before' ACE is the next one after the 'after' ACE
if (i >= accessControlEntries.length - 1) {
//the after is the last one in the list
beforeEntry = null;
} else {
beforeEntry = accessControlEntries[i + 1];
}
break;
}
}
if (beforeEntry == null) {
//didn't find an ACE that matched the 'after' principal
throw new IllegalArgumentException("No ACE was found for the specified principal: " + afterPrincipalName);
}
} else {
try {
int index = Integer.parseInt(order);
if (index > accessControlEntries.length) {
//invalid index
throw new IndexOutOfBoundsException("Index value is too large: " + index);
}
if (index == 0) {
beforeEntry = accessControlEntries[0];
} else {
//the index value is the index of the principal. A principal may have more
// than one ACEs (deny + grant), so we need to compensate.
Set<Principal> processedPrincipals = new HashSet<Principal>();
for (int i = 0; i < accessControlEntries.length; i++) {
Principal principal2 = accessControlEntries[i].getPrincipal();
if (processedPrincipals.size() == index &&
!processedPrincipals.contains(principal2)) {
//we are now at the correct position in the list
beforeEntry = accessControlEntries[i];
break;
}
processedPrincipals.add(principal2);
}
}
} catch (NumberFormatException nfe) {
//not a number.
throw new IllegalArgumentException("Illegal value for the order parameter: " + order);
}
}
//now loop through the entries to move the affected ACEs to the specified
// position.
for (int i = accessControlEntries.length - 1; i >= 0; i--) {
AccessControlEntry ace = accessControlEntries[i];
if (principal.equals(ace.getPrincipal())) {
//this ACE is for the specified principal.
jacl.orderBefore(ace, beforeEntry);
}
}
} else {
throw new IllegalArgumentException("The acl must be an instance of JackrabbitAccessControlList");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cordova.camera;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
import javax.microedition.io.Connector;
import javax.microedition.io.file.FileConnection;
import org.apache.cordova.api.Plugin;
import org.apache.cordova.api.PluginResult;
import org.apache.cordova.json4j.JSONArray;
import org.apache.cordova.json4j.JSONException;
import org.apache.cordova.util.Logger;
import net.rim.blackberry.api.invoke.CameraArguments;
import net.rim.blackberry.api.invoke.Invoke;
import net.rim.device.api.io.Base64OutputStream;
import net.rim.device.api.io.IOUtilities;
import net.rim.device.api.system.ApplicationDescriptor;
import net.rim.device.api.system.Bitmap;
import net.rim.device.api.system.Characters;
import net.rim.device.api.system.ControlledAccessException;
import net.rim.device.api.system.EncodedImage;
import net.rim.device.api.system.EventInjector;
import net.rim.device.api.system.JPEGEncodedImage;
import net.rim.device.api.system.PNGEncodedImage;
import net.rim.device.api.ui.UiApplication;
/**
* The Camera plugin interface.
*
* The Camera class can invoke the following actions:
*
* - takePicture: takes photo and returns base64 encoded image or image file URI
*
* future?
* - captureVideo...
*
*/
public class Camera extends Plugin
{
/**
* Possible actions.
*/
public static final String ACTION_TAKE_PICTURE = "takePicture";
/**
* Maximum image encoding size (in bytes) to allow. (Obtained unofficially
* through trial and error). Anything larger will cause stability issues
* when sending back to the browser.
*/
private static final long MAX_ENCODING_SIZE = 1500000L;
/**
* Executes the requested action and returns a PluginResult.
*
* @param action The action to execute.
* @param callbackId The callback ID to be invoked upon action completion
* @param args JSONArry of arguments for the action.
* @return A PluginResult object with a status and message.
*/
public PluginResult execute(String action, JSONArray args, String callbackId)
{
PluginResult result = null;
// take a picture
if (action != null && action.equals(ACTION_TAKE_PICTURE))
{
// Parse the options specified for the take picture action.
CameraOptions options;
try {
options = CameraOptions.fromJSONArray(args);
} catch (NumberFormatException e) {
return new PluginResult(PluginResult.Status.JSON_EXCEPTION, "One of the camera options is not a valid number.");
} catch (JSONException e) {
return new PluginResult(PluginResult.Status.JSON_EXCEPTION, "One of the camera options is not valid JSON.");
}
// launch native camera application
launchCamera(new PhotoListener(options, callbackId));
// The native camera application runs in a separate process, so we
// must now wait for the listener to retrieve the photo taken.
// Return NO_RESULT status so plugin manager does not invoke a callback,
// but keep the callback so the listener can invoke it later.
result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
return result;
}
else
{
result = new PluginResult(PluginResult.Status.INVALID_ACTION, "Camera: Invalid action:" + action);
}
return result;
}
/**
* Launches the native camera application.
*/
private static void launchCamera(PhotoListener listener)
{
// MMAPI interface doesn't use the native Camera application or interface
// (we would have to replicate it). So, we invoke the native Camera application,
// which doesn't allow us to set any options.
synchronized(UiApplication.getEventLock()) {
UiApplication.getUiApplication().addFileSystemJournalListener(listener);
Invoke.invokeApplication(Invoke.APP_TYPE_CAMERA, new CameraArguments());
}
}
/**
* Closes the native camera application.
*/
public static void closeCamera()
{
// simulate two escape characters to exit native camera application
// no, there is no other way to do this
UiApplication.getUiApplication().invokeLater(new Runnable() {
public void run() {
try
{
EventInjector.KeyEvent inject = new EventInjector.KeyEvent(
EventInjector.KeyEvent.KEY_DOWN, Characters.ESCAPE, 0);
inject.post();
inject.post();
}
catch (ControlledAccessException e)
{
// the application doesn't have key injection permissions
Logger.log(Camera.class.getName() + ": Unable to close camera. " +
ApplicationDescriptor.currentApplicationDescriptor().getName() +
" does not have key injection permissions.");
}
}
});
}
/**
* Returns the image file URI or the Base64-encoded image.
* @param filePath The full path of the image file
* @param options Specifies the format of the image and the result
* @param callbackId The id of the callback to receive the result
*/
public static void processImage(String filePath, CameraOptions options,
String callbackId) {
PluginResult result = null;
try
{
// wait for the file to be fully written to the file system
// to avoid premature access to it (yes, this has happened)
waitForImageFile(filePath);
// Reformat the image if the specified options require it,
// otherwise, get encoded string if base 64 string is output format.
String imageURIorData = filePath;
if (options.reformat) {
imageURIorData = reformatImage(filePath, options);
} else if (options.destinationType == CameraOptions.DESTINATION_DATA_URL) {
imageURIorData = encodeImage(filePath);
}
// we have to check the size to avoid memory errors in the browser
if (imageURIorData.length() > MAX_ENCODING_SIZE)
{
// it's a big one. this is for your own good.
String msg = "Encoded image is too large. Try reducing camera image size.";
Logger.log(Camera.class.getName() + ": " + msg);
result = new PluginResult(PluginResult.Status.ERROR, msg);
}
else
{
result = new PluginResult(PluginResult.Status.OK, imageURIorData);
}
}
catch (Exception e)
{
result = new PluginResult(PluginResult.Status.IO_EXCEPTION, e.toString());
}
// send result back to JavaScript
sendResult(result, callbackId);
}
/**
* Waits for the image file to be fully written to the file system.
* @param filePath Full path of the image file
* @throws IOException
*/
private static void waitForImageFile(String filePath) throws IOException
{
long start = (new Date()).getTime();
FileConnection fconn = null;
try
{
fconn = (FileConnection)Connector.open(filePath, Connector.READ);
if (fconn.exists())
{
long fileSize = fconn.fileSize();
long size = 0;
while (true)
{
try { Thread.sleep(100); } catch (InterruptedException e) {}
size = fconn.fileSize();
if (size == fileSize) {
break;
}
fileSize = size;
}
Logger.log(Camera.class.getName() + ": " + filePath +
" size=" + Long.toString(fileSize) + " bytes");
}
}
finally
{
if (fconn != null) fconn.close();
}
long end = (new Date()).getTime();
Logger.log(Camera.class.getName() + ": wait time=" + Long.toString(end-start) + " ms");
}
/**
* Opens the specified image file and converts its contents to a Base64-encoded string.
* @param filePath Full path of the image file
* @return file contents as a Base64-encoded String
*/
private static String encodeImage(String filePath) throws IOException
{
String imageData = null;
// open the image file
FileConnection fconn = null;
InputStream in = null;
ByteArrayOutputStream byteArrayOS = null;
try
{
fconn = (FileConnection)Connector.open(filePath);
if (fconn.exists())
{
// encode file contents using BASE64 encoding
in = fconn.openInputStream();
byteArrayOS = new ByteArrayOutputStream();
Base64OutputStream base64OS = new Base64OutputStream(byteArrayOS);
base64OS.write(IOUtilities.streamToBytes(in, 96*1024));
base64OS.flush();
base64OS.close();
imageData = byteArrayOS.toString();
Logger.log(Camera.class.getName() + ": Base64 encoding size=" +
Integer.toString(imageData.length()));
}
}
finally
{
if (in != null) in.close();
if (fconn != null) fconn.close();
if (byteArrayOS != null) byteArrayOS.close();
}
return imageData;
}
/**
* Reformats the image taken with the camera based on the options specified.
*
* Unfortunately, reformatting the image will cause EXIF data in the photo
* to be lost. Most importantly the orientation data is lost so the
* picture is not auto rotated by software that recognizes EXIF data.
*
* @param filePath
* The full path of the image file
* @param options
* Specifies the format of the image and the result
* @return the reformatted image file URI or Base64-encoded image
* @throws IOException
*/
private static String reformatImage(String filePath, CameraOptions options)
throws IOException {
long start = (new Date()).getTime();
// Open the original image created by the camera application and read
// it into an EncodedImage object.
FileConnection fconn = null;
InputStream in = null;
Bitmap originalImage = null;
try {
fconn = (FileConnection) Connector.open(filePath);
in = fconn.openInputStream();
originalImage = Bitmap.createBitmapFromBytes(IOUtilities.streamToBytes(in, 96*1024), 0, -1, 1);
} finally {
if (in != null)
in.close();
if (fconn != null)
fconn.close();
}
int newWidth = options.targetWidth;
int newHeight = options.targetHeight;
int origWidth = originalImage.getWidth();
int origHeight = originalImage.getHeight();
// If only width or only height was specified, the missing dimension is
// set based on the current aspect ratio of the image.
if (newWidth > 0 && newHeight <= 0) {
newHeight = (newWidth * origHeight) / origWidth;
} else if (newWidth <= 0 && newHeight > 0) {
newWidth = (newHeight * origWidth) / origHeight;
} else if (newWidth <= 0 && newHeight <= 0) {
newWidth = origWidth;
newHeight = origHeight;
} else {
// If the user specified both a positive width and height
// (potentially different aspect ratio) then the width or height is
// scaled so that the image fits while maintaining aspect ratio.
// Alternatively, the specified width and height could have been
// kept and Bitmap.SCALE_TO_FIT specified when scaling, but this
// would result in whitespace in the new image.
double newRatio = newWidth / (double)newHeight;
double origRatio = origWidth / (double)origHeight;
if (origRatio > newRatio) {
newHeight = (newWidth * origHeight) / origWidth;
} else if (origRatio < newRatio) {
newWidth = (newHeight * origWidth) / origHeight;
}
}
Bitmap newImage = new Bitmap(newWidth, newHeight);
originalImage.scaleInto(newImage, options.imageFilter, Bitmap.SCALE_TO_FILL);
// Convert the image to the appropriate encoding. PNG does not allow
// quality to be specified so the only affect that the quality option
// has for a PNG is on the seelction of the image filter.
EncodedImage encodedImage;
if (options.encoding == CameraOptions.ENCODING_PNG) {
encodedImage = PNGEncodedImage.encode(newImage);
} else {
encodedImage = JPEGEncodedImage.encode(newImage, options.quality);
}
// Rewrite the modified image back out to the same file. This is done
// to ensure that for every picture taken, only one shows up in the
// gallery. If the encoding changed the file extension will differ
// from the original.
OutputStream out = null;
int dirIndex = filePath.lastIndexOf('/');
String filename = filePath.substring(dirIndex + 1, filePath.lastIndexOf('.'))
+ options.fileExtension;
try {
fconn = (FileConnection) Connector.open(filePath);
fconn.truncate(0);
out = fconn.openOutputStream();
out.write(encodedImage.getData());
fconn.rename(filename);
} finally {
if (out != null)
out.close();
if (fconn != null)
fconn.close();
}
// Return either the Base64-encoded string or the image URI for the
// new image.
String imageURIorData;
if (options.destinationType == CameraOptions.DESTINATION_DATA_URL) {
ByteArrayOutputStream byteArrayOS = null;
try {
byteArrayOS = new ByteArrayOutputStream();
Base64OutputStream base64OS = new Base64OutputStream(
byteArrayOS);
base64OS.write(encodedImage.getData());
base64OS.flush();
base64OS.close();
imageURIorData = byteArrayOS.toString();
Logger.log(Camera.class.getName() + ": Base64 encoding size="
+ Integer.toString(imageURIorData.length()));
} finally {
if (byteArrayOS != null) {
byteArrayOS.close();
}
}
} else {
imageURIorData = filePath.substring(0, dirIndex + 1) + filename;
}
long end = (new Date()).getTime();
Logger.log(Camera.class.getName() + ": reformat time=" + Long.toString(end-start) + " ms");
return imageURIorData;
}
/**
* Sends result back to JavaScript.
* @param result PluginResult
*/
private static void sendResult(PluginResult result, String callbackId)
{
// invoke the appropriate callback
if (result.getStatus() == PluginResult.Status.OK.ordinal())
{
success(result, callbackId);
}
else
{
error(result, callbackId);
}
}
}
| |
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.prism.query;
import java.io.Serializable;
import javax.xml.namespace.QName;
import com.evolveum.midpoint.util.DebugDumpable;
import com.evolveum.midpoint.util.DebugUtil;
public class ObjectPaging implements DebugDumpable, Serializable {
private Integer offset;
private Integer maxSize;
private QName orderBy;
private OrderDirection direction;
private String cookie;
ObjectPaging() {
// TODO Auto-generated constructor stub
}
ObjectPaging(Integer offset, Integer maxSize){
this.offset = offset;
this.maxSize = maxSize;
}
ObjectPaging(Integer offset, Integer maxSize, QName orderBy, OrderDirection direction){
this.offset = offset;
this.maxSize = maxSize;
this.orderBy = orderBy;
this.direction = direction;
}
public static ObjectPaging createPaging(Integer offset, Integer maxSize){
return new ObjectPaging(offset, maxSize);
}
public static ObjectPaging createPaging(Integer offset, Integer maxSize, QName orderBy, OrderDirection direction){
return new ObjectPaging(offset, maxSize, orderBy, direction);
}
public static ObjectPaging createPaging(Integer offset, Integer maxSize, String orderBy, String namespace, OrderDirection direction){
return new ObjectPaging(offset, maxSize, new QName(namespace, orderBy), direction);
}
public static ObjectPaging createEmptyPaging(){
return new ObjectPaging();
}
public OrderDirection getDirection() {
return direction;
}
public void setDirection(OrderDirection direction) {
this.direction = direction;
}
public Integer getOffset() {
return offset;
}
public void setOffset(Integer offset) {
this.offset = offset;
}
public QName getOrderBy() {
return orderBy;
}
public void setOrderBy(QName orderBy) {
this.orderBy = orderBy;
}
public Integer getMaxSize() {
return maxSize;
}
public void setMaxSize(Integer maxSize) {
this.maxSize = maxSize;
}
/**
* Returns the paging cookie. The paging cookie is used for optimization of paged searches.
* The presence of the cookie may allow the data store to correlate queries and associate
* them with the same server-side context. This may allow the data store to reuse the same
* pre-computed data. We want this as the sorted and paged searches may be quite expensive.
* It is expected that the cookie returned from the search will be passed back in the options
* when the next page of the same search is requested.
*
* It is OK to initialize a search without any cookie. If the datastore utilizes a re-usable
* context it will return a cookie in a search response.
*/
public String getCookie() {
return cookie;
}
/**
* Sets paging cookie. The paging cookie is used for optimization of paged searches.
* The presence of the cookie may allow the data store to correlate queries and associate
* them with the same server-side context. This may allow the data store to reuse the same
* pre-computed data. We want this as the sorted and paged searches may be quite expensive.
* It is expected that the cookie returned from the search will be passed back in the options
* when the next page of the same search is requested.
*
* It is OK to initialize a search without any cookie. If the datastore utilizes a re-usable
* context it will return a cookie in a search response.
*/
public void setCookie(String cookie) {
this.cookie = cookie;
}
public ObjectPaging clone() {
ObjectPaging clone = new ObjectPaging(offset, maxSize, orderBy, direction);
clone.cookie = this.cookie;
return clone;
}
@Override
public String debugDump() {
return debugDump(0);
}
@Override
public String debugDump(int indent) {
StringBuilder sb = new StringBuilder();
sb.append("PAGING:");
if (getOffset() != null) {
sb.append("\n");
DebugUtil.indentDebugDump(sb, indent + 1);
sb.append("Offset: " + getOffset());
}
if (getMaxSize() != null) {
sb.append("\n");
DebugUtil.indentDebugDump(sb, indent + 1);
sb.append("Max size: " + getMaxSize());
}
if (getOrderBy() != null) {
sb.append("\n");
DebugUtil.indentDebugDump(sb, indent + 1);
sb.append("Order by: " + getOrderBy().toString());
}
if (getDirection() != null) {
sb.append("\n");
DebugUtil.indentDebugDump(sb, indent + 1);
sb.append("Order direction: " + getDirection());
}
if (getCookie() != null) {
sb.append("\n");
DebugUtil.indentDebugDump(sb, indent + 1);
sb.append("Cookie: " + getCookie());
}
return sb.toString();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("PAGING: ");
if (this == null){
sb.append("null");
return sb.toString();
}
if (getOffset() != null){
sb.append("O: ");
sb.append(getOffset());
sb.append(",");
}
if (getMaxSize() != null){
sb.append("M: ");
sb.append(getMaxSize());
sb.append(",");
}
if (getOrderBy() != null){
sb.append("BY: ");
sb.append(getOrderBy().getLocalPart());
sb.append(", ");
}
if (getDirection() != null){
sb.append("D:");
sb.append(getDirection());
sb.append(", ");
}
if (getCookie() != null) {
sb.append("C:");
sb.append(getCookie());
}
return sb.toString();
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package online_quiz;
import com.jfoenix.controls.JFXButton;
import com.jfoenix.controls.JFXComboBox;
import com.jfoenix.controls.JFXTextField;
import java.awt.Color;
import static java.awt.Color.RED;
import java.net.URL;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.layout.VBox;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Rectangle;
import javafx.scene.text.Font;
import javafx.scene.text.FontPosture;
import javafx.scene.text.Text;
import javafx.stage.Stage;
import static javax.management.Query.value;
import javax.swing.JOptionPane;
import static javax.management.Query.value;
/**
* FXML Controller class
*
* @author root
*/
public class Practise implements Initializable {
/**
* Initializes the controller class.
*/
@FXML
private JFXTextField ques;
@FXML
private Label qno;
@FXML
private Button doubtans;
@FXML
private Button finalans;
@FXML
private JFXTextField opt1;
@FXML
private JFXTextField opt2;
@FXML
private JFXTextField opt3;
@FXML
private JFXTextField opt4;
@FXML
private Label y_ans;
@FXML
private VBox vbox1;
@FXML
private VBox vbox2;
@FXML
private MenuBar menuuser;
@FXML
private JFXButton lock;
@FXML
private JFXComboBox<?> cmbans;
Button btn[]=new Button[100];
String qu[]=new String[100];
String o1[]=new String[100];
String o2[]=new String[100];
String o3[]=new String[100];
String o4[]=new String[100];
public static String an[]=new String[100];
public static String yourans[]=new String[100];
int select_ans[]=new int[100]; //0 for initial 1 for doubtful 2 for final
int row=-1;
public static int attempt=0,wrong=0,not_attempt=0,score=0,no_of_ques=0;
@FXML
void btnlock(ActionEvent event) {
int count=0;
for(int j=1;j<=no_of_ques;j++){
//System.out.println(ans[j] +"" +solve[j]);
if(yourans[j].equals(""))
{
not_attempt++;
}
else if(yourans[j].equals(an[j]))
{
score++;
}
else if(!yourans[j].equals(an[j]))
{
wrong++;
}
attempt=wrong+score;
}
System.out.println(attempt +" "+wrong+" "+not_attempt+" "+score);
try{
Stage stage = (Stage) doubtans.getScene().getWindow();
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("Result.fxml"));
Parent root1 = (Parent) fxmlLoader.load();
Stage sta1 = new Stage();
sta1.setScene(new Scene(root1));
sta1.setTitle("RESULT");
sta1.show();
stage.close();
}catch(Exception eee){
System.out.println(eee);
}
}
@FXML
void btnfinalans(ActionEvent event){
ResultSet rs = null;
PreparedStatement pstmt = null;
if(select_ans[row]==2){
JOptionPane.showMessageDialog(null, "Sorry, You cann't change final Answer","Error ",JOptionPane.ERROR_MESSAGE);
}
else{
if(row<1){
JOptionPane.showMessageDialog(null, "Choose Question","Error ",JOptionPane.ERROR_MESSAGE);
}else{
if(cmbans.getValue()==null){
JOptionPane.showMessageDialog(null, "Choose Answer","Error ",JOptionPane.ERROR_MESSAGE);
}else{
try{
yourans[row]=cmbans.getValue().toString();
y_ans.setText(yourans[row]);
cmbans.setValue(null);
btn[row].setStyle("-fx-background-color: #05fb26;");
select_ans[row]=2;
JOptionPane.showMessageDialog(null, "Updated successfully","Success ",JOptionPane.INFORMATION_MESSAGE);
System.out.println(Student_after_login.backup_paper_name+" "+row);
}catch(Exception e){
JOptionPane.showMessageDialog(null, "Update Error","Error ",JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
@FXML
void btndoubt(ActionEvent event) {
ResultSet rs = null;
PreparedStatement pstmt = null;
if(select_ans[row]==2){
JOptionPane.showMessageDialog(null, "Sorry, You cann't change final Answer","Error ",JOptionPane.ERROR_MESSAGE);
}else{
if(row<1){
JOptionPane.showMessageDialog(null, "Choose Question","Error ",JOptionPane.ERROR_MESSAGE);
}
else{
if(cmbans.getValue()==null){
JOptionPane.showMessageDialog(null, "Choose Answer","Error ",JOptionPane.ERROR_MESSAGE);
}else{
try{
yourans[row]=cmbans.getValue().toString();
y_ans.setText(yourans[row]);
cmbans.setValue(null);
select_ans[row]=1;
btn[row].setStyle("-fx-background-color: #f90404;");
JOptionPane.showMessageDialog(null, "Updated successfully","Success ",JOptionPane.INFORMATION_MESSAGE);
System.out.println(Student_after_login.backup_paper_name+" "+row);
}catch(Exception e){
JOptionPane.showMessageDialog(null, "Update Error","Error ",JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
private void jb1MousePressed(ActionEvent evt,int p) {
// # blue 1a0eff green #05fb26 red #f90404 yellow #e4f704
// btn[row].setStyle("-fx-text-fill: black");
if(p==row){
int r=17;
btn[p].setShape(new Rectangle(r,r));
btn[p].setMinSize(2*r, 2*r);
btn[p].setMaxSize(2*r, 2*r);
}
else{
if(select_ans[p]==0){
btn[p].setStyle("-fx-background-color: #1a0eff;");
if(row>0 && select_ans[row]==0 )
btn[row].setStyle("-fx-background-color: #ffffff;");
}
int r=17;
btn[p].setShape(new Rectangle(r,r));
btn[p].setMinSize(2*r, 2*r);
btn[p].setMaxSize(2*r, 2*r);
if(row>0){
btn[row].setShape(new Circle(r));
btn[row].setMinSize(2*r, 2*r);
btn[row].setMaxSize(2*r, 2*r);
}
}
ques.setText(qu[p]);
opt1.setText(o1[p]);
opt2.setText(o2[p]);
opt3.setText(o3[p]);
System.out.println(qu[p]);
opt4.setText(o4[p]);
y_ans.setText(yourans[p]);
cmbans.setValue(null);
row=p;
}
List<String> nn = new ArrayList<String>();
@Override
public void initialize(URL url, ResourceBundle rb) {
Menu me=new Menu( first_page.logged_user);
MenuItem mt1=new MenuItem(" Back ");
MenuItem mt2=new MenuItem(" Log Out ");
MenuItem mt3=new MenuItem(" Exit ");
mt1.setOnAction(actionEvent -> funback());
mt2.setOnAction(actionEvent -> funlogout());
mt3.setOnAction(actionEvent -> System.exit(0));
me.getItems().addAll(mt1,mt2,mt3);
menuuser.getMenus().addAll(me);
nn.add("1");
nn.add("2");
nn.add("3");
nn.add("4");
ObservableList ob = FXCollections.observableList(nn);
cmbans.setItems(ob);
PreparedStatement pstmt = null;
ResultSet rst=null;
int i=1,k=0;
double r=17;
try{
Connection conn = connection_db.connection();
String quer = "select * from " +Student_after_login.backup_paper_name;
Statement st=conn.createStatement();
rst=st.executeQuery(quer);
while(rst.next()){
no_of_ques++;
qu[i]=new String();
o1[i]=new String();
o2[i]=new String();
o3[i]=new String();
o4[i]=new String();
an[i]=new String();
qu[i]=rst.getString(2);
o1[i]=rst.getString(3);
o2[i]=rst.getString(4);
o3[i]=rst.getString(5);
o4[i]=rst.getString(6);
an[i]=rst.getString(7);
yourans[i]="";
select_ans[i]=0;
btn[i]=new Button();
btn[i].setShape(new Circle(r));
btn[i].setMinSize(2*r, 2*r);
btn[i].setMaxSize(2*r, 2*r);
btn[i].setFont(Font.font(12));
//btn[i].setStyle("-fx-background-color: #e4f704;");
//btn[i].setStyle("-fx-background-color: #1a0eff;" + "-fx-text-fill: white ;");
//btn[i].setFocusTraversable(true);
btn[i].setText(""+i);
btn[i].setVisible(true);
vbox1.setSpacing(5);
vbox2.setSpacing(5);
int p=i;
btn[i].setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent e) {
System.out.println("scds");
jb1MousePressed(e,p);
// fun();
}
});
if(k==0){
vbox1.getChildren().add(btn[i]);
k=1;
}
else{
vbox2.getChildren().add(btn[i]);
k=0;
}
i++;
// first_page.add_ques_row=i;
}
}
catch(Exception e){}
}
public void funback(){
try{
Stage stage = (Stage) doubtans.getScene().getWindow();
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("Student_after_login.fxml"));
Parent root1 = (Parent) fxmlLoader.load();
Stage sta1 = new Stage();
sta1.setScene(new Scene(root1));
sta1.show();
sta1.setTitle("RESULT");
stage.close();
}catch(Exception eee){
System.out.println("error");
}
}
public void funlogout(){
try{
Stage stage = (Stage) doubtans.getScene().getWindow();
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("Login.fxml"));
Parent root1 = (Parent) fxmlLoader.load();
Stage sta1 = new Stage();
sta1.setScene(new Scene(root1));
sta1.setTitle("LOGIN");
sta1.show();
stage.close();
}catch(Exception eee){
System.out.println("error");
}
}
}
| |
/*
* 3D City Database - The Open Source CityGML Database
* https://www.3dcitydb.org/
*
* Copyright 2013 - 2021
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.lrg.tum.de/gis/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* Virtual City Systems, Berlin <https://vc.systems/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.gui.operation.importer.preferences;
import org.citydb.config.Config;
import org.citydb.config.i18n.Language;
import org.citydb.config.project.global.UpdatingPersonMode;
import org.citydb.config.project.importer.Continuation;
import org.citydb.config.project.importer.CreationDateMode;
import org.citydb.config.project.importer.TerminationDateMode;
import org.citydb.gui.components.TitledPanel;
import org.citydb.gui.components.popup.PopupMenuDecorator;
import org.citydb.gui.operation.common.DefaultPreferencesComponent;
import org.citydb.gui.util.GuiUtil;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionListener;
@SuppressWarnings("serial")
public class ContinuationPanel extends DefaultPreferencesComponent {
private TitledPanel metadataPanel;
private TitledPanel personPanel;
private TitledPanel creationDatePanel;
private TitledPanel terminationDatePanel;
private JTextField lineageText;
private JLabel lineageLabel;
private JTextField reasonForUpdateText;
private JLabel reasonForUpdateLabel;
private JRadioButton updatingPersonDBAccount;
private JRadioButton updatingPersonUser;
private JTextField updatingPersonText;
private JRadioButton creDateRadioInherit;
private JRadioButton creDateRadioOnlyMissing;
private JRadioButton creDateRadioAll;
private JRadioButton termDateRadioInherit;
private JRadioButton termDateRadioOnlyMissing;
private JRadioButton termDateRadioAll;
public ContinuationPanel(Config config) {
super(config);
initGui();
}
@Override
public boolean isModified() {
Continuation continuation = config.getImportConfig().getContinuation();
if (!lineageText.getText().equals(continuation.getLineage())) return true;
if (!reasonForUpdateText.getText().equals(continuation.getReasonForUpdate())) return true;
if (!updatingPersonText.getText().equals(continuation.getUpdatingPerson())) return true;
if (updatingPersonDBAccount.isSelected() != continuation.isUpdatingPersonModeDatabase()) return true;
if (updatingPersonUser.isSelected() != continuation.isUpdatingPersonModeUser()) return true;
if (creDateRadioInherit.isSelected() != continuation.isCreationDateModeInherit()) return true;
if (creDateRadioOnlyMissing.isSelected() != continuation.isCreationDateModeComplement()) return true;
if (creDateRadioAll.isSelected() != continuation.isCreationDateModeReplace()) return true;
if (termDateRadioInherit.isSelected() != continuation.isTerminationDateModeInherit()) return true;
if (termDateRadioOnlyMissing.isSelected() != continuation.isTerminationDateModeComplement()) return true;
if (termDateRadioAll.isSelected() != continuation.isTerminationDateModeReplace()) return true;
return false;
}
private void initGui() {
lineageText = new JTextField();
lineageLabel = new JLabel();
reasonForUpdateText = new JTextField();
reasonForUpdateLabel = new JLabel();
updatingPersonDBAccount = new JRadioButton();
updatingPersonUser = new JRadioButton();
ButtonGroup updatingPerson = new ButtonGroup();
updatingPerson.add(updatingPersonDBAccount);
updatingPerson.add(updatingPersonUser);
updatingPersonText = new JTextField();
creDateRadioInherit = new JRadioButton();
creDateRadioOnlyMissing = new JRadioButton();
creDateRadioAll = new JRadioButton();
ButtonGroup creDateRadio = new ButtonGroup();
creDateRadio.add(creDateRadioInherit);
creDateRadio.add(creDateRadioOnlyMissing);
creDateRadio.add(creDateRadioAll);
termDateRadioInherit = new JRadioButton();
termDateRadioOnlyMissing = new JRadioButton();
termDateRadioAll = new JRadioButton();
ButtonGroup trmDateRadio = new ButtonGroup();
trmDateRadio.add(termDateRadioInherit);
trmDateRadio.add(termDateRadioOnlyMissing);
trmDateRadio.add(termDateRadioAll);
PopupMenuDecorator.getInstance().decorate(lineageText, reasonForUpdateText, updatingPersonText);
setLayout(new GridBagLayout());
{
JPanel content = new JPanel();
content.setLayout(new GridBagLayout());
{
content.add(lineageLabel, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.BOTH, 0, 0, 5, 5));
content.add(lineageText, GuiUtil.setConstraints(1, 0, 1, 1, GridBagConstraints.BOTH, 0, 5, 5, 0));
content.add(reasonForUpdateLabel, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.BOTH, 0, 0, 0, 5));
content.add(reasonForUpdateText, GuiUtil.setConstraints(1, 1, 1, 1, GridBagConstraints.BOTH, 0, 5, 0, 0));
}
metadataPanel = new TitledPanel().build(content);
}
{
JPanel content = new JPanel();
content.setLayout(new GridBagLayout());
{
content.add(updatingPersonDBAccount, GuiUtil.setConstraints(0, 0, 2, 1, 0, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
content.add(updatingPersonUser, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.BOTH, 5, 0, 0, 5));
content.add(updatingPersonText, GuiUtil.setConstraints(1, 1, 1, 1, GridBagConstraints.BOTH, 5, 5, 0, 0));
}
personPanel = new TitledPanel().build(content);
}
{
JPanel content = new JPanel();
content.setLayout(new GridBagLayout());
{
content.add(creDateRadioInherit, GuiUtil.setConstraints(0, 0, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
content.add(creDateRadioOnlyMissing, GuiUtil.setConstraints(0, 1, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0));
content.add(creDateRadioAll, GuiUtil.setConstraints(0, 2, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0));
}
creationDatePanel = new TitledPanel().build(content);
}
{
JPanel content = new JPanel();
content.setLayout(new GridBagLayout());
{
content.add(termDateRadioInherit, GuiUtil.setConstraints(0, 0, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
content.add(termDateRadioOnlyMissing, GuiUtil.setConstraints(0, 1, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0));
content.add(termDateRadioAll, GuiUtil.setConstraints(0, 2, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0));
}
terminationDatePanel = new TitledPanel().build(content);
}
add(metadataPanel, GuiUtil.setConstraints(0, 0, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
add(personPanel, GuiUtil.setConstraints(0, 1, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
add(creationDatePanel, GuiUtil.setConstraints(0, 2, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
add(terminationDatePanel, GuiUtil.setConstraints(0, 3, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0));
ActionListener updatingPersonListener = e -> setEnabledUpdatingPerson();
updatingPersonDBAccount.addActionListener(updatingPersonListener);
updatingPersonUser.addActionListener(updatingPersonListener);
}
private void setEnabledUpdatingPerson() {
updatingPersonText.setEnabled(updatingPersonUser.isSelected());
}
@Override
public void doTranslation() {
metadataPanel.setTitle(Language.I18N.getString("pref.import.continuation.border.lineage"));
personPanel.setTitle(Language.I18N.getString("pref.import.continuation.border.updatingPerson"));
creationDatePanel.setTitle(Language.I18N.getString("pref.import.continuation.border.creationDate"));
terminationDatePanel.setTitle(Language.I18N.getString("pref.import.continuation.border.terminationDate"));
lineageLabel.setText(Language.I18N.getString("pref.import.continuation.label.lineage"));
reasonForUpdateLabel.setText(Language.I18N.getString("pref.import.continuation.label.reasonForUpdate"));
updatingPersonDBAccount.setText(Language.I18N.getString("pref.import.continuation.label.updatingPerson.database"));
updatingPersonUser.setText(Language.I18N.getString("pref.import.continuation.label.updatingPerson.user"));
creDateRadioInherit.setText(Language.I18N.getString("pref.import.continuation.label.creationDate.inherit"));
creDateRadioOnlyMissing.setText(Language.I18N.getString("pref.import.continuation.label.creationDate.onlyMissing"));
creDateRadioAll.setText(Language.I18N.getString("pref.import.continuation.label.creationDate.all"));
termDateRadioInherit.setText(Language.I18N.getString("pref.import.continuation.label.terminationDate.inherit"));
termDateRadioOnlyMissing.setText(Language.I18N.getString("pref.import.continuation.label.terminationDate.onlyMissing"));
termDateRadioAll.setText(Language.I18N.getString("pref.import.continuation.label.terminationDate.all"));
}
@Override
public void loadSettings() {
Continuation continuation = config.getImportConfig().getContinuation();
lineageText.setText(continuation.getLineage());
reasonForUpdateText.setText(continuation.getReasonForUpdate());
updatingPersonText.setText(continuation.getUpdatingPerson());
if (continuation.isUpdatingPersonModeDatabase())
updatingPersonDBAccount.setSelected(true);
else
updatingPersonUser.setSelected(true);
setEnabledUpdatingPerson();
if (continuation.isCreationDateModeInherit())
creDateRadioInherit.setSelected(true);
else if (continuation.isCreationDateModeComplement())
creDateRadioOnlyMissing.setSelected(true);
else
creDateRadioAll.setSelected(true);
if (continuation.isTerminationDateModeInherit())
termDateRadioInherit.setSelected(true);
else if (continuation.isTerminationDateModeComplement())
termDateRadioOnlyMissing.setSelected(true);
else
termDateRadioAll.setSelected(true);
}
@Override
public void setSettings() {
Continuation continuation = config.getImportConfig().getContinuation();
String lineage = lineageText.getText().trim();
continuation.setLineage(lineage);
lineageText.setText(lineage);
String reasonForUpdate = reasonForUpdateText.getText().trim();
continuation.setReasonForUpdate(reasonForUpdate);
reasonForUpdateText.setText(reasonForUpdate);
String updatingPerson = updatingPersonText.getText().trim();
continuation.setUpdatingPerson(updatingPerson);
updatingPersonText.setText(updatingPerson);
if (updatingPersonDBAccount.isSelected())
continuation.setUpdatingPersonMode(UpdatingPersonMode.DATABASE);
else
continuation.setUpdatingPersonMode(UpdatingPersonMode.USER);
if (creDateRadioInherit.isSelected())
continuation.setCreationDateMode(CreationDateMode.INHERIT);
else if (creDateRadioOnlyMissing.isSelected())
continuation.setCreationDateMode(CreationDateMode.COMPLEMENT);
else
continuation.setCreationDateMode(CreationDateMode.REPLACE);
if (termDateRadioInherit.isSelected())
continuation.setTerminationDateMode(TerminationDateMode.INHERIT);
else if (termDateRadioOnlyMissing.isSelected())
continuation.setTerminationDateMode(TerminationDateMode.COMPLEMENT);
else
continuation.setTerminationDateMode(TerminationDateMode.REPLACE);
}
@Override
public String getTitle() {
return Language.I18N.getString("pref.tree.import.continuation");
}
}
| |
package io.advantageous.qbit.admin;
import io.advantageous.qbit.client.ClientProxy;
import io.advantageous.qbit.reactive.Callback;
import io.advantageous.qbit.reakt.Reakt;
import io.advantageous.qbit.service.ServiceProxyUtils;
import io.advantageous.qbit.service.health.HealthFailReason;
import io.advantageous.qbit.service.health.HealthServiceClient;
import io.advantageous.qbit.service.health.ServiceHealthManager;
import io.advantageous.qbit.service.stats.StatsCollector;
import io.advantageous.qbit.util.Timer;
import io.advantageous.reakt.Expected;
import io.advantageous.reakt.promise.Promise;
import io.advantageous.reakt.reactor.Reactor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.function.Consumer;
/**
* Common things that you need for QBit/Reakt services.
* Gets rid of most of the boilerplate code.
* Provides a facade over the QBit monitoring, KPI, stats, and health system.
*/
public class ServiceManagementBundle implements ServiceHealthManager, StatsCollector {
private final Reactor reactor;
private final StatsCollector stats;
private final ServiceHealthManager healthManager;
private final Expected<HealthServiceClient> healthServiceClient;
private final String serviceName;
private final Timer timer;
private final String statKeyPrefix;
private final HashMap<String, String> statNameMap;
private final List<Object> servicesToFlush;
private final Expected<Runnable> processHandler;
private final Logger logger = LoggerFactory.getLogger(ServiceManagementBundle.class);
protected long time;
public ServiceManagementBundle(final Reactor reactor,
final StatsCollector stats,
final ServiceHealthManager serviceHealthManager,
final String serviceName,
final Timer timer,
final String statKeyPrefix,
final Runnable processHandler,
final HealthServiceClient healthServiceClient) {
this.reactor = reactor;
this.stats = stats;
this.healthManager = serviceHealthManager;
this.serviceName = serviceName;
this.timer = timer;
this.statKeyPrefix = statKeyPrefix;
this.healthServiceClient = Expected.ofNullable(healthServiceClient);
this.statNameMap = new HashMap<>();
this.processHandler = Expected.ofNullable(processHandler);
this.servicesToFlush = new ArrayList<>();
}
public void addServiceToFlush(Object service) {
servicesToFlush.add(service);
}
public void addServicesToFlush(Object... services) {
for (Object service : services) {
servicesToFlush.add(service);
}
}
public void process() {
time = timer.time();
reactor.process();
processHandler.ifPresent(Runnable::run);
stats.clientProxyFlush();
healthServiceClient.ifPresent(ClientProxy::clientProxyFlush);
servicesToFlush.forEach((service) -> {
try {
ServiceProxyUtils.flushServiceProxy(service);
} catch (Exception ex) {
logger.error("Unable to flush service on behalf of service " + serviceName, ex);
}
});
}
/**
* Creates a QBit callback based on promise created.
*
* @param promiseConsumer promise consumer
* @param <T> T
* @return QBit callback
*/
public <T> Callback<T> callback(final Consumer<Promise<T>> promiseConsumer) {
Promise<T> promise = reactor.promise();
promiseConsumer.accept(promise);
return Reakt.convertPromise(promise);
}
/**
* Prefixes the stats key with the stat key prefix, and then calls statsCollector.recordLevel.
*
* @param statKey statKey
* @param level level
*/
@Override
public void recordLevel(final String statKey, final long level) {
final String longKey = getActualStatKey(statKey);
stats.recordLevel(longKey, level);
}
/**
* Prefixes the stats key with the stat key prefix, and then calls statsCollector.recordCount.
*
* @param statKey statKey
* @param count count
*/
@Override
public void recordCount(final String statKey, final long count) {
final String longKey = getActualStatKey(statKey);
stats.recordCount(longKey, count);
}
/**
* Prefixes the stats key with the stat key prefix, and then calls statsCollector.recordCount.
*
* @param statKey statKey
*/
@Override
public void increment(final String statKey) {
final String longKey = getActualStatKey(statKey);
stats.increment(longKey);
}
/**
* Prefixes the stats key with the stat key prefix, and then calls statsCollector.recordTiming.
*
* @param statKey statKey
* @param timeSpan timeSpan
*/
@Override
public void recordTiming(String statKey, long timeSpan) {
final String longKey = getActualStatKey(statKey);
stats.recordTiming(longKey, timeSpan);
}
private String getActualStatKey(String statKey) {
String longKey = statNameMap.get(statKey);
if (longKey == null) {
longKey = this.statKeyPrefix + statKey;
}
return longKey;
}
public Reactor reactor() {
return reactor;
}
public StatsCollector stats() {
return stats;
}
public ServiceHealthManager health() {
return healthManager;
}
public String getServiceName() {
return serviceName;
}
public Timer getTimer() {
return timer;
}
public long getTime() {
return time;
}
@Override
public boolean isFailing() {
return healthManager.isFailing();
}
@Override
public boolean isOk() {
return healthManager.isOk();
}
@Override
public void setFailing() {
increment("fail");
healthManager.setFailing();
healthServiceClient.ifPresent(healthServiceClient1 -> healthServiceClient1.failWithReason(serviceName, HealthFailReason.ERROR));
}
@Override
public void setFailingWithReason(HealthFailReason reason) {
increment("fail." + reason.name().toLowerCase());
healthManager.setFailing();
healthServiceClient.ifPresent(healthServiceClient1 -> healthServiceClient1.failWithReason(serviceName, reason));
}
@Override
public void setFailingWithError(Throwable cause) {
increment("fail." + cause.getClass().getSimpleName().toLowerCase());
healthManager.setFailing();
healthServiceClient.ifPresent(healthServiceClient1 -> healthServiceClient1.failWithError(serviceName, cause));
}
public Expected<HealthServiceClient> healthServiceClient() {
return healthServiceClient;
}
@Override
public void recover() {
increment("recovered");
healthManager.recover();
}
@Override
public void clientProxyFlush() {
this.stats.clientProxyFlush();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.taskmanager;
import org.apache.flink.api.common.time.Deadline;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.NettyShuffleEnvironmentOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.io.network.api.writer.RecordWriter;
import org.apache.flink.runtime.io.network.api.writer.RecordWriterBuilder;
import org.apache.flink.runtime.io.network.api.writer.ResultPartitionWriter;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.io.network.partition.consumer.InputGate;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup;
import org.apache.flink.runtime.minicluster.MiniCluster;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testutils.MiniClusterResource;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.testutils.junit.category.AlsoRunWithSchedulerNG;
import org.apache.flink.types.LongValue;
import org.apache.flink.util.TestLogger;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import static org.apache.flink.runtime.io.network.buffer.LocalBufferPoolDestroyTest.isInBlockingBufferRequest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Category(AlsoRunWithSchedulerNG.class)
public class TaskCancelAsyncProducerConsumerITCase extends TestLogger {
// The Exceptions thrown by the producer/consumer Threads
private static volatile Exception ASYNC_PRODUCER_EXCEPTION;
private static volatile Exception ASYNC_CONSUMER_EXCEPTION;
// The Threads producing/consuming the intermediate stream
private static volatile Thread ASYNC_PRODUCER_THREAD;
private static volatile Thread ASYNC_CONSUMER_THREAD;
@ClassRule
public static final MiniClusterResource MINI_CLUSTER_RESOURCE = new MiniClusterResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(getFlinkConfiguration())
.build());
private static Configuration getFlinkConfiguration() {
Configuration config = new Configuration();
config.setString(TaskManagerOptions.MEMORY_SEGMENT_SIZE, "4096");
config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_NUM_BUFFERS, 9);
return config;
}
/**
* Tests that a task waiting on an async producer/consumer that is stuck
* in a blocking buffer request can be properly cancelled.
*
* <p>This is currently required for the Flink Kafka sources, which spawn
* a separate Thread consuming from Kafka and producing the intermediate
* streams in the spawned Thread instead of the main task Thread.
*/
@Test
public void testCancelAsyncProducerAndConsumer() throws Exception {
Deadline deadline = Deadline.now().plus(Duration.ofMinutes(2));
// Job with async producer and consumer
JobVertex producer = new JobVertex("AsyncProducer");
producer.setParallelism(1);
producer.setInvokableClass(AsyncProducer.class);
JobVertex consumer = new JobVertex("AsyncConsumer");
consumer.setParallelism(1);
consumer.setInvokableClass(AsyncConsumer.class);
consumer.connectNewDataSetAsInput(producer, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
SlotSharingGroup slot = new SlotSharingGroup(producer.getID(), consumer.getID());
producer.setSlotSharingGroup(slot);
consumer.setSlotSharingGroup(slot);
JobGraph jobGraph = new JobGraph(producer, consumer);
final MiniCluster flink = MINI_CLUSTER_RESOURCE.getMiniCluster();
// Submit job and wait until running
flink.runDetached(jobGraph);
FutureUtils.retrySuccessfulWithDelay(
() -> flink.getJobStatus(jobGraph.getJobID()),
Time.milliseconds(10),
deadline,
status -> status == JobStatus.RUNNING,
TestingUtils.defaultScheduledExecutor()
).get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
boolean producerBlocked = false;
for (int i = 0; i < 50; i++) {
Thread thread = ASYNC_PRODUCER_THREAD;
if (thread != null && thread.isAlive()) {
StackTraceElement[] stackTrace = thread.getStackTrace();
producerBlocked = isInBlockingBufferRequest(stackTrace);
}
if (producerBlocked) {
break;
} else {
// Retry
Thread.sleep(500L);
}
}
// Verify that async producer is in blocking request
assertTrue("Producer thread is not blocked: " + Arrays.toString(ASYNC_PRODUCER_THREAD.getStackTrace()), producerBlocked);
boolean consumerWaiting = false;
for (int i = 0; i < 50; i++) {
Thread thread = ASYNC_CONSUMER_THREAD;
if (thread != null && thread.isAlive()) {
consumerWaiting = thread.getState() == Thread.State.WAITING;
}
if (consumerWaiting) {
break;
} else {
// Retry
Thread.sleep(500L);
}
}
// Verify that async consumer is in blocking request
assertTrue("Consumer thread is not blocked.", consumerWaiting);
flink.cancelJob(jobGraph.getJobID())
.get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
// wait until the job is canceled
FutureUtils.retrySuccessfulWithDelay(
() -> flink.getJobStatus(jobGraph.getJobID()),
Time.milliseconds(10),
deadline,
status -> status == JobStatus.CANCELED,
TestingUtils.defaultScheduledExecutor()
).get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
// Verify the expected Exceptions
assertNotNull(ASYNC_PRODUCER_EXCEPTION);
assertEquals(IllegalStateException.class, ASYNC_PRODUCER_EXCEPTION.getClass());
assertNotNull(ASYNC_CONSUMER_EXCEPTION);
assertEquals(IllegalStateException.class, ASYNC_CONSUMER_EXCEPTION.getClass());
}
/**
* Invokable emitting records in a separate Thread (not the main Task
* thread).
*/
public static class AsyncProducer extends AbstractInvokable {
public AsyncProducer(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
Thread producer = new ProducerThread(getEnvironment().getWriter(0));
// Publish the async producer for the main test Thread
ASYNC_PRODUCER_THREAD = producer;
producer.start();
// Wait for the producer Thread to finish. This is executed in the
// main Task thread and will be interrupted on cancellation.
while (producer.isAlive()) {
try {
producer.join();
} catch (InterruptedException ignored) {
}
}
}
/**
* The Thread emitting the records.
*/
private static class ProducerThread extends Thread {
private final RecordWriter<LongValue> recordWriter;
public ProducerThread(ResultPartitionWriter partitionWriter) {
this.recordWriter = new RecordWriterBuilder<LongValue>().build(partitionWriter);
}
@Override
public void run() {
LongValue current = new LongValue(0);
try {
while (true) {
current.setValue(current.getValue() + 1);
recordWriter.emit(current);
recordWriter.flushAll();
}
} catch (Exception e) {
ASYNC_PRODUCER_EXCEPTION = e;
}
}
}
}
/**
* Invokable consuming buffers in a separate Thread (not the main Task
* thread).
*/
public static class AsyncConsumer extends AbstractInvokable {
public AsyncConsumer(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
Thread consumer = new ConsumerThread(getEnvironment().getInputGate(0));
// Publish the async consumer for the main test Thread
ASYNC_CONSUMER_THREAD = consumer;
consumer.start();
// Wait for the consumer Thread to finish. This is executed in the
// main Task thread and will be interrupted on cancellation.
while (consumer.isAlive()) {
try {
consumer.join();
} catch (InterruptedException ignored) {
}
}
}
/**
* The Thread consuming buffers.
*/
private static class ConsumerThread extends Thread {
private final InputGate inputGate;
public ConsumerThread(InputGate inputGate) {
this.inputGate = inputGate;
}
@Override
public void run() {
try {
while (true) {
inputGate.getNext();
}
} catch (Exception e) {
ASYNC_CONSUMER_EXCEPTION = e;
}
}
}
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.util.*;
import java.util.concurrent.atomic.*;
import org.junit.Test;
import org.reactivestreams.Subscriber;
import io.reactivex.*;
import io.reactivex.functions.*;
import io.reactivex.internal.functions.Functions;
import io.reactivex.internal.fuseable.*;
import io.reactivex.subscribers.*;
public class FlowableRangeTest {
@Test
public void testRangeStartAt2Count3() {
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
Flowable.range(2, 3).subscribe(subscriber);
verify(subscriber, times(1)).onNext(2);
verify(subscriber, times(1)).onNext(3);
verify(subscriber, times(1)).onNext(4);
verify(subscriber, never()).onNext(5);
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, times(1)).onComplete();
}
@Test
public void testRangeUnsubscribe() {
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
final AtomicInteger count = new AtomicInteger();
Flowable.range(1, 1000).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
count.incrementAndGet();
}
})
.take(3).subscribe(subscriber);
verify(subscriber, times(1)).onNext(1);
verify(subscriber, times(1)).onNext(2);
verify(subscriber, times(1)).onNext(3);
verify(subscriber, never()).onNext(4);
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, times(1)).onComplete();
assertEquals(3, count.get());
}
@Test
public void testRangeWithZero() {
Flowable.range(1, 0);
}
@Test
public void testRangeWithOverflow2() {
Flowable.range(Integer.MAX_VALUE, 0);
}
@Test
public void testRangeWithOverflow3() {
Flowable.range(1, Integer.MAX_VALUE);
}
@Test(expected = IllegalArgumentException.class)
public void testRangeWithOverflow4() {
Flowable.range(2, Integer.MAX_VALUE);
}
@Test
public void testRangeWithOverflow5() {
assertFalse(Flowable.range(Integer.MIN_VALUE, 0).blockingIterable().iterator().hasNext());
}
@Test
public void testBackpressureViaRequest() {
Flowable<Integer> f = Flowable.range(1, Flowable.bufferSize());
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
ts.assertNoValues();
ts.request(1);
f.subscribe(ts);
ts.assertValue(1);
ts.request(2);
ts.assertValues(1, 2, 3);
ts.request(3);
ts.assertValues(1, 2, 3, 4, 5, 6);
ts.request(Flowable.bufferSize());
ts.assertTerminated();
}
@Test
public void testNoBackpressure() {
ArrayList<Integer> list = new ArrayList<Integer>(Flowable.bufferSize() * 2);
for (int i = 1; i <= Flowable.bufferSize() * 2 + 1; i++) {
list.add(i);
}
Flowable<Integer> f = Flowable.range(1, list.size());
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
ts.assertNoValues();
ts.request(Long.MAX_VALUE); // infinite
f.subscribe(ts);
ts.assertValueSequence(list);
ts.assertTerminated();
}
void testWithBackpressureOneByOne(int start) {
Flowable<Integer> source = Flowable.range(start, 100);
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
ts.request(1);
source.subscribe(ts);
List<Integer> list = new ArrayList<Integer>(100);
for (int i = 0; i < 100; i++) {
list.add(i + start);
ts.request(1);
}
ts.assertValueSequence(list);
ts.assertTerminated();
}
void testWithBackpressureAllAtOnce(int start) {
Flowable<Integer> source = Flowable.range(start, 100);
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
ts.request(100);
source.subscribe(ts);
List<Integer> list = new ArrayList<Integer>(100);
for (int i = 0; i < 100; i++) {
list.add(i + start);
}
ts.assertValueSequence(list);
ts.assertTerminated();
}
@Test
public void testWithBackpressure1() {
for (int i = 0; i < 100; i++) {
testWithBackpressureOneByOne(i);
}
}
@Test
public void testWithBackpressureAllAtOnce() {
for (int i = 0; i < 100; i++) {
testWithBackpressureAllAtOnce(i);
}
}
@Test
public void testWithBackpressureRequestWayMore() {
Flowable<Integer> source = Flowable.range(50, 100);
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
ts.request(150);
source.subscribe(ts);
List<Integer> list = new ArrayList<Integer>(100);
for (int i = 0; i < 100; i++) {
list.add(i + 50);
}
ts.request(50); // and then some
ts.assertValueSequence(list);
ts.assertTerminated();
}
@Test
public void testRequestOverflow() {
final AtomicInteger count = new AtomicInteger();
int n = 10;
Flowable.range(1, n).subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
request(2);
}
@Override
public void onComplete() {
//do nothing
}
@Override
public void onError(Throwable e) {
throw new RuntimeException(e);
}
@Override
public void onNext(Integer t) {
count.incrementAndGet();
request(Long.MAX_VALUE - 1);
}});
assertEquals(n, count.get());
}
@Test
public void testEmptyRangeSendsOnCompleteEagerlyWithRequestZero() {
final AtomicBoolean completed = new AtomicBoolean(false);
Flowable.range(1, 0).subscribe(new DefaultSubscriber<Integer>() {
@Override
public void onStart() {
// request(0);
}
@Override
public void onComplete() {
completed.set(true);
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(Integer t) {
}});
assertTrue(completed.get());
}
@Test(timeout = 1000)
public void testNearMaxValueWithoutBackpressure() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Flowable.range(Integer.MAX_VALUE - 1, 2).subscribe(ts);
ts.assertComplete();
ts.assertNoErrors();
ts.assertValues(Integer.MAX_VALUE - 1, Integer.MAX_VALUE);
}
@Test(timeout = 1000)
public void testNearMaxValueWithBackpressure() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(3L);
Flowable.range(Integer.MAX_VALUE - 1, 2).subscribe(ts);
ts.assertComplete();
ts.assertNoErrors();
ts.assertValues(Integer.MAX_VALUE - 1, Integer.MAX_VALUE);
}
@Test
public void negativeCount() {
try {
Flowable.range(1, -1);
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException ex) {
assertEquals("count >= 0 required but it was -1", ex.getMessage());
}
}
@Test
public void requestWrongFusion() {
TestSubscriber<Integer> ts = SubscriberFusion.newTest(QueueFuseable.ASYNC);
Flowable.range(1, 5)
.subscribe(ts);
SubscriberFusion.assertFusion(ts, QueueFuseable.NONE)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void countOne() {
Flowable.range(5495454, 1)
.test()
.assertResult(5495454);
}
@Test
public void fused() {
TestSubscriber<Integer> ts = SubscriberFusion.newTest(QueueFuseable.ANY);
Flowable.range(1, 2).subscribe(ts);
SubscriberFusion.assertFusion(ts, QueueFuseable.SYNC)
.assertResult(1, 2);
}
@Test
public void fusedReject() {
TestSubscriber<Integer> ts = SubscriberFusion.newTest(QueueFuseable.ASYNC);
Flowable.range(1, 2).subscribe(ts);
SubscriberFusion.assertFusion(ts, QueueFuseable.NONE)
.assertResult(1, 2);
}
@Test
public void disposed() {
TestHelper.checkDisposed(Flowable.range(1, 2));
}
@Test
public void fusedClearIsEmpty() {
TestHelper.checkFusedIsEmptyClear(Flowable.range(1, 2));
}
@Test
public void noOverflow() {
Flowable.range(Integer.MAX_VALUE - 1, 2);
Flowable.range(Integer.MIN_VALUE, 2);
Flowable.range(Integer.MIN_VALUE, Integer.MAX_VALUE);
}
@Test
public void conditionalNormal() {
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.range(1, 5));
TestHelper.assertBadRequestReported(Flowable.range(1, 5).filter(Functions.alwaysTrue()));
}
@Test
public void conditionalNormalSlowpath() {
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.test(5)
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void conditionalSlowPathTakeExact() {
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.take(5)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void slowPathTakeExact() {
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.take(5)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void conditionalSlowPathRebatch() {
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.rebatchRequests(1)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void slowPathRebatch() {
Flowable.range(1, 5)
.rebatchRequests(1)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void slowPathCancel() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(2L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
Flowable.range(1, 5)
.subscribe(ts);
ts.assertResult(1);
}
@Test
public void fastPathCancel() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
Flowable.range(1, 5)
.subscribe(ts);
ts.assertResult(1);
}
@Test
public void conditionalSlowPathCancel() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.subscribe(ts);
ts.assertResult(1);
}
@Test
public void conditionalFastPathCancel() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.subscribe(ts);
ts.assertResult(1);
}
@Test
public void conditionalRequestOneByOne() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
request(1);
}
};
Flowable.range(1, 5)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
})
.subscribe(ts);
ts.assertResult(2, 4);
}
@Test
public void conditionalRequestOneByOne2() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
request(1);
}
};
Flowable.range(1, 5)
.filter(Functions.alwaysTrue())
.subscribe(ts);
ts.assertResult(1, 2, 3, 4, 5);
}
@Test
public void fastPathCancelExact() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 5L) {
cancel();
onComplete();
}
}
};
Flowable.range(1, 5)
.subscribe(ts);
ts.assertResult(1, 2, 3, 4, 5);
}
@Test
public void conditionalFastPathCancelExact() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 5L) {
cancel();
onComplete();
}
}
};
Flowable.range(1, 5)
.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
})
.subscribe(ts);
ts.assertResult(2, 4);
}
@Test
public void conditionalCancel1() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(2L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 1) {
cancel();
onComplete();
}
}
};
Flowable.range(1, 2)
.filter(Functions.alwaysTrue())
.subscribe(ts);
ts.assertResult(1);
}
@Test
public void conditionalCancel2() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(2L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 2) {
cancel();
onComplete();
}
}
};
Flowable.range(1, 2)
.filter(Functions.alwaysTrue())
.subscribe(ts);
ts.assertResult(1, 2);
}
}
| |
package info.bitrich.xchangestream.bitfinex;
import static org.junit.Assert.assertEquals;
import info.bitrich.xchangestream.bitfinex.dto.BitfinexWebSocketAuthOrder;
import info.bitrich.xchangestream.bitfinex.dto.BitfinexWebSocketAuthTrade;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.Date;
import org.junit.Test;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderStatus;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
public class BitfinexStreamingAdaptersTest {
@Test
public void testMarketOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205259L, // mtsUpdate
new BigDecimal("0.000"), // amount
new BigDecimal("0.004"), // amountOrig
"MARKET", // type
null, // typePrev
"EXECUTED @ 3495.1(0.004)", // orderStatus
new BigDecimal("3495.2"), // price
new BigDecimal("3495.2"), // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
// TODO awaiting https://github.com/knowm/XChange/pull/2907 then I can add market order
// support to XChange itself. In the meantime these are returned as limit orders.
Order adaptedOrder = BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.BID, adaptedOrder.getType());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getAveragePrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getCumulativeAmount());
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
// TODO see above. should be:
// assertEquals(Collections.singleton(BitfinexOrderFlags.MARGIN), adaptedOrder.getOrderFlags());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.000"), adaptedOrder.getRemainingAmount());
assertEquals(OrderStatus.FILLED, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testStopOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205259L, // mtsUpdate
new BigDecimal("0.000"), // amount
new BigDecimal("0.004"), // amountOrig
"STOP", // type
null, // typePrev
"EXECUTED @ 3495.1(0.004)", // orderStatus
new BigDecimal("3495.2"), // price
new BigDecimal("3495.2"), // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
// TODO awaiting https://github.com/knowm/XChange/pull/2907 then I can add market order
// support to XChange itself. In the meantime these are returned as limit orders.
Order adaptedOrder = BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.BID, adaptedOrder.getType());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getAveragePrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getCumulativeAmount());
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
// TODO see above. should be:
// assertEquals(Collections.singleton(BitfinexOrderFlags.MARGIN), adaptedOrder.getOrderFlags());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.000"), adaptedOrder.getRemainingAmount());
assertEquals(OrderStatus.FILLED, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testNewLimitOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205267L, // mtsUpdate
new BigDecimal("0.004"), // amount
new BigDecimal("0.004"), // amountOrig
"EXCHANGE LIMIT", // type
null, // typePrev
"ACTIVE", // orderStatus
new BigDecimal("3495.2"), // price
BigDecimal.ZERO, // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
LimitOrder adaptedOrder =
(LimitOrder) BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.BID, adaptedOrder.getType());
assertEquals(BigDecimal.ZERO, adaptedOrder.getAveragePrice());
assertEquals(0, BigDecimal.ZERO.compareTo(adaptedOrder.getCumulativeAmount()));
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getLimitPrice());
assertEquals(Collections.emptySet(), adaptedOrder.getOrderFlags());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getRemainingAmount());
assertEquals(OrderStatus.NEW, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testCancelledLimitOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205267L, // mtsUpdate
new BigDecimal("-0.004"), // amount
new BigDecimal("-0.004"), // amountOrig
"LIMIT", // type
null, // typePrev
"CANCELED", // orderStatus
new BigDecimal("3495.2"), // price
BigDecimal.ZERO, // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
LimitOrder adaptedOrder =
(LimitOrder) BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.ASK, adaptedOrder.getType());
assertEquals(BigDecimal.ZERO, adaptedOrder.getAveragePrice());
assertEquals(0, BigDecimal.ZERO.compareTo(adaptedOrder.getCumulativeAmount()));
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getLimitPrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getRemainingAmount());
// TODO see above. should be:
// assertEquals(Collections.singleton(BitfinexOrderFlags.MARGIN), adaptedOrder.getOrderFlags());
assertEquals(OrderStatus.CANCELED, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testPartiallyFilledLimitOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205267L, // mtsUpdate
new BigDecimal("-0.001"), // amount
new BigDecimal("-0.004"), // amountOrig
"LIMIT", // type
null, // typePrev
"PARTIALLY FILLED @ 3495.1(0.003)", // orderStatus
new BigDecimal("3495.2"), // price
new BigDecimal("3495.1"), // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
LimitOrder adaptedOrder =
(LimitOrder) BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.ASK, adaptedOrder.getType());
assertEquals(new BigDecimal("3495.1"), adaptedOrder.getAveragePrice());
assertEquals(new BigDecimal("0.003"), adaptedOrder.getCumulativeAmount());
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getLimitPrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.001"), adaptedOrder.getRemainingAmount());
// TODO see above. should be:
// assertEquals(Collections.singleton(BitfinexOrderFlags.MARGIN), adaptedOrder.getOrderFlags());
assertEquals(OrderStatus.PARTIALLY_FILLED, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testExecutedLimitOrder() {
BitfinexWebSocketAuthOrder bitfinexWebSocketAuthOrder =
new BitfinexWebSocketAuthOrder(
123123123L, // id,
0L, // groupId,
456456456L, // cid,
"tBTCUSD", // symbol,
1548674205259L, // mtsCreateamount
1548674205267L, // mtsUpdate
BigDecimal.ZERO, // amount
new BigDecimal("0.004"), // amountOrig
"EXCHANGE LIMIT", // type
null, // typePrev
"EXECUTED @ 3495.1(0.004): was PARTIALLY FILLED @ 3495.1(0.003)", // orderStatus
new BigDecimal("3495.2"), // price
new BigDecimal("3495.1"), // priceAvg
BigDecimal.ZERO, // priceTrailing
BigDecimal.ZERO, // priceAuxLimit
0, // placedId
0 // flags
);
LimitOrder adaptedOrder =
(LimitOrder) BitfinexStreamingAdapters.adaptOrder(bitfinexWebSocketAuthOrder);
assertEquals("123123123", adaptedOrder.getId());
assertEquals(Order.OrderType.BID, adaptedOrder.getType());
assertEquals(new BigDecimal("3495.1"), adaptedOrder.getAveragePrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getCumulativeAmount());
assertEquals(CurrencyPair.BTC_USD, adaptedOrder.getCurrencyPair());
assertEquals(new BigDecimal("3495.2"), adaptedOrder.getLimitPrice());
assertEquals(new BigDecimal("0.004"), adaptedOrder.getOriginalAmount());
assertEquals(new BigDecimal("0.000"), adaptedOrder.getRemainingAmount());
assertEquals(Collections.emptySet(), adaptedOrder.getOrderFlags());
assertEquals(OrderStatus.FILLED, adaptedOrder.getStatus());
assertEquals(new Date(1548674205259L).getTime(), adaptedOrder.getTimestamp().getTime());
}
@Test
public void testTradeBuy() {
BitfinexWebSocketAuthTrade bitfinexWebSocketAuthTrade =
new BitfinexWebSocketAuthTrade(
335015622L, // id
"tBTCUSD", // pair
1548674247684L, // mtsCreate
21895093123L, // orderId
new BigDecimal("0.00341448"), // execAmount
new BigDecimal("3495.4"), // execPrice
"SHOULDNT MATTER", // orderType
new BigDecimal("3495.9"), // orderPrice
1548674247683L, // maker
new BigDecimal("-0.00000682896"), // fee
"BTC" // feeCurrency
);
UserTrade adapted = BitfinexStreamingAdapters.adaptUserTrade(bitfinexWebSocketAuthTrade);
assertEquals(CurrencyPair.BTC_USD, adapted.getCurrencyPair());
assertEquals(new BigDecimal("0.00000682896"), adapted.getFeeAmount());
assertEquals(CurrencyPair.BTC_USD.base, adapted.getFeeCurrency());
assertEquals("335015622", adapted.getId());
assertEquals("21895093123", adapted.getOrderId());
assertEquals(new BigDecimal("0.00341448"), adapted.getOriginalAmount());
assertEquals(new BigDecimal("3495.4"), adapted.getPrice());
assertEquals(new Date(1548674247684L).getTime(), adapted.getTimestamp().getTime());
assertEquals(OrderType.BID, adapted.getType());
}
@Test
public void testTradeSell() {
BitfinexWebSocketAuthTrade bitfinexWebSocketAuthTrade =
new BitfinexWebSocketAuthTrade(
335015622L, // id
"tBTCUSD", // pair
1548674247684L, // mtsCreate
21895093123L, // orderId
new BigDecimal("-0.00341448"), // execAmount
new BigDecimal("3495.4"), // execPrice
"SHOULDNT MATTER", // orderType
new BigDecimal("3495.9"), // orderPrice
1548674247683L, // maker
new BigDecimal("0.00000682896"), // fee
"BTC" // feeCurrency
);
UserTrade adapted = BitfinexStreamingAdapters.adaptUserTrade(bitfinexWebSocketAuthTrade);
assertEquals(CurrencyPair.BTC_USD, adapted.getCurrencyPair());
assertEquals(new BigDecimal("0.00000682896"), adapted.getFeeAmount());
assertEquals(CurrencyPair.BTC_USD.base, adapted.getFeeCurrency());
assertEquals("335015622", adapted.getId());
assertEquals("21895093123", adapted.getOrderId());
assertEquals(new BigDecimal("0.00341448"), adapted.getOriginalAmount());
assertEquals(new BigDecimal("3495.4"), adapted.getPrice());
assertEquals(new Date(1548674247684L).getTime(), adapted.getTimestamp().getTime());
assertEquals(OrderType.ASK, adapted.getType());
}
}
| |
// Copyright (c) 1999-2004 Brian Wellington (bwelling@xbill.org)
package org.xbill.DNS;
import org.xbill.DNS.utils.base64;
import java.io.IOException;
import java.util.Date;
/**
* Transaction Key - used to compute and/or securely transport a shared
* secret to be used with TSIG.
* @see TSIG
*
* @author Brian Wellington
*/
public class TKEYRecord extends Record {
private static final long serialVersionUID = 8828458121926391756L;
private Name alg;
private Date timeInception;
private Date timeExpire;
private int mode, error;
private byte [] key;
private byte [] other;
/** The key is assigned by the server (unimplemented) */
public static final int SERVERASSIGNED = 1;
/** The key is computed using a Diffie-Hellman key exchange */
public static final int DIFFIEHELLMAN = 2;
/** The key is computed using GSS_API (unimplemented) */
public static final int GSSAPI = 3;
/** The key is assigned by the resolver (unimplemented) */
public static final int RESOLVERASSIGNED = 4;
/** The key should be deleted */
public static final int DELETE = 5;
TKEYRecord() {}
Record
getObject() {
return new TKEYRecord();
}
/**
* Creates a TKEY Record from the given data.
* @param alg The shared key's algorithm
* @param timeInception The beginning of the validity period of the shared
* secret or keying material
* @param timeExpire The end of the validity period of the shared
* secret or keying material
* @param mode The mode of key agreement
* @param error The extended error field. Should be 0 in queries
* @param key The shared secret
* @param other The other data field. Currently unused
* responses.
*/
public
TKEYRecord(Name name, int dclass, long ttl, Name alg,
Date timeInception, Date timeExpire, int mode, int error,
byte [] key, byte other[])
{
super(name, Type.TKEY, dclass, ttl);
this.alg = checkName("alg", alg);
this.timeInception = timeInception;
this.timeExpire = timeExpire;
this.mode = checkU16("mode", mode);
this.error = checkU16("error", error);
this.key = key;
this.other = other;
}
void
rrFromWire(DNSInput in) throws IOException {
alg = new Name(in);
timeInception = new Date(1000 * in.readU32());
timeExpire = new Date(1000 * in.readU32());
mode = in.readU16();
error = in.readU16();
int keylen = in.readU16();
if (keylen > 0)
key = in.readByteArray(keylen);
else
key = null;
int otherlen = in.readU16();
if (otherlen > 0)
other = in.readByteArray(otherlen);
else
other = null;
}
void
rdataFromString(Tokenizer st, Name origin) throws IOException {
throw st.exception("no text format defined for TKEY");
}
protected String
modeString() {
switch (mode) {
case SERVERASSIGNED: return "SERVERASSIGNED";
case DIFFIEHELLMAN: return "DIFFIEHELLMAN";
case GSSAPI: return "GSSAPI";
case RESOLVERASSIGNED: return "RESOLVERASSIGNED";
case DELETE: return "DELETE";
default: return Integer.toString(mode);
}
}
/** Converts rdata to a String */
String
rrToString() {
StringBuffer sb = new StringBuffer();
sb.append(alg);
sb.append(" ");
if (Options.check("multiline"))
sb.append("(\n\t");
sb.append(FormattedTime.format(timeInception));
sb.append(" ");
sb.append(FormattedTime.format(timeExpire));
sb.append(" ");
sb.append(modeString());
sb.append(" ");
sb.append(Rcode.TSIGstring(error));
if (Options.check("multiline")) {
sb.append("\n");
if (key != null) {
sb.append(base64.formatString(key, 64, "\t", false));
sb.append("\n");
}
if (other != null)
sb.append(base64.formatString(other, 64, "\t", false));
sb.append(" )");
} else {
sb.append(" ");
if (key != null) {
sb.append(base64.toString(key));
sb.append(" ");
}
if (other != null)
sb.append(base64.toString(other));
}
return sb.toString();
}
/** Returns the shared key's algorithm */
public Name
getAlgorithm() {
return alg;
}
/**
* Returns the beginning of the validity period of the shared secret or
* keying material
*/
public Date
getTimeInception() {
return timeInception;
}
/**
* Returns the end of the validity period of the shared secret or
* keying material
*/
public Date
getTimeExpire() {
return timeExpire;
}
/** Returns the key agreement mode */
public int
getMode() {
return mode;
}
/** Returns the extended error */
public int
getError() {
return error;
}
/** Returns the shared secret or keying material */
public byte []
getKey() {
return key;
}
/** Returns the other data */
public byte []
getOther() {
return other;
}
void
rrToWire(DNSOutput out, Compression c, boolean canonical) {
alg.toWire(out, null, canonical);
out.writeU32(timeInception.getTime() / 1000);
out.writeU32(timeExpire.getTime() / 1000);
out.writeU16(mode);
out.writeU16(error);
if (key != null) {
out.writeU16(key.length);
out.writeByteArray(key);
}
else
out.writeU16(0);
if (other != null) {
out.writeU16(other.length);
out.writeByteArray(other);
}
else
out.writeU16(0);
}
}
| |
package nl.esciencecenter.aether.io;
import java.io.IOException;
import java.io.ObjectStreamClass;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
class SunJavaStuff extends JavaDependantStuff {
/** newInstance method of ObjectStreamClass, if it exists. */
private static Method newInstance = null;
// Only works as of Java 1.4, earlier versions of Java don't have Unsafe.
// Use introspection, so that it at least compiles on systems that don't
// have unsafe.
private static Object unsafe = null;
private static Method unsafeObjectFieldOffsetMethod;
private static Method unsafePutDoubleMethod;
private static Method unsafePutLongMethod;
private static Method unsafePutFloatMethod;
private static Method unsafePutIntMethod;
private static Method unsafePutShortMethod;
private static Method unsafePutCharMethod;
private static Method unsafePutBooleanMethod;
private static Method unsafePutByteMethod;
private static Method unsafePutObjectMethod;
static boolean available = false;
static {
try {
newInstance = ObjectStreamClass.class.getDeclaredMethod(
"newInstance", new Class[] {});
newInstance.setAccessible(true);
// unsafe = Unsafe.getUnsafe();
// does not work when a classloader is present, so we get it
// from ObjectStreamClass.
Class<?> cl = Class
.forName("java.io.ObjectStreamClass$FieldReflector");
Field uf = cl.getDeclaredField("unsafe");
uf.setAccessible(true);
unsafe = uf.get(null);
cl = unsafe.getClass();
unsafeObjectFieldOffsetMethod = cl.getMethod("objectFieldOffset",
new Class[] { Field.class });
unsafePutDoubleMethod = cl.getMethod("putDouble", new Class[] {
Object.class, Long.TYPE, Double.TYPE });
unsafePutLongMethod = cl.getMethod("putLong", new Class[] {
Object.class, Long.TYPE, Long.TYPE });
unsafePutFloatMethod = cl.getMethod("putFloat", new Class[] {
Object.class, Long.TYPE, Float.TYPE });
unsafePutIntMethod = cl.getMethod("putInt", new Class[] {
Object.class, Long.TYPE, Integer.TYPE });
unsafePutShortMethod = cl.getMethod("putShort", new Class[] {
Object.class, Long.TYPE, Short.TYPE });
unsafePutCharMethod = cl.getMethod("putChar", new Class[] {
Object.class, Long.TYPE, Character.TYPE });
unsafePutByteMethod = cl.getMethod("putByte", new Class[] {
Object.class, Long.TYPE, Byte.TYPE });
unsafePutBooleanMethod = cl.getMethod("putBoolean", new Class[] {
Object.class, Long.TYPE, Boolean.TYPE });
unsafePutObjectMethod = cl.getMethod("putObject", new Class[] {
Object.class, Long.TYPE, Object.class });
available = true;
} catch (Throwable e) {
logger.info("Sun Java Stuff not available", e);
}
}
SunJavaStuff(Class<?> clazz) {
super(clazz);
if (! available) {
throw new Error("SunJavaStuff not available");
}
}
/**
* This method assigns the specified value to a final field.
*
* @param ref
* object with a final field
* @param fieldname
* name of the field
* @param d
* value to be assigned
* @exception IOException
* is thrown when an IO error occurs.
*/
public void setFieldDouble(Object ref, String fieldname, double d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutDoubleMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldLong(Object ref, String fieldname, long d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutLongMethod.invoke(unsafe, ref, key, d);
return;
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldFloat(Object ref, String fieldname, float d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutFloatMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldInt(Object ref, String fieldname, int d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutIntMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldShort(Object ref, String fieldname, short d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutShortMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldChar(Object ref, String fieldname, char d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutCharMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldByte(Object ref, String fieldname, byte d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutByteMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldBoolean(Object ref, String fieldname, boolean d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutBooleanMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*/
public void setFieldString(Object ref, String fieldname, String d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutObjectMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*
*/
public void setFieldClass(Object ref, String fieldname, Class<?> d)
throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutObjectMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* See {@link #setFieldDouble(Object, String, double)} for a description.
*
* @param fieldsig
* signature of the field
*/
public void setFieldObject(Object ref, String fieldname, Object d,
String fieldsig) throws IOException {
try {
Field f = clazz.getDeclaredField(fieldname);
if (d != null && !f.getType().isInstance(d)) {
throw new IOException("wrong field type");
}
Object key = unsafeObjectFieldOffsetMethod.invoke(unsafe, f);
unsafePutObjectMethod.invoke(unsafe, ref, key, d);
} catch (Throwable ex) {
throw new IOException("got exception", ex);
}
}
/**
* Try to create an object through the newInstance method of
* ObjectStreamClass. Return null if it fails for some reason.
*/
Object newInstance() {
try {
return newInstance.invoke(objectStreamClass,
(java.lang.Object[]) null);
} catch (Throwable e) {
// System.out.println("newInstance fails: got exception " + e);
return null;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyByteFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyShortFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicInteger;
import static org.hamcrest.Matchers.containsString;
public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testGetForFieldDefaults() {
final IndexService indexService = createIndex("test");
final IndexFieldDataService ifdService = indexService.fieldData();
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
final MappedFieldType stringMapper = new KeywordFieldMapper.Builder("string").build(ctx).fieldType();
ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
assertTrue(fd instanceof SortedSetDVOrdinalsIndexFieldData);
for (MappedFieldType mapper : Arrays.asList(
new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.BYTE).build(ctx).fieldType(),
new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.SHORT).build(ctx).fieldType(),
new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.INTEGER).build(ctx).fieldType(),
new NumberFieldMapper.Builder("long", NumberFieldMapper.NumberType.LONG).build(ctx).fieldType()
)) {
ifdService.clear();
fd = ifdService.getForField(mapper);
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
}
final MappedFieldType floatMapper = new NumberFieldMapper.Builder("float", NumberFieldMapper.NumberType.FLOAT)
.build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(floatMapper);
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
final MappedFieldType doubleMapper = new NumberFieldMapper.Builder("double", NumberFieldMapper.NumberType.DOUBLE)
.build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(doubleMapper);
assertTrue(fd instanceof SortedNumericDVIndexFieldData);
}
public void testFieldDataCacheListener() throws Exception {
final IndexService indexService = createIndex("test");
final IndicesService indicesService = getInstanceFromNode(IndicesService.class);
// copy the ifdService since we can set the listener only once.
final IndexFieldDataService ifdService = new IndexFieldDataService(indexService.getIndexSettings(),
indicesService.getIndicesFieldDataCache(), indicesService.getCircuitBreakerService(), indexService.mapperService());
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
final MappedFieldType mapper1 = new TextFieldMapper.Builder("s").fielddata(true).build(ctx).fieldType();
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new StringField("s", "thisisastring", Store.NO));
writer.addDocument(doc);
DirectoryReader open = DirectoryReader.open(writer);
final boolean wrap = randomBoolean();
final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open;
final AtomicInteger onCacheCalled = new AtomicInteger();
final AtomicInteger onRemovalCalled = new AtomicInteger();
ifdService.setListener(new IndexFieldDataCache.Listener() {
@Override
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
if (wrap) {
assertEquals(new ShardId("test", "_na_", 1), shardId);
} else {
assertNull(shardId);
}
onCacheCalled.incrementAndGet();
}
@Override
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
if (wrap) {
assertEquals(new ShardId("test", "_na_", 1), shardId);
} else {
assertNull(shardId);
}
onRemovalCalled.incrementAndGet();
}
});
IndexFieldData<?> ifd = ifdService.getForField(mapper1);
LeafReaderContext leafReaderContext = reader.getContext().leaves().get(0);
AtomicFieldData load = ifd.load(leafReaderContext);
assertEquals(1, onCacheCalled.get());
assertEquals(0, onRemovalCalled.get());
reader.close();
load.close();
writer.close();
assertEquals(1, onCacheCalled.get());
assertEquals(1, onRemovalCalled.get());
ifdService.clear();
}
public void testSetCacheListenerTwice() {
final IndexService indexService = createIndex("test");
IndexFieldDataService shardPrivateService = indexService.fieldData();
try {
shardPrivateService.setListener(new IndexFieldDataCache.Listener() {
@Override
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
}
@Override
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
}
});
fail("listener already set");
} catch (IllegalStateException ex) {
// all well
}
}
private void doTestRequireDocValues(MappedFieldType ft) {
ThreadPool threadPool = new ThreadPool("random_threadpool_name");
try {
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), cache, null, null);
ft.setName("some_long");
ft.setHasDocValues(true);
ifds.getForField(ft); // no exception
ft.setHasDocValues(false);
try {
ifds.getForField(ft);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("doc values"));
}
} finally {
threadPool.shutdown();
}
}
public void testRequireDocValuesOnLongs() {
doTestRequireDocValues(new LegacyLongFieldMapper.LongFieldType());
}
public void testRequireDocValuesOnDoubles() {
doTestRequireDocValues(new LegacyDoubleFieldMapper.DoubleFieldType());
}
public void testRequireDocValuesOnBools() {
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.run;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.oak.commons.PropertiesUtil.populate;
import static org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentNodeStoreBuilder.newMongoDocumentNodeStoreBuilder;
import static org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentNodeStoreBuilder.newRDBDocumentNodeStoreBuilder;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.jcr.RepositoryException;
import javax.sql.DataSource;
import joptsimple.OptionSpecBuilder;
import org.apache.commons.io.FileUtils;
import org.apache.felix.cm.file.ConfigurationHandler;
import org.apache.jackrabbit.core.data.DataStore;
import org.apache.jackrabbit.core.data.DataStoreException;
import org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage.AzureDataStore;
import org.apache.jackrabbit.oak.blob.cloud.s3.S3DataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBuilder;
import org.apache.jackrabbit.oak.plugins.document.LeaseCheckMode;
import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDataSourceFactory;
import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection;
import org.apache.jackrabbit.oak.run.cli.DummyDataStore;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.jetbrains.annotations.Nullable;
import com.google.common.collect.Maps;
import com.google.common.io.Closer;
import com.google.common.io.Files;
import com.mongodb.MongoClientURI;
import com.mongodb.MongoURI;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
class Utils {
private static final long MB = 1024 * 1024;
public static class NodeStoreOptions {
public final OptionParser parser;
public final OptionSpec<String> rdbjdbcuser;
public final OptionSpec<String> rdbjdbcpasswd;
public final OptionSpec<Integer> clusterId;
public final OptionSpec<Void> disableBranchesSpec;
public final OptionSpec<Integer> cacheSizeSpec;
public final OptionSpec<?> help;
public final OptionSpec<String> nonOption;
protected OptionSet options;
public NodeStoreOptions(String usage) {
parser = new OptionParser();
rdbjdbcuser = parser
.accepts("rdbjdbcuser", "RDB JDBC user")
.withOptionalArg().defaultsTo("");
rdbjdbcpasswd = parser
.accepts("rdbjdbcpasswd", "RDB JDBC password")
.withOptionalArg().defaultsTo("");
clusterId = parser
.accepts("clusterId", "MongoMK clusterId")
.withRequiredArg().ofType(Integer.class).defaultsTo(0);
disableBranchesSpec = parser.
accepts("disableBranches", "disable branches");
cacheSizeSpec = parser.
accepts("cacheSize", "cache size")
.withRequiredArg().ofType(Integer.class).defaultsTo(0);
help = parser.acceptsAll(asList("h", "?", "help"),"show help").forHelp();
nonOption = parser.nonOptions(usage);
}
public NodeStoreOptions parse(String[] args) {
assert(options == null);
options = parser.parse(args);
return this;
}
public void printHelpOn(OutputStream sink) throws IOException {
parser.printHelpOn(sink);
System.exit(2);
}
public String getStoreArg() {
List<String> nonOptions = nonOption.values(options);
return nonOptions.size() > 0? nonOptions.get(0) : "";
}
public List<String> getOtherArgs() {
List<String> args = new ArrayList<String>(nonOption.values(options));
if (args.size() > 0) {
args.remove(0);
}
return args;
}
public int getClusterId() {
return clusterId.value(options);
}
public boolean disableBranchesSpec() {
return options.has(disableBranchesSpec);
}
public int getCacheSize() {
return cacheSizeSpec.value(options);
}
public String getRDBJDBCUser() {
return rdbjdbcuser.value(options);
}
public String getRDBJDBCPassword() {
return rdbjdbcpasswd.value(options);
}
}
public static NodeStore bootstrapNodeStore(String[] args, Closer closer, String h) throws IOException, InvalidFileStoreVersionException {
return bootstrapNodeStore(new NodeStoreOptions(h).parse(args), closer);
}
public static NodeStore bootstrapNodeStore(NodeStoreOptions options, Closer closer) throws IOException, InvalidFileStoreVersionException {
String src = options.getStoreArg();
if (src == null || src.length() == 0) {
options.printHelpOn(System.err);
System.exit(1);
}
if (src.startsWith(MongoURI.MONGODB_PREFIX) || src.startsWith("jdbc")) {
DocumentNodeStoreBuilder<?> builder = createDocumentMKBuilder(options, closer);
if (builder != null) {
DocumentNodeStore store = builder.build();
closer.register(asCloseable(store));
return store;
}
}
FileStore fileStore = fileStoreBuilder(new File(src))
.withStrictVersionCheck(true)
.build();
closer.register(fileStore);
return SegmentNodeStoreBuilders.builder(fileStore).build();
}
@Nullable
static DocumentNodeStoreBuilder<?> createDocumentMKBuilder(NodeStoreOptions options,
Closer closer)
throws IOException {
String src = options.getStoreArg();
if (src == null || src.length() == 0) {
options.printHelpOn(System.err);
System.exit(1);
}
DocumentNodeStoreBuilder<?> builder;
if (src.startsWith(MongoURI.MONGODB_PREFIX)) {
MongoClientURI uri = new MongoClientURI(src);
if (uri.getDatabase() == null) {
System.err.println("Database missing in MongoDB URI: "
+ uri.getURI());
System.exit(1);
}
MongoConnection mongo = new MongoConnection(uri.getURI());
closer.register(asCloseable(mongo));
builder = newMongoDocumentNodeStoreBuilder().setMongoDB(
mongo.getMongoClient(), mongo.getDBName());
} else if (src.startsWith("jdbc")) {
DataSource ds = RDBDataSourceFactory.forJdbcUrl(src,
options.getRDBJDBCUser(), options.getRDBJDBCPassword());
builder = newRDBDocumentNodeStoreBuilder().setRDBConnection(ds);
} else {
return null;
}
builder.
setLeaseCheckMode(LeaseCheckMode.DISABLED).
setClusterId(options.getClusterId());
if (options.disableBranchesSpec()) {
builder.disableBranches();
}
int cacheSize = options.getCacheSize();
if (cacheSize != 0) {
builder.memoryCacheSize(cacheSize * MB);
}
return builder;
}
@Nullable
public static GarbageCollectableBlobStore bootstrapDataStore(String[] args, Closer closer)
throws IOException, RepositoryException {
OptionParser parser = new OptionParser();
parser.allowsUnrecognizedOptions();
ArgumentAcceptingOptionSpec<String> s3dsConfig =
parser.accepts("s3ds", "S3DataStore config").withRequiredArg().ofType(String.class);
ArgumentAcceptingOptionSpec<String> fdsConfig =
parser.accepts("fds", "FileDataStore config").withRequiredArg().ofType(String.class);
ArgumentAcceptingOptionSpec<String> azureBlobDSConfig =
parser.accepts("azureblobds", "AzureBlobStorageDataStore config").withRequiredArg().ofType(String.class);
OptionSpecBuilder nods = parser.accepts("nods", "No DataStore ");
OptionSet options = parser.parse(args);
if (!options.has(s3dsConfig) && !options.has(fdsConfig) && !options.has(azureBlobDSConfig) && !options.has(nods)) {
return null;
}
DataStore delegate;
if (options.has(s3dsConfig)) {
S3DataStore s3ds = new S3DataStore();
String cfgPath = s3dsConfig.value(options);
Properties props = loadAndTransformProps(cfgPath);
s3ds.setProperties(props);
File homeDir = Files.createTempDir();
closer.register(asCloseable(homeDir));
s3ds.init(homeDir.getAbsolutePath());
delegate = s3ds;
} else if (options.has(azureBlobDSConfig)) {
AzureDataStore azureds = new AzureDataStore();
String cfgPath = azureBlobDSConfig.value(options);
Properties props = loadAndTransformProps(cfgPath);
azureds.setProperties(props);
File homeDir = Files.createTempDir();
azureds.init(homeDir.getAbsolutePath());
closer.register(asCloseable(homeDir));
delegate = azureds;
} else if (options.has(nods)){
delegate = new DummyDataStore();
File homeDir = Files.createTempDir();
delegate.init(homeDir.getAbsolutePath());
closer.register(asCloseable(homeDir));
}
else {
delegate = new OakFileDataStore();
String cfgPath = fdsConfig.value(options);
Properties props = loadAndTransformProps(cfgPath);
populate(delegate, asMap(props), true);
delegate.init(null);
}
DataStoreBlobStore blobStore = new DataStoreBlobStore(delegate);
closer.register(Utils.asCloseable(blobStore));
return blobStore;
}
static Closeable asCloseable(final DocumentNodeStore dns) {
return new Closeable() {
@Override
public void close() throws IOException {
dns.dispose();
}
};
}
private static Closeable asCloseable(final MongoConnection con) {
return new Closeable() {
@Override
public void close() throws IOException {
con.close();
}
};
}
static Closeable asCloseable(final DataStoreBlobStore blobStore) {
return new Closeable() {
@Override
public void close() throws IOException {
try {
blobStore.close();
} catch (DataStoreException e) {
throw new IOException(e);
}
}
};
}
static Closeable asCloseable(final File dir) {
return new Closeable() {
@Override
public void close() throws IOException {
FileUtils.deleteDirectory(dir);
}
};
}
private static Properties loadAndTransformProps(String cfgPath) throws IOException {
Dictionary dict = ConfigurationHandler.read(new FileInputStream(cfgPath));
Properties props = new Properties();
Enumeration keys = dict.keys();
while (keys.hasMoreElements()) {
String key = (String) keys.nextElement();
props.put(key, dict.get(key));
}
return props;
}
private static Map<String, ?> asMap(Properties props) {
Map<String, Object> map = Maps.newHashMap();
for (Object key : props.keySet()) {
map.put((String)key, props.get(key));
}
return map;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2.twostep;
import java.lang.reflect.Field;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.events.CacheQueryReadEvent;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.query.CacheQueryType;
import org.apache.ignite.internal.processors.cache.query.GridCacheSqlQuery;
import org.apache.ignite.internal.processors.cache.tree.CacheDataTree;
import org.apache.ignite.internal.processors.query.h2.H2PooledConnection;
import org.apache.ignite.internal.processors.query.h2.H2QueryFetchSizeInterceptor;
import org.apache.ignite.internal.processors.query.h2.H2Utils;
import org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing;
import org.apache.ignite.internal.processors.query.h2.MapH2QueryInfo;
import org.apache.ignite.internal.processors.query.h2.opt.GridH2Table;
import org.apache.ignite.internal.processors.query.h2.opt.GridH2ValueCacheObject;
import org.apache.ignite.internal.processors.query.h2.opt.QueryContext;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.h2.engine.Session;
import org.h2.jdbc.JdbcResultSet;
import org.h2.result.LazyResult;
import org.h2.result.ResultInterface;
import org.h2.value.Value;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_OBJECT_READ;
/**
* Mapper result for a single part of the query.
*/
class MapQueryResult {
/** */
private static final Field RESULT_FIELD;
/*
* Initialize.
*/
static {
try {
RESULT_FIELD = JdbcResultSet.class.getDeclaredField("result");
RESULT_FIELD.setAccessible(true);
}
catch (NoSuchFieldException e) {
throw new IllegalStateException("Check H2 version in classpath.", e);
}
}
/** Indexing. */
private final IgniteH2Indexing h2;
/** */
private final GridCacheContext<?, ?> cctx;
/** */
private final GridCacheSqlQuery qry;
/** */
private final UUID qrySrcNodeId;
/** */
private volatile Result res;
/** */
private final IgniteLogger log;
/** */
private final Object[] params;
/** */
private int page;
/** */
private boolean cpNeeded;
/** */
private volatile boolean closed;
/** H2 session. */
private final Session ses;
/** Detached connection. Used for lazy execution to prevent connection sharing. */
private H2PooledConnection conn;
/** */
private final ReentrantLock lock = new ReentrantLock();
/**
* @param h2 H2 indexing.
* @param cctx Cache context.
* @param qrySrcNodeId Query source node.
* @param qry Query.
* @param params Query params.
* @param conn H2 connection wrapper.
* @param log Logger.
*/
MapQueryResult(IgniteH2Indexing h2, @Nullable GridCacheContext cctx,
UUID qrySrcNodeId, GridCacheSqlQuery qry, Object[] params, H2PooledConnection conn, IgniteLogger log) {
this.h2 = h2;
this.cctx = cctx;
this.qry = qry;
this.params = params;
this.qrySrcNodeId = qrySrcNodeId;
this.cpNeeded = F.eq(h2.kernalContext().localNodeId(), qrySrcNodeId);
this.log = log;
this.conn = conn;
ses = H2Utils.session(conn.connection());
}
/** */
void openResult(@NotNull ResultSet rs, MapH2QueryInfo qryInfo) {
res = new Result(rs, qryInfo);
}
/**
* @return Page number.
*/
int page() {
return page;
}
/**
* @return Row count.
*/
int rowCount() {
assert res != null;
return res.rowCnt;
}
/**
* @return Column ocunt.
*/
int columnCount() {
assert res != null;
return res.cols;
}
/**
* @return Closed flag.
*/
boolean closed() {
return closed;
}
/**
* @param rows Collection to fetch into.
* @param pageSize Page size.
* @param dataPageScanEnabled If data page scan is enabled.
* @return {@code true} If there are no more rows available.
*/
boolean fetchNextPage(List<Value[]> rows, int pageSize, Boolean dataPageScanEnabled) {
assert lock.isHeldByCurrentThread();
if (closed)
return true;
assert res != null;
boolean readEvt = cctx != null && cctx.name() != null && cctx.events().isRecordable(EVT_CACHE_QUERY_OBJECT_READ);
QueryContext.threadLocal(H2Utils.context(ses));
page++;
h2.enableDataPageScan(dataPageScanEnabled);
try {
for (int i = 0; i < pageSize; i++) {
if (!res.res.next())
return true;
Value[] row = res.res.currentRow();
if (cpNeeded) {
boolean copied = false;
for (int j = 0; j < row.length; j++) {
Value val = row[j];
if (val instanceof GridH2ValueCacheObject) {
GridH2ValueCacheObject valCacheObj = (GridH2ValueCacheObject)val;
row[j] = new GridH2ValueCacheObject(valCacheObj.getCacheObject(), h2.objectContext()) {
@Override public Object getObject() {
return getObject(true);
}
};
copied = true;
}
}
if (i == 0 && !copied)
cpNeeded = false; // No copy on read caches, skip next checks.
}
assert row != null;
if (readEvt) {
GridKernalContext ctx = h2.kernalContext();
ctx.event().record(new CacheQueryReadEvent<>(
ctx.discovery().localNode(),
"SQL fields query result set row read.",
EVT_CACHE_QUERY_OBJECT_READ,
CacheQueryType.SQL.name(),
cctx.name(),
null,
qry.query(),
null,
null,
params,
qrySrcNodeId,
null,
null,
null,
null,
row(row)));
}
rows.add(res.res.currentRow());
res.fetchSizeInterceptor.checkOnFetchNext();
}
return !res.res.hasNext();
}
finally {
CacheDataTree.setDataPageScanEnabled(false);
}
}
/**
* @param row Values array row.
* @return Objects list row.
*/
private List<?> row(Value[] row) {
List<Object> res = new ArrayList<>(row.length);
for (Value v : row)
res.add(v.getObject());
return res;
}
/**
* Close the result.
*/
void close() {
assert lock.isHeldByCurrentThread();
if (closed)
return;
closed = true;
if (res != null)
res.close();
H2Utils.resetSession(conn);
conn.close();
}
/** */
public void lock() {
if (!lock.isHeldByCurrentThread())
lock.lock();
}
/** */
public void lockTables() {
if (!closed && ses.isLazyQueryExecution())
GridH2Table.readLockTables(ses);
}
/** */
public void unlock() {
if (lock.isHeldByCurrentThread())
lock.unlock();
}
/** */
public void unlockTables() {
if (!closed && ses.isLazyQueryExecution())
GridH2Table.unlockTables(ses);
}
/**
*
*/
public void checkTablesVersions() {
if (ses.isLazyQueryExecution())
GridH2Table.checkTablesVersions(ses);
}
/** */
private class Result {
/** */
private final ResultInterface res;
/** */
private final ResultSet rs;
/** */
private final int cols;
/** */
private final int rowCnt;
/** */
private final H2QueryFetchSizeInterceptor fetchSizeInterceptor;
/**
* Constructor.
*
* @param rs H2 result set.
*/
Result(@NotNull ResultSet rs, MapH2QueryInfo qryInfo) {
this.rs = rs;
try {
res = (ResultInterface)RESULT_FIELD.get(rs);
}
catch (IllegalAccessException e) {
throw new IllegalStateException(e); // Must not happen.
}
rowCnt = (res instanceof LazyResult) ? -1 : res.getRowCount();
cols = res.getVisibleColumnCount();
fetchSizeInterceptor = new H2QueryFetchSizeInterceptor(h2, qryInfo, log);
}
/** */
void close() {
fetchSizeInterceptor.checkOnClose();
U.close(rs, log);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eigenbase.sql;
import java.util.*;
import org.eigenbase.sql.parser.*;
import org.eigenbase.sql.util.*;
import org.eigenbase.sql.validate.*;
import org.eigenbase.util.Util;
import com.google.common.collect.ImmutableList;
/**
* A <code>SqlIdentifier</code> is an identifier, possibly compound.
*/
public class SqlIdentifier extends SqlNode {
//~ Instance fields --------------------------------------------------------
/**
* Array of the components of this compound identifier.
*
* <p>It's convenient to have this member public, and it's convenient to
* have this member not-final, but it's a shame it's public and not-final.
* If you assign to this member, please use
* {@link #setNames(java.util.List, java.util.List)}.
* And yes, we'd like to make identifiers immutable one day.
*/
public ImmutableList<String> names;
/**
* This identifier's collation (if any).
*/
final SqlCollation collation;
/**
* A list of the positions of the components of compound identifiers.
*/
private ImmutableList<SqlParserPos> componentPositions;
//~ Constructors -----------------------------------------------------------
/**
* Creates a compound identifier, for example <code>foo.bar</code>.
*
* @param names Parts of the identifier, length ≥ 1
*/
public SqlIdentifier(
List<String> names,
SqlCollation collation,
SqlParserPos pos,
List<SqlParserPos> componentPositions) {
super(pos);
this.names = ImmutableList.copyOf(names);
this.collation = collation;
this.componentPositions = componentPositions == null ? null
: ImmutableList.copyOf(componentPositions);
for (String name : names) {
assert name != null;
}
}
public SqlIdentifier(List<String> names, SqlParserPos pos) {
this(names, null, pos, null);
}
/**
* Creates a simple identifier, for example <code>foo</code>, with a
* collation.
*/
public SqlIdentifier(
String name,
SqlCollation collation,
SqlParserPos pos) {
this(ImmutableList.of(name), collation, pos, null);
}
/**
* Creates a simple identifier, for example <code>foo</code>.
*/
public SqlIdentifier(
String name,
SqlParserPos pos) {
this(ImmutableList.of(name), null, pos, null);
}
//~ Methods ----------------------------------------------------------------
public SqlKind getKind() {
return SqlKind.IDENTIFIER;
}
public SqlNode clone(SqlParserPos pos) {
return new SqlIdentifier(names, collation, pos, componentPositions);
}
public String toString() {
return Util.sepList(names, ".");
}
/**
* Modifies the components of this identifier and their positions.
*
* @param names Names of components
* @param poses Positions of components
*/
public void setNames(List<String> names, List<SqlParserPos> poses) {
this.names = ImmutableList.copyOf(names);
this.componentPositions = poses == null ? null
: ImmutableList.copyOf(poses);
}
/**
* Returns the position of the <code>i</code>th component of a compound
* identifier, or the position of the whole identifier if that information
* is not present.
*
* @param i Ordinal of component.
* @return Position of i'th component
*/
public SqlParserPos getComponentParserPosition(int i) {
assert (i >= 0) && (i < names.size());
return (componentPositions == null) ? getParserPosition()
: componentPositions.get(i);
}
/**
* Copies names and components from another identifier. Does not modify the
* cross-component parser position.
*
* @param other identifier from which to copy
*/
public void assignNamesFrom(SqlIdentifier other) {
setNames(other.names, other.componentPositions);
}
/**
* Creates an identifier which contains only the <code>ordinal</code>th
* component of this compound identifier. It will have the correct {@link
* SqlParserPos}, provided that detailed position information is available.
*/
public SqlIdentifier getComponent(int ordinal) {
return new SqlIdentifier(names.get(ordinal),
getComponentParserPosition(ordinal));
}
public void unparse(
SqlWriter writer,
int leftPrec,
int rightPrec) {
final SqlWriter.Frame frame =
writer.startList(SqlWriter.FrameTypeEnum.IDENTIFIER);
for (String name : names) {
writer.sep(".");
if (name.equals("*")) {
writer.print(name);
} else {
writer.identifier(name);
}
}
if (null != collation) {
collation.unparse(writer, leftPrec, rightPrec);
}
writer.endList(frame);
}
public void validate(SqlValidator validator, SqlValidatorScope scope) {
validator.validateIdentifier(this, scope);
}
public void validateExpr(SqlValidator validator, SqlValidatorScope scope) {
// First check for builtin functions which don't have parentheses,
// like "LOCALTIME".
SqlCall call =
SqlUtil.makeCall(
validator.getOperatorTable(),
this);
if (call != null) {
validator.validateCall(call, scope);
return;
}
validator.validateIdentifier(this, scope);
}
public boolean equalsDeep(SqlNode node, boolean fail) {
if (!(node instanceof SqlIdentifier)) {
assert !fail : this + "!=" + node;
return false;
}
SqlIdentifier that = (SqlIdentifier) node;
if (this.names.size() != that.names.size()) {
assert !fail : this + "!=" + node;
return false;
}
for (int i = 0; i < names.size(); i++) {
if (!this.names.get(i).equals(that.names.get(i))) {
assert !fail : this + "!=" + node;
return false;
}
}
return true;
}
public <R> R accept(SqlVisitor<R> visitor) {
return visitor.visit(this);
}
public SqlCollation getCollation() {
return collation;
}
public String getSimple() {
assert names.size() == 1;
return names.get(0);
}
/**
* Returns whether this identifier is a star, such as "*" or "foo.bar.*".
*/
public boolean isStar() {
return Util.last(names).equals("*");
}
/**
* Returns whether this is a simple identifier. "FOO" is simple; "*",
* "FOO.*" and "FOO.BAR" are not.
*/
public boolean isSimple() {
return (names.size() == 1) && !names.get(0).equals("*");
}
public SqlMonotonicity getMonotonicity(SqlValidatorScope scope) {
// First check for builtin functions which don't have parentheses,
// like "LOCALTIME".
final SqlValidator validator = scope.getValidator();
SqlCall call =
SqlUtil.makeCall(
validator.getOperatorTable(),
this);
if (call != null) {
return call.getMonotonicity(scope);
}
final SqlIdentifier fqId = scope.fullyQualify(this);
final SqlValidatorNamespace ns =
SqlValidatorUtil.lookup(scope, Util.skipLast(fqId.names));
return ns.resolve().getMonotonicity(Util.last(fqId.names));
}
}
// End SqlIdentifier.java
| |
package com.github.kostyasha.yad;
import com.cloudbees.plugins.credentials.CredentialsScope;
import com.cloudbees.plugins.credentials.SystemCredentialsProvider;
import com.github.kostyasha.yad.commons.DockerContainerRestartPolicy;
import com.github.kostyasha.yad.commons.DockerCreateContainer;
import com.github.kostyasha.yad.commons.DockerImagePullStrategy;
import com.github.kostyasha.yad.commons.DockerPullImage;
import com.github.kostyasha.yad.commons.DockerRemoveContainer;
import com.github.kostyasha.yad.commons.DockerStopContainer;
import com.github.kostyasha.yad.launcher.DockerComputerJNLPLauncher;
import com.github.kostyasha.yad.strategy.DockerOnceRetentionStrategy;
import hudson.model.Node;
import hudson.slaves.EnvironmentVariablesNodeProperty;
import org.hamcrest.Matcher;
import org.jenkinsci.plugins.docker.commons.credentials.DockerServerCredentials;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.ExternalResource;
import org.junit.rules.RuleChain;
import org.jvnet.hudson.test.JenkinsRule;
import java.util.ArrayList;
import static com.github.kostyasha.yad.commons.DockerContainerRestartPolicyName.NO;
import static java.util.Collections.singletonList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.Is.is;
/**
* Ensure that configuration is the same after configRoundTrip.
* In one large test because in Cloud it one big page.
*
* @author Kanstantsin Shautsou
*/
public class DockerCloudTest {
public static JenkinsRule j = new JenkinsRule();
public static PreparedCloud clouds = new PreparedCloud();
public static class PreparedCloud extends ExternalResource {
public DockerCloud before;
public DockerCloud after;
public DockerSlaveTemplate getTemplateAfter() {
return after.getTemplates().get(0);
}
public DockerContainerLifecycle getLifecycleAfter() {
return getTemplateAfter().getDockerContainerLifecycle();
}
public DockerSlaveTemplate getTemplateBefore() {
return before.getTemplates().get(0);
}
public DockerContainerLifecycle getLifecycleBefore() {
return getTemplateBefore().getDockerContainerLifecycle();
}
@Override
public void before() throws Exception {
final DockerServerCredentials dockerServerCredentials = new DockerServerCredentials(
CredentialsScope.GLOBAL, // scope
null, // id
"description", //desc
"keypem",
"certpem",
"capem"
);
SystemCredentialsProvider.getInstance().getCredentials().add(dockerServerCredentials);
final EnvironmentVariablesNodeProperty.Entry entry = new EnvironmentVariablesNodeProperty.Entry("kee", "vasdfs");
final EnvironmentVariablesNodeProperty variablesNodeProperty = new EnvironmentVariablesNodeProperty(singletonList(entry));
final DockerConnector connector = new DockerConnector("http://sdfs.com:234");
connector.setCredentialsId(dockerServerCredentials.getId());
connector.setConnectTimeout(1001);
connector.setReadTimeout(1002);
final DockerPullImage pullImage = new DockerPullImage();
pullImage.setCredentialsId("");
pullImage.setPullStrategy(DockerImagePullStrategy.PULL_ALWAYS);
final DockerComputerJNLPLauncher launcher = new DockerComputerJNLPLauncher();
launcher.setLaunchTimeout(100);
launcher.setUser("jenkins");
launcher.setJenkinsUrl("http://jenkins");
launcher.setJvmOpts("-blah");
launcher.setSlaveOpts("-more");
launcher.setNoCertificateCheck(true);
launcher.setNoReconnect(false);
final DockerCreateContainer createContainer = new DockerCreateContainer();
createContainer.setBindAllPorts(true);
createContainer.setBindPorts("234");
createContainer.setCommand("sdfff");
createContainer.setCpuShares(3);
createContainer.setDnsHosts(singletonList("dsf"));
createContainer.setEnvironment(singletonList("sdf"));
createContainer.setExtraHosts(singletonList("hoststs"));
createContainer.setHostname("hostname.local");
createContainer.setMacAddress("33:44:33:66:66:33");
createContainer.setMemoryLimit(33333333L);
createContainer.setPrivileged(false);
createContainer.setTty(false);
createContainer.setVolumes(singletonList("ssdf:/sdfsdf/sdf"));
createContainer.setVolumesFrom(singletonList("sdfsd:/sdfsdf"));
createContainer.setDevices(singletonList("/dev/sdc:/dev/sdc:rw"));
createContainer.setCpusetCpus("1");
createContainer.setCpusetMems("2");
createContainer.setLinksString("some");
createContainer.setShmSize(102L);
createContainer.setRestartPolicy(new DockerContainerRestartPolicy(NO, 0));
createContainer.setWorkdir("workdir");
createContainer.setUser("user");
createContainer.setDockerLabels(singletonList("testlabel=testvalue"));
final DockerStopContainer stopContainer = new DockerStopContainer();
stopContainer.setTimeout(100);
final DockerRemoveContainer removeContainer = new DockerRemoveContainer();
removeContainer.setForce(true);
removeContainer.setRemoveVolumes(true);
final DockerContainerLifecycle containerLifecycle = new DockerContainerLifecycle();
containerLifecycle.setImage("sdf/sdf:df");
containerLifecycle.setPullImage(pullImage);
containerLifecycle.setCreateContainer(createContainer);
containerLifecycle.setStopContainer(stopContainer);
containerLifecycle.setRemoveContainer(removeContainer);
final DockerSlaveTemplate dockerSlaveTemplate = new DockerSlaveTemplate();
dockerSlaveTemplate.setDockerContainerLifecycle(containerLifecycle);
dockerSlaveTemplate.setLabelString("some-label");
dockerSlaveTemplate.setLauncher(launcher);
dockerSlaveTemplate.setMaxCapacity(233);
dockerSlaveTemplate.setMode(Node.Mode.EXCLUSIVE);
dockerSlaveTemplate.setNodeProperties(singletonList(variablesNodeProperty));
dockerSlaveTemplate.setRemoteFs("/remotefs");
dockerSlaveTemplate.setNumExecutors(1); // need to be verified with other retention strategy
dockerSlaveTemplate.setRetentionStrategy(new DockerOnceRetentionStrategy(30));
final ArrayList<DockerSlaveTemplate> dockerSlaveTemplates = new ArrayList<>();
dockerSlaveTemplates.add(dockerSlaveTemplate);
before = new DockerCloud("docker-cloud", dockerSlaveTemplates, 17, connector);
j.getInstance().clouds.add(before);
j.getInstance().save();
j.configRoundtrip();
after = (DockerCloud) j.getInstance().getCloud("docker-cloud");
}
}
@ClassRule
public static RuleChain chain = RuleChain.outerRule(j).around(clouds);
@Test
public void dockerCloud() {
assertThatConfig(clouds.after,
equalTo(clouds.before));
assertThatConfig(clouds.after.getTemplates().size(),
is(clouds.before.getTemplates().size()));
}
@Test
public void dockerConnector() {
assertThatConfig(clouds.after.getConnector(),
equalTo(clouds.before.getConnector()));
}
@Test
public void template() {
assertThatConfig(clouds.getTemplateAfter(),
equalTo(clouds.getTemplateBefore()));
}
@Test
public void containerLifecycle() {
final DockerContainerLifecycle lifecycleBefore = clouds.getTemplateBefore().getDockerContainerLifecycle();
final DockerContainerLifecycle lifecycleAfter = clouds.getTemplateAfter().getDockerContainerLifecycle();
assertThatConfig(lifecycleAfter,
equalTo(lifecycleBefore));
}
@Test
public void pullImage() {
assertThatConfig(clouds.getLifecycleAfter().getPullImage(),
equalTo(clouds.getLifecycleBefore().getPullImage()));
}
@Test
public void createContainer() {
assertThatConfig(clouds.getLifecycleAfter().getCreateContainer(),
equalTo(clouds.getLifecycleBefore().getCreateContainer()));
}
@Test
public void stopContainer() {
assertThatConfig(clouds.getLifecycleAfter().getStopContainer(),
equalTo(clouds.getLifecycleBefore().getStopContainer()));
}
@Test
public void removeContainer() {
assertThatConfig(clouds.getLifecycleAfter().getRemoveContainer(),
equalTo(clouds.getLifecycleBefore().getRemoveContainer()));
}
public static <T> void assertThatConfig(T actual, Matcher<? super T> matcher) {
assertThat("Page configuration must match before and after page save", actual, matcher);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.vertx.websocket;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringJoiner;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import io.vertx.core.http.ServerWebSocket;
import io.vertx.core.http.WebSocket;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class VertxWebsocketTest extends VertxWebSocketTestSupport {
@Test
public void testTextMessage() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived("Hello world");
template.sendBody("direct:start", "world");
mockEndpoint.assertIsSatisfied();
}
@Test
public void testBinaryMessage() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived("Hello world");
template.sendBody("direct:start", "world".getBytes(StandardCharsets.UTF_8));
mockEndpoint.assertIsSatisfied();
}
@Test
public void testStreamMessage() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived("Hello world");
InputStream stream = new ByteArrayInputStream("world".getBytes(StandardCharsets.UTF_8));
template.sendBody("direct:start", stream);
mockEndpoint.assertIsSatisfied();
}
@Test
public void testNullMessage() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived("Hello world");
mockEndpoint.setResultWaitTime(500);
template.sendBody("direct:start", null);
// Since the message body is null, the WebSocket producer will not send payload to the WS endpoint
mockEndpoint.assertIsNotSatisfied();
}
@Test
public void testSendWithConnectionKey() throws Exception {
int expectedResultCount = 1;
CountDownLatch latch = new CountDownLatch(expectedResultCount);
List<String> results = new ArrayList<>();
for (int i = 0; i < 2; i++) {
openWebSocketConnection("localhost", port, "/test", message -> {
synchronized (latch) {
results.add(message);
latch.countDown();
}
});
}
VertxWebsocketEndpoint endpoint
= context.getEndpoint("vertx-websocket:localhost:" + port + "/test", VertxWebsocketEndpoint.class);
Map<String, ServerWebSocket> connectedPeers = endpoint.findPeersForHostPort();
assertEquals(2, connectedPeers.size());
String connectionKey = connectedPeers.keySet().iterator().next();
template.sendBodyAndHeader("vertx-websocket:localhost:" + port + "/test", "Hello World",
VertxWebsocketConstants.CONNECTION_KEY, connectionKey);
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertEquals(expectedResultCount, results.size());
assertTrue(results.contains("Hello World"));
}
@Test
public void testSendWithInvalidConnectionKey() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived("Hello world");
mockEndpoint.setResultWaitTime(500);
template.sendBodyAndHeader("direct:start", "Hello World", VertxWebsocketConstants.CONNECTION_KEY, "invalid-key");
// Since the message body is null, the WebSocket producer will not send payload to the WS endpoint
mockEndpoint.assertIsNotSatisfied();
}
@Test
public void testSendWithMultipleConnectionKeys() throws Exception {
int expectedResultCount = 3;
CountDownLatch latch = new CountDownLatch(expectedResultCount);
List<String> results = new ArrayList<>();
for (int i = 0; i < 5; i++) {
openWebSocketConnection("localhost", port, "/test", message -> {
synchronized (latch) {
results.add(message);
latch.countDown();
}
});
}
VertxWebsocketEndpoint endpoint
= context.getEndpoint("vertx-websocket:localhost:" + port + "/test", VertxWebsocketEndpoint.class);
Map<String, ServerWebSocket> connectedPeers = endpoint.findPeersForHostPort();
assertEquals(5, connectedPeers.size());
StringJoiner joiner = new StringJoiner(",");
Iterator<String> iterator = connectedPeers.keySet().iterator();
for (int i = 0; i < 3; i++) {
joiner.add(iterator.next());
}
template.sendBodyAndHeader("vertx-websocket:localhost:" + port + "/test", "Hello World",
VertxWebsocketConstants.CONNECTION_KEY, joiner.toString());
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertEquals(expectedResultCount, results.size());
results.forEach(result -> assertEquals("Hello World", result));
}
@Test
public void testSendToAll() throws Exception {
int expectedResultCount = 5;
CountDownLatch latch = new CountDownLatch(expectedResultCount);
List<String> results = new ArrayList<>();
for (int i = 0; i < expectedResultCount; i++) {
openWebSocketConnection("localhost", port, "/test", message -> {
synchronized (latch) {
results.add(message + " " + latch.getCount());
latch.countDown();
}
});
}
template.sendBody("vertx-websocket:localhost:" + port + "/test?sendToAll=true", "Hello World");
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertEquals(expectedResultCount, results.size());
for (int i = 1; i <= expectedResultCount; i++) {
assertTrue(results.contains("Hello World " + i));
}
}
@Test
public void testSendToAllWithHeader() throws Exception {
int expectedResultCount = 5;
CountDownLatch latch = new CountDownLatch(expectedResultCount);
List<String> results = new ArrayList<>();
for (int i = 0; i < expectedResultCount; i++) {
openWebSocketConnection("localhost", port, "/test", message -> {
synchronized (latch) {
results.add(message + " " + latch.getCount());
latch.countDown();
}
});
}
template.sendBodyAndHeader("vertx-websocket:localhost:" + port + "/test", "Hello World",
VertxWebsocketConstants.SEND_TO_ALL, true);
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertEquals(expectedResultCount, results.size());
for (int i = 1; i <= expectedResultCount; i++) {
assertTrue(results.contains("Hello World " + i));
}
}
@Test
public void testEchoRoute() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
List<String> results = new ArrayList<>();
VertxWebsocketComponent component = context.getComponent("vertx-websocket", VertxWebsocketComponent.class);
Map<VertxWebsocketHostKey, VertxWebsocketHost> registry = component.getVertxHostRegistry();
VertxWebsocketHost host = registry.values()
.stream()
.filter(wsHost -> wsHost.getPort() != port)
.findFirst()
.get();
WebSocket webSocket = openWebSocketConnection("localhost", host.getPort(), "/greeting", message -> {
synchronized (latch) {
results.add(message);
latch.countDown();
}
});
webSocket.writeTextMessage("Camel");
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertEquals(1, results.size());
assertEquals("Hello Camel", results.get(0));
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.toF("vertx-websocket:localhost:%d/test", port);
fromF("vertx-websocket:localhost:%d/test", port)
.setBody(simple("Hello ${body}"))
.to("mock:result");
from("vertx-websocket://greeting")
.setBody(simple("Hello ${body}"))
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
int serverPort = getVertxServerRandomPort();
exchange.getMessage().setHeader("port", serverPort);
}
})
.toD("vertx-websocket:localhost:${header.port}/greeting");
}
};
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.java.decompiler.modules.decompiler.stats;
import org.jetbrains.java.decompiler.util.TextBuffer;
import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer;
import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.StatEdge;
import org.jetbrains.java.decompiler.modules.decompiler.exps.Exprent;
import java.util.ArrayList;
import java.util.List;
public class DoStatement extends Statement {
public static final int LOOP_DO = 0;
public static final int LOOP_DOWHILE = 1;
public static final int LOOP_WHILE = 2;
public static final int LOOP_FOR = 3;
private int looptype;
private final List<Exprent> initExprent = new ArrayList<>();
private final List<Exprent> conditionExprent = new ArrayList<>();
private final List<Exprent> incExprent = new ArrayList<>();
// *****************************************************************************
// constructors
// *****************************************************************************
private DoStatement() {
type = Statement.TYPE_DO;
looptype = LOOP_DO;
initExprent.add(null);
conditionExprent.add(null);
incExprent.add(null);
}
private DoStatement(Statement head) {
this();
first = head;
stats.addWithKey(first, first.id);
// post is always null!
}
// *****************************************************************************
// public methods
// *****************************************************************************
public static Statement isHead(Statement head) {
if (head.getLastBasicType() == LASTBASICTYPE_GENERAL && !head.isMonitorEnter()) {
// at most one outgoing edge
StatEdge edge = null;
List<StatEdge> lstSuccs = head.getSuccessorEdges(STATEDGE_DIRECT_ALL);
if (!lstSuccs.isEmpty()) {
edge = lstSuccs.get(0);
}
// regular loop
if (edge != null && edge.getType() == StatEdge.TYPE_REGULAR && edge.getDestination() == head) {
return new DoStatement(head);
}
// continues
if (head.type != TYPE_DO && (edge == null || edge.getType() != StatEdge.TYPE_REGULAR) &&
head.getContinueSet().contains(head.getBasichead())) {
return new DoStatement(head);
}
}
return null;
}
public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) {
TextBuffer buf = new TextBuffer();
buf.append(ExprProcessor.listToJava(varDefinitions, indent, tracer));
if (isLabeled()) {
buf.appendIndent(indent).append("label").append(this.id.toString()).append(":").appendLineSeparator();
tracer.incrementCurrentSourceLine();
}
switch (looptype) {
case LOOP_DO:
buf.appendIndent(indent).append("while(true) {").appendLineSeparator();
tracer.incrementCurrentSourceLine();
buf.append(ExprProcessor.jmpWrapper(first, indent + 1, false, tracer));
buf.appendIndent(indent).append("}").appendLineSeparator();
tracer.incrementCurrentSourceLine();
break;
case LOOP_DOWHILE:
buf.appendIndent(indent).append("do {").appendLineSeparator();
tracer.incrementCurrentSourceLine();
buf.append(ExprProcessor.jmpWrapper(first, indent + 1, false, tracer));
buf.appendIndent(indent).append("} while(").append(conditionExprent.get(0).toJava(indent, tracer)).append(");").appendLineSeparator();
tracer.incrementCurrentSourceLine();
break;
case LOOP_WHILE:
buf.appendIndent(indent).append("while(").append(conditionExprent.get(0).toJava(indent, tracer)).append(") {").appendLineSeparator();
tracer.incrementCurrentSourceLine();
buf.append(ExprProcessor.jmpWrapper(first, indent + 1, false, tracer));
buf.appendIndent(indent).append("}").appendLineSeparator();
tracer.incrementCurrentSourceLine();
break;
case LOOP_FOR:
buf.appendIndent(indent).append("for(");
if (initExprent.get(0) != null) {
buf.append(initExprent.get(0).toJava(indent, tracer));
}
buf.append("; ")
.append(conditionExprent.get(0).toJava(indent, tracer)).append("; ").append(incExprent.get(0).toJava(indent, tracer)).append(") {")
.appendLineSeparator();
tracer.incrementCurrentSourceLine();
buf.append(ExprProcessor.jmpWrapper(first, indent + 1, false, tracer));
buf.appendIndent(indent).append("}").appendLineSeparator();
tracer.incrementCurrentSourceLine();
}
return buf;
}
public List<Object> getSequentialObjects() {
List<Object> lst = new ArrayList<>();
switch (looptype) {
case LOOP_FOR:
if (getInitExprent() != null) {
lst.add(getInitExprent());
}
case LOOP_WHILE:
lst.add(getConditionExprent());
}
lst.add(first);
switch (looptype) {
case LOOP_DOWHILE:
lst.add(getConditionExprent());
break;
case LOOP_FOR:
lst.add(getIncExprent());
}
return lst;
}
public void replaceExprent(Exprent oldexpr, Exprent newexpr) {
if (initExprent.get(0) == oldexpr) {
initExprent.set(0, newexpr);
}
if (conditionExprent.get(0) == oldexpr) {
conditionExprent.set(0, newexpr);
}
if (incExprent.get(0) == oldexpr) {
incExprent.set(0, newexpr);
}
}
public Statement getSimpleCopy() {
return new DoStatement();
}
// *****************************************************************************
// getter and setter methods
// *****************************************************************************
public List<Exprent> getInitExprentList() {
return initExprent;
}
public List<Exprent> getConditionExprentList() {
return conditionExprent;
}
public List<Exprent> getIncExprentList() {
return incExprent;
}
public Exprent getConditionExprent() {
return conditionExprent.get(0);
}
public void setConditionExprent(Exprent conditionExprent) {
this.conditionExprent.set(0, conditionExprent);
}
public Exprent getIncExprent() {
return incExprent.get(0);
}
public void setIncExprent(Exprent incExprent) {
this.incExprent.set(0, incExprent);
}
public Exprent getInitExprent() {
return initExprent.get(0);
}
public void setInitExprent(Exprent initExprent) {
this.initExprent.set(0, initExprent);
}
public int getLooptype() {
return looptype;
}
public void setLooptype(int looptype) {
this.looptype = looptype;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.security.access.AccessControlConstants;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.io.ByteArrayDataInput;
import com.google.common.io.ByteArrayDataOutput;
import com.google.common.io.ByteStreams;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
HeapSize {
public static final long MUTATION_OVERHEAD = ClassSize.align(
// This
ClassSize.OBJECT +
// row + OperationWithAttributes.attributes
2 * ClassSize.REFERENCE +
// Timestamp
1 * Bytes.SIZEOF_LONG +
// durability
ClassSize.REFERENCE +
// familyMap
ClassSize.REFERENCE +
// familyMap
ClassSize.TREEMAP);
/**
* The attribute for storing the list of clusters that have consumed the change.
*/
private static final String CONSUMED_CLUSTER_IDS = "_cs.id";
protected byte [] row = null;
protected long ts = HConstants.LATEST_TIMESTAMP;
protected Durability durability = Durability.USE_DEFAULT;
// A Map sorted by column family.
protected NavigableMap<byte [], List<Cell>> familyMap =
new TreeMap<byte [], List<Cell>>(Bytes.BYTES_COMPARATOR);
@Override
public CellScanner cellScanner() {
return CellUtil.createCellScanner(getFamilyCellMap());
}
/**
* Creates an empty list if one doesn't exist for the given column family
* or else it returns the associated list of Cell objects.
*
* @param family column family
* @return a list of Cell objects, returns an empty list if one doesn't exist.
*/
List<Cell> getCellList(byte[] family) {
List<Cell> list = this.familyMap.get(family);
if (list == null) {
list = new ArrayList<Cell>();
}
return list;
}
/*
* Create a KeyValue with this objects row key and the Put identifier.
*
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value) {
return new KeyValue(this.row, family, qualifier, ts, KeyValue.Type.Put, value);
}
/**
* Create a KeyValue with this objects row key and the Put identifier.
* @param family
* @param qualifier
* @param ts
* @param value
* @param tags - Specify the Tags as an Array {@link KeyValue.Tag}
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
KeyValue kvWithTag = new KeyValue(this.row, family, qualifier, ts, value, tags);
return kvWithTag;
}
/*
* Create a KeyValue with this objects row key and the Put identifier.
*
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value,
Tag[] tags) {
return new KeyValue(this.row, 0, this.row == null ? 0 : this.row.length,
family, 0, family == null ? 0 : family.length,
qualifier, ts, KeyValue.Type.Put, value, tags != null ? Arrays.asList(tags) : null);
}
/**
* Compile the column family (i.e. schema) information
* into a Map. Useful for parsing and aggregation by debugging,
* logging, and administration tools.
* @return Map
*/
@Override
public Map<String, Object> getFingerprint() {
Map<String, Object> map = new HashMap<String, Object>();
List<String> families = new ArrayList<String>();
// ideally, we would also include table information, but that information
// is not stored in each Operation instance.
map.put("families", families);
for (Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
families.add(Bytes.toStringBinary(entry.getKey()));
}
return map;
}
/**
* Compile the details beyond the scope of getFingerprint (row, columns,
* timestamps, etc.) into a Map along with the fingerprinted information.
* Useful for debugging, logging, and administration tools.
* @param maxCols a limit on the number of columns output prior to truncation
* @return Map
*/
@Override
public Map<String, Object> toMap(int maxCols) {
// we start with the fingerprint map and build on top of it.
Map<String, Object> map = getFingerprint();
// replace the fingerprint's simple list of families with a
// map from column families to lists of qualifiers and kv details
Map<String, List<Map<String, Object>>> columns =
new HashMap<String, List<Map<String, Object>>>();
map.put("families", columns);
map.put("row", Bytes.toStringBinary(this.row));
int colCount = 0;
// iterate through all column families affected
for (Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
// map from this family to details for each cell affected within the family
List<Map<String, Object>> qualifierDetails = new ArrayList<Map<String, Object>>();
columns.put(Bytes.toStringBinary(entry.getKey()), qualifierDetails);
colCount += entry.getValue().size();
if (maxCols <= 0) {
continue;
}
// add details for each cell
for (Cell cell: entry.getValue()) {
if (--maxCols <= 0 ) {
continue;
}
// KeyValue v1 expectation. Cast for now until we go all Cell all the time.
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
Map<String, Object> kvMap = kv.toStringMap();
// row and family information are already available in the bigger map
kvMap.remove("row");
kvMap.remove("family");
qualifierDetails.add(kvMap);
}
}
map.put("totalColumns", colCount);
// add the id if set
if (getId() != null) {
map.put("id", getId());
}
return map;
}
/**
* @deprecated Use {@link #getDurability()} instead.
* @return true if edits should be applied to WAL, false if not
*/
@Deprecated
public boolean getWriteToWAL() {
return this.durability == Durability.SKIP_WAL;
}
/**
* Set whether this Delete should be written to the WAL or not.
* Not writing the WAL means you may lose edits on server crash.
* This method will reset any changes made via {@link #setDurability(Durability)}
* @param write true if edits should be written to WAL, false if not
* @deprecated Use {@link #setDurability(Durability)} instead.
*/
@Deprecated
public void setWriteToWAL(boolean write) {
setDurability(write ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
}
/**
* Set the durability for this mutation
* @param d
*/
public void setDurability(Durability d) {
this.durability = d;
}
/** Get the current durability */
public Durability getDurability() {
return this.durability;
}
/**
* Method for retrieving the put's familyMap
* @return familyMap
*/
public NavigableMap<byte [], List<Cell>> getFamilyCellMap() {
return this.familyMap;
}
/**
* Method for setting the put's familyMap
*/
public void setFamilyCellMap(NavigableMap<byte [], List<Cell>> map) {
// TODO: Shut this down or move it up to be a Constructor. Get new object rather than change
// this internal data member.
this.familyMap = map;
}
/**
* Method for retrieving the put's familyMap that is deprecated and inefficient.
* @return the map
* @deprecated use {@link #getFamilyCellMap()} instead.
*/
@Deprecated
public Map<byte [], List<KeyValue>> getFamilyMap() {
TreeMap<byte[], List<KeyValue>> fm =
new TreeMap<byte[], List<KeyValue>>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
List<KeyValue> kvl = new ArrayList<KeyValue>(e.getValue().size());
for (Cell c : e.getValue()) {
kvl.add(KeyValueUtil.ensureKeyValue(c));
}
fm.put(e.getKey(), kvl);
}
return fm;
}
/**
* Method for setting the put's familyMap that is deprecated and inefficient.
* @deprecated use {@link #setFamilyCellMap(NavigableMap)} instead.
*/
@Deprecated
public void setFamilyMap(NavigableMap<byte [], List<KeyValue>> map) {
TreeMap<byte[], List<Cell>> fm = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<KeyValue>> e : map.entrySet()) {
fm.put(e.getKey(), Lists.<Cell>newArrayList(e.getValue()));
}
this.familyMap = fm;
}
/**
* Method to check if the familyMap is empty
* @return true if empty, false otherwise
*/
public boolean isEmpty() {
return familyMap.isEmpty();
}
/**
* Method for retrieving the delete's row
* @return row
*/
@Override
public byte [] getRow() {
return this.row;
}
@Override
public int compareTo(final Row d) {
return Bytes.compareTo(this.getRow(), d.getRow());
}
/**
* Method for retrieving the timestamp
* @return timestamp
*/
public long getTimeStamp() {
return this.ts;
}
/**
* Marks that the clusters with the given clusterIds have consumed the mutation
* @param clusterIds of the clusters that have consumed the mutation
*/
public void setClusterIds(List<UUID> clusterIds) {
ByteArrayDataOutput out = ByteStreams.newDataOutput();
out.writeInt(clusterIds.size());
for (UUID clusterId : clusterIds) {
out.writeLong(clusterId.getMostSignificantBits());
out.writeLong(clusterId.getLeastSignificantBits());
}
setAttribute(CONSUMED_CLUSTER_IDS, out.toByteArray());
}
/**
* @return the set of clusterIds that have consumed the mutation
*/
public List<UUID> getClusterIds() {
List<UUID> clusterIds = new ArrayList<UUID>();
byte[] bytes = getAttribute(CONSUMED_CLUSTER_IDS);
if(bytes != null) {
ByteArrayDataInput in = ByteStreams.newDataInput(bytes);
int numClusters = in.readInt();
for(int i=0; i<numClusters; i++){
clusterIds.add(new UUID(in.readLong(), in.readLong()));
}
}
return clusterIds;
}
/**
* Sets the visibility expression associated with cells in this Mutation.
* It is illegal to set <code>CellVisibility</code> on <code>Delete</code> mutation.
* @param expression
*/
public void setCellVisibility(CellVisibility expression) {
this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, ProtobufUtil
.toCellVisibility(expression).toByteArray());
}
/**
* @return CellVisibility associated with cells in this Mutation.
* @throws DeserializationException
*/
public CellVisibility getCellVisibility() throws DeserializationException {
byte[] cellVisibilityBytes = this.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY);
if (cellVisibilityBytes == null) return null;
return ProtobufUtil.toCellVisibility(cellVisibilityBytes);
}
/**
* Number of KeyValues carried by this Mutation.
* @return the total number of KeyValues
*/
public int size() {
int size = 0;
for (List<Cell> cells : this.familyMap.values()) {
size += cells.size();
}
return size;
}
/**
* @return the number of different families
*/
public int numFamilies() {
return familyMap.size();
}
/**
* @return Calculate what Mutation adds to class heap size.
*/
@Override
public long heapSize() {
long heapsize = MUTATION_OVERHEAD;
// Adding row
heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length);
// Adding map overhead
heapsize +=
ClassSize.align(this.familyMap.size() * ClassSize.MAP_ENTRY);
for(Map.Entry<byte [], List<Cell>> entry : this.familyMap.entrySet()) {
//Adding key overhead
heapsize +=
ClassSize.align(ClassSize.ARRAY + entry.getKey().length);
//This part is kinds tricky since the JVM can reuse references if you
//store the same value, but have a good match with SizeOf at the moment
//Adding value overhead
heapsize += ClassSize.align(ClassSize.ARRAYLIST);
int size = entry.getValue().size();
heapsize += ClassSize.align(ClassSize.ARRAY +
size * ClassSize.REFERENCE);
for(Cell cell : entry.getValue()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
heapsize += kv.heapSize();
}
}
heapsize += getAttributeSize();
heapsize += extraHeapSize();
return ClassSize.align(heapsize);
}
/**
* @return The serialized ACL for this operation, or null if none
*/
public byte[] getACL() {
return getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL);
}
/**
* @param user User short name
* @param perms Permissions for the user
*/
public void setACL(String user, Permission perms) {
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
ProtobufUtil.toUsersAndPermissions(user, perms).toByteArray());
}
/**
* @param perms A map of permissions for a user or users
*/
public void setACL(Map<String, Permission> perms) {
ListMultimap<String, Permission> permMap = ArrayListMultimap.create();
for (Map.Entry<String, Permission> entry : perms.entrySet()) {
permMap.put(entry.getKey(), entry.getValue());
}
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
ProtobufUtil.toUsersAndPermissions(permMap).toByteArray());
}
/**
* @return true if ACLs should be evaluated on the cell level first
*/
public boolean getACLStrategy() {
byte[] bytes = getAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL_STRATEGY);
if (bytes != null) {
return Bytes.equals(bytes, AccessControlConstants.OP_ATTRIBUTE_ACL_STRATEGY_CELL_FIRST);
}
return false;
}
/**
* @param cellFirstStrategy true if ACLs should be evaluated on the cell
* level first, false if ACL should first be checked at the CF and table
* levels
*/
public void setACLStrategy(boolean cellFirstStrategy) {
if (cellFirstStrategy) {
setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL_STRATEGY,
AccessControlConstants.OP_ATTRIBUTE_ACL_STRATEGY_CELL_FIRST);
}
}
/**
* Subclasses should override this method to add the heap size of their own fields.
* @return the heap size to add (will be aligned).
*/
protected long extraHeapSize(){
return 0L;
}
/**
* @param row Row to check
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
* > {@link HConstants#MAX_ROW_LENGTH}
* @return <code>row</code>
*/
static byte [] checkRow(final byte [] row) {
return checkRow(row, 0, row == null? 0: row.length);
}
/**
* @param row Row to check
* @param offset
* @param length
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
* > {@link HConstants#MAX_ROW_LENGTH}
* @return <code>row</code>
*/
static byte [] checkRow(final byte [] row, final int offset, final int length) {
if (row == null) {
throw new IllegalArgumentException("Row buffer is null");
}
if (length == 0) {
throw new IllegalArgumentException("Row length is 0");
}
if (length > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row length " + length + " is > " +
HConstants.MAX_ROW_LENGTH);
}
return row;
}
static void checkRow(ByteBuffer row) {
if (row == null) {
throw new IllegalArgumentException("Row buffer is null");
}
if (row.remaining() == 0) {
throw new IllegalArgumentException("Row length is 0");
}
if (row.remaining() > HConstants.MAX_ROW_LENGTH) {
throw new IllegalArgumentException("Row length " + row.remaining() + " is > " +
HConstants.MAX_ROW_LENGTH);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.jdbc.catalog;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.types.logical.DecimalType;
import com.opentable.db.postgres.junit.EmbeddedPostgresRules;
import com.opentable.db.postgres.junit.SingleInstancePostgresRule;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
/**
* Test base for {@link PostgresCatalog}.
*/
public class PostgresCatalogTestBase {
@Rule
public ExpectedException exception = ExpectedException.none();
@ClassRule
public static SingleInstancePostgresRule pg = EmbeddedPostgresRules.singleInstance();
protected static final String TEST_CATALOG_NAME = "mypg";
protected static final String TEST_USERNAME = "postgres";
protected static final String TEST_PWD = "postgres";
protected static final String TEST_DB = "test";
protected static final String TEST_SCHEMA = "test_schema";
protected static final String TABLE1 = "t1";
protected static final String TABLE2 = "t2";
protected static final String TABLE3 = "t3";
protected static final String TABLE4 = "t4";
protected static final String TABLE5 = "t5";
protected static final String TABLE_PRIMITIVE_TYPE = "primitive_table";
protected static final String TABLE_PRIMITIVE_TYPE2 = "primitive_table2";
protected static final String TABLE_ARRAY_TYPE = "array_table";
protected static final String TABLE_SERIAL_TYPE = "serial_table";
protected static String baseUrl;
protected static PostgresCatalog catalog;
@BeforeClass
public static void init() throws SQLException {
// jdbc:postgresql://localhost:50807/postgres?user=postgres
String embeddedJdbcUrl = pg.getEmbeddedPostgres().getJdbcUrl(TEST_USERNAME, TEST_PWD);
// jdbc:postgresql://localhost:50807/
baseUrl = embeddedJdbcUrl.substring(0, embeddedJdbcUrl.lastIndexOf("/") + 1);
catalog = new PostgresCatalog(TEST_CATALOG_NAME, PostgresCatalog.DEFAULT_DATABASE, TEST_USERNAME, TEST_PWD, baseUrl);
// create test database and schema
createDatabase(TEST_DB);
createSchema(TEST_DB, TEST_SCHEMA);
// create test tables
// table: postgres.public.t1
// table: postgres.public.t4
// table: postgres.public.t5
createTable(PostgresTablePath.fromFlinkTableName(TABLE1), getSimpleTable().pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE4), getSimpleTable().pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE5), getSimpleTable().pgSchemaSql);
// table: test.public.t2
// table: test.test_schema.t3
// table: postgres.public.dt
// table: postgres.public.dt2
createTable(TEST_DB, PostgresTablePath.fromFlinkTableName(TABLE2), getSimpleTable().pgSchemaSql);
createTable(TEST_DB, new PostgresTablePath(TEST_SCHEMA, TABLE3), getSimpleTable().pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE_PRIMITIVE_TYPE), getPrimitiveTable().pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE_PRIMITIVE_TYPE2), getPrimitiveTable("test_pk2").pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE_ARRAY_TYPE), getArrayTable().pgSchemaSql);
createTable(PostgresTablePath.fromFlinkTableName(TABLE_SERIAL_TYPE), getSerialTable().pgSchemaSql);
executeSQL(PostgresCatalog.DEFAULT_DATABASE, String.format("insert into public.%s values (%s);", TABLE1, getSimpleTable().values));
executeSQL(PostgresCatalog.DEFAULT_DATABASE, String.format("insert into %s values (%s);", TABLE_PRIMITIVE_TYPE, getPrimitiveTable().values));
executeSQL(PostgresCatalog.DEFAULT_DATABASE, String.format("insert into %s values (%s);", TABLE_ARRAY_TYPE, getArrayTable().values));
executeSQL(PostgresCatalog.DEFAULT_DATABASE, String.format("insert into %s values (%s);", TABLE_SERIAL_TYPE, getSerialTable().values));
}
public static void createTable(PostgresTablePath tablePath, String tableSchemaSql) throws SQLException {
executeSQL(PostgresCatalog.DEFAULT_DATABASE, String.format("CREATE TABLE %s(%s);", tablePath.getFullPath(), tableSchemaSql));
}
public static void createTable(String db, PostgresTablePath tablePath, String tableSchemaSql) throws SQLException {
executeSQL(db, String.format("CREATE TABLE %s(%s);", tablePath.getFullPath(), tableSchemaSql));
}
public static void createSchema(String db, String schema) throws SQLException {
executeSQL(db, String.format("CREATE SCHEMA %s", schema));
}
public static void createDatabase(String database) throws SQLException {
executeSQL(String.format("CREATE DATABASE %s;", database));
}
public static void executeSQL(String sql) throws SQLException {
executeSQL("", sql);
}
public static void executeSQL(String db, String sql) throws SQLException {
try (Connection conn = DriverManager.getConnection(baseUrl + db, TEST_USERNAME, TEST_PWD);
Statement statement = conn.createStatement()) {
statement.executeUpdate(sql);
} catch (SQLException e) {
throw e;
}
}
/**
* Object holding schema and corresponding sql.
*/
public static class TestTable {
TableSchema schema;
String pgSchemaSql;
String values;
public TestTable(TableSchema schema, String pgSchemaSql, String values) {
this.schema = schema;
this.pgSchemaSql = pgSchemaSql;
this.values = values;
}
}
public static TestTable getSimpleTable() {
return new TestTable(
TableSchema.builder()
.field("id", DataTypes.INT())
.build(),
"id integer",
"1"
);
}
// posgres doesn't support to use the same primary key name across different tables,
// make the table parameterized to resolve this problem.
public static TestTable getPrimitiveTable() {
return getPrimitiveTable("test_pk");
}
// TODO: add back timestamptz and time types.
// Flink currently doens't support converting time's precision, with the following error
// TableException: Unsupported conversion from data type 'TIME(6)' (conversion class: java.sql.Time)
// to type information. Only data types that originated from type information fully support a reverse conversion.
public static TestTable getPrimitiveTable(String primaryKeyName) {
return new TestTable(
TableSchema.builder()
.field("int", DataTypes.INT().notNull())
.field("bytea", DataTypes.BYTES())
.field("short", DataTypes.SMALLINT().notNull())
.field("long", DataTypes.BIGINT())
.field("real", DataTypes.FLOAT())
.field("double_precision", DataTypes.DOUBLE())
.field("numeric", DataTypes.DECIMAL(10, 5))
.field("decimal", DataTypes.DECIMAL(10, 1))
.field("boolean", DataTypes.BOOLEAN())
.field("text", DataTypes.STRING())
.field("char", DataTypes.CHAR(1))
.field("character", DataTypes.CHAR(3))
.field("character_varying", DataTypes.VARCHAR(20))
.field("timestamp", DataTypes.TIMESTAMP(5))
// .field("timestamptz", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(4))
.field("date", DataTypes.DATE())
.field("time", DataTypes.TIME(0))
.field("default_numeric", DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18))
.primaryKey(primaryKeyName, new String[]{"short", "int"})
.build(),
"int integer, " +
"bytea bytea, " +
"short smallint, " +
"long bigint, " +
"real real, " +
"double_precision double precision, " +
"numeric numeric(10, 5), " +
"decimal decimal(10, 1), " +
"boolean boolean, " +
"text text, " +
"char char, " +
"character character(3), " +
"character_varying character varying(20), " +
"timestamp timestamp(5), " +
// "timestamptz timestamptz(4), " +
"date date," +
"time time(0), " +
"default_numeric numeric, " +
"CONSTRAINT " + primaryKeyName + " PRIMARY KEY (short, int)",
"1," +
"'2'," +
"3," +
"4," +
"5.5," +
"6.6," +
"7.7," +
"8.8," +
"true," +
"'a'," +
"'b'," +
"'c'," +
"'d'," +
"'2016-06-22 19:10:25'," +
// "'2006-06-22 19:10:25'," +
"'2015-01-01'," +
"'00:51:02.746572', " +
"500"
);
}
// TODO: add back timestamptz once blink planner supports timestamp with timezone
public static TestTable getArrayTable() {
return new TestTable(
TableSchema.builder()
.field("int_arr", DataTypes.ARRAY(DataTypes.INT()))
.field("bytea_arr", DataTypes.ARRAY(DataTypes.BYTES()))
.field("short_arr", DataTypes.ARRAY(DataTypes.SMALLINT()))
.field("long_arr", DataTypes.ARRAY(DataTypes.BIGINT()))
.field("real_arr", DataTypes.ARRAY(DataTypes.FLOAT()))
.field("double_precision_arr", DataTypes.ARRAY(DataTypes.DOUBLE()))
.field("numeric_arr", DataTypes.ARRAY(DataTypes.DECIMAL(10, 5)))
.field("numeric_arr_default", DataTypes.ARRAY(DataTypes.DECIMAL(DecimalType.MAX_PRECISION, 18)))
.field("decimal_arr", DataTypes.ARRAY(DataTypes.DECIMAL(10, 2)))
.field("boolean_arr", DataTypes.ARRAY(DataTypes.BOOLEAN()))
.field("text_arr", DataTypes.ARRAY(DataTypes.STRING()))
.field("char_arr", DataTypes.ARRAY(DataTypes.CHAR(1)))
.field("character_arr", DataTypes.ARRAY(DataTypes.CHAR(3)))
.field("character_varying_arr", DataTypes.ARRAY(DataTypes.VARCHAR(20)))
.field("timestamp_arr", DataTypes.ARRAY(DataTypes.TIMESTAMP(5)))
// .field("timestamptz_arr", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(4)))
.field("date_arr", DataTypes.ARRAY(DataTypes.DATE()))
.field("time_arr", DataTypes.ARRAY(DataTypes.TIME(0)))
.build(),
"int_arr integer[], " +
"bytea_arr bytea[], " +
"short_arr smallint[], " +
"long_arr bigint[], " +
"real_arr real[], " +
"double_precision_arr double precision[], " +
"numeric_arr numeric(10, 5)[], " +
"numeric_arr_default numeric[], " +
"decimal_arr decimal(10,2)[], " +
"boolean_arr boolean[], " +
"text_arr text[], " +
"char_arr char[], " +
"character_arr character(3)[], " +
"character_varying_arr character varying(20)[], " +
"timestamp_arr timestamp(5)[], " +
// "timestamptz_arr timestamptz(4)[], " +
"date_arr date[], " +
"time_arr time(0)[]",
String.format("'{1,2,3}'," +
"'{2,3,4}'," +
"'{3,4,5}'," +
"'{4,5,6}'," +
"'{5.5,6.6,7.7}'," +
"'{6.6,7.7,8.8}'," +
"'{7.7,8.8,9.9}'," +
"'{8.8,9.9,10.10}'," +
"'{9.9,10.10,11.11}'," +
"'{true,false,true}'," +
"'{a,b,c}'," +
"'{b,c,d}'," +
"'{b,c,d}'," +
"'{b,c,d}'," +
"'{\"2016-06-22 19:10:25\", \"2019-06-22 19:10:25\"}'," +
// "'{\"2006-06-22 19:10:25\", \"2009-06-22 19:10:25\"}'," +
"'{\"2015-01-01\", \"2020-01-01\"}'," +
"'{\"00:51:02.746572\", \"00:59:02.746572\"}'"
));
}
public static TestTable getSerialTable() {
return new TestTable(
TableSchema.builder()
// serial fields are returned as not null by ResultSetMetaData.columnNoNulls
.field("f0", DataTypes.SMALLINT().notNull())
.field("f1", DataTypes.INT().notNull())
.field("f2", DataTypes.SMALLINT().notNull())
.field("f3", DataTypes.INT().notNull())
.field("f4", DataTypes.BIGINT().notNull())
.field("f5", DataTypes.BIGINT().notNull())
.build(),
"f0 smallserial, " +
"f1 serial, " +
"f2 serial2, " +
"f3 serial4, " +
"f4 serial8, " +
"f5 bigserial",
"32767," +
"2147483647," +
"32767," +
"2147483647," +
"9223372036854775807," +
"9223372036854775807"
);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.test.spec.javaspace.conformance.snapshot;
import java.util.logging.Level;
// net.jini
import net.jini.core.entry.Entry;
import net.jini.core.transaction.Transaction;
// com.sun.jini
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.QAConfig;
// com.sun.jini.qa
import com.sun.jini.test.spec.javaspace.conformance.SimpleEntry;
/**
* SnapshotTransactionReadIfExistsTakeTest asserts that when read by
* readIfExists method, an entry may be read in any other transaction to
* which the entry is visible, but cannot be taken in another transaction.
*
* It tests this statement for snapshots.
*
*
*/
public class SnapshotTransactionReadIfExistsTakeTest
extends SnapshotAbstractTestBase {
/**
* Sets up the testing environment.
*
* @param config QAConfig from the runner for setup.
*/
public void setup(QAConfig config) throws Exception {
// mandatory call to parent
super.setup(config);
// get an instance of Transaction Manager
mgr = getTxnManager();
}
/**
* This method asserts that when read by readIfExists
* method, an entry may be read in any other transaction to which the entry
* is visible, but cannot be taken in another transaction.
*
* It tests this statement for snapshots.
*
* <P>Notes:<BR>For more information see the JavaSpaces specification
* sections 2.6, 3.1.</P>
*/
public void run() throws Exception {
SimpleEntry sampleEntry = new SimpleEntry("TestEntry #1", 1);
Entry snapshot;
SimpleEntry result;
Transaction txn1;
Transaction txn2;
// first check that space is empty
if (!checkSpace(space)) {
throw new TestException(
"Space is not empty in the beginning.");
}
// create snapshot
snapshot = space.snapshot(sampleEntry);
// write sample entry to the space
space.write(sampleEntry, null, leaseForeverTime);
// create two non null transactions
txn1 = getTransaction();
txn2 = getTransaction();
/*
* readIfExists written entry from the space using it's snapshot
* within 1-st transaction
*/
space.readIfExists(snapshot, txn1, checkTime);
// check that we can read the entry within 2-nd transaction
result = (SimpleEntry) space.read(sampleEntry, txn2, checkTime);
if (result == null) {
throw new TestException(
"performed read with template " + sampleEntry
+ " within 2-nd transaction while reading it within"
+ " 1-st one, expected non null but read null result.");
}
logDebugText("Entry can be read in 2-nd transaction.");
/*
* check that we can read the entry within 2-nd transaction
* using it's snapshot
*/
result = (SimpleEntry) space.read(snapshot, txn2, checkTime);
if (result == null) {
throw new TestException(
"performed read with template " + sampleEntry
+ " within 2-nd transaction using it's snapshot"
+ " while reading it within 1-st one, expected"
+ " non null but read null result.");
}
logDebugText("Entry can be read using it's snapshot within 2-nd"
+ " transaction.");
// check that we can read the entry outside both transactions
result = (SimpleEntry) space.read(sampleEntry, null, checkTime);
if (result == null) {
throw new TestException(
"performed read with template " + sampleEntry
+ " outside both transactions while reading it within"
+ " 1-st one, expected non null but read null result.");
}
logDebugText("Entry can be read outside both transactions.");
/*
* check that we can read the entry using it's snapshot
* outside both transactions
*/
result = (SimpleEntry) space.read(snapshot, null, checkTime);
if (result == null) {
throw new TestException(
"performed read with template " + sampleEntry
+ " outside both transactions using it's snapshot"
+ " while reading it within 1-st one,"
+ " expected non null but read null result.");
}
logDebugText("Entry can be read outside both transactions"
+ " using it's snapshot.");
// check that we can not take the entry within another transaction
result = (SimpleEntry) space.take(sampleEntry, txn2, checkTime);
if (result != null) {
throw new TestException(
"performed take with template " + sampleEntry
+ " within 2-nd transaction while reading it within"
+ " 1-st one, expected null but took " + result);
}
logDebugText("Entry can't be taken in 2-nd transaction.");
/*
* check that we can not take the entry within another transaction
* using it's snapshot
*/
result = (SimpleEntry) space.take(snapshot, txn2, checkTime);
if (result != null) {
throw new TestException(
"performed take with template " + sampleEntry
+ " within 2-nd transaction using it's snapshot"
+ " while reading it within"
+ " 1-st one, expected null but took " + result);
}
logDebugText("Entry can't be taken in 2-nd transaction using"
+ " it's snapshot.");
// check that we can not take the entry outside both transactions
result = (SimpleEntry) space.take(sampleEntry, null, checkTime);
if (result != null) {
throw new TestException(
"performed take with template " + sampleEntry
+ " outside both transactions while reading it within "
+ " 1-st one, expected null but took " + result);
}
logDebugText("Entry can't be taken outside both transactions.");
/*
* check that we can not take the entry outside both transactions
* using it's snapshot
*/
result = (SimpleEntry) space.take(snapshot, null, checkTime);
if (result != null) {
throw new TestException(
"performed take with template " + sampleEntry
+ " outside both transactions using it's snapshot"
+ " while reading it within "
+ " 1-st one, expected null but took " + result);
}
logDebugText("Entry can't be taken outside both transactions"
+ " using it's snapshot.");
// commit both transactions
txnCommit(txn1);
txnCommit(txn2);
}
}
| |
/* Generated By:JJTree&JavaCC: Do not edit this line. QueryParserTokenManager.java */
package wdb.parser;
import wdb.metadata.*;
import java.io.*;
public class QueryParserTokenManager implements QueryParserConstants
{
public static java.io.PrintStream debugStream = System.out;
public static void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
static private final int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
static private final int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 9:
jjmatchedKind = 2;
return jjMoveNfa_0(3, 0);
case 10:
jjmatchedKind = 3;
return jjMoveNfa_0(3, 0);
case 13:
jjmatchedKind = 4;
return jjMoveNfa_0(3, 0);
case 32:
jjmatchedKind = 1;
return jjMoveNfa_0(3, 0);
case 40:
jjmatchedKind = 6;
return jjMoveNfa_0(3, 0);
case 41:
jjmatchedKind = 7;
return jjMoveNfa_0(3, 0);
case 42:
jjmatchedKind = 45;
return jjMoveNfa_0(3, 0);
case 44:
jjmatchedKind = 12;
return jjMoveNfa_0(3, 0);
case 58:
jjmatchedKind = 10;
return jjMoveStringLiteralDfa1_0(0x400000L);
case 59:
jjmatchedKind = 11;
return jjMoveNfa_0(3, 0);
case 60:
jjmatchedKind = 33;
return jjMoveStringLiteralDfa1_0(0x3000000000L);
case 61:
jjmatchedKind = 34;
return jjMoveNfa_0(3, 0);
case 62:
jjmatchedKind = 32;
return jjMoveStringLiteralDfa1_0(0x800000000L);
case 65:
return jjMoveStringLiteralDfa1_0(0x80000020000000L);
case 66:
return jjMoveStringLiteralDfa1_0(0x80000000000L);
case 67:
return jjMoveStringLiteralDfa1_0(0x40000002000L);
case 68:
return jjMoveStringLiteralDfa1_0(0x4000000000000L);
case 69:
return jjMoveStringLiteralDfa1_0(0x100000L);
case 70:
return jjMoveStringLiteralDfa1_0(0x8010000000L);
case 73:
return jjMoveStringLiteralDfa1_0(0x1008100000b0000L);
case 76:
return jjMoveStringLiteralDfa1_0(0x40000000000000L);
case 77:
return jjMoveStringLiteralDfa1_0(0x28000000000000L);
case 78:
return jjMoveStringLiteralDfa1_0(0x80040000L);
case 79:
return jjMoveStringLiteralDfa1_0(0x1000040008000L);
case 81:
return jjMoveStringLiteralDfa1_0(0x10000000000000L);
case 82:
return jjMoveStringLiteralDfa1_0(0x20006000000L);
case 83:
return jjMoveStringLiteralDfa1_0(0x500000004000L);
case 84:
return jjMoveStringLiteralDfa1_0(0x4000000000L);
case 85:
return jjMoveStringLiteralDfa1_0(0x2000000000000L);
case 87:
return jjMoveStringLiteralDfa1_0(0x8200000L);
case 91:
jjmatchedKind = 8;
return jjMoveNfa_0(3, 0);
case 93:
jjmatchedKind = 9;
return jjMoveNfa_0(3, 0);
case 97:
return jjMoveStringLiteralDfa1_0(0x80000020000000L);
case 98:
return jjMoveStringLiteralDfa1_0(0x80000000000L);
case 99:
return jjMoveStringLiteralDfa1_0(0x40000002000L);
case 100:
return jjMoveStringLiteralDfa1_0(0x4000000000000L);
case 101:
return jjMoveStringLiteralDfa1_0(0x100000L);
case 102:
return jjMoveStringLiteralDfa1_0(0x8010000000L);
case 105:
return jjMoveStringLiteralDfa1_0(0x1008100000b0000L);
case 108:
return jjMoveStringLiteralDfa1_0(0x40000000000000L);
case 109:
return jjMoveStringLiteralDfa1_0(0x28000000000000L);
case 110:
return jjMoveStringLiteralDfa1_0(0x80040000L);
case 111:
return jjMoveStringLiteralDfa1_0(0x1000040008000L);
case 113:
return jjMoveStringLiteralDfa1_0(0x10000000000000L);
case 114:
return jjMoveStringLiteralDfa1_0(0x20006000000L);
case 115:
return jjMoveStringLiteralDfa1_0(0x500000004000L);
case 116:
return jjMoveStringLiteralDfa1_0(0x4000000000L);
case 117:
return jjMoveStringLiteralDfa1_0(0x2000000000000L);
case 119:
return jjMoveStringLiteralDfa1_0(0x8200000L);
default :
return jjMoveNfa_0(3, 0);
}
}
static private final int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 0);
}
switch(curChar)
{
case 61:
if ((active0 & 0x400000L) != 0L)
{
jjmatchedKind = 22;
jjmatchedPos = 1;
}
else if ((active0 & 0x800000000L) != 0L)
{
jjmatchedKind = 35;
jjmatchedPos = 1;
}
else if ((active0 & 0x1000000000L) != 0L)
{
jjmatchedKind = 36;
jjmatchedPos = 1;
}
break;
case 62:
if ((active0 & 0x2000000000L) != 0L)
{
jjmatchedKind = 37;
jjmatchedPos = 1;
}
break;
case 65:
return jjMoveStringLiteralDfa2_0(active0, 0x8008000000000L);
case 69:
return jjMoveStringLiteralDfa2_0(active0, 0x20006040000L);
case 70:
if ((active0 & 0x8000L) != 0L)
{
jjmatchedKind = 15;
jjmatchedPos = 1;
}
break;
case 72:
return jjMoveStringLiteralDfa2_0(active0, 0x40008000000L);
case 73:
return jjMoveStringLiteralDfa2_0(active0, 0x44000000200000L);
case 76:
return jjMoveStringLiteralDfa2_0(active0, 0x80000000002000L);
case 78:
if ((active0 & 0x1000000000000L) != 0L)
{
jjmatchedKind = 48;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x1028100200b0000L);
case 79:
return jjMoveStringLiteralDfa2_0(active0, 0x20480080000000L);
case 82:
if ((active0 & 0x40000000L) != 0L)
{
jjmatchedKind = 30;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x4010000000L);
case 84:
return jjMoveStringLiteralDfa2_0(active0, 0x100000000000L);
case 85:
return jjMoveStringLiteralDfa2_0(active0, 0x10000000004000L);
case 88:
return jjMoveStringLiteralDfa2_0(active0, 0x100000L);
case 97:
return jjMoveStringLiteralDfa2_0(active0, 0x8008000000000L);
case 101:
return jjMoveStringLiteralDfa2_0(active0, 0x20006040000L);
case 102:
if ((active0 & 0x8000L) != 0L)
{
jjmatchedKind = 15;
jjmatchedPos = 1;
}
break;
case 104:
return jjMoveStringLiteralDfa2_0(active0, 0x40008000000L);
case 105:
return jjMoveStringLiteralDfa2_0(active0, 0x44000000200000L);
case 108:
return jjMoveStringLiteralDfa2_0(active0, 0x80000000002000L);
case 110:
if ((active0 & 0x1000000000000L) != 0L)
{
jjmatchedKind = 48;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x1028100200b0000L);
case 111:
return jjMoveStringLiteralDfa2_0(active0, 0x20480080000000L);
case 114:
if ((active0 & 0x40000000L) != 0L)
{
jjmatchedKind = 30;
jjmatchedPos = 1;
}
return jjMoveStringLiteralDfa2_0(active0, 0x4010000000L);
case 116:
return jjMoveStringLiteralDfa2_0(active0, 0x100000000000L);
case 117:
return jjMoveStringLiteralDfa2_0(active0, 0x10000000004000L);
case 120:
return jjMoveStringLiteralDfa2_0(active0, 0x100000L);
default :
break;
}
return jjMoveNfa_0(3, 1);
}
static private final int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 1);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 1);
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa3_0(active0, 0x60000002000L);
case 66:
return jjMoveStringLiteralDfa3_0(active0, 0x4000L);
case 67:
return jjMoveStringLiteralDfa3_0(active0, 0x180000L);
case 68:
if ((active0 & 0x20000000L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x20800000000000L);
case 69:
return jjMoveStringLiteralDfa3_0(active0, 0x8000000L);
case 73:
return jjMoveStringLiteralDfa3_0(active0, 0x112000000000000L);
case 76:
if ((active0 & 0x80000000000000L) != 0L)
{
jjmatchedKind = 55;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L);
case 77:
return jjMoveStringLiteralDfa3_0(active0, 0x40000000000000L);
case 79:
return jjMoveStringLiteralDfa3_0(active0, 0x80010000000L);
case 81:
return jjMoveStringLiteralDfa3_0(active0, 0x2000000L);
case 82:
return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L);
case 83:
return jjMoveStringLiteralDfa3_0(active0, 0x4000000020000L);
case 84:
if ((active0 & 0x80000000L) != 0L)
{
jjmatchedKind = 31;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x10004200000L);
case 85:
return jjMoveStringLiteralDfa3_0(active0, 0x404000000000L);
case 86:
return jjMoveStringLiteralDfa3_0(active0, 0x10000L);
case 87:
if ((active0 & 0x40000L) != 0L)
{
jjmatchedKind = 18;
jjmatchedPos = 2;
}
break;
case 88:
if ((active0 & 0x8000000000000L) != 0L)
{
jjmatchedKind = 51;
jjmatchedPos = 2;
}
break;
case 97:
return jjMoveStringLiteralDfa3_0(active0, 0x60000002000L);
case 98:
return jjMoveStringLiteralDfa3_0(active0, 0x4000L);
case 99:
return jjMoveStringLiteralDfa3_0(active0, 0x180000L);
case 100:
if ((active0 & 0x20000000L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x20800000000000L);
case 101:
return jjMoveStringLiteralDfa3_0(active0, 0x8000000L);
case 105:
return jjMoveStringLiteralDfa3_0(active0, 0x112000000000000L);
case 108:
if ((active0 & 0x80000000000000L) != 0L)
{
jjmatchedKind = 55;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L);
case 109:
return jjMoveStringLiteralDfa3_0(active0, 0x40000000000000L);
case 111:
return jjMoveStringLiteralDfa3_0(active0, 0x80010000000L);
case 113:
return jjMoveStringLiteralDfa3_0(active0, 0x2000000L);
case 114:
return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L);
case 115:
return jjMoveStringLiteralDfa3_0(active0, 0x4000000020000L);
case 116:
if ((active0 & 0x80000000L) != 0L)
{
jjmatchedKind = 31;
jjmatchedPos = 2;
}
return jjMoveStringLiteralDfa3_0(active0, 0x10004200000L);
case 117:
return jjMoveStringLiteralDfa3_0(active0, 0x404000000000L);
case 118:
return jjMoveStringLiteralDfa3_0(active0, 0x10000L);
case 119:
if ((active0 & 0x40000L) != 0L)
{
jjmatchedKind = 18;
jjmatchedPos = 2;
}
break;
case 120:
if ((active0 & 0x8000000000000L) != 0L)
{
jjmatchedKind = 51;
jjmatchedPos = 2;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 2);
}
static private final int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 2);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 2);
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa4_0(active0, 0x4000L);
case 69:
if ((active0 & 0x4000000000L) != 0L)
{
jjmatchedKind = 38;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x810000030000L);
case 72:
if ((active0 & 0x200000L) != 0L)
{
jjmatchedKind = 21;
jjmatchedPos = 3;
}
break;
case 73:
return jjMoveStringLiteralDfa4_0(active0, 0x60100000000000L);
case 76:
if ((active0 & 0x20000000000L) != 0L)
{
jjmatchedKind = 41;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x80000180000L);
case 77:
if ((active0 & 0x10000000L) != 0L)
{
jjmatchedKind = 28;
jjmatchedPos = 3;
}
break;
case 81:
return jjMoveStringLiteralDfa4_0(active0, 0x2000000000000L);
case 82:
if ((active0 & 0x40000000000L) != 0L)
{
jjmatchedKind = 42;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x40000c000000L);
case 83:
return jjMoveStringLiteralDfa4_0(active0, 0x8000002000L);
case 84:
if ((active0 & 0x10000000000000L) != 0L)
{
jjmatchedKind = 52;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x104000000000000L);
case 85:
return jjMoveStringLiteralDfa4_0(active0, 0x2000000L);
case 99:
return jjMoveStringLiteralDfa4_0(active0, 0x4000L);
case 101:
if ((active0 & 0x4000000000L) != 0L)
{
jjmatchedKind = 38;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x810000030000L);
case 104:
if ((active0 & 0x200000L) != 0L)
{
jjmatchedKind = 21;
jjmatchedPos = 3;
}
break;
case 105:
return jjMoveStringLiteralDfa4_0(active0, 0x60100000000000L);
case 108:
if ((active0 & 0x20000000000L) != 0L)
{
jjmatchedKind = 41;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x80000180000L);
case 109:
if ((active0 & 0x10000000L) != 0L)
{
jjmatchedKind = 28;
jjmatchedPos = 3;
}
break;
case 113:
return jjMoveStringLiteralDfa4_0(active0, 0x2000000000000L);
case 114:
if ((active0 & 0x40000000000L) != 0L)
{
jjmatchedKind = 42;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x40000c000000L);
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x8000002000L);
case 116:
if ((active0 & 0x10000000000000L) != 0L)
{
jjmatchedKind = 52;
jjmatchedPos = 3;
}
return jjMoveStringLiteralDfa4_0(active0, 0x104000000000000L);
case 117:
return jjMoveStringLiteralDfa4_0(active0, 0x2000000L);
default :
break;
}
return jjMoveNfa_0(3, 3);
}
static private final int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 3);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 3);
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L);
case 69:
if ((active0 & 0x8000000L) != 0L)
{
jjmatchedKind = 27;
jjmatchedPos = 4;
}
else if ((active0 & 0x8000000000L) != 0L)
{
jjmatchedKind = 39;
jjmatchedPos = 4;
}
return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L);
case 70:
return jjMoveStringLiteralDfa5_0(active0, 0x20000000000000L);
case 71:
return jjMoveStringLiteralDfa5_0(active0, 0x10000000000L);
case 73:
return jjMoveStringLiteralDfa5_0(active0, 0x104000006000000L);
case 76:
return jjMoveStringLiteralDfa5_0(active0, 0x4000L);
case 78:
return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L);
case 82:
return jjMoveStringLiteralDfa5_0(active0, 0x30000L);
case 83:
if ((active0 & 0x2000L) != 0L)
{
jjmatchedKind = 13;
jjmatchedPos = 4;
}
break;
case 84:
if ((active0 & 0x40000000000000L) != 0L)
{
jjmatchedKind = 54;
jjmatchedPos = 4;
}
break;
case 85:
return jjMoveStringLiteralDfa5_0(active0, 0x2000000180000L);
case 88:
if ((active0 & 0x800000000000L) != 0L)
{
jjmatchedKind = 47;
jjmatchedPos = 4;
}
break;
case 99:
return jjMoveStringLiteralDfa5_0(active0, 0x400000000000L);
case 101:
if ((active0 & 0x8000000L) != 0L)
{
jjmatchedKind = 27;
jjmatchedPos = 4;
}
else if ((active0 & 0x8000000000L) != 0L)
{
jjmatchedKind = 39;
jjmatchedPos = 4;
}
return jjMoveStringLiteralDfa5_0(active0, 0x80000000000L);
case 102:
return jjMoveStringLiteralDfa5_0(active0, 0x20000000000000L);
case 103:
return jjMoveStringLiteralDfa5_0(active0, 0x10000000000L);
case 105:
return jjMoveStringLiteralDfa5_0(active0, 0x104000006000000L);
case 108:
return jjMoveStringLiteralDfa5_0(active0, 0x4000L);
case 110:
return jjMoveStringLiteralDfa5_0(active0, 0x100000000000L);
case 114:
return jjMoveStringLiteralDfa5_0(active0, 0x30000L);
case 115:
if ((active0 & 0x2000L) != 0L)
{
jjmatchedKind = 13;
jjmatchedPos = 4;
}
break;
case 116:
if ((active0 & 0x40000000000000L) != 0L)
{
jjmatchedKind = 54;
jjmatchedPos = 4;
}
break;
case 117:
return jjMoveStringLiteralDfa5_0(active0, 0x2000000180000L);
case 120:
if ((active0 & 0x800000000000L) != 0L)
{
jjmatchedKind = 47;
jjmatchedPos = 4;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 4);
}
static private final int jjMoveStringLiteralDfa5_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 4);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 4);
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa6_0(active0, 0x100080000004000L);
case 68:
return jjMoveStringLiteralDfa6_0(active0, 0x180000L);
case 69:
if ((active0 & 0x400000000000L) != 0L)
{
jjmatchedKind = 46;
jjmatchedPos = 5;
}
else if ((active0 & 0x2000000000000L) != 0L)
{
jjmatchedKind = 49;
jjmatchedPos = 5;
}
return jjMoveStringLiteralDfa6_0(active0, 0x10004000000L);
case 71:
if ((active0 & 0x100000000000L) != 0L)
{
jjmatchedKind = 44;
jjmatchedPos = 5;
}
break;
case 78:
return jjMoveStringLiteralDfa6_0(active0, 0x4000000000000L);
case 82:
return jjMoveStringLiteralDfa6_0(active0, 0x2000000L);
case 83:
return jjMoveStringLiteralDfa6_0(active0, 0x10000L);
case 84:
if ((active0 & 0x20000L) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 5;
}
break;
case 89:
if ((active0 & 0x20000000000000L) != 0L)
{
jjmatchedKind = 53;
jjmatchedPos = 5;
}
break;
case 97:
return jjMoveStringLiteralDfa6_0(active0, 0x100080000004000L);
case 100:
return jjMoveStringLiteralDfa6_0(active0, 0x180000L);
case 101:
if ((active0 & 0x400000000000L) != 0L)
{
jjmatchedKind = 46;
jjmatchedPos = 5;
}
else if ((active0 & 0x2000000000000L) != 0L)
{
jjmatchedKind = 49;
jjmatchedPos = 5;
}
return jjMoveStringLiteralDfa6_0(active0, 0x10004000000L);
case 103:
if ((active0 & 0x100000000000L) != 0L)
{
jjmatchedKind = 44;
jjmatchedPos = 5;
}
break;
case 110:
return jjMoveStringLiteralDfa6_0(active0, 0x4000000000000L);
case 114:
return jjMoveStringLiteralDfa6_0(active0, 0x2000000L);
case 115:
return jjMoveStringLiteralDfa6_0(active0, 0x10000L);
case 116:
if ((active0 & 0x20000L) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 5;
}
break;
case 121:
if ((active0 & 0x20000000000000L) != 0L)
{
jjmatchedKind = 53;
jjmatchedPos = 5;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 5);
}
static private final int jjMoveStringLiteralDfa6_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 5);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 5);
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa7_0(active0, 0x4000000000000L);
case 69:
if ((active0 & 0x80000L) != 0L)
{
jjmatchedKind = 19;
jjmatchedPos = 6;
}
else if ((active0 & 0x100000L) != 0L)
{
jjmatchedKind = 20;
jjmatchedPos = 6;
}
return jjMoveStringLiteralDfa7_0(active0, 0x2010000L);
case 76:
return jjMoveStringLiteralDfa7_0(active0, 0x100000000000000L);
case 78:
if ((active0 & 0x80000000000L) != 0L)
{
jjmatchedKind = 43;
jjmatchedPos = 6;
}
break;
case 82:
if ((active0 & 0x10000000000L) != 0L)
{
jjmatchedKind = 40;
jjmatchedPos = 6;
}
break;
case 83:
return jjMoveStringLiteralDfa7_0(active0, 0x4000L);
case 86:
return jjMoveStringLiteralDfa7_0(active0, 0x4000000L);
case 99:
return jjMoveStringLiteralDfa7_0(active0, 0x4000000000000L);
case 101:
if ((active0 & 0x80000L) != 0L)
{
jjmatchedKind = 19;
jjmatchedPos = 6;
}
else if ((active0 & 0x100000L) != 0L)
{
jjmatchedKind = 20;
jjmatchedPos = 6;
}
return jjMoveStringLiteralDfa7_0(active0, 0x2010000L);
case 108:
return jjMoveStringLiteralDfa7_0(active0, 0x100000000000000L);
case 110:
if ((active0 & 0x80000000000L) != 0L)
{
jjmatchedKind = 43;
jjmatchedPos = 6;
}
break;
case 114:
if ((active0 & 0x10000000000L) != 0L)
{
jjmatchedKind = 40;
jjmatchedPos = 6;
}
break;
case 115:
return jjMoveStringLiteralDfa7_0(active0, 0x4000L);
case 118:
return jjMoveStringLiteralDfa7_0(active0, 0x4000000L);
default :
break;
}
return jjMoveNfa_0(3, 6);
}
static private final int jjMoveStringLiteralDfa7_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 6);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 6);
}
switch(curChar)
{
case 32:
return jjMoveStringLiteralDfa8_0(active0, 0x10000L);
case 68:
if ((active0 & 0x2000000L) != 0L)
{
jjmatchedKind = 25;
jjmatchedPos = 7;
}
break;
case 69:
if ((active0 & 0x4000000L) != 0L)
{
jjmatchedKind = 26;
jjmatchedPos = 7;
}
break;
case 83:
if ((active0 & 0x4000L) != 0L)
{
jjmatchedKind = 14;
jjmatchedPos = 7;
}
break;
case 84:
if ((active0 & 0x4000000000000L) != 0L)
{
jjmatchedKind = 50;
jjmatchedPos = 7;
}
break;
case 86:
return jjMoveStringLiteralDfa8_0(active0, 0x100000000000000L);
case 100:
if ((active0 & 0x2000000L) != 0L)
{
jjmatchedKind = 25;
jjmatchedPos = 7;
}
break;
case 101:
if ((active0 & 0x4000000L) != 0L)
{
jjmatchedKind = 26;
jjmatchedPos = 7;
}
break;
case 115:
if ((active0 & 0x4000L) != 0L)
{
jjmatchedKind = 14;
jjmatchedPos = 7;
}
break;
case 116:
if ((active0 & 0x4000000000000L) != 0L)
{
jjmatchedKind = 50;
jjmatchedPos = 7;
}
break;
case 118:
return jjMoveStringLiteralDfa8_0(active0, 0x100000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 7);
}
static private final int jjMoveStringLiteralDfa8_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 7);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 7);
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa9_0(active0, 0x100000000000000L);
case 73:
return jjMoveStringLiteralDfa9_0(active0, 0x10000L);
case 97:
return jjMoveStringLiteralDfa9_0(active0, 0x100000000000000L);
case 105:
return jjMoveStringLiteralDfa9_0(active0, 0x10000L);
default :
break;
}
return jjMoveNfa_0(3, 8);
}
static private final int jjMoveStringLiteralDfa9_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 8);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 8);
}
switch(curChar)
{
case 76:
return jjMoveStringLiteralDfa10_0(active0, 0x100000000000000L);
case 83:
if ((active0 & 0x10000L) != 0L)
{
jjmatchedKind = 16;
jjmatchedPos = 9;
}
break;
case 108:
return jjMoveStringLiteralDfa10_0(active0, 0x100000000000000L);
case 115:
if ((active0 & 0x10000L) != 0L)
{
jjmatchedKind = 16;
jjmatchedPos = 9;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 9);
}
static private final int jjMoveStringLiteralDfa10_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 9);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 9);
}
switch(curChar)
{
case 85:
return jjMoveStringLiteralDfa11_0(active0, 0x100000000000000L);
case 117:
return jjMoveStringLiteralDfa11_0(active0, 0x100000000000000L);
default :
break;
}
return jjMoveNfa_0(3, 10);
}
static private final int jjMoveStringLiteralDfa11_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjMoveNfa_0(3, 10);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
return jjMoveNfa_0(3, 10);
}
switch(curChar)
{
case 69:
if ((active0 & 0x100000000000000L) != 0L)
{
jjmatchedKind = 56;
jjmatchedPos = 11;
}
break;
case 101:
if ((active0 & 0x100000000000000L) != 0L)
{
jjmatchedKind = 56;
jjmatchedPos = 11;
}
break;
default :
break;
}
return jjMoveNfa_0(3, 11);
}
static private final void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
static private final void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
static private final void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
static private final void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
static private final void jjCheckNAddStates(int start)
{
jjCheckNAdd(jjnextStates[start]);
jjCheckNAdd(jjnextStates[start + 1]);
}
static final long[] jjbitVec0 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static private final int jjMoveNfa_0(int startState, int curPos)
{
int strKind = jjmatchedKind;
int strPos = jjmatchedPos;
int seenUpto;
input_stream.backup(seenUpto = curPos + 1);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { throw new Error("Internal Error"); }
curPos = 0;
int[] nextStates;
int startsAt = 0;
jjnewStateCnt = 41;
int i = 1;
jjstateSet[0] = startState;
int j, kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 3:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 57)
kind = 57;
jjCheckNAdd(5);
}
else if (curChar == 34)
jjCheckNAddTwoStates(14, 15);
else if (curChar == 39)
jjstateSet[jjnewStateCnt++] = 10;
else if (curChar == 45)
jjCheckNAdd(5);
else if (curChar == 47)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 0:
if (curChar == 47)
jjCheckNAddTwoStates(1, 2);
break;
case 1:
if ((0xffffffffffffdbffL & l) != 0L)
jjCheckNAddTwoStates(1, 2);
break;
case 2:
if ((0x2400L & l) != 0L)
kind = 5;
break;
case 4:
if (curChar == 45)
jjCheckNAdd(5);
break;
case 5:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 57)
kind = 57;
jjCheckNAdd(5);
break;
case 7:
if ((0x3ff600000000000L & l) != 0L)
jjAddStates(0, 1);
break;
case 8:
if ((0x3ff000000000000L & l) != 0L && kind > 58)
kind = 58;
break;
case 9:
if (curChar == 39)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 10:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(11, 12);
break;
case 11:
if ((0xffffff7b00000000L & l) != 0L)
jjCheckNAddTwoStates(11, 12);
break;
case 12:
if (curChar == 39 && kind > 59)
kind = 59;
break;
case 13:
if (curChar == 34)
jjCheckNAddTwoStates(14, 15);
break;
case 14:
if ((0xfffffffb00000000L & l) != 0L)
jjCheckNAddTwoStates(14, 15);
break;
case 15:
if (curChar == 34 && kind > 60)
kind = 60;
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 3:
if ((0x7fffffe07fffffeL & l) != 0L)
jjCheckNAddTwoStates(7, 8);
if ((0x8000000080000L & l) != 0L)
jjAddStates(2, 3);
else if ((0x200000002000L & l) != 0L)
jjAddStates(4, 5);
break;
case 1:
jjAddStates(6, 7);
break;
case 6:
if ((0x7fffffe07fffffeL & l) != 0L)
jjCheckNAddTwoStates(7, 8);
break;
case 7:
if ((0x7fffffe87fffffeL & l) != 0L)
jjCheckNAddTwoStates(7, 8);
break;
case 8:
if ((0x7fffffe07fffffeL & l) != 0L && kind > 58)
kind = 58;
break;
case 10:
if ((0x7fffffe07fffffeL & l) != 0L)
jjCheckNAddTwoStates(11, 12);
break;
case 11:
if ((0x7ffffffeffffffffL & l) != 0L)
jjCheckNAddTwoStates(11, 12);
break;
case 14:
if ((0x7fffffffffffffffL & l) != 0L)
jjAddStates(8, 9);
break;
case 16:
if ((0x200000002000L & l) != 0L)
jjAddStates(4, 5);
break;
case 17:
if ((0x40000000400000L & l) != 0L && kind > 24)
kind = 24;
break;
case 18:
if ((0x1000000010L & l) != 0L && kind > 24)
kind = 24;
break;
case 19:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 21:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 20;
break;
case 22:
if ((0x200000002L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 23:
if ((0x40000000400000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 22;
break;
case 24:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 25:
if ((0x10000000100000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 26:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 25;
break;
case 27:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 28:
if ((0x8000000080000L & l) != 0L)
jjAddStates(2, 3);
break;
case 29:
if ((0x40000000400000L & l) != 0L && kind > 23)
kind = 23;
break;
case 30:
if ((0x1000000010L & l) != 0L && kind > 23)
kind = 23;
break;
case 31:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 30;
break;
case 32:
if ((0x20000000200000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 31;
break;
case 33:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 32;
break;
case 34:
if ((0x200000002L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 33;
break;
case 35:
if ((0x40000000400000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 34;
break;
case 36:
if ((0x2000000020L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 35;
break;
case 37:
if ((0x100000001000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 36;
break;
case 38:
if ((0x8000000080L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 37;
break;
case 39:
if ((0x400000004000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 38;
break;
case 40:
if ((0x20000000200L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 39;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 1:
if ((jjbitVec0[i2] & l2) != 0L)
jjAddStates(6, 7);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 41 - (jjnewStateCnt = startsAt)))
break;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { break; }
}
if (jjmatchedPos > strPos)
return curPos;
int toRet = Math.max(curPos, seenUpto);
if (curPos < toRet)
for (i = toRet - Math.min(curPos, seenUpto); i-- > 0; )
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { throw new Error("Internal Error : Please send a bug report."); }
if (jjmatchedPos < strPos)
{
jjmatchedKind = strKind;
jjmatchedPos = strPos;
}
else if (jjmatchedPos == strPos && jjmatchedKind > strKind)
jjmatchedKind = strKind;
return toRet;
}
static final int[] jjnextStates = {
7, 8, 29, 40, 17, 27, 1, 2, 14, 15,
};
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, "\50", "\51", "\133", "\135", "\72", "\73",
"\54", null, null, null, null, null, null, null, null, null, "\72\75", null, null,
null, null, null, null, null, null, null, "\76", "\74", "\75", "\76\75", "\74\75",
"\74\76", null, null, null, null, null, null, null, "\52", null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null, };
public static final String[] lexStateNames = {
"DEFAULT",
};
static final long[] jjtoToken = {
0x3fffffffffffffc1L,
};
static final long[] jjtoSkip = {
0x3eL,
};
static protected SimpleCharStream input_stream;
static private final int[] jjrounds = new int[41];
static private final int[] jjstateSet = new int[82];
static protected char curChar;
public QueryParserTokenManager(SimpleCharStream stream){
if (input_stream != null)
throw new TokenMgrError("ERROR: Second call to constructor of static lexer. You must use ReInit() to initialize the static variables.", TokenMgrError.STATIC_LEXER_ERROR);
input_stream = stream;
}
public QueryParserTokenManager(SimpleCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
static public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
static private final void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 41; i-- > 0;)
jjrounds[i] = 0x80000000;
}
static public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
static public void SwitchTo(int lexState)
{
if (lexState >= 1 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
static protected Token jjFillToken()
{
Token t = Token.newToken(jjmatchedKind);
t.kind = jjmatchedKind;
String im = jjstrLiteralImages[jjmatchedKind];
t.image = (im == null) ? input_stream.GetImage() : im;
t.beginLine = input_stream.getBeginLine();
t.beginColumn = input_stream.getBeginColumn();
t.endLine = input_stream.getEndLine();
t.endColumn = input_stream.getEndColumn();
return t;
}
static int curLexState = 0;
static int defaultLexState = 0;
static int jjnewStateCnt;
static int jjround;
static int jjmatchedPos;
static int jjmatchedKind;
public static Token getNextToken()
{
int kind;
Token specialToken = null;
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
if (jjmatchedPos == 0 && jjmatchedKind > 61)
{
jjmatchedKind = 61;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
return matchedToken;
}
else
{
continue EOFLoop;
}
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
| |
// This file is part of JavaSMT,
// an API wrapper for a collection of SMT solvers:
// https://github.com/sosy-lab/java-smt
//
// SPDX-FileCopyrightText: 2020 Dirk Beyer <https://www.sosy-lab.org>
//
// SPDX-License-Identifier: Apache-2.0
package org.sosy_lab.java_smt.solvers.smtinterpol;
import static com.google.common.base.Preconditions.checkNotNull;
import de.uni_freiburg.informatik.ultimate.logic.Annotation;
import de.uni_freiburg.informatik.ultimate.logic.ApplicationTerm;
import de.uni_freiburg.informatik.ultimate.logic.Assignments;
import de.uni_freiburg.informatik.ultimate.logic.DataType;
import de.uni_freiburg.informatik.ultimate.logic.DataType.Constructor;
import de.uni_freiburg.informatik.ultimate.logic.FunctionSymbol;
import de.uni_freiburg.informatik.ultimate.logic.Logics;
import de.uni_freiburg.informatik.ultimate.logic.Model;
import de.uni_freiburg.informatik.ultimate.logic.QuotedObject;
import de.uni_freiburg.informatik.ultimate.logic.SMTLIBException;
import de.uni_freiburg.informatik.ultimate.logic.Script;
import de.uni_freiburg.informatik.ultimate.logic.Sort;
import de.uni_freiburg.informatik.ultimate.logic.Term;
import de.uni_freiburg.informatik.ultimate.logic.TermVariable;
import de.uni_freiburg.informatik.ultimate.logic.Theory;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* This {@link Script} implementation allows to use the SMTLIB2 parser of SMTInterpol for parsing
* single formulas. It is meant to be given to a {@link
* de.uni_freiburg.informatik.ultimate.smtinterpol.smtlib2.ParseEnvironment} and allows declaring
* and define terms (by forwarding such calls to a real {@link Script} implementation), but does not
* allow any other actions. All formulas that are asserted (regardless of pop and push commands) are
* collected and can be retrieved afterwards.
*
* <p>The environment represented by the given {@link Script} that this class delegates to is
* changed only by declaring and defining terms, sorts etc., so these terms can be used in that
* environment afterwards.
*/
class FormulaCollectionScript implements Script {
private final Theory theory;
private final Script script;
private final List<Term> assertedTerms = new ArrayList<>(1);
FormulaCollectionScript(Script pScript, Theory pTheory) {
script = checkNotNull(pScript);
theory = checkNotNull(pTheory);
}
public List<Term> getAssertedTerms() {
return Collections.unmodifiableList(assertedTerms);
}
@Override
public LBool assertTerm(Term pTerm) throws SMTLIBException {
assertedTerms.add(pTerm);
// Do not call script.assertTerm(pTerm)
// because we do not want to actually modify the environment
return LBool.UNKNOWN;
}
@Override
public void declareFun(String fun, Sort[] paramSorts, Sort resultSort) throws SMTLIBException {
FunctionSymbol fsym = theory.getFunction(fun, paramSorts);
if (fsym == null) {
script.declareFun(fun, paramSorts, resultSort);
} else {
if (!fsym.getReturnSort().equals(resultSort)) {
throw new SMTLIBException(
"Function " + fun + " is already declared with different definition");
}
}
}
@Override
public void defineFun(String fun, TermVariable[] params, Sort resultSort, Term definition)
throws SMTLIBException {
Sort[] paramSorts = new Sort[params.length];
for (int i = 0; i < paramSorts.length; i++) {
paramSorts[i] = params[i].getSort();
}
FunctionSymbol fsym = theory.getFunction(fun, paramSorts);
if (fsym == null) {
script.defineFun(fun, params, resultSort, definition);
} else {
if (!fsym.getDefinition().equals(definition) || !fsym.getReturnSort().equals(resultSort)) {
throw new SMTLIBException(
"Function " + fun + " is already defined with different definition");
}
}
}
@Override
public void setInfo(String info, Object value) {
script.setInfo(info, value);
}
@Override
public void declareSort(String sort, int arity) throws SMTLIBException {
script.declareSort(sort, arity);
}
@Override
public void defineSort(String sort, Sort[] sortParams, Sort definition) throws SMTLIBException {
script.defineSort(sort, sortParams, definition);
}
@Override
public Sort sort(String sortname, Sort... params) throws SMTLIBException {
return script.sort(sortname, params);
}
@Override
public Sort sort(String sortname, String[] indices, Sort... params) throws SMTLIBException {
return script.sort(sortname, indices, params);
}
@Override
public Term term(String funcname, Term... params) throws SMTLIBException {
Term result = script.term(funcname, params);
return replaceWithDefinition(result);
}
@Override
public Term term(String funcname, String[] indices, Sort returnSort, Term... params)
throws SMTLIBException {
Term result = script.term(funcname, indices, returnSort, params);
return replaceWithDefinition(result);
}
private Term replaceWithDefinition(Term result) {
// Replace a term with its definition so that we do not have to handle defined terms later on.
if (result instanceof ApplicationTerm) {
FunctionSymbol func = ((ApplicationTerm) result).getFunction();
if (!func.isIntern() && func.getDefinition() != null) {
if (func.getParameterSorts().length == 0) {
result = func.getDefinition();
} else {
// If we would accept this here,
// we would need to handle the definition of a term
// when accessing its parameters with SmtInterpolUtil.getArg()
throw new SMTLIBException("Terms with definitions are not supported currently.");
}
}
}
return result;
}
@Override
public TermVariable variable(String varname, Sort sort) throws SMTLIBException {
return script.variable(varname, sort);
}
@Override
public Term quantifier(int quantor, TermVariable[] vars, Term body, Term[]... patterns)
throws SMTLIBException {
return script.quantifier(quantor, vars, body, patterns);
}
@Override
public Term let(TermVariable[] vars, Term[] values, Term body) throws SMTLIBException {
return script.let(vars, values, body);
}
@Override
public Term annotate(Term t, Annotation... annotations) throws SMTLIBException {
return script.annotate(t, annotations);
}
@Override
public Term numeral(String num) throws SMTLIBException {
return script.numeral(num);
}
@Override
public Term numeral(BigInteger num) throws SMTLIBException {
return script.numeral(num);
}
@Override
public Term decimal(String decimal) throws SMTLIBException {
return script.decimal(decimal);
}
@Override
public Term decimal(BigDecimal decimal) throws SMTLIBException {
return script.decimal(decimal);
}
@Override
public Term string(QuotedObject pStr) throws SMTLIBException {
return script.string(pStr);
}
@Override
public Term hexadecimal(String hex) throws SMTLIBException {
return script.hexadecimal(hex);
}
@Override
public Term binary(String bin) throws SMTLIBException {
return script.binary(bin);
}
@Override
public Sort[] sortVariables(String... names) throws SMTLIBException {
return script.sortVariables(names);
}
@Override
public Term[] getAssertions() throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public Term getProof() throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Term[] getUnsatCore() throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Map<Term, Term> getValue(Term[] terms)
throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Assignments getAssignment() throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Object getOption(String opt) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Object getInfo(String info) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Term simplify(Term term) throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public void push(int levels) {
throw new UnsupportedOperationException();
}
@Override
public void pop(int levels) throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public Model getModel() throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void setLogic(String logic) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void setLogic(Logics logic) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void setOption(String opt, Object value)
throws UnsupportedOperationException, SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public void reset() {
throw new UnsupportedOperationException();
}
@Override
public Term[] getInterpolants(Term[] partition)
throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Term[] getInterpolants(Term[] partition, int[] startOfSubtree)
throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Term[] getInterpolants(Term[] pPartition, int[] pStartOfSubtree, Term pProofTree)
throws SMTLIBException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public LBool checkSat() {
throw new UnsupportedOperationException();
}
@Override
public Iterable<Term[]> checkAllsat(Term[] pPredicates) {
throw new UnsupportedOperationException();
}
@Override
public LBool checkSatAssuming(Term... pAssumptions) {
throw new UnsupportedOperationException();
}
@Override
public Term[] getUnsatAssumptions() {
throw new UnsupportedOperationException();
}
@Override
public void resetAssertions() {
throw new UnsupportedOperationException();
}
@Override
public Term[] findImpliedEquality(Term[] pX, Term[] pY) {
throw new UnsupportedOperationException();
}
@Override
public void exit() {
throw new UnsupportedOperationException();
}
@Override
public QuotedObject echo(QuotedObject pMsg) {
throw new UnsupportedOperationException();
}
@Override
public FunctionSymbol getFunctionSymbol(String pConstructor) {
throw new UnsupportedOperationException();
}
@Override
public Constructor constructor(String pName, String[] pSelectors, Sort[] pArgumentSorts)
throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public DataType datatype(String pTypename, int pNumParams) throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public void declareDatatype(DataType pDatatype, Constructor[] pConstrs) throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public void declareDatatypes(
DataType[] pDatatypes, Constructor[][] pConstrs, Sort[][] pSortParams)
throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public Term match(
Term pDataArg, TermVariable[][] pVars, Term[] pCases, Constructor[] pConstructors)
throws SMTLIBException {
throw new UnsupportedOperationException();
}
@Override
public Theory getTheory() {
return theory;
}
}
| |
/*
*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* /
*/
package org.wso2.carbon.identity.oauth2.dao;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2ScopeClientException;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2ScopeException;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2ScopeServerException;
import org.wso2.carbon.identity.oauth2.Oauth2ScopeConstants;
import org.wso2.carbon.identity.oauth2.bean.Scope;
import org.wso2.carbon.identity.oauth2.bean.ScopeBinding;
import org.wso2.carbon.identity.oauth2.util.NamedPreparedStatement;
import org.wso2.carbon.identity.oauth2.util.Oauth2ScopeUtils;
import org.wso2.carbon.utils.DBUtils;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.wso2.carbon.identity.oauth2.Oauth2ScopeConstants.DEFAULT_SCOPE_BINDING;
import static org.wso2.carbon.identity.oauth2.Oauth2ScopeConstants.INTERNAL_SCOPE_PREFIX;
import static org.wso2.carbon.identity.oauth2.Oauth2ScopeConstants.SQLPlaceholders.SCOPE_LIST_PLACEHOLDER;
/**
* OAuth scope management data access object implementation.
*/
public class OAuthScopeDAOImpl implements OAuthScopeDAO {
private static final Log log = LogFactory.getLog(OAuthScopeDAOImpl.class);
/**
* Add a scope
*
* @param scope Scope
* @param tenantID tenant ID
* @throws IdentityOAuth2ScopeException IdentityOAuth2ScopeException
*/
@Override
public void addScope(Scope scope, int tenantID) throws IdentityOAuth2ScopeException {
if (scope == null || scope.getName() == null) {
if (log.isDebugEnabled()) {
log.debug("Scope is not defined");
}
throw Oauth2ScopeUtils.generateClientException(Oauth2ScopeConstants.ErrorMessages.
ERROR_CODE_BAD_REQUEST_SCOPE_NAME_NOT_SPECIFIED, null);
}
if (scope.getName().startsWith(INTERNAL_SCOPE_PREFIX) &&
Oauth2ScopeUtils.isSystemLevelInternalSystemScopeManagementEnabled()) {
if (log.isDebugEnabled()) {
log.debug("Internal Scopes can't be added per tenant as they are managed at system level.");
}
throw Oauth2ScopeUtils.generateClientException(Oauth2ScopeConstants.ErrorMessages.
ERROR_CODE_INTERNAL_SCOPE_MANAGED_AT_SYSTEM_LEVEL, null);
}
if (log.isDebugEnabled()) {
log.debug("Adding scope :" + scope.getName());
}
try (Connection conn = IdentityDatabaseUtil.getDBConnection()) {
try {
addScope(scope, conn, tenantID);
IdentityDatabaseUtil.commitTransaction(conn);
} catch (SQLException e1) {
IdentityDatabaseUtil.rollbackTransaction(conn);
String msg = "SQL error occurred while creating scope :" + scope.getName();
throw new IdentityOAuth2ScopeServerException(msg, e1);
}
} catch (SQLException e) {
String msg = "Error occurred while creating scope :" + scope.getName();
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Get all available OAuth2 scopes.
*
* @param tenantID tenant ID
* @return available scope list
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public Set<Scope> getAllScopes(int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get all scopes for tenantId :" + tenantID);
}
Set<Scope> scopes = new HashSet<>();
Map<Integer, Scope> scopeMap = new HashMap<>();
String sql;
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
if (conn.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_ALL_OAUTH2_SCOPES_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_ALL_OAUTH2_SCOPES;
}
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setInt(1, tenantID);
ps.setString(2, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
int scopeID = rs.getInt(1);
String name = rs.getString(2);
String displayName = rs.getString(3);
String description = rs.getString(4);
final String binding = rs.getString(5);
String bindingType = rs.getString(6);
if (scopeMap.containsKey(scopeID) && scopeMap.get(scopeID) != null) {
scopeMap.get(scopeID).setName(name);
scopeMap.get(scopeID).setDescription(description);
scopeMap.get(scopeID).setDisplayName(displayName);
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
} else {
scopeMap.put(scopeID, new Scope(name, displayName, new ArrayList<>(), description));
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
}
}
}
}
for (Map.Entry<Integer, Scope> entry : scopeMap.entrySet()) {
scopes.add(entry.getValue());
}
return scopes;
} catch (SQLException e) {
String msg = "Error occurred while getting all OAUTH2 scopes in tenant :" + tenantID;
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Get all available scopes depends on scope type.
*
* @param tenantID Tenant ID.
* @param includeOIDCScopes Include OIDC scopes in the scope list.
* @return List of scopes.
* @throws IdentityOAuth2ScopeServerException
*/
@Override
public Set<Scope> getAllScopes(int tenantID, Boolean includeOIDCScopes) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get all scopes for tenantId :" + tenantID + " including OIDC scope: " + includeOIDCScopes);
}
if (includeOIDCScopes) {
// Get all scopes including OIDC scopes as well.
return getAllScopesIncludingOIDCScopes(tenantID);
} else {
// Return all OAuth2 scopes only.
return getAllScopes(tenantID);
}
}
/**
* Get all scopes including OAuth2 scopes and OIDC scopes as well.
*
* @param tenantID Tenant ID.
* @return List of scopes.
* @throws IdentityOAuth2ScopeServerException
*/
private Set<Scope> getAllScopesIncludingOIDCScopes(int tenantID)
throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get all scopes including OAUTH2 and OIDC scopes for tenantId :" + tenantID);
}
Set<Scope> scopes = new HashSet<>();
Map<Integer, Scope> scopeMap = new HashMap<>();
String sql;
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
if (conn.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_ALL_SCOPES_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_ALL_SCOPES;
}
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setInt(1, tenantID);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
int scopeID = rs.getInt(1);
String name = rs.getString(2);
String displayName = rs.getString(3);
String description = rs.getString(4);
final String binding = rs.getString(5);
String bindingType = rs.getString(6);
if (scopeMap.containsKey(scopeID) && scopeMap.get(scopeID) != null) {
scopeMap.get(scopeID).setName(name);
scopeMap.get(scopeID).setDescription(description);
scopeMap.get(scopeID).setDisplayName(displayName);
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
} else {
scopeMap.put(scopeID, new Scope(name, displayName, new ArrayList<>(), description));
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
}
}
}
}
for (Map.Entry<Integer, Scope> entry : scopeMap.entrySet()) {
scopes.add(entry.getValue());
}
return scopes;
} catch (SQLException e) {
String msg = "Error occurred while getting all scopes in tenant :" + tenantID;
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
@Override
public Set<Scope> getRequestedScopesOnly(int tenantID, Boolean includeOIDCScopes, String requestedScopes)
throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug(String.format("Get requested scopes for scopes: %s for tenantId: %s with includeOIDCScopes: %s",
requestedScopes, tenantID, includeOIDCScopes));
}
// Validate requestedScopes.
if (StringUtils.isBlank(requestedScopes)) {
return new HashSet<>();
}
String sql;
if (includeOIDCScopes) {
sql = String.format(SQLQueries.RETRIEVE_REQUESTED_ALL_SCOPES_WITHOUT_SCOPE_TYPE);
} else {
sql = String.format(SQLQueries.RETRIEVE_REQUESTED_OAUTH2_SCOPES);
}
List<String> requestedScopeList = Arrays.asList(requestedScopes.split("\\s+"));
String placeholder = String.join(", ", Collections.nCopies(requestedScopeList.size(), "?"));
sql = sql.replace(SCOPE_LIST_PLACEHOLDER, placeholder);
Set<Scope> scopes = new HashSet<>();
Map<Integer, Scope> scopeMap = new HashMap<>();
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setInt(1, tenantID);
int scopeIndex = 2;
if (!includeOIDCScopes) {
ps.setString(2, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
scopeIndex++;
}
for (String scope : requestedScopeList) {
ps.setString(scopeIndex, scope);
scopeIndex++;
}
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
int scopeID = rs.getInt(1);
String name = rs.getString(2);
String displayName = rs.getString(3);
String description = rs.getString(4);
final String binding = rs.getString(5);
String bindingType = rs.getString(6);
if (scopeMap.containsKey(scopeID) && scopeMap.get(scopeID) != null) {
scopeMap.get(scopeID).setName(name);
scopeMap.get(scopeID).setDescription(description);
scopeMap.get(scopeID).setDisplayName(displayName);
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
} else {
scopeMap.put(scopeID, new Scope(name, displayName, new ArrayList<>(), description));
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
}
}
}
}
for (Map.Entry<Integer, Scope> entry : scopeMap.entrySet()) {
scopes.add(entry.getValue());
}
return scopes;
} catch (SQLException e) {
String msg = "Error occurred while getting requested scopes in tenant :" + tenantID;
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
@Override
public Set<Scope> getScopes(int tenantID, String bindingType) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get scopes for tenantId :" + tenantID + " and bindingType: " + bindingType);
}
Set<Scope> scopes = new HashSet<>();
Map<Integer, Scope> scopeMap = new HashMap<>();
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.RETRIEVE_SCOPES_BY_BINDING_TYPE)) {
ps.setInt(1, tenantID);
ps.setString(2, bindingType);
ps.setString(3, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
int scopeID = rs.getInt(1);
String name = rs.getString(2);
String displayName = rs.getString(3);
String description = rs.getString(4);
final String binding = rs.getString(5);
if (scopeMap.containsKey(scopeID) && scopeMap.get(scopeID) != null) {
scopeMap.get(scopeID).setName(name);
scopeMap.get(scopeID).setDescription(description);
scopeMap.get(scopeID).setDisplayName(displayName);
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
} else {
scopeMap.put(scopeID, new Scope(name, displayName, new ArrayList<>(), description));
if (binding != null) {
scopeMap.get(scopeID).addScopeBinding(bindingType, binding);
}
}
}
}
}
for (Map.Entry<Integer, Scope> entry : scopeMap.entrySet()) {
scopes.add(entry.getValue());
}
return scopes;
} catch (SQLException e) {
String msg = "Error occurred while getting all scopes ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Get only OAUTH2 Scopes with pagination.
*
* @param offset start index of the result set
* @param limit number of elements of the result set
* @param tenantID tenant ID
* @return available scope list
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public Set<Scope> getScopesWithPagination(Integer offset, Integer limit, int tenantID)
throws IdentityOAuth2ScopeServerException {
// Default we won't reterive OIDC scopes via OAUTH2 endpoint. Hence includeOIDCScopes set to false.
return getScopesWithPagination(offset, limit, tenantID, false);
}
/**
* Get SQL statement for get OAuth2 scope with pagination.
*
* @param offset Offset.
* @param limit Limit.
* @param tenantID Tenet ID.
* @param conn Database connection.
* @return
* @throws SQLException
*/
private NamedPreparedStatement getPreparedStatementForGetScopesWithPagination(Integer offset, Integer limit,
int tenantID, Connection conn)
throws SQLException {
String query;
String driverName = conn.getMetaData().getDriverName();
if (driverName.contains("MySQL")
|| driverName.contains("MariaDB")
|| driverName.contains("H2")) {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_MYSQL;
} else if (conn.getMetaData().getDatabaseProductName().contains("DB2")) {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_DB2SQL;
} else if (driverName.contains("MS SQL")) {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_MSSQL;
} else if (driverName.contains("Microsoft") || driverName.contains("microsoft")) {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_MSSQL;
} else if (driverName.contains("PostgreSQL")) {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_POSTGRESQL;
} else if (driverName.contains("Informix")) {
// Driver name = "IBM Informix JDBC Driver for IBM Informix Dynamic Server"
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_INFORMIX;
} else {
query = SQLQueries.RETRIEVE_SCOPES_WITH_PAGINATION_ORACLE;
}
NamedPreparedStatement namedPreparedStatement = new NamedPreparedStatement(conn, query);
namedPreparedStatement
.setString(Oauth2ScopeConstants.SQLPlaceholders.SCOPE_TYPE, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.TENANT_ID, tenantID);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.OFFSET, offset);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.LIMIT, limit);
return namedPreparedStatement;
}
/**
* Get SQL statement for get all scope with pagination. (including OAuth2 scopes and OIDC scopes).
*
* @param offset Offset.
* @param limit Limit.
* @param tenantID Tenet ID.
* @param conn Database connection.
* @return
* @throws SQLException
*/
private NamedPreparedStatement getPreparedStatementForGetAllScopesWithPagination(Integer offset, Integer limit,
int tenantID, Connection conn)
throws SQLException {
String query;
String driverName = conn.getMetaData().getDriverName();
if (driverName.contains("MySQL")
|| driverName.contains("MariaDB")
|| driverName.contains("H2")) {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_MYSQL;
} else if (conn.getMetaData().getDatabaseProductName().contains("DB2")) {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_DB2SQL;
} else if (driverName.contains("MS SQL")) {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_MSSQL;
} else if (driverName.contains("Microsoft") || driverName.contains("microsoft")) {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_MSSQL;
} else if (driverName.contains("PostgreSQL")) {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_POSTGRESQL;
} else if (driverName.contains("Informix")) {
// Driver name = "IBM Informix JDBC Driver for IBM Informix Dynamic Server"
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_INFORMIX;
} else {
query = SQLQueries.RETRIEVE_ALL_SCOPES_WITH_PAGINATION_ORACLE;
}
NamedPreparedStatement namedPreparedStatement = new NamedPreparedStatement(conn, query);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.TENANT_ID, tenantID);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.OFFSET, offset);
namedPreparedStatement.setInt(Oauth2ScopeConstants.SQLPlaceholders.LIMIT, limit);
return namedPreparedStatement;
}
@Override
public Set<Scope> getScopesWithPagination(Integer offset, Integer limit, int tenantID, Boolean includeOIDCScopes)
throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get all scopes with pagination for tenantId :" + tenantID + " including OIDC scope: " +
includeOIDCScopes);
}
Set<Scope> scopes = new HashSet<>();
Map<Integer, Scope> scopeMap = new HashMap<>();
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
NamedPreparedStatement namedPreparedStatement;
if (includeOIDCScopes) {
namedPreparedStatement = getPreparedStatementForGetAllScopesWithPagination(offset, limit, tenantID,
conn);
} else {
namedPreparedStatement =
getPreparedStatementForGetScopesWithPagination(offset, limit, tenantID, conn);
}
try (PreparedStatement preparedStatement = namedPreparedStatement.getPreparedStatement();) {
try (ResultSet rs = preparedStatement.executeQuery()) {
while (rs.next()) {
int scopeID = rs.getInt(1);
String name = rs.getString(2);
String displayName = rs.getString(3);
String description = rs.getString(4);
final String binding = rs.getString(5);
if (scopeMap.containsKey(scopeID) && scopeMap.get(scopeID) != null) {
scopeMap.get(scopeID).setName(name);
scopeMap.get(scopeID).setDescription(description);
scopeMap.get(scopeID).setDisplayName(displayName);
if (binding != null) {
if (scopeMap.get(scopeID).getBindings() != null) {
scopeMap.get(scopeID).addBinding(binding);
} else {
scopeMap.get(scopeID).setBindings(new ArrayList<String>() {{
add(binding);
}});
}
}
} else {
scopeMap.put(scopeID, new Scope(name, displayName, description, new ArrayList<String>()));
if (binding != null) {
scopeMap.get(scopeID).addBinding(binding);
}
}
}
}
}
for (Map.Entry<Integer, Scope> entry : scopeMap.entrySet()) {
scopes.add(entry.getValue());
}
return scopes;
} catch (SQLException e) {
String msg = "Error occurred while getting all scopes with pagination ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Get a scope by name
*
* @param name name of the scope
* @param tenantID tenant ID
* @return Scope for the provided ID
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public Scope getScopeByName(String name, int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get scope by name called for scope name:" + name);
}
Scope scope = null;
String sql;
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
if (conn.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_SCOPE_BY_NAME_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_SCOPE_BY_NAME;
}
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, name);
ps.setInt(2, tenantID);
ps.setString(3, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
try (ResultSet rs = ps.executeQuery()) {
String description = null;
String displayName = null;
while (rs.next()) {
if (StringUtils.isBlank(description)) {
description = rs.getString(3);
}
if (StringUtils.isBlank(displayName)) {
displayName = rs.getString(2);
}
String bindingType = rs.getString(5);
if (bindingType == null) {
bindingType = DEFAULT_SCOPE_BINDING;
}
if (scope == null) {
scope = new Scope(name, displayName, new ArrayList<>(), description);
}
scope.addScopeBinding(bindingType, rs.getString(4));
}
}
}
return scope;
} catch (SQLException e) {
String msg = "Error occurred while getting scope by ID ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Get existence of OAuth2 scope for the provided scope name.
*
* @param scopeName name of the scope
* @param tenantID tenant ID
* @return true if scope is exists
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public boolean isScopeExists(String scopeName, int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Is scope exists called for scope:" + scopeName);
}
boolean isScopeExists = false;
int scopeID = getScopeIDByName(scopeName, tenantID);
if (scopeID != Oauth2ScopeConstants.INVALID_SCOPE_ID) {
isScopeExists = true;
}
return isScopeExists;
}
/**
* Get existence of scope for the provided scope name depends on the scope type.
*
* @param scopeName Name of the scope.
* @param tenantID Tenant ID.
* @param includeOIDCScopes Whether to include OIDC scopes in the search.
* @return True if scope is exists.
* @throws IdentityOAuth2ScopeServerException
*/
@Override
public boolean isScopeExists(String scopeName, int tenantID, Boolean includeOIDCScopes)
throws IdentityOAuth2ScopeServerException {
if (includeOIDCScopes) {
if (log.isDebugEnabled()) {
log.debug("Check scope exists regardless of scope type for scope:" + scopeName);
}
boolean isScopeExists = false;
int scopeID = getScopeIDByNameWithoutScopeType(scopeName, tenantID);
if (scopeID != Oauth2ScopeConstants.INVALID_SCOPE_ID) {
isScopeExists = true;
}
return isScopeExists;
} else {
return isScopeExists(scopeName, tenantID);
}
}
/**
* Get scope ID for the provided scope name
*
* @param scopeName name of the scope
* @param tenantID tenant ID
* @return scope ID for the provided scope name
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public int getScopeIDByName(String scopeName, int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get scope ID by name called for scope name:" + scopeName);
}
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
return getScopeId(scopeName, tenantID, conn);
} catch (SQLException e) {
String msg = "Error occurred while getting scope ID by name ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
private int getScopeId(String scopeName, int tenantID, Connection conn) throws SQLException {
int scopeID = Oauth2ScopeConstants.INVALID_SCOPE_ID;
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.RETRIEVE_SCOPE_ID_BY_NAME)) {
ps.setString(1, scopeName);
ps.setInt(2, tenantID);
ps.setString(3, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
scopeID = rs.getInt(1);
}
}
}
return scopeID;
}
/**
* Get scope ID of the provided scope regardless of scope type.
*
* @param scopeName Scope name.
* @param tenantID Tenant ID.
* @return
* @throws IdentityOAuth2ScopeServerException
*/
private int getScopeIDByNameWithoutScopeType(String scopeName, int tenantID)
throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Get scope ID regardless of scope type, for scope name: " + scopeName);
}
int scopeID = Oauth2ScopeConstants.INVALID_SCOPE_ID;
try (Connection conn = IdentityDatabaseUtil.getDBConnection(false)) {
try (PreparedStatement ps = conn
.prepareStatement(SQLQueries.RETRIEVE_SCOPE_ID_BY_NAME_WITHOUT_SCOPE_TYPE)) {
ps.setString(1, scopeName);
ps.setInt(2, tenantID);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
scopeID = rs.getInt(1);
}
}
}
return scopeID;
} catch (SQLException e) {
String msg = "Error occurred while getting scope ID by name.";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Delete a scope of the provided scope ID
*
* @param name name of the scope
* @param tenantID tenant ID
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public void deleteScopeByName(String name, int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Delete scope by name for scope name:" + name);
}
try (Connection conn = IdentityDatabaseUtil.getDBConnection()) {
try {
deleteScope(name, tenantID, conn);
IdentityDatabaseUtil.commitTransaction(conn);
} catch (SQLException e1) {
IdentityDatabaseUtil.rollbackTransaction(conn);
String msg = "Error occurred while deleting scopes ";
throw new IdentityOAuth2ScopeServerException(msg, e1);
}
} catch (SQLException e) {
String msg = "Error occurred while deleting scopes ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Update a scope of the provided scope name
*
* @param updatedScope details of the updated scope
* @param tenantID tenant ID
* @throws IdentityOAuth2ScopeServerException IdentityOAuth2ScopeServerException
*/
@Override
public void updateScopeByName(Scope updatedScope, int tenantID) throws IdentityOAuth2ScopeServerException {
if (log.isDebugEnabled()) {
log.debug("Update scope by name for scope name:" + updatedScope.getName());
}
try (Connection conn = IdentityDatabaseUtil.getDBConnection()) {
try {
int scopeId = getScopeId(updatedScope.getName(), tenantID, conn);
if (scopeId != Oauth2ScopeConstants.INVALID_SCOPE_ID) {
updateScopeDetails(updatedScope, conn, scopeId);
deleteBindings(scopeId, conn);
addScopeBinding(updatedScope, conn, scopeId);
IdentityDatabaseUtil.commitTransaction(conn);
}
} catch (SQLException e1) {
IdentityDatabaseUtil.rollbackTransaction(conn);
String msg = "Error occurred while updating scope by ID ";
throw new IdentityOAuth2ScopeServerException(msg, e1);
}
} catch (SQLException e) {
String msg = "Error occurred while updating scope by ID ";
throw new IdentityOAuth2ScopeServerException(msg, e);
}
}
/**
* Add an OIDC scope.
*
* @param scope Scope.
* @param conn Databse connection.
* @param tenantID Tenant ID.
* @throws SQLException
* @throws IdentityOAuth2ScopeClientException
*/
private void addScope(Scope scope, Connection conn, int tenantID)
throws SQLException {
//Adding the scope
if (scope != null) {
int scopeID = 0;
String dbProductName = conn.getMetaData().getDatabaseProductName();
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.ADD_SCOPE, new String[]{
DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, Oauth2ScopeConstants.SCOPE_ID)})) {
ps.setString(1, scope.getName());
ps.setString(2, scope.getDisplayName());
ps.setString(3, scope.getDescription());
ps.setInt(4, tenantID);
ps.setString(5, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
ps.execute();
try (ResultSet rs = ps.getGeneratedKeys()) {
if (rs.next()) {
scopeID = rs.getInt(1);
}
}
}
// some JDBC Drivers returns this in the result, some don't
if (scopeID == 0) {
if (log.isDebugEnabled()) {
log.debug("JDBC Driver did not return the scope id, executing Select operation");
}
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.RETRIEVE_SCOPE_ID_BY_NAME)) {
ps.setString(1, scope.getName());
ps.setInt(2, tenantID);
ps.setString(3, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
scopeID = rs.getInt(1);
}
}
}
}
addScopeBinding(scope, conn, scopeID);
}
}
/**
* Add bindings to a scope.
*
* @param scope Scope.
* @param conn Connection.
* @param scopeID Scope ID.
* @throws SQLException
* @throws IdentityOAuth2ScopeClientException
*/
private void addScopeBinding(Scope scope, Connection conn, int scopeID)
throws SQLException {
// Adding scope bindings.
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.ADD_SCOPE_BINDING)) {
List<ScopeBinding> scopeBindings = scope.getScopeBindings();
for (ScopeBinding scopeBinding : scopeBindings) {
String bindingType = scopeBinding.getBindingType();
for (String binding : scopeBinding.getBindings()) {
ps.setInt(1, scopeID);
ps.setString(2, binding);
ps.setString(3, bindingType);
ps.addBatch();
}
}
ps.executeBatch();
}
}
/**
* Delete the complete scope object.
*
* @param scopeName Scope name.
* @param tenantID Tenant ID.
* @param conn Data-base connection object.
* @throws SQLException
*/
private void deleteScope(String scopeName, int tenantID, Connection conn) throws SQLException {
// Delete the entire scope entry.
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.DELETE_SCOPE_BY_NAME)) {
ps.setString(1, scopeName);
ps.setInt(2, tenantID);
ps.setString(3, Oauth2ScopeConstants.SCOPE_TYPE_OAUTH2);
ps.execute();
}
}
/**
* Delete binding of the provided scope.
*
* @param conn Data-base connection.
* @throws SQLException
*/
private void deleteBindings(int scopeId, Connection conn) throws SQLException {
// Delete only the binding part of the given scope.
if (log.isDebugEnabled()) {
log.debug("OIDC claim mapping exists for the scope ID: " + scopeId + ", hence delete only the " +
"bindings of the scope");
}
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.DELETE_BINDINGS_OF_SCOPE)) {
ps.setInt(1, scopeId);
ps.execute();
}
}
/**
* This method is to get resource scope key of the resource uri
*
* @param resourceUri Resource Path
* @return Scope key of the resource
* @throws IdentityOAuth2Exception if failed to find the resource scope
*/
@Deprecated
public String findScopeOfResource(String resourceUri) throws IdentityOAuth2Exception {
if (log.isDebugEnabled()) {
log.debug("Retrieving scope for resource: " + resourceUri);
}
String sql;
try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) {
if (connection.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_SCOPE_NAME_FOR_RESOURCE_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_SCOPE_NAME_FOR_RESOURCE;
}
try (PreparedStatement ps = connection.prepareStatement(sql)) {
ps.setString(1, resourceUri);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
return rs.getString("NAME");
}
}
return null;
}
} catch (SQLException e) {
String errorMsg = "Error getting scopes for resource - " + resourceUri + " : " + e.getMessage();
throw new IdentityOAuth2Exception(errorMsg, e);
}
}
@Override
public boolean validateScope(Connection connection, String accessToken, String resourceUri) {
return false;
}
/**
* Get the list of roles associated for a given scope.
*
* @param scopeName name of the scope.
* @param tenantId Tenant Id
* @return The Set of roles associated with the given scope.
* @throws IdentityOAuth2Exception If an SQL error occurs while retrieving the roles.
*/
@Override
public Set<String> getBindingsOfScopeByScopeName(String scopeName, int tenantId) throws IdentityOAuth2Exception {
if (log.isDebugEnabled()) {
log.debug("Retrieving bindings of scope: " + scopeName + " tenant id: " + tenantId);
}
Connection connection = IdentityDatabaseUtil.getDBConnection(false);
PreparedStatement ps = null;
ResultSet rs = null;
Set<String> bindings = new HashSet<>();
String sql;
try {
if (connection.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_BINDINGS_OF_SCOPE_FOR_TENANT_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_BINDINGS_OF_SCOPE_FOR_TENANT;
}
ps = connection.prepareStatement(sql);
ps.setString(1, scopeName);
ps.setInt(2, tenantId);
rs = ps.executeQuery();
while (rs.next()) {
String binding = rs.getString("SCOPE_BINDING");
if (StringUtils.isNotEmpty(binding)) {
bindings.add(binding);
}
}
if (log.isDebugEnabled()) {
StringBuilder bindingStringBuilder = new StringBuilder();
for (String binding : bindings) {
bindingStringBuilder.append(binding).append(" ");
}
log.debug("Binding for scope: " + scopeName + " found: " + bindingStringBuilder.toString() + " tenant" +
" id: " + tenantId);
}
return bindings;
} catch (SQLException e) {
String errorMsg = "Error getting bindings of scope - " + scopeName;
throw new IdentityOAuth2Exception(errorMsg, e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rs, ps);
}
}
/**
* Get the list of roles associated for a given scope.
*
* @param scopeName Name of the scope.
* @return The Set of roles associated with the given scope.
* @throws IdentityOAuth2Exception If an SQL error occurs while retrieving the roles.
*/
@Deprecated
public Set<String> getBindingsOfScopeByScopeName(String scopeName) throws IdentityOAuth2Exception {
if (log.isDebugEnabled()) {
log.debug("Retrieving bindings of scope: " + scopeName);
}
Connection connection = IdentityDatabaseUtil.getDBConnection(false);
PreparedStatement ps = null;
ResultSet rs = null;
Set<String> bindings = new HashSet<>();
String sql;
try {
if (connection.getMetaData().getDriverName().contains(Oauth2ScopeConstants.DataBaseType.ORACLE)) {
sql = SQLQueries.RETRIEVE_BINDINGS_OF_SCOPE_ORACLE;
} else {
sql = SQLQueries.RETRIEVE_BINDINGS_OF_SCOPE;
}
ps = connection.prepareStatement(sql);
ps.setString(1, scopeName);
rs = ps.executeQuery();
while (rs.next()) {
String binding = rs.getString("SCOPE_BINDING");
if (StringUtils.isNotBlank(binding)) {
bindings.add(binding);
}
}
if (log.isDebugEnabled()) {
StringBuilder bindingsStringBuilder = new StringBuilder();
for (String binding : bindings) {
bindingsStringBuilder.append(binding).append(" ");
}
log.debug("Binding for scope: " + scopeName + " found: " + bindingsStringBuilder.toString());
}
return bindings;
} catch (SQLException e) {
String errorMsg = "Error getting roles of scope - " + scopeName;
throw new IdentityOAuth2Exception(errorMsg, e);
} finally {
IdentityDatabaseUtil.closeAllConnections(connection, rs, ps);
}
}
/**
* Update scope details on IDN_OAUTH2_SCOPE scope table.
*
* @param updatedScope Updated scope.
* @param conn Data-base connection.
* @param scopeId Scope ID.
* @throws SQLException
*/
public void updateScopeDetails(Scope updatedScope, Connection conn, int scopeId) throws SQLException {
// Update scope details on IDN_OAUTH2_SCOPE table.
try (PreparedStatement ps = conn.prepareStatement(SQLQueries.UPDATE_SCOPE)) {
ps.setString(1, updatedScope.getDisplayName());
ps.setString(2, updatedScope.getDescription());
ps.setInt(3, scopeId);
ps.execute();
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options.colors;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.options.colors.AttributesDescriptor;
import com.intellij.openapi.options.colors.ColorSettingsPage;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Pair;
import com.intellij.ui.CollectionComboBoxModel;
import com.intellij.ui.ColorPanel;
import com.intellij.ui.HyperlinkAdapter;
import com.intellij.ui.ListCellRendererWrapper;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.util.FontUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Collections;
import java.util.Map;
/**
* @author cdr
*/
public class ColorAndFontDescriptionPanel extends JPanel {
private JPanel myPanel;
private ColorPanel myBackgroundChooser;
private ColorPanel myForegroundChooser;
private ColorPanel myEffectsColorChooser;
private ColorPanel myErrorStripeColorChooser;
private JBCheckBox myCbBackground;
private JBCheckBox myCbForeground;
private JBCheckBox myCbEffects;
private JBCheckBox myCbErrorStripe;
private Map<String, EffectType> myEffectsMap;
{
Map<String, EffectType> map = ContainerUtil.newLinkedHashMap();
map.put(ApplicationBundle.message("combobox.effect.underscored"), EffectType.LINE_UNDERSCORE);
map.put(ApplicationBundle.message("combobox.effect.boldunderscored"), EffectType.BOLD_LINE_UNDERSCORE);
map.put(ApplicationBundle.message("combobox.effect.underwaved"), EffectType.WAVE_UNDERSCORE);
map.put(ApplicationBundle.message("combobox.effect.bordered"), EffectType.BOXED);
map.put(ApplicationBundle.message("combobox.effect.strikeout"), EffectType.STRIKEOUT);
map.put(ApplicationBundle.message("combobox.effect.bold.dottedline"), EffectType.BOLD_DOTTED_LINE);
myEffectsMap = Collections.unmodifiableMap(map);
}
private JComboBox myEffectsCombo;
private JBCheckBox myCbBold;
private JBCheckBox myCbItalic;
private JLabel myLabelFont;
private JTextPane myInheritanceLabel;
private JBCheckBox myInheritAttributesBox;
public ColorAndFontDescriptionPanel() {
super(new BorderLayout());
add(myPanel, BorderLayout.CENTER);
setBorder(BorderFactory.createEmptyBorder(4, 0, 4, 4));
myEffectsCombo.setModel(new CollectionComboBoxModel(ContainerUtil.newArrayList(myEffectsMap.keySet())));
myEffectsCombo.setRenderer(new ListCellRendererWrapper<String>() {
@Override
public void customize(JList list, String value, int index, boolean selected, boolean hasFocus) {
setText(value != null ? value : "<invalid>");
}
});
ActionListener actionListener = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
onSettingsChanged(e);
}
};
for (JBCheckBox c : new JBCheckBox[]{myCbBackground, myCbForeground, myCbEffects, myCbErrorStripe, myCbItalic, myCbBold, myInheritAttributesBox}) {
c.addActionListener(actionListener);
}
for (ColorPanel c : new ColorPanel[]{myBackgroundChooser, myForegroundChooser, myEffectsColorChooser, myErrorStripeColorChooser}) {
c.addActionListener(actionListener);
}
myEffectsCombo.addActionListener(actionListener);
Messages.configureMessagePaneUi(myInheritanceLabel, "<html>", false);
myInheritanceLabel.addHyperlinkListener(new HyperlinkAdapter() {
@Override
protected void hyperlinkActivated(HyperlinkEvent e) {
onHyperLinkClicked(e);
}
});
myInheritanceLabel.setBorder(BorderFactory.createEmptyBorder());
myLabelFont.setVisible(false); // hide for now as it doesn't look that good
}
protected void onHyperLinkClicked(HyperlinkEvent e) {
}
protected void onSettingsChanged(ActionEvent e) {
myErrorStripeColorChooser.setEnabled(myCbErrorStripe.isSelected());
myForegroundChooser.setEnabled(myCbForeground.isSelected());
myBackgroundChooser.setEnabled(myCbBackground.isSelected());
myEffectsColorChooser.setEnabled(myCbEffects.isSelected());
myEffectsCombo.setEnabled(myCbEffects.isSelected());
}
public void resetDefault() {
myLabelFont.setEnabled(false);
myCbBold.setSelected(false);
myCbBold.setEnabled(false);
myCbItalic.setSelected(false);
myCbItalic.setEnabled(false);
updateColorChooser(myCbForeground, myForegroundChooser, false, false, null);
updateColorChooser(myCbBackground, myBackgroundChooser, false, false, null);
updateColorChooser(myCbErrorStripe, myErrorStripeColorChooser, false, false, null);
updateColorChooser(myCbEffects, myEffectsColorChooser, false, false, null);
myEffectsCombo.setEnabled(false);
}
private static void updateColorChooser(JCheckBox checkBox,
ColorPanel colorPanel,
boolean isEnabled,
boolean isChecked,
@Nullable Color color) {
checkBox.setEnabled(isEnabled);
checkBox.setSelected(isChecked);
if (color != null) {
colorPanel.setSelectedColor(color);
}
else {
colorPanel.setSelectedColor(Color.white);
}
colorPanel.setEnabled(isChecked);
}
public void reset(ColorAndFontDescription description) {
if (description.isFontEnabled()) {
myLabelFont.setEnabled(true);
myCbBold.setEnabled(true);
myCbItalic.setEnabled(true);
int fontType = description.getFontType();
myCbBold.setSelected((fontType & Font.BOLD) != 0);
myCbItalic.setSelected((fontType & Font.ITALIC) != 0);
}
else {
myLabelFont.setEnabled(false);
myCbBold.setSelected(false);
myCbBold.setEnabled(false);
myCbItalic.setSelected(false);
myCbItalic.setEnabled(false);
}
updateColorChooser(myCbForeground, myForegroundChooser, description.isForegroundEnabled(),
description.isForegroundChecked(), description.getForegroundColor());
updateColorChooser(myCbBackground, myBackgroundChooser, description.isBackgroundEnabled(),
description.isBackgroundChecked(), description.getBackgroundColor());
updateColorChooser(myCbErrorStripe, myErrorStripeColorChooser, description.isErrorStripeEnabled(),
description.isErrorStripeChecked(), description.getErrorStripeColor());
EffectType effectType = description.getEffectType();
updateColorChooser(myCbEffects, myEffectsColorChooser, description.isEffectsColorEnabled(),
description.isEffectsColorChecked(), description.getEffectColor());
if (description.isEffectsColorEnabled() && description.isEffectsColorChecked()) {
myEffectsCombo.setEnabled(true);
myEffectsCombo.getModel().setSelectedItem(ContainerUtil.reverseMap(myEffectsMap).get(effectType));
}
else {
myEffectsCombo.setEnabled(false);
}
setInheritanceInfo(description);
myLabelFont.setEnabled(myCbBold.isEnabled() || myCbItalic.isEnabled());
}
private void setInheritanceInfo(ColorAndFontDescription description) {
Pair<ColorSettingsPage, AttributesDescriptor> baseDescriptor = description.getBaseAttributeDescriptor();
if (baseDescriptor != null && baseDescriptor.second.getDisplayName() != null) {
String attrName = baseDescriptor.second.getDisplayName();
String attrLabel = attrName.replaceAll(ColorOptionsTree.NAME_SEPARATOR, FontUtil.rightArrow(UIUtil.getLabelFont()));
ColorSettingsPage settingsPage = baseDescriptor.first;
String style = "<div style=\"text-align:right\" vertical-align=\"top\">";
String tooltipText;
String labelText;
if (settingsPage != null) {
String pageName = settingsPage.getDisplayName();
tooltipText = "'" + attrLabel + "' from<br>'" + pageName + "' section";
labelText = style + "'" + attrLabel + "'<br>of <a href=\"" + attrName + "\">" + pageName;
}
else {
tooltipText = attrLabel;
labelText = style + attrLabel + "<br> ";
}
myInheritanceLabel.setText(labelText);
myInheritanceLabel.setToolTipText(tooltipText);
myInheritanceLabel.setEnabled(description.isInherited());
myInheritAttributesBox.setEnabled(true);
myInheritAttributesBox.setSelected(description.isInherited());
setEditEnabled(!description.isInherited(), description);
}
else {
myInheritanceLabel.setToolTipText(null);
myInheritanceLabel.setText("<html><br> ");
myInheritanceLabel.setEnabled(true);
myInheritAttributesBox.setEnabled(false);
myInheritAttributesBox.setSelected(false);
setEditEnabled(true, description);
}
}
private void setEditEnabled(boolean isEditEnabled, ColorAndFontDescription description) {
myCbBackground.setEnabled(isEditEnabled && description.isBackgroundEnabled());
myCbForeground.setEnabled(isEditEnabled && description.isForegroundEnabled());
myCbBold.setEnabled(isEditEnabled && description.isFontEnabled());
myCbItalic.setEnabled(isEditEnabled && description.isFontEnabled());
myCbEffects.setEnabled(isEditEnabled && description.isEffectsColorEnabled());
myCbErrorStripe.setEnabled(isEditEnabled && description.isErrorStripeEnabled());
myErrorStripeColorChooser.setEditable(isEditEnabled);
myEffectsColorChooser.setEditable(isEditEnabled);
myForegroundChooser.setEditable(isEditEnabled);
myBackgroundChooser.setEditable(isEditEnabled);
}
public void apply(ColorAndFontDescription description, EditorColorsScheme scheme) {
if (description != null) {
description.setInherited(myInheritAttributesBox.isSelected());
if (description.isInherited()) {
TextAttributes baseAttributes = description.getBaseAttributes();
if (baseAttributes != null) {
description.setFontType(baseAttributes.getFontType());
description.setForegroundChecked(baseAttributes.getForegroundColor() != null);
description.setForegroundColor(baseAttributes.getForegroundColor());
description.setBackgroundChecked(baseAttributes.getBackgroundColor() != null);
description.setBackgroundColor(baseAttributes.getBackgroundColor());
description.setErrorStripeChecked(baseAttributes.getErrorStripeColor() != null);
description.setErrorStripeColor(baseAttributes.getErrorStripeColor());
description.setEffectColor(baseAttributes.getEffectColor());
description.setEffectType(baseAttributes.getEffectType());
description.setEffectsColorChecked(baseAttributes.getEffectColor() != null);
}
else {
description.setInherited(false);
}
reset(description);
}
else {
setInheritanceInfo(description);
int fontType = Font.PLAIN;
if (myCbBold.isSelected()) fontType |= Font.BOLD;
if (myCbItalic.isSelected()) fontType |= Font.ITALIC;
description.setFontType(fontType);
description.setForegroundChecked(myCbForeground.isSelected());
description.setForegroundColor(myForegroundChooser.getSelectedColor());
description.setBackgroundChecked(myCbBackground.isSelected());
description.setBackgroundColor(myBackgroundChooser.getSelectedColor());
description.setErrorStripeChecked(myCbErrorStripe.isSelected());
description.setErrorStripeColor(myErrorStripeColorChooser.getSelectedColor());
description.setEffectsColorChecked(myCbEffects.isSelected());
description.setEffectColor(myEffectsColorChooser.getSelectedColor());
if (myEffectsCombo.isEnabled()) {
String effectType = (String)myEffectsCombo.getModel().getSelectedItem();
description.setEffectType(myEffectsMap.get(effectType));
}
}
description.apply(scheme);
}
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.common.math;
import java.math.BigDecimal;
import java.util.Comparator;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.modeshape.common.annotation.Immutable;
/**
* The {@link MathOperations math operations} for {@link Duration}s.
*/
@Immutable
public class DurationOperations implements MathOperations<Duration>, Comparator<Duration> {
@Override
public Class<Duration> getOperandClass() {
return Duration.class;
}
@Override
public Duration add( Duration value1,
Duration value2 ) {
if (value1 == null) return value2 != null ? value2 : createZeroValue();
if (value2 == null) return value1;
return value1.add(value2);
}
@Override
public Duration subtract( Duration value1,
Duration value2 ) {
if (value1 == null) return negate(value2);
if (value2 == null) return value1;
return value1.subtract(value2);
}
@Override
public Duration multiply( Duration value1,
Duration value2 ) {
if (value1 == null || value2 == null) return createZeroValue();
return value1.multiply(value2.longValue());
}
@Override
public double divide( Duration value1,
Duration value2 ) {
if (value1 == null || value2 == null) throw new IllegalArgumentException();
return value1.divide(value2);
}
@Override
public Duration negate( Duration value ) {
if (value == null) return createZeroValue();
return value.multiply(value.longValue() * -1);
}
@Override
public Duration increment( Duration value ) {
if (value == null) return createZeroValue();
return value.add(1l, TimeUnit.NANOSECONDS);
}
@Override
public Duration maximum( Duration value1,
Duration value2 ) {
if (value1 == null) return value2;
if (value2 == null) return value1;
return new Duration(Math.max(value1.longValue(), value2.longValue()));
}
@Override
public Duration minimum( Duration value1,
Duration value2 ) {
if (value1 == null) return value2;
if (value2 == null) return value1;
return new Duration(Math.min(value1.longValue(), value2.longValue()));
}
@Override
public int compare( Duration value1,
Duration value2 ) {
if (value1 == null) return value2 != null ? -1 : 0;
if (value2 == null) return 1;
return value1.compareTo(value2);
}
@Override
public BigDecimal asBigDecimal( Duration value ) {
return value != null ? value.toBigDecimal() : null;
}
@Override
public Duration fromBigDecimal( BigDecimal value ) {
return value != null ? new Duration(value.longValue()) : null;
}
@Override
public Duration createZeroValue() {
return new Duration(0l);
}
@Override
public Duration create( int value ) {
return new Duration(value);
}
@Override
public Duration create( long value ) {
return new Duration(value);
}
@Override
public Duration create( double value ) {
return new Duration((long)value);
}
@Override
public double sqrt( Duration value ) {
return Math.sqrt(value.longValue());
}
@Override
public Comparator<Duration> getComparator() {
return this;
}
@Override
public Duration random( Duration minimum,
Duration maximum,
Random rng ) {
Duration difference = subtract(maximum, minimum);
return new Duration(minimum.getDuratinInNanoseconds() + rng.nextInt(difference.intValue()));
}
@Override
public double doubleValue( Duration value ) {
return value.doubleValue();
}
@Override
public float floatValue( Duration value ) {
return value.floatValue();
}
@Override
public int intValue( Duration value ) {
return value.intValue();
}
@Override
public long longValue( Duration value ) {
return value.longValue();
}
@Override
public short shortValue( Duration value ) {
return value.shortValue();
}
@Override
public int getExponentInScientificNotation( Duration value ) {
long v = Math.abs(value.getDuratinInNanoseconds());
int exp = 0;
if (v > 1l) {
while (v >= 10l) {
v /= 10l;
++exp;
}
}
return exp;
}
@Override
public Duration roundUp( Duration durationValue,
int decimalShift ) {
long value = durationValue.longValue();
if (value == 0) return new Duration(0l);
if (decimalShift >= 0) return durationValue;
long shiftedValueP5 = Math.abs(value);
for (int i = 0; i != (-decimalShift - 1); ++i)
shiftedValueP5 /= 10l;
shiftedValueP5 += 5l;
long shiftedValue = shiftedValueP5 / 10l;
if (shiftedValue * 10l - shiftedValueP5 >= 5) ++shiftedValue;
shiftedValue *= Long.signum(value);
for (int i = 0; i != -decimalShift; ++i)
shiftedValue *= 10l;
return new Duration(shiftedValue);
}
@Override
public Duration roundDown( Duration durationValue,
int decimalShift ) {
long value = durationValue.longValue();
if (value == 0) return new Duration(0l);
if (decimalShift >= 0) return durationValue;
long shiftedValue = Math.abs(value);
for (int i = 0; i != -decimalShift; ++i)
shiftedValue /= 10l;
shiftedValue *= Long.signum(value);
for (int i = 0; i != -decimalShift; ++i)
shiftedValue *= 10l;
return new Duration(shiftedValue);
}
@Override
public Duration keepSignificantFigures( Duration value,
int numSigFigs ) {
if (numSigFigs < 0) return value;
if (numSigFigs == 0) return new Duration(0l);
int currentExp = getExponentInScientificNotation(value);
int decimalShift = -currentExp + numSigFigs - 1;
return roundUp(value, decimalShift);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapTaskAttemptImpl;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletion;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.Locality;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.ClusterInfo;
import org.apache.hadoop.mapreduce.v2.app.MRApp;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptFinishingMonitor;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptTooManyFetchFailureEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ControlledClock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
@SuppressWarnings({"unchecked", "rawtypes"})
public class TestTaskAttempt{
static public class StubbedFS extends RawLocalFileSystem {
@Override
public FileStatus getFileStatus(Path f) throws IOException {
return new FileStatus(1, false, 1, 1, 1, f);
}
}
@Test
public void testMRAppHistoryForMap() throws Exception {
MRApp app = new FailingAttemptsMRApp(1, 0);
testMRAppHistory(app);
}
@Test
public void testMRAppHistoryForReduce() throws Exception {
MRApp app = new FailingAttemptsMRApp(0, 1);
testMRAppHistory(app);
}
@Test
public void testMRAppHistoryForTAFailedInAssigned() throws Exception {
// test TA_CONTAINER_LAUNCH_FAILED for map
FailingAttemptsDuringAssignedMRApp app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED);
testTaskAttemptAssignedFailHistory(app);
// test TA_CONTAINER_LAUNCH_FAILED for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED);
testTaskAttemptAssignedFailHistory(app);
// test TA_CONTAINER_COMPLETED for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_CONTAINER_COMPLETED);
testTaskAttemptAssignedFailHistory(app);
// test TA_CONTAINER_COMPLETED for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_CONTAINER_COMPLETED);
testTaskAttemptAssignedFailHistory(app);
// test TA_FAILMSG for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_FAILMSG);
testTaskAttemptAssignedFailHistory(app);
// test TA_FAILMSG for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_FAILMSG);
testTaskAttemptAssignedFailHistory(app);
// test TA_FAILMSG_BY_CLIENT for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT);
testTaskAttemptAssignedFailHistory(app);
// test TA_FAILMSG_BY_CLIENT for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT);
testTaskAttemptAssignedFailHistory(app);
// test TA_KILL for map
app =
new FailingAttemptsDuringAssignedMRApp(1, 0,
TaskAttemptEventType.TA_KILL);
testTaskAttemptAssignedKilledHistory(app);
// test TA_KILL for reduce
app =
new FailingAttemptsDuringAssignedMRApp(0, 1,
TaskAttemptEventType.TA_KILL);
testTaskAttemptAssignedKilledHistory(app);
}
@Test
public void testSingleRackRequest() throws Exception {
TaskAttemptImpl.RequestContainerTransition rct =
new TaskAttemptImpl.RequestContainerTransition(false);
EventHandler eventHandler = mock(EventHandler.class);
String[] hosts = new String[3];
hosts[0] = "host1";
hosts[1] = "host2";
hosts[2] = "host3";
TaskSplitMetaInfo splitInfo =
new TaskSplitMetaInfo(hosts, 0, 128 * 1024 * 1024l);
TaskAttemptImpl mockTaskAttempt =
createMapTaskAttemptImplForTest(eventHandler, splitInfo);
TaskAttemptEvent mockTAEvent = mock(TaskAttemptEvent.class);
rct.transition(mockTaskAttempt, mockTAEvent);
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(arg.capture());
if (!(arg.getAllValues().get(1) instanceof ContainerRequestEvent)) {
Assert.fail("Second Event not of type ContainerRequestEvent");
}
ContainerRequestEvent cre =
(ContainerRequestEvent) arg.getAllValues().get(1);
String[] requestedRacks = cre.getRacks();
//Only a single occurrence of /DefaultRack
assertEquals(1, requestedRacks.length);
}
@Test
public void testHostResolveAttempt() throws Exception {
TaskAttemptImpl.RequestContainerTransition rct =
new TaskAttemptImpl.RequestContainerTransition(false);
EventHandler eventHandler = mock(EventHandler.class);
String[] hosts = new String[3];
hosts[0] = "192.168.1.1";
hosts[1] = "host2";
hosts[2] = "host3";
TaskSplitMetaInfo splitInfo =
new TaskSplitMetaInfo(hosts, 0, 128 * 1024 * 1024l);
TaskAttemptImpl mockTaskAttempt =
createMapTaskAttemptImplForTest(eventHandler, splitInfo);
TaskAttemptImpl spyTa = spy(mockTaskAttempt);
when(spyTa.resolveHost(hosts[0])).thenReturn("host1");
spyTa.dataLocalHosts = spyTa.resolveHosts(splitInfo.getLocations());
TaskAttemptEvent mockTAEvent = mock(TaskAttemptEvent.class);
rct.transition(spyTa, mockTAEvent);
verify(spyTa).resolveHost(hosts[0]);
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(arg.capture());
if (!(arg.getAllValues().get(1) instanceof ContainerRequestEvent)) {
Assert.fail("Second Event not of type ContainerRequestEvent");
}
Map<String, Boolean> expected = new HashMap<String, Boolean>();
expected.put("host1", true);
expected.put("host2", true);
expected.put("host3", true);
ContainerRequestEvent cre =
(ContainerRequestEvent) arg.getAllValues().get(1);
String[] requestedHosts = cre.getHosts();
for (String h : requestedHosts) {
expected.remove(h);
}
assertEquals(0, expected.size());
}
@Test
public void testMillisCountersUpdate() throws Exception {
verifyMillisCounters(2048, 2048, 1024);
verifyMillisCounters(2048, 1024, 1024);
verifyMillisCounters(10240, 1024, 2048);
}
public void verifyMillisCounters(int mapMemMb, int reduceMemMb,
int minContainerSize) throws Exception {
Clock actualClock = new SystemClock();
ControlledClock clock = new ControlledClock(actualClock);
clock.setTime(10);
MRApp app =
new MRApp(1, 1, false, "testSlotMillisCounterUpdate", true, clock);
Configuration conf = new Configuration();
conf.setInt(MRJobConfig.MAP_MEMORY_MB, mapMemMb);
conf.setInt(MRJobConfig.REDUCE_MEMORY_MB, reduceMemMb);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
minContainerSize);
app.setClusterInfo(new ClusterInfo(Resource.newInstance(10240, 1)));
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Map<TaskId, Task> tasks = job.getTasks();
Assert.assertEquals("Num tasks is not correct", 2, tasks.size());
Iterator<Task> taskIter = tasks.values().iterator();
Task mTask = taskIter.next();
app.waitForState(mTask, TaskState.RUNNING);
Task rTask = taskIter.next();
app.waitForState(rTask, TaskState.RUNNING);
Map<TaskAttemptId, TaskAttempt> mAttempts = mTask.getAttempts();
Assert.assertEquals("Num attempts is not correct", 1, mAttempts.size());
Map<TaskAttemptId, TaskAttempt> rAttempts = rTask.getAttempts();
Assert.assertEquals("Num attempts is not correct", 1, rAttempts.size());
TaskAttempt mta = mAttempts.values().iterator().next();
TaskAttempt rta = rAttempts.values().iterator().next();
app.waitForState(mta, TaskAttemptState.RUNNING);
app.waitForState(rta, TaskAttemptState.RUNNING);
clock.setTime(11);
app.getContext()
.getEventHandler()
.handle(new TaskAttemptEvent(mta.getID(), TaskAttemptEventType.TA_DONE));
app.getContext()
.getEventHandler()
.handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
Assert.assertEquals(mta.getFinishTime(), 11);
Assert.assertEquals(mta.getLaunchTime(), 10);
Assert.assertEquals(rta.getFinishTime(), 11);
Assert.assertEquals(rta.getLaunchTime(), 10);
Counters counters = job.getAllCounters();
Assert.assertEquals((int) Math.ceil((float) mapMemMb / minContainerSize),
counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue());
Assert.assertEquals((int) Math.ceil((float) reduceMemMb / minContainerSize),
counters.findCounter(JobCounter.SLOTS_MILLIS_REDUCES).getValue());
Assert.assertEquals(1,
counters.findCounter(JobCounter.MILLIS_MAPS).getValue());
Assert.assertEquals(1,
counters.findCounter(JobCounter.MILLIS_REDUCES).getValue());
Assert.assertEquals(mapMemMb,
counters.findCounter(JobCounter.MB_MILLIS_MAPS).getValue());
Assert.assertEquals(reduceMemMb,
counters.findCounter(JobCounter.MB_MILLIS_REDUCES).getValue());
Assert.assertEquals(1,
counters.findCounter(JobCounter.VCORES_MILLIS_MAPS).getValue());
Assert.assertEquals(1,
counters.findCounter(JobCounter.VCORES_MILLIS_REDUCES).getValue());
}
private TaskAttemptImpl createMapTaskAttemptImplForTest(
EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo) {
Clock clock = new SystemClock();
return createMapTaskAttemptImplForTest(eventHandler, taskSplitMetaInfo, clock);
}
private TaskAttemptImpl createMapTaskAttemptImplForTest(
EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo, Clock clock) {
ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
Path jobFile = mock(Path.class);
JobConf jobConf = new JobConf();
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
taskSplitMetaInfo, jobConf, taListener, null,
null, clock, null);
return taImpl;
}
private void testMRAppHistory(MRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId, Task> tasks = job.getTasks();
Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
Task task = tasks.values().iterator().next();
Assert.assertEquals("Task state not correct", TaskState.FAILED, task
.getReport().getTaskState());
Map<TaskAttemptId, TaskAttempt> attempts = tasks.values().iterator().next()
.getAttempts();
Assert.assertEquals("Num attempts is not correct", 4, attempts.size());
Iterator<TaskAttempt> it = attempts.values().iterator();
TaskAttemptReport report = it.next().getReport();
Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
report.getTaskAttemptState());
Assert.assertEquals("Diagnostic Information is not Correct",
"Test Diagnostic Event", report.getDiagnosticInfo());
report = it.next().getReport();
Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
report.getTaskAttemptState());
}
private void testTaskAttemptAssignedFailHistory
(FailingAttemptsDuringAssignedMRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId, Task> tasks = job.getTasks();
Assert.assertTrue("No Ta Started JH Event", app.getTaStartJHEvent());
Assert.assertTrue("No Ta Failed JH Event", app.getTaFailedJHEvent());
}
private void testTaskAttemptAssignedKilledHistory
(FailingAttemptsDuringAssignedMRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Map<TaskId, Task> tasks = job.getTasks();
Task task = tasks.values().iterator().next();
app.waitForState(task, TaskState.SCHEDULED);
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
TaskAttempt attempt = attempts.values().iterator().next();
app.waitForState(attempt, TaskAttemptState.KILLED);
Assert.assertTrue("No Ta Started JH Event", app.getTaStartJHEvent());
Assert.assertTrue("No Ta Killed JH Event", app.getTaKilledJHEvent());
}
static class FailingAttemptsMRApp extends MRApp {
FailingAttemptsMRApp(int maps, int reduces) {
super(maps, reduces, true, "FailingAttemptsMRApp", true);
}
@Override
protected void attemptLaunched(TaskAttemptId attemptID) {
getContext().getEventHandler().handle(
new TaskAttemptDiagnosticsUpdateEvent(attemptID,
"Test Diagnostic Event"));
getContext().getEventHandler().handle(
new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_FAILMSG));
}
protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
AppContext context) {
return new EventHandler<JobHistoryEvent>() {
@Override
public void handle(JobHistoryEvent event) {
if (event.getType() == org.apache.hadoop.mapreduce.jobhistory.EventType.MAP_ATTEMPT_FAILED) {
TaskAttemptUnsuccessfulCompletion datum = (TaskAttemptUnsuccessfulCompletion) event
.getHistoryEvent().getDatum();
Assert.assertEquals("Diagnostic Information is not Correct",
"Test Diagnostic Event", datum.get(8).toString());
}
}
};
}
}
static class FailingAttemptsDuringAssignedMRApp extends MRApp {
FailingAttemptsDuringAssignedMRApp(int maps, int reduces,
TaskAttemptEventType event) {
super(maps, reduces, true, "FailingAttemptsMRApp", true);
sendFailEvent = event;
}
TaskAttemptEventType sendFailEvent;
@Override
protected void containerLaunched(TaskAttemptId attemptID,
int shufflePort) {
//do nothing, not send TA_CONTAINER_LAUNCHED event
}
@Override
protected void attemptLaunched(TaskAttemptId attemptID) {
getContext().getEventHandler().handle(
new TaskAttemptEvent(attemptID, sendFailEvent));
}
private boolean receiveTaStartJHEvent = false;
private boolean receiveTaFailedJHEvent = false;
private boolean receiveTaKilledJHEvent = false;
public boolean getTaStartJHEvent(){
return receiveTaStartJHEvent;
}
public boolean getTaFailedJHEvent(){
return receiveTaFailedJHEvent;
}
public boolean getTaKilledJHEvent(){
return receiveTaKilledJHEvent;
}
protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
AppContext context) {
return new EventHandler<JobHistoryEvent>() {
@Override
public void handle(JobHistoryEvent event) {
if (event.getType() == org.apache.hadoop.mapreduce.jobhistory.
EventType.MAP_ATTEMPT_FAILED) {
receiveTaFailedJHEvent = true;
} else if (event.getType() == org.apache.hadoop.mapreduce.
jobhistory.EventType.MAP_ATTEMPT_KILLED) {
receiveTaKilledJHEvent = true;
} else if (event.getType() == org.apache.hadoop.mapreduce.
jobhistory.EventType.MAP_ATTEMPT_STARTED) {
receiveTaStartJHEvent = true;
} else if (event.getType() == org.apache.hadoop.mapreduce.
jobhistory.EventType.REDUCE_ATTEMPT_FAILED) {
receiveTaFailedJHEvent = true;
} else if (event.getType() == org.apache.hadoop.mapreduce.
jobhistory.EventType.REDUCE_ATTEMPT_KILLED) {
receiveTaKilledJHEvent = true;
} else if (event.getType() == org.apache.hadoop.mapreduce.
jobhistory.EventType.REDUCE_ATTEMPT_STARTED) {
receiveTaStartJHEvent = true;
}
}
};
}
}
@Test
public void testLaunchFailedWhileKilling() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
new Token(), new Credentials(),
new SystemClock(), null);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED));
assertFalse(eventHandler.internalError);
assertEquals("Task attempt is not assigned on the local node",
Locality.NODE_LOCAL, taImpl.getLocality());
}
@Test
public void testContainerCleanedWhileRunning() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
new Token(), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.2", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
assertEquals("Task attempt is not assigned on the local rack",
Locality.RACK_LOCAL, taImpl.getLocality());
}
@Test
public void testContainerCleanedWhileCommitting() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
new Token(), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
assertEquals("Task attempt is not in commit pending state", taImpl.getState(),
TaskAttemptState.COMMIT_PENDING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
assertEquals("Task attempt is assigned locally", Locality.OFF_SWITCH,
taImpl.getLocality());
}
@Test
public void testDoubleTooManyFetchFailure() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
TaskId reduceTaskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.REDUCE);
TaskAttemptId reduceTAId =
MRBuilderUtils.newTaskAttemptId(reduceTaskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
new Token(), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_DONE));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
TaskAttemptState.FAILED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
assertEquals("Task attempt is not in FAILED state, still", taImpl.getState(),
TaskAttemptState.FAILED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@Test
public void testAppDiognosticEventOnUnassignedTask() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler,
jobFile, 1, splits, jobConf, taListener,
new Token(), new Credentials(), new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(attemptId,
"Task got killed"));
assertFalse(
"InternalError occurred trying to handle TA_DIAGNOSTICS_UPDATE on assigned task",
eventHandler.internalError);
try {
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
Assert.assertTrue("No exception on UNASSIGNED STATE KILL event", true);
} catch (Exception e) {
Assert.assertFalse(
"Exception not expected for UNASSIGNED STATE KILL event", true);
}
}
@Test
public void testTooManyFetchFailureAfterKill() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
mock(Token.class), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_DONE));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
assertEquals("Task attempt is not in KILLED state, still", taImpl.getState(),
TaskAttemptState.KILLED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@Test
public void testAppDiognosticEventOnNewTask() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler,
jobFile, 1, splits, jobConf, taListener,
new Token(), new Credentials(), new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(attemptId,
"Task got killed"));
assertFalse(
"InternalError occurred trying to handle TA_DIAGNOSTICS_UPDATE on assigned task",
eventHandler.internalError);
}
@Test
public void testFetchFailureAttemptFinishTime() throws Exception{
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
TaskId reducetaskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.REDUCE);
TaskAttemptId reduceTAId =
MRBuilderUtils.newTaskAttemptId(reducetaskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,mock(Token.class), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_DONE));
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertTrue("Task Attempt finish time is not greater than 0",
taImpl.getFinishTime() > 0);
Long finishTime = taImpl.getFinishTime();
Thread.sleep(5);
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
assertEquals("Task attempt is not in Too Many Fetch Failure state",
taImpl.getState(), TaskAttemptState.FAILED);
assertEquals("After TA_TOO_MANY_FETCH_FAILURE,"
+ " Task attempt finish time is not the same ",
finishTime, Long.valueOf(taImpl.getFinishTime()));
}
@Test
public void testContainerKillAfterAssigned() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId,
0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler,
jobFile, 1, splits, jobConf, taListener, new Token(),
new Credentials(), new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.2", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
assertEquals("Task attempt is not in assinged state",
taImpl.getInternalState(), TaskAttemptStateInternal.ASSIGNED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertEquals("Task should be in KILLED state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
}
@Test
public void testContainerKillWhileRunning() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId,
0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler,
jobFile, 1, splits, jobConf, taListener, new Token(),
new Credentials(), new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.2", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
assertEquals("Task should be in KILLED state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
}
@Test
public void testContainerKillWhileCommitPending() throws Exception {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId,
0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(
new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] { "127.0.0.1" });
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
Resource resource = mock(Resource.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
when(resource.getMemory()).thenReturn(1024);
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler,
jobFile, 1, splits, jobConf, taListener, new Token(),
new Credentials(), new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.2", 0);
ContainerId contId = ContainerId.newContainerId(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
assertEquals("Task should be in COMMIT_PENDING state",
TaskAttemptStateInternal.COMMIT_PENDING, taImpl.getInternalState());
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
assertEquals("Task should be in KILLED state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
}
@Test
public void testKillMapTaskWhileSuccessFinishing() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in SUCCEEDED state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the map task is killed when it is in SUCCESS_FINISHING_CONTAINER
// state, the state will move to KILL_CONTAINER_CLEANUP
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertEquals("Task attempt's internal state is not KILL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not KILL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.KILL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@Test
public void testKillMapTaskWhileFailFinishing() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_FAILMSG));
assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the map task is killed when it is in FAIL_FINISHING_CONTAINER state,
// the state will stay in FAIL_FINISHING_CONTAINER.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@Test
public void testFailMapTaskByClient() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_CONTAINER_CLEANUP", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@Test
public void testTaskAttemptDiagnosticEventOnFinishing() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// TA_DIAGNOSTICS_UPDATE doesn't change state
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(taImpl.getID(),
"Task got updated"));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@Test
public void testTimeoutWhileSuccessFinishing() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the task stays in SUCCESS_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_CONTAINER_CLEANUP", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@Test
public void testTimeoutWhileFailFinishing() throws Exception {
MockEventHandler eventHandler = new MockEventHandler();
TaskAttemptImpl taImpl = createTaskAttemptImpl(eventHandler);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_FAILMSG));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the task stays in FAIL_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
private void setupTaskAttemptFinishingMonitor(
EventHandler eventHandler, JobConf jobConf, AppContext appCtx) {
TaskAttemptFinishingMonitor taskAttemptFinishingMonitor =
new TaskAttemptFinishingMonitor(eventHandler);
taskAttemptFinishingMonitor.init(jobConf);
when(appCtx.getTaskAttemptFinishingMonitor()).
thenReturn(taskAttemptFinishingMonitor);
}
private TaskAttemptImpl createTaskAttemptImpl(
MockEventHandler eventHandler) {
ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
Path jobFile = mock(Path.class);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, "10");
TaskSplitMetaInfo splits = mock(TaskSplitMetaInfo.class);
when(splits.getLocations()).thenReturn(new String[] {"127.0.0.1"});
AppContext appCtx = mock(AppContext.class);
ClusterInfo clusterInfo = mock(ClusterInfo.class);
when(appCtx.getClusterInfo()).thenReturn(clusterInfo);
setupTaskAttemptFinishingMonitor(eventHandler, jobConf, appCtx);
TaskAttemptImpl taImpl =
new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
splits, jobConf, taListener,
mock(Token.class), new Credentials(),
new SystemClock(), appCtx);
NodeId nid = NodeId.newInstance("127.0.0.1", 0);
ContainerId contId = ContainerId.newInstance(appAttemptId, 3);
Container container = mock(Container.class);
when(container.getId()).thenReturn(contId);
when(container.getNodeId()).thenReturn(nid);
when(container.getNodeHttpAddress()).thenReturn("localhost:0");
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
return taImpl;
}
public static class MockEventHandler implements EventHandler {
public boolean internalError;
@Override
public void handle(Event event) {
if (event instanceof JobEvent) {
JobEvent je = ((JobEvent) event);
if (JobEventType.INTERNAL_ERROR == je.getType()) {
internalError = true;
}
}
}
};
}
| |
/*
* Copyright 2010 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.VariableVisibilityAnalysis.VariableVisibility;
import com.google.javascript.rhino.Node;
/**
* Tests of {@link VariableVisibilityAnalysis}.
*
* @author dcc@google.com (Devin Coughlin)
*/
public final class VariableVisibilityAnalysisTest extends CompilerTestCase {
private Compiler lastCompiler;
private VariableVisibilityAnalysis lastAnalysis;
@Override
protected CompilerPass getProcessor(Compiler compiler) {
lastAnalysis = new VariableVisibilityAnalysis(compiler);
lastCompiler = compiler;
return lastAnalysis;
}
public void testCapturedVariables() {
String source =
"global:var global;\n" +
"function Outer() {\n" +
" captured:var captured;\n" +
" notcaptured:var notCaptured;\n" +
" function Inner() {\n" +
" alert(captured);" +
" }\n" +
"}\n";
analyze(source);
assertIsCapturedLocal("captured");
assertIsUncapturedLocal("notcaptured");
}
public void testGlobals() {
String source =
"global:var global;";
analyze(source);
assertIsGlobal("global");
}
public void testParameters() {
String source =
"function A(a,b,c) {\n" +
"}\n";
analyze(source);
assertIsParameter("a");
assertIsParameter("b");
assertIsParameter("c");
}
public void testFunctions() {
String source =
"function global() {\n" +
" function inner() {\n" +
" }\n" +
" function innerCaptured() {\n" +
" (function(){innerCaptured()})()\n" +
" }\n" +
"}\n";
analyze(source);
assertFunctionHasVisibility("global",
VariableVisibility.GLOBAL);
assertFunctionHasVisibility("inner",
VariableVisibility.LOCAL);
assertFunctionHasVisibility("innerCaptured",
VariableVisibility.CAPTURED_LOCAL);
}
private void assertFunctionHasVisibility(String functionName,
VariableVisibility visibility) {
Node functionNode = searchForFunction(functionName);
assertNotNull(functionNode);
Node nameNode = functionNode.getFirstChild();
assertEquals(visibility, lastAnalysis.getVariableVisibility(nameNode));
}
private void assertLabeledVariableHasVisibility(String label,
VariableVisibility visibility) {
Node labeledVariable = searchLabel(label);
Preconditions.checkState(labeledVariable.isVar());
// VAR
// NAME
Node nameNode = labeledVariable.getFirstChild();
assertEquals(visibility, lastAnalysis.getVariableVisibility(nameNode));
}
private void assertIsCapturedLocal(String label) {
assertLabeledVariableHasVisibility(label,
VariableVisibility.CAPTURED_LOCAL);
}
private void assertIsUncapturedLocal(String label) {
assertLabeledVariableHasVisibility(label,
VariableVisibility.LOCAL);
}
private void assertIsGlobal(String label) {
assertLabeledVariableHasVisibility(label,
VariableVisibility.GLOBAL);
}
private void assertIsParameter(String parameterName) {
Node parameterNode = searchForParameter(parameterName);
assertNotNull(parameterNode);
assertEquals(VariableVisibility.PARAMETER,
lastAnalysis.getVariableVisibility(parameterNode));
}
private VariableVisibilityAnalysis analyze(String src) {
testSame(src);
return lastAnalysis;
}
/*
* Finds a parameter NAME node with the given name in the source AST.
*
* Behavior is undefined if there are multiple parameters with
* parameterName.
*/
private Node searchForParameter(final String parameterName) {
Preconditions.checkArgument(parameterName != null);
final Node[] foundNode = new Node[1];
AbstractPostOrderCallback findParameter = new AbstractPostOrderCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.getParent().isParamList()
&& parameterName.equals(n.getString())) {
foundNode[0] = n;
}
}
};
NodeTraversal.traverseEs6(lastCompiler, lastCompiler.jsRoot, findParameter);
return foundNode[0];
}
/*
* Finds a function node with the given name in the source AST.
*
* Behavior is undefined if there are multiple functions with
* parameterName.
*/
private Node searchForFunction(final String functionName) {
Preconditions.checkArgument(functionName != null);
final Node[] foundNode = new Node[1];
AbstractPostOrderCallback findFunction =
new AbstractPostOrderCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isFunction() && functionName.equals(NodeUtil.getName(n))) {
foundNode[0] = n;
}
}
};
NodeTraversal.traverseEs6(lastCompiler, lastCompiler.jsRoot, findFunction);
return foundNode[0];
}
// Shamelessly stolen from NameReferenceGraphConstructionTest
private Node searchLabel(String label) {
LabeledVariableSearcher s = new LabeledVariableSearcher(label);
NodeTraversal.traverseEs6(lastCompiler, lastCompiler.jsRoot, s);
assertNotNull("Label " + label + " should be in the source code", s.found);
return s.found;
}
/**
* Quick traversal to find a given labeled variable in the AST.
*
* Finds the variable for foo in:
* foo: var a = ...
*/
private static class LabeledVariableSearcher extends AbstractPostOrderCallback {
Node found = null;
final String target;
LabeledVariableSearcher(String target) {
this.target = target;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isLabel() &&
target.equals(n.getFirstChild().getString())) {
// LABEL
// VAR
// NAME
found = n.getLastChild();
}
}
}
}
| |
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components;
import com.haulmont.cuba.gui.WindowManager.OpenType;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Map;
import java.util.function.Function;
public interface TokenList<V> extends Field<Collection<V>>,
Component.BelongToFrame, Component.HasCaption, Component.Editable, Component.Focusable {
String NAME = "tokenList";
/**
* @return a property that is used for caption generation
*/
String getCaptionProperty();
/**
* Sets a property that will be used for item caption generation when {@link CaptionMode#PROPERTY} is used.
*
* @param captionProperty property
*/
void setCaptionProperty(String captionProperty);
/**
* @return item caption mode generation
*/
CaptionMode getCaptionMode();
/**
* Sets how item caption should be generated.
*
* @param captionMode mode
*/
void setCaptionMode(CaptionMode captionMode);
/**
* @return bound {@link CollectionDatasource} instance
*/
@Override
CollectionDatasource getDatasource();
/**
* Binds the given {@code datasource} with field.
*
* @param datasource {@link CollectionDatasource} instance
*/
void setDatasource(CollectionDatasource datasource);
/**
* @return options filter mode
*/
LookupField.FilterMode getFilterMode();
/**
* Sets the given {@code mode} to manage how options should be filtered.
*
* @param mode options filter mode
*/
void setFilterMode(LookupField.FilterMode mode);
/**
* @return a property that is used for option captions generation
*/
String getOptionsCaptionProperty();
/**
* Sets a property that will be used for option captions generation when {@link CaptionMode#PROPERTY} is used.
*
* @param captionProperty property
*/
void setOptionsCaptionProperty(String captionProperty);
/**
* @return option captions mode generation
*/
CaptionMode getOptionsCaptionMode();
/**
* Sets how option captions should be generated.
*
* @param captionMode mode
*/
void setOptionsCaptionMode(CaptionMode captionMode);
/**
* @return {@link CollectionDatasource} instance that stores field options
*/
CollectionDatasource getOptionsDatasource();
/**
* Sets the given {@code datasource} as options datasource.
*
* @param datasource options datasource
*/
void setOptionsDatasource(CollectionDatasource datasource);
/**
* Sets whether options should be refreshed after lookup window closing.
*/
void setRefreshOptionsOnLookupClose(boolean refresh);
/**
* @return whether options should be refreshed after lookup window closing
*/
boolean isRefreshOptionsOnLookupClose();
java.util.List getOptionsList();
void setOptionsList(java.util.List optionsList);
Map<String, ?> getOptionsMap();
void setOptionsMap(Map<String, ?> map);
/**
* @return whether inner LookupPickerField component has lookup action
*/
boolean isLookup();
/**
* Sets whether inner LookupPickerField component should have lookup action
*
* @param lookup enable lookup action
*/
void setLookup(boolean lookup);
/**
* @return lookup screen alias
*/
String getLookupScreen();
/**
* Sets lookup screen alias.
*
* @param lookupScreen screen alias
*/
void setLookupScreen(String lookupScreen);
/**
* Sets params that will be passed to lookup screen.
*
* @param params params
*/
void setLookupScreenParams(Map<String, Object> params);
/**
* @return params that will be passed to lookup screen
*/
@Nullable
Map<String, Object> getLookupScreenParams();
/**
* @return clear button is enabled
*/
boolean isClearEnabled();
/**
* Sets whether clear button is enabled or not
*
* @param clearEnabled clear button enabled
*/
void setClearEnabled(boolean clearEnabled);
/**
* @return whether multiselect mode is enabled
*/
boolean isMultiSelect();
/**
* Enables multiselect mode. It leads to the passing {@link com.haulmont.cuba.gui.WindowParams#MULTI_SELECT} param
* to the lookup screen.
*
* @param multiselect multiselect
*/
void setMultiSelect(boolean multiselect);
/**
* @return whether simple mode is used ("Add button" instead of LookupPickerField)
*/
boolean isSimple();
/**
* Sets whether simple mode should be used ("Add button" instead of LookupPickerField)
*
* @param simple simple
*/
void setSimple(boolean simple);
/**
* @return component editor (LookupPickerField / "Add" button) position
*/
Position getPosition();
/**
* Sets component editor (LookupPickerField / "Add" button) position.
* <p>
* {@link Position#TOP} is the default.
*
* @param position editor position
*/
void setPosition(Position position);
/**
* @return lookup screen open mode
*/
OpenType getLookupOpenMode();
/**
* Sets lookup screen open mode.
* <p>
* {@link OpenType#THIS_TAB} is the default.
*
* @param lookupOpenMode open mode
*/
void setLookupOpenMode(OpenType lookupOpenMode);
/**
* @return whether inline tokens mode should be used
*/
boolean isInline();
/**
* Sets whether inline tokens mode should be used.
*
* @param inline inline mode
*/
void setInline(boolean inline);
/**
* @return "Add" button caption
*/
String getAddButtonCaption();
/**
* Sets "Add" button caption.
*
* @param caption caption
*/
void setAddButtonCaption(String caption);
/**
* @return "Add" button icon
*/
String getAddButtonIcon();
/**
* Sets "Add" button icon.
*
* @param icon icon
*/
void setAddButtonIcon(String icon);
/**
* @return "Clear" button caption
*/
String getClearButtonCaption();
/**
* Sets "Clear" button caption.
*
* @param caption caption
*/
void setClearButtonCaption(String caption);
/**
* @return "Clear" button icon
*/
String getClearButtonIcon();
/**
* Sets "Clear" button icon.
*
* @param icon icon
*/
void setClearButtonIcon(String icon);
/**
* @return selected items change handler
*/
ItemChangeHandler getItemChangeHandler();
/**
* Sets selected items change handler.
*
* @param handler items change handler
*/
void setItemChangeHandler(ItemChangeHandler handler);
/**
* @return selected tokens click listener
*/
ItemClickListener getItemClickListener();
/**
* Sets selected tokens click listener.
*
* @param itemClickListener items click listener
*/
void setItemClickListener(ItemClickListener itemClickListener);
/**
* @return handler that is invoked after lookup screen closing
*/
AfterLookupCloseHandler getAfterLookupCloseHandler();
/**
* Sets handler that is invoked after lookup screen closing.
*
* @param handler handler
*/
void setAfterLookupCloseHandler(AfterLookupCloseHandler handler);
/**
* @return handler that is invoked when an item is selected in lookup screen
*/
AfterLookupSelectionHandler getAfterLookupSelectionHandler();
/**
* Sets handler that is invoked when an item is selected in lookup screen.
*
* @param handler handler
*/
void setAfterLookupSelectionHandler(AfterLookupSelectionHandler handler);
@Deprecated
void setTokenStyleGenerator(Function<Object, String> tokenStyleGenerator);
@Deprecated
Function<Object, String> getTokenStyleGenerator();
/**
* @return input prompt of LookupPickerField
*/
String getLookupInputPrompt();
/**
* Sets the input prompt - a textual prompt that is displayed when the LookupPickerField
* would otherwise be empty, to prompt the user for input.
*
* @param inputPrompt input prompt
*/
void setLookupInputPrompt(String inputPrompt);
/**
* Enables to generate stylenames for tokens.
*
* @deprecated
*/
@Deprecated
interface TokenStyleGenerator extends Function<Object, String> {
@Override
default String apply(Object itemId) {
return getStyle(itemId);
}
String getStyle(Object itemId);
}
/**
* Enables to handle selected items change.
*/
interface ItemChangeHandler {
/**
* Invoked when item is added.
*
* @param item item
*/
void addItem(Object item);
/**
* Invoked when item is removed.
*
* @param item item
*/
void removeItem(Object item);
}
/**
* Selected items click handler.
*/
@FunctionalInterface
interface ItemClickListener {
/**
* Invoked when selected item is clicked.
*
* @param item item
*/
void onClick(Object item);
}
/**
* Enables to handle lookup screen closing.
*/
@FunctionalInterface
interface AfterLookupCloseHandler {
/**
* Invoked when lookup screen is closed.
*
* @param window window
* @param actionId action id
*/
void onClose(Window window, String actionId);
}
/**
* Enables to handle item selection in lookup screen.
*/
@FunctionalInterface
interface AfterLookupSelectionHandler {
/**
* Invoked when items are selected.
*
* @param items items
*/
void onSelect(Collection items);
}
/**
* Defines component editor position.
*/
enum Position {
/**
* Editor is above tokens container.
*/
TOP,
/**
* Editor is under tokens container.
*/
BOTTOM
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes.patch;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.impl.patch.*;
import com.intellij.openapi.diff.impl.patch.formove.PatchApplier;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.help.HelpManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.FilePathImpl;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.actions.ShowDiffAction;
import com.intellij.openapi.vcs.changes.ui.ChangeListChooserPanel;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.CollectionListModel;
import com.intellij.ui.ColoredListCellRenderer;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.Alarm;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Consumer;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.PropertyKey;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* @author yole
*/
public class ApplyPatchDialog extends DialogWrapper {
private final Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.changes.patch.ApplyPatchDialog");
private JPanel myRootPanel;
private TextFieldWithBrowseButton myFileNameField;
private JLabel myStatusLabel;
private TextFieldWithBrowseButton myBaseDirectoryField;
private JSpinner myStripLeadingDirectoriesSpinner;
private JList myPatchContentsList;
private ChangeListChooserPanel myChangeListChooser;
private JButton myShowDiffButton;
private List<FilePatch> myPatches;
private Collection<FilePatch> myPatchesFailedToLoad;
private final Alarm myLoadPatchAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
private final Alarm myVerifyPatchAlarm = new Alarm(Alarm.ThreadToUse.SHARED_THREAD);
private String myLoadPatchError = null;
private String myDetectedBaseDirectory = null;
private int myDetectedStripLeadingDirs = -1;
private final Project myProject;
private boolean myInnerChange;
private LocalChangeList mySelectedChangeList;
private final Map<Pair<String, String>, String> myMoveRenameInfo;
public ApplyPatchDialog(Project project) {
super(project, true);
myProject = project;
setTitle(VcsBundle.message("patch.apply.dialog.title"));
final FileChooserDescriptor descriptor = new FileChooserDescriptor(true, false, false, false, false, false) {
@Override
public boolean isFileSelectable(VirtualFile file) {
return file.getFileType() == StdFileTypes.PATCH || file.getFileType() == FileTypes.PLAIN_TEXT;
}
};
myMoveRenameInfo = new HashMap<Pair<String, String>, String>();
myFileNameField.addBrowseFolderListener(VcsBundle.message("patch.apply.select.title"), "", project, descriptor);
myFileNameField.getTextField().getDocument().addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
updateOKAction();
myStatusLabel.setForeground(UIUtil.getLabelForeground());
myStatusLabel.setText(VcsBundle.message("patch.load.progress"));
myPatches = null;
myMoveRenameInfo.clear();
myLoadPatchAlarm.cancelAllRequests();
myLoadPatchAlarm.addRequest(new Runnable() {
public void run() {
checkLoadPatches(true);
}
}, 400);
}
});
myBaseDirectoryField.setText(project.getBaseDir().getPresentableUrl());
myBaseDirectoryField.addBrowseFolderListener(VcsBundle.message("patch.apply.select.base.directory.title"), "", project,
new FileChooserDescriptor(false, true, false, false, false, false));
myBaseDirectoryField.getTextField().getDocument().addDocumentListener(new DocumentAdapter() {
protected void textChanged(final DocumentEvent e) {
if (!myInnerChange) {
queueVerifyPatchPaths();
}
}
});
myStripLeadingDirectoriesSpinner.setModel(new SpinnerNumberModel(0, 0, 256, 1));
myStripLeadingDirectoriesSpinner.addChangeListener(new ChangeListener() {
public void stateChanged(final ChangeEvent e) {
if (!myInnerChange) {
queueVerifyPatchPaths();
}
}
});
myPatchContentsList.setCellRenderer(new PatchCellRendererPanel());
ChangeListManager changeListManager = ChangeListManager.getInstance(project);
myChangeListChooser.setChangeLists(changeListManager.getChangeListsCopy());
myChangeListChooser.setDefaultSelection(changeListManager.getDefaultChangeList());
myChangeListChooser.init(project);
init();
updateOKAction();
myShowDiffButton.addActionListener(new ActionListener() {
public void actionPerformed(final ActionEvent e) {
showDiff();
}
});
myPatchContentsList.addMouseListener(new MouseAdapter() {
public void mouseClicked(final MouseEvent e) {
if (e.getButton() == 1 && e.getClickCount() == 2) {
showDiff();
}
}
});
new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
showDiff();
}
}.registerCustomShortcutSet(CommonShortcuts.getDiff(), myRootPanel, myDisposable);
}
private void showDiff() {
List<Change> changes = new ArrayList<Change>();
ApplyPatchContext context = getApplyPatchContext().getPrepareContext();
Object[] selection = myPatchContentsList.getSelectedValues();
if (selection.length == 0) {
if (myPatches == null) return;
selection = ArrayUtil.toObjectArray(myPatches);
}
for(Object o: selection) {
final TextFilePatch patch = (TextFilePatch) o;
try {
if (patch.isNewFile()) {
final FilePath newFilePath = FilePathImpl.createNonLocal(patch.getAfterName(), false);
final String content = patch.getNewFileText();
ContentRevision revision = new SimpleContentRevision(content, newFilePath, patch.getAfterVersionId());
changes.add(new Change(null, revision));
} else if ((! patch.isDeletedFile()) && (patch.getBeforeName() != null) && (patch.getAfterName() != null) &&
(! patch.getBeforeName().equals(patch.getAfterName()))) {
final VirtualFile baseDirectory = getBaseDirectory();
final VirtualFile beforeFile = PatchApplier.getFile(baseDirectory, patch.getBeforeName());
if (beforeFile != null) {
final List<String> tail = new ArrayList<String>();
final VirtualFile partFile = PatchApplier.getFile(baseDirectory, patch.getAfterName(), tail);
final StringBuilder sb = new StringBuilder(partFile.getPath());
for (String s : tail) {
if (sb.charAt(sb.length() - 1) != '/') {
sb.append('/');
}
sb.append(s);
}
final Change change =
changeForPath(beforeFile, patch, FilePathImpl.createNonLocal(FileUtil.toSystemIndependentName(sb.toString()), false));
if (change != null) {
changes.add(change);
}
} else {
Messages.showErrorDialog(myProject, "Cannot show difference: cannot find file " + patch.getBeforeName(),
VcsBundle.message("patch.apply.dialog.title"));
}
}
else {
final VirtualFile fileToPatch = patch.findFileToPatch(context);
if (fileToPatch != null) {
final FilePathImpl filePath = new FilePathImpl(fileToPatch);
final CurrentContentRevision currentRevision = new CurrentContentRevision(filePath);
if (patch.isDeletedFile()) {
changes.add(new Change(currentRevision, null));
}
else {
final Change change = changeForPath(fileToPatch, patch, null);
if (change != null) {
changes.add(change);
}
}
}
}
}
catch (Exception e) {
Messages.showErrorDialog(myProject, "Error loading changes for " + patch.getAfterFileName() + ": " + e.getMessage(),
VcsBundle.message("patch.apply.dialog.title"));
return;
}
}
ShowDiffAction.showDiffForChange(changes.toArray(new Change[changes.size()]), 0, myProject,
ShowDiffAction.DiffExtendUIFactory.NONE, false);
}
@Nullable
private Change changeForPath(final VirtualFile fileToPatch, final TextFilePatch patch, final FilePath newFilePath) {
try {
final FilePathImpl filePath = new FilePathImpl(fileToPatch);
final CurrentContentRevision currentRevision = new CurrentContentRevision(filePath);
final Document doc = FileDocumentManager.getInstance().getDocument(fileToPatch);
String baseContent = doc.getText();
StringBuilder newText = new StringBuilder();
patch.applyModifications(baseContent, newText);
ContentRevision revision = new SimpleContentRevision(newText.toString(), (newFilePath == null) ? filePath : newFilePath, patch.getAfterVersionId());
return new Change(currentRevision, revision);
} catch (ApplyPatchException e) {
ApplyPatchContext context = new ApplyPatchContext(getBaseDirectory(), 0, false, false);
// just show diff here. maybe refactor further..
ApplyPatchAction.mergeAgainstBaseVersion(myProject, fileToPatch, context, patch, ApplyPatchAction.ApplyPatchMergeRequestFactory.INSTANCE_READ_ONLY);
return null;
}
}
@Override
@NonNls
protected String getDimensionServiceKey() {
return "vcs.ApplyPatchDialog";
}
private void queueVerifyPatchPaths() {
myStatusLabel.setForeground(UIUtil.getLabelForeground());
myStatusLabel.setText(VcsBundle.message("apply.patch.progress.verifying"));
myVerifyPatchAlarm.cancelAllRequests();
myVerifyPatchAlarm.addRequest(new Runnable() {
public void run() {
try {
if (myPatches != null) {
verifyPatchPaths();
}
}
catch(Exception ex) {
LOG.error(ex);
}
}
}, 400);
}
public void setFileName(String fileName) {
myFileNameField.setText(fileName);
checkLoadPatches(false);
}
private void checkLoadPatches(final boolean async) {
final String fileName = myFileNameField.getText().replace(File.separatorChar, '/');
final VirtualFile patchFile = ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
public VirtualFile compute() {
final VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(fileName);
if (file != null) {
file.refresh(false, false);
if (file.isDirectory()) {
// we are looking for file not directory
return null;
}
}
return file;
}
});
if (patchFile == null) {
queueUpdateStatus("Cannot find patch file");
return;
}
myChangeListChooser.setDefaultName(patchFile.getNameWithoutExtension().replace('_', ' ').trim());
if (async) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
loadPatchesFromFile(patchFile);
}
});
}
else {
loadPatchesFromFile(patchFile);
}
}
private void loadPatchesFromFile(final VirtualFile patchFile) {
myPatches = new ArrayList<FilePatch>();
myPatchesFailedToLoad = new HashSet<FilePatch>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
if (!patchFile.isValid()) {
queueUpdateStatus("Cannot find patch file");
return;
}
PatchReader reader;
try {
reader = new PatchReader(patchFile);
}
catch (IOException e) {
queueUpdateStatus(VcsBundle.message("patch.apply.open.error", e.getMessage()));
return;
}
while(true) {
FilePatch patch;
try {
patch = reader.readNextPatch();
}
catch (PatchSyntaxException e) {
if (e.getLine() >= 0) {
queueUpdateStatus(VcsBundle.message("patch.apply.load.error.line", e.getMessage(), e.getLine()));
}
else {
queueUpdateStatus(VcsBundle.message("patch.apply.load.error", e.getMessage()));
}
return;
}
if (patch == null) {
break;
}
final String beforeName = patch.getBeforeName();
final String afterName = patch.getAfterName();
final String movedMessage = RelativePathCalculator.getMovedString(beforeName, afterName);
if (movedMessage != null) {
myMoveRenameInfo.put(new Pair<String, String>(beforeName, afterName), movedMessage);
}
myPatches.add(patch);
}
if (myPatches.isEmpty()) {
queueUpdateStatus(VcsBundle.message("patch.apply.no.patches.found"));
return;
}
autoDetectBaseDirectory();
queueUpdateStatus(null);
}
});
}
private void autoDetectBaseDirectory() {
boolean autodetectFailed = false;
for(FilePatch patch: myPatches) {
VirtualFile baseDir = myDetectedBaseDirectory == null
? getBaseDirectory()
: LocalFileSystem.getInstance().findFileByPath(myDetectedBaseDirectory.replace(File.separatorChar, '/'));
int skipTopDirs = myDetectedStripLeadingDirs >= 0 ? myDetectedStripLeadingDirs : 0;
VirtualFile fileToPatch;
try {
fileToPatch = patch.findFileToPatch(new ApplyPatchContext(baseDir, skipTopDirs, false, false));
}
catch (IOException e) {
myPatchesFailedToLoad.add(patch);
continue;
}
if (fileToPatch == null) {
boolean success = false;
if (!autodetectFailed) {
String oldDetectedBaseDirectory = myDetectedBaseDirectory;
int oldDetectedStripLeadingDirs = myDetectedStripLeadingDirs;
success = detectDirectory(patch);
if (success) {
if ((oldDetectedBaseDirectory != null && !Comparing.equal(oldDetectedBaseDirectory, myDetectedBaseDirectory)) ||
(oldDetectedStripLeadingDirs >= 0 && oldDetectedStripLeadingDirs != myDetectedStripLeadingDirs)) {
myDetectedBaseDirectory = null;
myDetectedStripLeadingDirs = -1;
autodetectFailed = true;
}
}
}
if (!success) {
myPatchesFailedToLoad.add(patch);
}
}
}
}
private boolean detectDirectory(final FilePatch patch) {
if (patch.getBeforeName().equals(patch.getAfterName()) && patch.isNewFile()) {
return false;
} else {
boolean success = detectDirectoryByName(patch.getBeforeName());
if (! success) {
success = detectDirectoryByName(patch.getAfterName());
}
return success;
}
}
private Collection<String> verifyPatchPaths() {
final ApplyPatchContext context = getApplyPatchContext();
myPatchesFailedToLoad.clear();
for(FilePatch patch: myPatches) {
try {
if (context.getBaseDir() == null || patch.findFileToPatch(context) == null) {
myPatchesFailedToLoad.add(patch);
}
}
catch (IOException e) {
myPatchesFailedToLoad.add(patch);
}
}
SwingUtilities.invokeLater(new Runnable() {
public void run() {
myPatchContentsList.repaint();
myStatusLabel.setText("");
}
});
return context.getMissingDirectories();
}
private boolean detectDirectoryByName(final String patchFileName) {
PatchBaseDirectoryDetector detector = PatchBaseDirectoryDetector.getInstance(myProject);
if (detector == null) return false;
final PatchBaseDirectoryDetector.Result result = detector.detectBaseDirectory(patchFileName);
if (result == null) return false;
myDetectedBaseDirectory = result.baseDir;
myDetectedStripLeadingDirs = result.stripDirs;
return true;
}
private void queueUpdateStatus(final String s) {
if (!SwingUtilities.isEventDispatchThread()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
queueUpdateStatus(s);
}
});
return;
}
updateStatus(s);
}
private void updateStatus(String s) {
myInnerChange = true;
try {
if (myDetectedBaseDirectory != null) {
myBaseDirectoryField.setText(myDetectedBaseDirectory);
myDetectedBaseDirectory = null;
}
if (myDetectedStripLeadingDirs != -1) {
myStripLeadingDirectoriesSpinner.setValue(myDetectedStripLeadingDirs);
myDetectedStripLeadingDirs = -1;
}
}
finally {
myInnerChange = false;
}
myLoadPatchError = s;
if (s == null) {
myStatusLabel.setForeground(UIUtil.getLabelForeground());
myStatusLabel.setText(buildPatchSummary());
}
else {
myStatusLabel.setText(s);
myStatusLabel.setForeground(Color.red);
}
updatePatchTableModel();
updateOKAction();
}
private void updatePatchTableModel() {
if (myPatches != null) {
myPatchContentsList.setModel(new CollectionListModel(myPatches));
}
else {
myPatchContentsList.setModel(new DefaultListModel());
}
myShowDiffButton.setEnabled(myPatches != null && myPatches.size() > 0);
}
private String buildPatchSummary() {
int newFiles = 0;
int changedFiles = 0;
int deletedFiles = 0;
for(FilePatch patch: myPatches) {
if (patch.isNewFile()) {
newFiles++;
}
else if (patch.isDeletedFile()) {
deletedFiles++;
}
else {
changedFiles++;
}
}
StringBuilder summaryBuilder = new StringBuilder("<html><body><b>").append(VcsBundle.message("apply.patch.summary.title")).append("</b> ");
appendSummary(changedFiles, 0, summaryBuilder, "patch.summary.changed.files");
appendSummary(newFiles, changedFiles, summaryBuilder, "patch.summary.new.files");
appendSummary(deletedFiles, changedFiles + newFiles, summaryBuilder, "patch.summary.deleted.files");
summaryBuilder.append("</body></html>");
return summaryBuilder.toString();
}
private static void appendSummary(final int count, final int prevCount, final StringBuilder summaryBuilder,
@PropertyKey(resourceBundle = "messages.VcsBundle") final String key) {
if (count > 0) {
if (prevCount > 0) {
summaryBuilder.append(", ");
}
summaryBuilder.append(VcsBundle.message(key, count));
}
}
@Override
protected void dispose() {
myLoadPatchAlarm.dispose();
myVerifyPatchAlarm.dispose();
super.dispose();
}
private void updateOKAction() {
setOKActionEnabled(myFileNameField.getText().length() > 0 && myLoadPatchError == null);
}
@Override
protected void doOKAction() {
if (myPatches == null) {
myLoadPatchAlarm.cancelAllRequests();
checkLoadPatches(false);
}
if (myLoadPatchError == null) {
mySelectedChangeList = myChangeListChooser.getSelectedList(myProject);
if (mySelectedChangeList == null) return;
final Collection<String> missingDirs = verifyPatchPaths();
if (missingDirs.size() > 0 && !checkCreateMissingDirs(missingDirs)) return;
if (getBaseDirectory() == null) {
Messages.showErrorDialog(getContentPane(), "Could not find patch base directory " + myBaseDirectoryField.getText());
return;
}
super.doOKAction();
}
}
private boolean checkCreateMissingDirs(final Collection<String> missingDirs) {
StringBuilder messageBuilder = new StringBuilder(VcsBundle.message("apply.patch.create.dirs.prompt.header"));
for(String missingDir: missingDirs) {
messageBuilder.append(missingDir).append("\r\n");
}
messageBuilder.append(VcsBundle.message("apply.patch.create.dirs.prompt.footer"));
int rc = Messages.showYesNoCancelDialog(myProject, messageBuilder.toString(), VcsBundle.message("patch.apply.dialog.title"),
Messages.getQuestionIcon());
if (rc == 0) {
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
public void run() {
for(String dir: missingDirs) {
try {
VfsUtil.createDirectories(dir);
}
catch (IOException e) {
Messages.showErrorDialog(myProject, "Error creating directories: " + e.getMessage(),
VcsBundle.message("patch.apply.dialog.title"));
}
}
}
}, "Creating directories for new files in patch", null);
}
else if (rc != 1) {
return false;
}
return true;
}
@Nullable
protected JComponent createCenterPanel() {
return myRootPanel;
}
public List<FilePatch> getPatches() {
return myPatches;
}
private VirtualFile getBaseDirectory() {
if (ApplicationManager.getApplication().isDispatchThread()) {
return LocalFileSystem.getInstance().refreshAndFindFileByPath(FileUtil.toSystemIndependentName(myBaseDirectoryField.getText()));
}
return LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(myBaseDirectoryField.getText()));
}
private int getStripLeadingDirectories() {
return ((Integer) myStripLeadingDirectoriesSpinner.getValue()).intValue();
}
public ApplyPatchContext getApplyPatchContext() {
return new ApplyPatchContext(getBaseDirectory(), getStripLeadingDirectories(), false, false);
}
public LocalChangeList getSelectedChangeList() {
return mySelectedChangeList;
}
private static String getChangeType(final FilePatch filePatch) {
if (filePatch.isNewFile()) return VcsBundle.message("change.type.new");
if (filePatch.isDeletedFile()) return VcsBundle.message("change.type.deleted");
return VcsBundle.message("change.type.modified");
}
protected void doHelpAction() {
HelpManager.getInstance().invokeHelp("reference.dialogs.vcs.patch.apply");
}
protected Action[] createActions() {
return new Action[]{ getOKAction(), getCancelAction(), getHelpAction() };
}
private void createUIComponents() {
myChangeListChooser = new ChangeListChooserPanel(null, new Consumer<Boolean>() {
public void consume(final Boolean aBoolean) {
setOKActionEnabled(aBoolean);
}
});
}
private class PatchCellRendererPanel extends JPanel implements ListCellRenderer {
private final PatchCellRenderer myRenderer;
private final JLabel myFileTypeLabel;
public PatchCellRendererPanel() {
super(new BorderLayout());
setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 2));
myRenderer = new PatchCellRenderer();
add(myRenderer, BorderLayout.CENTER);
myFileTypeLabel = new JLabel();
myFileTypeLabel.setHorizontalAlignment(JLabel.RIGHT);
add(myFileTypeLabel, BorderLayout.EAST);
}
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
FilePatch patch = (FilePatch) value;
myRenderer.getListCellRendererComponent(list, value, index, isSelected, false);
myFileTypeLabel.setText("(" + getChangeType(patch) + ")");
if (isSelected) {
setBackground(UIUtil.getListSelectionBackground());
setForeground(UIUtil.getListSelectionForeground());
myFileTypeLabel.setForeground(UIUtil.getListSelectionForeground());
}
else {
setBackground(UIUtil.getListBackground());
setForeground(UIUtil.getListForeground());
myFileTypeLabel.setForeground(Color.gray);
}
return this;
}
}
private class PatchCellRenderer extends ColoredListCellRenderer {
private final SimpleTextAttributes myNewAttributes = new SimpleTextAttributes(0, FileStatus.ADDED.getColor());
private final SimpleTextAttributes myDeletedAttributes = new SimpleTextAttributes(0, FileStatus.DELETED.getColor());
private final SimpleTextAttributes myModifiedAttributes = new SimpleTextAttributes(0, FileStatus.MODIFIED.getColor());
private boolean assumeProblemWillBeFixed(final FilePatch filePatch) {
// if some of the files are valid, assume that "red" new files will be fixed by creating directories
if (myPatches == null || myPatchesFailedToLoad == null) return false;
return (filePatch.isNewFile() && myPatchesFailedToLoad.size() != myPatches.size());
}
protected void customizeCellRenderer(JList list, Object value, int index, boolean selected, boolean hasFocus) {
FilePatch filePatch = (FilePatch) value;
String name = filePatch.getAfterNameRelative(getStripLeadingDirectories());
final FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(name);
setIcon(fileType.getIcon());
if (myPatchesFailedToLoad.contains(filePatch) && !assumeProblemWillBeFixed(filePatch)) {
append(name, SimpleTextAttributes.ERROR_ATTRIBUTES);
}
else if (filePatch.isNewFile()) {
append(name, myNewAttributes);
}
else if (filePatch.isDeletedFile()) {
append(name, myDeletedAttributes);
}
else {
append(name, myModifiedAttributes);
}
final String afterPath = filePatch.getAfterName();
final String beforePath = filePatch.getBeforeName();
if ((beforePath != null) && (afterPath != null) && (! beforePath.equals(afterPath))) {
final String message = myMoveRenameInfo.get(new Pair<String, String>(beforePath, afterPath));
if (message != null) {
append(message, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
}
}
}
}
| |
package com.trilead.ssh2.crypto.digest;
/**
* MD5. Based on the example code in RFC 1321. Optimized (...a little).
*
* @author Christian Plattner, plattner@trilead.com
* @version $Id: MD5.java,v 1.1 2007/10/15 12:49:57 cplattne Exp $
*/
/*
* The following disclaimer has been copied from RFC 1321:
*
* Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All rights
* reserved.
*
* License to copy and use this software is granted provided that it is
* identified as the "RSA Data Security, Inc. MD5 Message-Digest Algorithm" in
* all material mentioning or referencing this software or this function.
*
* License is also granted to make and use derivative works provided that such
* works are identified as "derived from the RSA Data Security, Inc. MD5
* Message-Digest Algorithm" in all material mentioning or referencing the
* derived work.
*
* RSA Data Security, Inc. makes no representations concerning either the
* merchantability of this software or the suitability of this software for any
* particular purpose. It is provided "as is" without express or implied
* warranty of any kind.
*
* These notices must be retained in any copies of any part of this
* documentation and/or software.
*
*/
public final class MD5 implements Digest
{
private int state0, state1, state2, state3;
private long count;
private final byte[] block = new byte[64];
private final int x[] = new int[16];
private static final byte[] padding = new byte[] { (byte) 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
public MD5()
{
reset();
}
private static final int FF(int a, int b, int c, int d, int x, int s, int ac)
{
a += ((b & c) | ((~b) & d)) + x + ac;
return ((a << s) | (a >>> (32 - s))) + b;
}
private static final int GG(int a, int b, int c, int d, int x, int s, int ac)
{
a += ((b & d) | (c & (~d))) + x + ac;
return ((a << s) | (a >>> (32 - s))) + b;
}
private static final int HH(int a, int b, int c, int d, int x, int s, int ac)
{
a += (b ^ c ^ d) + x + ac;
return ((a << s) | (a >>> (32 - s))) + b;
}
private static final int II(int a, int b, int c, int d, int x, int s, int ac)
{
a += (c ^ (b | (~d))) + x + ac;
return ((a << s) | (a >>> (32 - s))) + b;
}
private static final void encode(byte[] dst, int dstoff, int word)
{
dst[dstoff] = (byte) (word);
dst[dstoff + 1] = (byte) (word >> 8);
dst[dstoff + 2] = (byte) (word >> 16);
dst[dstoff + 3] = (byte) (word >> 24);
}
private final void transform(byte[] src, int pos)
{
int a = state0;
int b = state1;
int c = state2;
int d = state3;
for (int i = 0; i < 16; i++, pos += 4)
{
x[i] = (src[pos] & 0xff) | ((src[pos + 1] & 0xff) << 8) | ((src[pos + 2] & 0xff) << 16)
| ((src[pos + 3] & 0xff) << 24);
}
/* Round 1 */
a = FF(a, b, c, d, x[0], 7, 0xd76aa478); /* 1 */
d = FF(d, a, b, c, x[1], 12, 0xe8c7b756); /* 2 */
c = FF(c, d, a, b, x[2], 17, 0x242070db); /* 3 */
b = FF(b, c, d, a, x[3], 22, 0xc1bdceee); /* 4 */
a = FF(a, b, c, d, x[4], 7, 0xf57c0faf); /* 5 */
d = FF(d, a, b, c, x[5], 12, 0x4787c62a); /* 6 */
c = FF(c, d, a, b, x[6], 17, 0xa8304613); /* 7 */
b = FF(b, c, d, a, x[7], 22, 0xfd469501); /* 8 */
a = FF(a, b, c, d, x[8], 7, 0x698098d8); /* 9 */
d = FF(d, a, b, c, x[9], 12, 0x8b44f7af); /* 10 */
c = FF(c, d, a, b, x[10], 17, 0xffff5bb1); /* 11 */
b = FF(b, c, d, a, x[11], 22, 0x895cd7be); /* 12 */
a = FF(a, b, c, d, x[12], 7, 0x6b901122); /* 13 */
d = FF(d, a, b, c, x[13], 12, 0xfd987193); /* 14 */
c = FF(c, d, a, b, x[14], 17, 0xa679438e); /* 15 */
b = FF(b, c, d, a, x[15], 22, 0x49b40821); /* 16 */
/* Round 2 */
a = GG(a, b, c, d, x[1], 5, 0xf61e2562); /* 17 */
d = GG(d, a, b, c, x[6], 9, 0xc040b340); /* 18 */
c = GG(c, d, a, b, x[11], 14, 0x265e5a51); /* 19 */
b = GG(b, c, d, a, x[0], 20, 0xe9b6c7aa); /* 20 */
a = GG(a, b, c, d, x[5], 5, 0xd62f105d); /* 21 */
d = GG(d, a, b, c, x[10], 9, 0x2441453); /* 22 */
c = GG(c, d, a, b, x[15], 14, 0xd8a1e681); /* 23 */
b = GG(b, c, d, a, x[4], 20, 0xe7d3fbc8); /* 24 */
a = GG(a, b, c, d, x[9], 5, 0x21e1cde6); /* 25 */
d = GG(d, a, b, c, x[14], 9, 0xc33707d6); /* 26 */
c = GG(c, d, a, b, x[3], 14, 0xf4d50d87); /* 27 */
b = GG(b, c, d, a, x[8], 20, 0x455a14ed); /* 28 */
a = GG(a, b, c, d, x[13], 5, 0xa9e3e905); /* 29 */
d = GG(d, a, b, c, x[2], 9, 0xfcefa3f8); /* 30 */
c = GG(c, d, a, b, x[7], 14, 0x676f02d9); /* 31 */
b = GG(b, c, d, a, x[12], 20, 0x8d2a4c8a); /* 32 */
/* Round 3 */
a = HH(a, b, c, d, x[5], 4, 0xfffa3942); /* 33 */
d = HH(d, a, b, c, x[8], 11, 0x8771f681); /* 34 */
c = HH(c, d, a, b, x[11], 16, 0x6d9d6122); /* 35 */
b = HH(b, c, d, a, x[14], 23, 0xfde5380c); /* 36 */
a = HH(a, b, c, d, x[1], 4, 0xa4beea44); /* 37 */
d = HH(d, a, b, c, x[4], 11, 0x4bdecfa9); /* 38 */
c = HH(c, d, a, b, x[7], 16, 0xf6bb4b60); /* 39 */
b = HH(b, c, d, a, x[10], 23, 0xbebfbc70); /* 40 */
a = HH(a, b, c, d, x[13], 4, 0x289b7ec6); /* 41 */
d = HH(d, a, b, c, x[0], 11, 0xeaa127fa); /* 42 */
c = HH(c, d, a, b, x[3], 16, 0xd4ef3085); /* 43 */
b = HH(b, c, d, a, x[6], 23, 0x4881d05); /* 44 */
a = HH(a, b, c, d, x[9], 4, 0xd9d4d039); /* 45 */
d = HH(d, a, b, c, x[12], 11, 0xe6db99e5); /* 46 */
c = HH(c, d, a, b, x[15], 16, 0x1fa27cf8); /* 47 */
b = HH(b, c, d, a, x[2], 23, 0xc4ac5665); /* 48 */
/* Round 4 */
a = II(a, b, c, d, x[0], 6, 0xf4292244); /* 49 */
d = II(d, a, b, c, x[7], 10, 0x432aff97); /* 50 */
c = II(c, d, a, b, x[14], 15, 0xab9423a7); /* 51 */
b = II(b, c, d, a, x[5], 21, 0xfc93a039); /* 52 */
a = II(a, b, c, d, x[12], 6, 0x655b59c3); /* 53 */
d = II(d, a, b, c, x[3], 10, 0x8f0ccc92); /* 54 */
c = II(c, d, a, b, x[10], 15, 0xffeff47d); /* 55 */
b = II(b, c, d, a, x[1], 21, 0x85845dd1); /* 56 */
a = II(a, b, c, d, x[8], 6, 0x6fa87e4f); /* 57 */
d = II(d, a, b, c, x[15], 10, 0xfe2ce6e0); /* 58 */
c = II(c, d, a, b, x[6], 15, 0xa3014314); /* 59 */
b = II(b, c, d, a, x[13], 21, 0x4e0811a1); /* 60 */
a = II(a, b, c, d, x[4], 6, 0xf7537e82); /* 61 */
d = II(d, a, b, c, x[11], 10, 0xbd3af235); /* 62 */
c = II(c, d, a, b, x[2], 15, 0x2ad7d2bb); /* 63 */
b = II(b, c, d, a, x[9], 21, 0xeb86d391); /* 64 */
state0 += a;
state1 += b;
state2 += c;
state3 += d;
}
public final void reset()
{
count = 0;
state0 = 0x67452301;
state1 = 0xefcdab89;
state2 = 0x98badcfe;
state3 = 0x10325476;
/* Clear traces in memory... */
for (int i = 0; i < 16; i++)
x[i] = 0;
}
public final void update(byte b)
{
final int space = 64 - ((int) (count & 0x3f));
count++;
block[64 - space] = b;
if (space == 1)
transform(block, 0);
}
public final void update(byte[] buff, int pos, int len)
{
int space = 64 - ((int) (count & 0x3f));
count += len;
while (len > 0)
{
if (len < space)
{
System.arraycopy(buff, pos, block, 64 - space, len);
break;
}
if (space == 64)
{
transform(buff, pos);
}
else
{
System.arraycopy(buff, pos, block, 64 - space, space);
transform(block, 0);
}
pos += space;
len -= space;
space = 64;
}
}
public final void update(byte[] b)
{
update(b, 0, b.length);
}
public final void digest(byte[] dst, int pos)
{
byte[] bits = new byte[8];
encode(bits, 0, (int) (count << 3));
encode(bits, 4, (int) (count >> 29));
int idx = (int) count & 0x3f;
int padLen = (idx < 56) ? (56 - idx) : (120 - idx);
update(padding, 0, padLen);
update(bits, 0, 8);
encode(dst, pos, state0);
encode(dst, pos + 4, state1);
encode(dst, pos + 8, state2);
encode(dst, pos + 12, state3);
reset();
}
public final void digest(byte[] dst)
{
digest(dst, 0);
}
public final int getDigestLength()
{
return 16;
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package com.google.android.exoplayer2.demo;
package com.shortsands.videoplayer;
import android.os.SystemClock;
import android.util.Log;
import android.view.Surface;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
//import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.metadata.emsg.EventMessage;
import com.google.android.exoplayer2.metadata.id3.ApicFrame;
import com.google.android.exoplayer2.metadata.id3.CommentFrame;
import com.google.android.exoplayer2.metadata.id3.GeobFrame;
import com.google.android.exoplayer2.metadata.id3.Id3Frame;
import com.google.android.exoplayer2.metadata.id3.PrivFrame;
import com.google.android.exoplayer2.metadata.id3.TextInformationFrame;
import com.google.android.exoplayer2.metadata.id3.UrlLinkFrame;
import com.google.android.exoplayer2.source.AdaptiveMediaSourceEventListener;
import com.google.android.exoplayer2.source.ExtractorMediaSource;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.MappingTrackSelector;
import com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
import java.io.IOException;
import java.text.NumberFormat;
import java.util.Locale;
/**
* Logs player events using {@link Log}.
*/
/* package */ final class EventLogger implements ExoPlayer.EventListener,
AudioRendererEventListener, VideoRendererEventListener, AdaptiveMediaSourceEventListener,
ExtractorMediaSource.EventListener, DefaultDrmSessionManager.EventListener,
MetadataRenderer.Output {
private static final String TAG = "EventLogger";
private static final int MAX_TIMELINE_ITEM_LINES = 3;
private static final NumberFormat TIME_FORMAT;
static {
TIME_FORMAT = NumberFormat.getInstance(Locale.US);
TIME_FORMAT.setMinimumFractionDigits(2);
TIME_FORMAT.setMaximumFractionDigits(2);
TIME_FORMAT.setGroupingUsed(false);
}
// private final MappingTrackSelector trackSelector;
private final Timeline.Window window;
private final Timeline.Period period;
private final long startTimeMs;
// public EventLogger(MappingTrackSelector trackSelector) {
public EventLogger() {
//this.trackSelector = trackSelector;
window = new Timeline.Window();
period = new Timeline.Period();
startTimeMs = SystemClock.elapsedRealtime();
}
// ExoPlayer.EventListener
@Override
public void onLoadingChanged(boolean isLoading) {
Log.d(TAG, "onLoadingChanged [" + isLoading + "]");
}
@Override
public void onPlayerStateChanged(boolean playWhenReady, int state) {
Log.d(TAG, "onPlayerStateChanged [" + getSessionTimeString() + ", " + playWhenReady + ", "
+ getStateString(state) + "]");
}
@Override
public void onPositionDiscontinuity() {
Log.d(TAG, "onPositionDiscontinuity");
}
//@Override
//public void onPlaybackParametersChanged(PlaybackParameters playbackParameters) {
// Log.d(TAG, "playbackParameters " + String.format(
// "[speed=%.2f, pitch=%.2f]", playbackParameters.speed, playbackParameters.pitch));
//}
@Override
public void onTimelineChanged(Timeline timeline, Object manifest) {
int periodCount = timeline.getPeriodCount();
int windowCount = timeline.getWindowCount();
Log.d(TAG, "onTimelineChanged [periodCount=" + periodCount + ", windowCount=" + windowCount);
for (int i = 0; i < Math.min(periodCount, MAX_TIMELINE_ITEM_LINES); i++) {
timeline.getPeriod(i, period);
Log.d(TAG, " " + "period [" + getTimeString(period.getDurationMs()) + "]");
}
if (periodCount > MAX_TIMELINE_ITEM_LINES) {
Log.d(TAG, " ...");
}
for (int i = 0; i < Math.min(windowCount, MAX_TIMELINE_ITEM_LINES); i++) {
timeline.getWindow(i, window);
Log.d(TAG, " " + "window [" + getTimeString(window.getDurationMs()) + ", "
+ window.isSeekable + ", " + window.isDynamic + "]");
}
if (windowCount > MAX_TIMELINE_ITEM_LINES) {
Log.d(TAG, " ...");
}
Log.d(TAG, "]");
}
@Override
public void onPlayerError(ExoPlaybackException e) {
Log.e(TAG, "onPlayerError [" + getSessionTimeString() + "]", e);
}
@Override
public void onTracksChanged(TrackGroupArray ignored, TrackSelectionArray trackSelections) {
Log.d(TAG, "onTracksChanged CALLED");
/*
MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
if (mappedTrackInfo == null) {
Log.d(TAG, "Tracks []");
return;
}
Log.d(TAG, "Tracks [");
// Log tracks associated to renderers.
for (int rendererIndex = 0; rendererIndex < mappedTrackInfo.length; rendererIndex++) {
TrackGroupArray rendererTrackGroups = mappedTrackInfo.getTrackGroups(rendererIndex);
TrackSelection trackSelection = trackSelections.get(rendererIndex);
if (rendererTrackGroups.length > 0) {
Log.d(TAG, " Renderer:" + rendererIndex + " [");
for (int groupIndex = 0; groupIndex < rendererTrackGroups.length; groupIndex++) {
TrackGroup trackGroup = rendererTrackGroups.get(groupIndex);
String adaptiveSupport = getAdaptiveSupportString(trackGroup.length,
mappedTrackInfo.getAdaptiveSupport(rendererIndex, groupIndex, false));
Log.d(TAG, " Group:" + groupIndex + ", adaptive_supported=" + adaptiveSupport + " [");
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
String status = getTrackStatusString(trackSelection, trackGroup, trackIndex);
String formatSupport = getFormatSupportString(
mappedTrackInfo.getTrackFormatSupport(rendererIndex, groupIndex, trackIndex));
Log.d(TAG, " " + status + " Track:" + trackIndex + ", "
+ Format.toLogString(trackGroup.getFormat(trackIndex))
+ ", supported=" + formatSupport);
}
Log.d(TAG, " ]");
}
// Log metadata for at most one of the tracks selected for the renderer.
if (trackSelection != null) {
for (int selectionIndex = 0; selectionIndex < trackSelection.length(); selectionIndex++) {
Metadata metadata = trackSelection.getFormat(selectionIndex).metadata;
if (metadata != null) {
Log.d(TAG, " Metadata [");
printMetadata(metadata, " ");
Log.d(TAG, " ]");
break;
}
}
}
Log.d(TAG, " ]");
}
}
// Log tracks not associated with a renderer.
TrackGroupArray unassociatedTrackGroups = mappedTrackInfo.getUnassociatedTrackGroups();
if (unassociatedTrackGroups.length > 0) {
Log.d(TAG, " Renderer:None [");
for (int groupIndex = 0; groupIndex < unassociatedTrackGroups.length; groupIndex++) {
Log.d(TAG, " Group:" + groupIndex + " [");
TrackGroup trackGroup = unassociatedTrackGroups.get(groupIndex);
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
String status = getTrackStatusString(false);
String formatSupport = getFormatSupportString(
RendererCapabilities.FORMAT_UNSUPPORTED_TYPE);
Log.d(TAG, " " + status + " Track:" + trackIndex + ", "
+ Format.toLogString(trackGroup.getFormat(trackIndex))
+ ", supported=" + formatSupport);
}
Log.d(TAG, " ]");
}
Log.d(TAG, " ]");
}
Log.d(TAG, "]");
*/
}
// MetadataRenderer.Output
@Override
public void onMetadata(Metadata metadata) {
Log.d(TAG, "onMetadata [");
printMetadata(metadata, " ");
Log.d(TAG, "]");
}
// AudioRendererEventListener
@Override
public void onAudioEnabled(DecoderCounters counters) {
Log.d(TAG, "onAudioEnabled [" + getSessionTimeString() + "]");
}
@Override
public void onAudioSessionId(int audioSessionId) {
Log.d(TAG, "onAudioSessionId [" + audioSessionId + "]");
}
@Override
public void onAudioDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs) {
Log.d(TAG, "onAudioDecoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
}
@Override
public void onAudioInputFormatChanged(Format format) {
Log.d(TAG, "onAudioInputFormatChanged [" + getSessionTimeString() + ", " + Format.toLogString(format)
+ "]");
}
@Override
public void onAudioDisabled(DecoderCounters counters) {
Log.d(TAG, "onAudioDisabled [" + getSessionTimeString() + "]");
}
@Override
public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
printInternalError("onAudioTrackUnderrun [" + bufferSize + ", " + bufferSizeMs + ", "
+ elapsedSinceLastFeedMs + "]", null);
}
// VideoRendererEventListener
@Override
public void onVideoEnabled(DecoderCounters counters) {
Log.d(TAG, "onVideoEnabled [" + getSessionTimeString() + "]");
}
@Override
public void onVideoDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs) {
Log.d(TAG, "onVideoDecoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
}
@Override
public void onVideoInputFormatChanged(Format format) {
Log.d(TAG, "onVideoInputFormatChanged [" + getSessionTimeString() + ", " + Format.toLogString(format)
+ "]");
}
@Override
public void onVideoDisabled(DecoderCounters counters) {
Log.d(TAG, "onVideoDisabled [" + getSessionTimeString() + "]");
}
@Override
public void onDroppedFrames(int count, long elapsed) {
Log.d(TAG, "onDroppedFrames [" + getSessionTimeString() + ", " + count + "]");
}
@Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthHeightRatio) {
Log.d(TAG, "onVideoSizeChanged");
// Do nothing.
}
@Override
public void onRenderedFirstFrame(Surface surface) {
Log.d(TAG, "onRenderedFirstFrame [" + surface + "]");
}
// DefaultDrmSessionManager.EventListener
@Override
public void onDrmSessionManagerError(Exception e) {
printInternalError("onDrmSessionManagerError", e);
}
@Override
public void onDrmKeysRestored() {
Log.d(TAG, "onDrmKeysRestored [" + getSessionTimeString() + "]");
}
@Override
public void onDrmKeysRemoved() {
Log.d(TAG, "onDrmKeysRemoved [" + getSessionTimeString() + "]");
}
@Override
public void onDrmKeysLoaded() {
Log.d(TAG, "onDrmKeysLoaded [" + getSessionTimeString() + "]");
}
// ExtractorMediaSource.EventListener
@Override
public void onLoadError(IOException error) {
printInternalError("onLoadError", error);
}
// AdaptiveMediaSourceEventListener
@Override
public void onLoadStarted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs) {
Log.d(TAG, "onLoadStarted");
// Do nothing.
}
@Override
public void onLoadError(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded,
IOException error, boolean wasCanceled) {
printInternalError("onLoadError", error);
}
@Override
public void onLoadCanceled(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded) {
// Do nothing.
Log.d(TAG, "onLoadCanceled");
}
@Override
public void onLoadCompleted(DataSpec dataSpec, int dataType, int trackType, Format trackFormat,
int trackSelectionReason, Object trackSelectionData, long mediaStartTimeMs,
long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs, long bytesLoaded) {
// Do nothing.
Log.d(TAG, "onLoadCompleted");
}
@Override
public void onUpstreamDiscarded(int trackType, long mediaStartTimeMs, long mediaEndTimeMs) {
// Do nothing.
Log.d(TAG, "onUpstreamDiscarded");
}
@Override
public void onDownstreamFormatChanged(int trackType, Format trackFormat, int trackSelectionReason,
Object trackSelectionData, long mediaTimeMs) {
// Do nothing.
Log.d(TAG, "onDownstreamFormatChanged");
}
// Internal methods
private void printInternalError(String type, Exception e) {
Log.e(TAG, "internalError [" + getSessionTimeString() + ", " + type + "]", e);
}
private void printMetadata(Metadata metadata, String prefix) {
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof TextInformationFrame) {
TextInformationFrame textInformationFrame = (TextInformationFrame) entry;
Log.d(TAG, prefix + String.format("%s: value=%s", textInformationFrame.id,
textInformationFrame.value));
} else if (entry instanceof UrlLinkFrame) {
UrlLinkFrame urlLinkFrame = (UrlLinkFrame) entry;
Log.d(TAG, prefix + String.format("%s: url=%s", urlLinkFrame.id, urlLinkFrame.url));
} else if (entry instanceof PrivFrame) {
PrivFrame privFrame = (PrivFrame) entry;
Log.d(TAG, prefix + String.format("%s: owner=%s", privFrame.id, privFrame.owner));
} else if (entry instanceof GeobFrame) {
GeobFrame geobFrame = (GeobFrame) entry;
Log.d(TAG, prefix + String.format("%s: mimeType=%s, filename=%s, description=%s",
geobFrame.id, geobFrame.mimeType, geobFrame.filename, geobFrame.description));
} else if (entry instanceof ApicFrame) {
ApicFrame apicFrame = (ApicFrame) entry;
Log.d(TAG, prefix + String.format("%s: mimeType=%s, description=%s",
apicFrame.id, apicFrame.mimeType, apicFrame.description));
} else if (entry instanceof CommentFrame) {
CommentFrame commentFrame = (CommentFrame) entry;
Log.d(TAG, prefix + String.format("%s: language=%s, description=%s", commentFrame.id,
commentFrame.language, commentFrame.description));
} else if (entry instanceof Id3Frame) {
Id3Frame id3Frame = (Id3Frame) entry;
Log.d(TAG, prefix + String.format("%s", id3Frame.id));
} else if (entry instanceof EventMessage) {
EventMessage eventMessage = (EventMessage) entry;
Log.d(TAG, prefix + String.format("EMSG: scheme=%s, id=%d, value=%s",
eventMessage.schemeIdUri, eventMessage.id, eventMessage.value));
}
}
}
private String getSessionTimeString() {
return getTimeString(SystemClock.elapsedRealtime() - startTimeMs);
}
private static String getTimeString(long timeMs) {
return timeMs == C.TIME_UNSET ? "?" : TIME_FORMAT.format((timeMs) / 1000f);
}
private static String getStateString(int state) {
switch (state) {
case ExoPlayer.STATE_BUFFERING:
return "Buffering";
case ExoPlayer.STATE_ENDED:
return "Ended";
case ExoPlayer.STATE_IDLE:
return "Idle";
case ExoPlayer.STATE_READY:
return "Ready";
default:
return "State ?";
}
}
private static String getFormatSupportString(int formatSupport) {
switch (formatSupport) {
case RendererCapabilities.FORMAT_HANDLED:
return "YES";
case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
return "NO_EXCEEDS_CAPABILITIES";
case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
return "NO_UNSUPPORTED_TYPE";
case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
return "NO";
default:
return "?";
}
}
private static String getAdaptiveSupportString(int trackCount, int adaptiveSupport) {
if (trackCount < 2) {
return "N/A";
}
switch (adaptiveSupport) {
case RendererCapabilities.ADAPTIVE_SEAMLESS:
return "YES";
case RendererCapabilities.ADAPTIVE_NOT_SEAMLESS:
return "YES_NOT_SEAMLESS";
case RendererCapabilities.ADAPTIVE_NOT_SUPPORTED:
return "NO";
default:
return "?";
}
}
private static String getTrackStatusString(TrackSelection selection, TrackGroup group,
int trackIndex) {
return getTrackStatusString(selection != null && selection.getTrackGroup() == group
&& selection.indexOf(trackIndex) != C.INDEX_UNSET);
}
private static String getTrackStatusString(boolean enabled) {
return enabled ? "[X]" : "[ ]";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Endpoint;
import org.apache.camel.ErrorHandlerFactory;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.spi.AsEndpointUri;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.Resource;
import org.apache.camel.support.OrderedComparator;
import org.apache.camel.support.PatternHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A series of Camel routes
*/
@Metadata(label = "configuration")
@XmlRootElement(name = "routes")
@XmlAccessorType(XmlAccessType.FIELD)
public class RoutesDefinition extends OptionalIdentifiedDefinition<RoutesDefinition>
implements RouteContainer, CamelContextAware {
private static final Logger LOG = LoggerFactory.getLogger(RoutesDefinition.class);
@XmlElementRef
private List<RouteDefinition> routes = new ArrayList<>();
@XmlTransient
private List<InterceptDefinition> intercepts = new ArrayList<>();
@XmlTransient
private List<InterceptFromDefinition> interceptFroms = new ArrayList<>();
@XmlTransient
private List<InterceptSendToEndpointDefinition> interceptSendTos = new ArrayList<>();
@XmlTransient
private List<OnExceptionDefinition> onExceptions = new ArrayList<>();
@XmlTransient
private List<OnCompletionDefinition> onCompletions = new ArrayList<>();
@XmlTransient
private CamelContext camelContext;
@XmlTransient
private ErrorHandlerFactory errorHandlerFactory;
@XmlTransient
private Resource resource;
public RoutesDefinition() {
}
@Override
public String toString() {
return "Routes: " + routes;
}
@Override
public String getShortName() {
return "routes";
}
@Override
public String getLabel() {
return "Route " + getId();
}
// Properties
// -----------------------------------------------------------------------
@Override
public List<RouteDefinition> getRoutes() {
return routes;
}
@Override
public void setRoutes(List<RouteDefinition> routes) {
this.routes = routes;
}
public List<InterceptFromDefinition> getInterceptFroms() {
return interceptFroms;
}
public void setInterceptFroms(List<InterceptFromDefinition> interceptFroms) {
this.interceptFroms = interceptFroms;
}
public List<InterceptSendToEndpointDefinition> getInterceptSendTos() {
return interceptSendTos;
}
public void setInterceptSendTos(List<InterceptSendToEndpointDefinition> interceptSendTos) {
this.interceptSendTos = interceptSendTos;
}
public List<InterceptDefinition> getIntercepts() {
return intercepts;
}
public void setIntercepts(List<InterceptDefinition> intercepts) {
this.intercepts = intercepts;
}
public List<OnExceptionDefinition> getOnExceptions() {
return onExceptions;
}
public void setOnExceptions(List<OnExceptionDefinition> onExceptions) {
this.onExceptions = onExceptions;
}
public List<OnCompletionDefinition> getOnCompletions() {
return onCompletions;
}
public void setOnCompletions(List<OnCompletionDefinition> onCompletions) {
this.onCompletions = onCompletions;
}
public CamelContext getCamelContext() {
return camelContext;
}
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public ErrorHandlerFactory getErrorHandlerFactory() {
return errorHandlerFactory;
}
public void setErrorHandlerFactory(ErrorHandlerFactory errorHandlerFactory) {
this.errorHandlerFactory = errorHandlerFactory;
}
public Resource getResource() {
return resource;
}
public void setResource(Resource resource) {
this.resource = resource;
}
// Fluent API
// -------------------------------------------------------------------------
/**
* Creates a new route
*
* Prefer to use the from methods when creating a new route.
*
* @return the builder
*/
public RouteDefinition route() {
RouteDefinition route = createRoute();
return route(route);
}
/**
* Creates a new route from the given URI input
*
* @param uri the from uri
* @return the builder
*/
public RouteDefinition from(@AsEndpointUri String uri) {
RouteDefinition route = createRoute();
route.from(uri);
return route(route);
}
/**
* Creates a new route from the given endpoint
*
* @param endpoint the from endpoint
* @return the builder
*/
public RouteDefinition from(Endpoint endpoint) {
RouteDefinition route = createRoute();
route.from(endpoint);
return route(route);
}
public RouteDefinition from(EndpointConsumerBuilder endpoint) {
RouteDefinition route = createRoute();
route.from(endpoint);
return route(route);
}
/**
* Creates a new route using the given route.
* <p/>
* <b>Important:</b> This API is NOT intended for Camel end users, but used internally by Camel itself.
*
* @param route the route
* @return the builder
*/
public RouteDefinition route(RouteDefinition route) {
// must set the error handler if not already set on the route
ErrorHandlerFactory handler = getErrorHandlerFactory();
if (handler != null) {
route.setErrorHandlerFactoryIfNull(handler);
}
getRoutes().add(route);
return route;
}
public void prepareRoute(RouteDefinition route) {
if (route.isPrepared()) {
return;
}
// reset before preparing route
route.resetPrepare();
// remember the source resource
route.setResource(resource);
// merge global and route scoped together
List<OnExceptionDefinition> oe = new ArrayList<>(onExceptions);
List<InterceptDefinition> icp = new ArrayList<>(intercepts);
List<InterceptFromDefinition> ifrom = new ArrayList<>(interceptFroms);
List<InterceptSendToEndpointDefinition> ito = new ArrayList<>(interceptSendTos);
List<OnCompletionDefinition> oc = new ArrayList<>(onCompletions);
if (getCamelContext() != null) {
List<RouteConfigurationDefinition> globalConfigurations
= getCamelContext().adapt(ModelCamelContext.class).getRouteConfigurationDefinitions();
if (globalConfigurations != null) {
// if there are multiple ids configured then we should apply in that same order
String[] ids = route.getRouteConfigurationId() != null
? route.getRouteConfigurationId().split(",") : new String[] { "*" };
for (String id : ids) {
// sort according to ordered
globalConfigurations.stream().sorted(OrderedComparator.get())
.filter(g -> {
if (route.getRouteConfigurationId() != null) {
// if the route has a route configuration assigned then use pattern matching
return PatternHelper.matchPattern(g.getId(), id);
} else {
// global configurations have no id assigned or is a wildcard
return g.getId() == null || g.getId().equals(id);
}
})
.forEach(g -> {
String aid = g.getId() == null ? "<default>" : g.getId();
// remember the id that was used on the route
route.addAppliedRouteConfigurationId(aid);
oe.addAll(g.getOnExceptions());
icp.addAll(g.getIntercepts());
ifrom.addAll(g.getInterceptFroms());
ito.addAll(g.getInterceptSendTos());
oc.addAll(g.getOnCompletions());
});
}
}
}
// must prepare the route before we can add it to the routes list
RouteDefinitionHelper.prepareRoute(getCamelContext(), route, oe, icp, ifrom, ito, oc);
if (LOG.isDebugEnabled() && route.getAppliedRouteConfigurationIds() != null) {
LOG.debug("Route: {} is using route configurations ids: {}", route.getId(),
route.getAppliedRouteConfigurationIds());
}
// mark this route as prepared
route.markPrepared();
}
/**
* Creates and adds an interceptor that is triggered on every step in the route processing.
*
* @return the interceptor builder to configure
*/
public InterceptDefinition intercept() {
InterceptDefinition answer = new InterceptDefinition();
getIntercepts().add(0, answer);
return answer;
}
/**
* Creates and adds an interceptor that is triggered when an exchange is received as input to any routes (eg from
* all the <tt>from</tt>)
*
* @return the interceptor builder to configure
*/
public InterceptFromDefinition interceptFrom() {
InterceptFromDefinition answer = new InterceptFromDefinition();
getInterceptFroms().add(answer);
return answer;
}
/**
* Creates and adds an interceptor that is triggered when an exchange is received as input to the route defined with
* the given endpoint (eg from the <tt>from</tt>)
*
* @param uri uri of the endpoint
* @return the interceptor builder to configure
*/
public InterceptFromDefinition interceptFrom(@AsEndpointUri final String uri) {
InterceptFromDefinition answer = new InterceptFromDefinition(uri);
getInterceptFroms().add(answer);
return answer;
}
/**
* Creates and adds an interceptor that is triggered when an exchange is send to the given endpoint
*
* @param uri uri of the endpoint
* @return the builder
*/
public InterceptSendToEndpointDefinition interceptSendToEndpoint(@AsEndpointUri final String uri) {
InterceptSendToEndpointDefinition answer = new InterceptSendToEndpointDefinition(uri);
getInterceptSendTos().add(answer);
return answer;
}
/**
* Adds an on exception
*
* @param exception the exception
* @return the builder
*/
public OnExceptionDefinition onException(Class<? extends Throwable> exception) {
OnExceptionDefinition answer = new OnExceptionDefinition(exception);
answer.setRouteScoped(false);
getOnExceptions().add(answer);
return answer;
}
/**
* Adds an on completion
*
* @return the builder
*/
public OnCompletionDefinition onCompletion() {
OnCompletionDefinition answer = new OnCompletionDefinition();
answer.setRouteScoped(false);
getOnCompletions().add(answer);
return answer;
}
// Implementation methods
// -------------------------------------------------------------------------
protected RouteDefinition createRoute() {
RouteDefinition route = new RouteDefinition();
ErrorHandlerFactory handler = getErrorHandlerFactory();
if (handler != null) {
route.setErrorHandlerFactoryIfNull(handler);
}
return route;
}
}
| |
package edu.cmu.pocketsphinx.demo;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.TreeSet;
public class DatabaseHandlerSudhir extends SQLiteOpenHelper {
public static ArrayList<defaultdetails> al1=new ArrayList<>();
public defaultdetails dd;
public emailStructure emailtemp;
public dmonth m;
public static final int database_version=1;
public static final String DataBase_name="Sudhir.db";
public static final String Table_name="MainTable";
public static final String roll_no="rollno";
public static final String Firstname="FirstName";
public static final String Last_name="LastName";
public static final String Email_id="EmailId";
public static final String col_phone="PhoneNo";
public static final String Course="Course";
String[] Table_info_attend={"IF1G_JUNE","IF1G_JULY","IF1G_AUGUST","IF1G_SEPT","IF1G_OCT","IF2G_DEC","IF2G_JAN","IF2G_FEB","IF2G_MARCH",
"IF3G_JUNE","IF3G_JULY","IF3G_AUGUST","IF3G_SEPT","IF3G_OCT","IF4G_DEC","IF4G_JAN","IF4G_FEB","IF4G_MARCH",
"IF5G_JUNE","IF5G_JULY","IF5G_AUGUST","IF5G_SEPT","IF5G_OCT","IF6G_DEC","IF6G_JAN","IF6G_FEB","IF6G_MARCH"};
String[] Table_info_attend_Computer={"CO1G_JUNE","CO1G_JULY","CO1G_AUGUST","CO1G_SEPT","CO1G_OCT","CO2G_DEC","CO2G_JAN","CO2G_FEB","CO2G_MARCH",
"CO3G_JUNE","CO3G_JULY","CO3G_AUGUST","CO3G_SEPT","CO3G_OCT","CO4G_DEC","CO4G_JAN","CO4G_FEB","CO4G_MARCH",
"CO5G_JUNE","CO5G_JULY","CO5G_AUGUST","CO5G_SEPT","CO5G_OCT","CO6G_DEC","CO6G_JAN","CO6G_FEB","CO6G_MARCH"};
String[] Table_info_attend_Electronics={"EJ1G_JUNE","EJ1G_JULY","EJ1G_AUGUST","EJ1G_SEPT","EJ1G_OCT","EJ2G_DEC","EJ2G_JAN","EJ2G_FEB","EJ2G_MARCH",
"EJ3G_JUNE","EJ3G_JULY","EJ3G_AUGUST","EJ3G_SEPT","EJ3G_OCT","EJ4G_DEC","EJ4G_JAN","EJ4G_FEB","EJ4G_MARCH",
"EJ5G_JUNE","EJ5G_JULY","EJ5G_AUGUST","EJ5G_SEPT","EJ5G_OCT","EJ6G_DEC","EJ6G_JAN","EJ6G_FEB","EJ6G_MARCH"};
public static final String Trigger_after="after_insert";
public static final String Table_Information="Information";
public static final String Table_Computer="Computer";
public static final String Table_Electronics="Electronics";
public static final String table_query = " CREATE TABLE " + Table_name + "(" +
roll_no + " INTEGER NOT NULL, " +
Firstname + " TEXT NOT NULL, " +
Course + " TEXT NOT NULL , "+
Last_name+ " TEXT NOT NULL , " +
Email_id + " TEXT NOT NULL , " +
col_phone + " TEXT NOT NULL "+
");";
public static final String Information_Query=" CREATE TABLE " + Table_Information + " (\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`FirstName`\tTEXT NOT NULL,\n" +
"\t`LastName`\tTEXT NOT NULL,\n" +
"\t`EmailId`\tTEXT NOT NULL,\n" +
"\t`PhoneNo`\tTEXT NOT NULL\n" +
")";
public static final String Computer_Query="CREATE TABLE " + Table_Computer + " (\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`FirstName`\tTEXT NOT NULL,\n" +
"\t`LastName`\tTEXT NOT NULL,\n" +
"\t`EmailId`\tTEXT NOT NULL,\n" +
"\t`PhoneNo`\tTEXT NOT NULL \n" +
")";
public static final String Electronics_Query="CREATE TABLE " + Table_Electronics+"(\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`FirstName`\tTEXT NOT NULL,\n" +
"\t`LastName`\tTEXT NOT NULL,\n" +
"\t`EmailId`\tTEXT NOT NULL,\n" +
"\t`PhoneNo`\tTEXT NOT NULL \n" +
")";
public static final String trigger_after_insert_Information="CREATE TRIGGER after_insert_Information\n" +
"\n" +
"after insert on MainTable for each row when new.Course=\"Information\"\n" +
"\n" +
"begin\n" +
"\n" +
"insert into Information (rollno,FirstName,LastName,EmailId,PhoneNo) values(new.rollno,new.FirstName,new.LastName,new.EmailId,new.PhoneNo);\n" +
"\t\n" +
"\tend";
public static final String trigger_after_insert_Computer="CREATE TRIGGER after_insert_Computer\n" +
"\n" +
"after insert on MainTable for each row when new.Course=\"Computer\"\n" +
"\n" +
"begin\n" +
"\n" +
"insert into Computer (rollno,FirstName,LastName,EmailId,PhoneNo) values(new.rollno,new.FirstName,new.LastName,new.EmailId,new.PhoneNo);\n" +
"\t\n" +
"\tend";
public static final String trigger_after_insert_Electronics="CREATE TRIGGER after_insert_Electronics\n" +
"\n" +
"after insert on MainTable for each row when new.Course=\"Electronics\"\n" +
"\n" +
"begin\n" +
"\n" +
"insert into Electronics (rollno,FirstName,LastName,EmailId,PhoneNo) values(new.rollno,new.FirstName,new.LastName,new.EmailId,new.PhoneNo);\n" +
"\t\n" +
"\tend";
public DatabaseHandlerSudhir(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) {
super(context, DataBase_name, factory, database_version);
}
@Override
public void onCreate(SQLiteDatabase db) {
//using an array to create table of sex semester for information
for(String tb_name:Table_info_attend) {
String table_attend_query = "CREATE TABLE " + tb_name + " (\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`1`\tTEXT,\n" +
"\t`2`\tTEXT,\n" +
"\t`3`\tTEXT,\n" +
"\t`4`\tTEXT,\n" +
"\t`5`\tTEXT,\n" +
"\t`6`\tTEXT,\n" +
"\t`7`\tTEXT,\n" +
"\t`8`\tTEXT,\n" +
"\t`9`\tTEXT,\n" +
"\t`10`\tTEXT,\n" +
"\t`11`\tTEXT,\n" +
"\t`12`\tTEXT,\n" +
"\t`13`\tTEXT,\n" +
"\t`14`\tTEXT,\n" +
"\t`15`\tTEXT,\n" +
"\t`16`\tTEXT,\n" +
"\t`17`\tTEXT,\n" +
"\t`18`\tTEXT,\n" +
"\t`19`\tTEXT,\n" +
"\t`20`\tTEXT,\n" +
"\t`21`\tTEXT,\n" +
"\t`22`\tTEXT,\n" +
"\t`23`\tTEXT,\n" +
"\t`24`\tTEXT,\n" +
"\t`25`\tTEXT,\n" +
"\t`26`\tTEXT,\n" +
"\t`27`\tTEXT,\n" +
"\t`28`\tTEXT,\n" +
"\t`29`\tTEXT,\n" +
"\t`30`\tTEXT,\n" +
"\t`31`\tTEXT \n" +
")";
db.execSQL(table_attend_query);
}
//using an array to create table of sex semester for Computer
for(String tb_name:Table_info_attend_Computer) {
String table_attend_query = "CREATE TABLE " + tb_name + " (\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`1`\tTEXT,\n" +
"\t`2`\tTEXT,\n" +
"\t`3`\tTEXT,\n" +
"\t`4`\tTEXT,\n" +
"\t`5`\tTEXT,\n" +
"\t`6`\tTEXT,\n" +
"\t`7`\tTEXT,\n" +
"\t`8`\tTEXT,\n" +
"\t`9`\tTEXT,\n" +
"\t`10`\tTEXT,\n" +
"\t`11`\tTEXT,\n" +
"\t`12`\tTEXT,\n" +
"\t`13`\tTEXT,\n" +
"\t`14`\tTEXT,\n" +
"\t`15`\tTEXT,\n" +
"\t`16`\tTEXT,\n" +
"\t`17`\tTEXT,\n" +
"\t`18`\tTEXT,\n" +
"\t`19`\tTEXT,\n" +
"\t`20`\tTEXT,\n" +
"\t`21`\tTEXT,\n" +
"\t`22`\tTEXT,\n" +
"\t`23`\tTEXT,\n" +
"\t`24`\tTEXT,\n" +
"\t`25`\tTEXT,\n" +
"\t`26`\tTEXT,\n" +
"\t`27`\tTEXT,\n" +
"\t`28`\tTEXT,\n" +
"\t`29`\tTEXT,\n" +
"\t`30`\tTEXT,\n" +
"\t`31`\tTEXT \n" +
")";
db.execSQL(table_attend_query);
}
for(String tb_name:Table_info_attend_Electronics) {
String table_attend_query = "CREATE TABLE " + tb_name + " (\n" +
"\t`rollno`\tINTEGER,\n" +
"\t`1`\tTEXT,\n" +
"\t`2`\tTEXT,\n" +
"\t`3`\tTEXT,\n" +
"\t`4`\tTEXT,\n" +
"\t`5`\tTEXT,\n" +
"\t`6`\tTEXT,\n" +
"\t`7`\tTEXT,\n" +
"\t`8`\tTEXT,\n" +
"\t`9`\tTEXT,\n" +
"\t`10`\tTEXT,\n" +
"\t`11`\tTEXT,\n" +
"\t`12`\tTEXT,\n" +
"\t`13`\tTEXT,\n" +
"\t`14`\tTEXT,\n" +
"\t`15`\tTEXT,\n" +
"\t`16`\tTEXT,\n" +
"\t`17`\tTEXT,\n" +
"\t`18`\tTEXT,\n" +
"\t`19`\tTEXT,\n" +
"\t`20`\tTEXT,\n" +
"\t`21`\tTEXT,\n" +
"\t`22`\tTEXT,\n" +
"\t`23`\tTEXT,\n" +
"\t`24`\tTEXT,\n" +
"\t`25`\tTEXT,\n" +
"\t`26`\tTEXT,\n" +
"\t`27`\tTEXT,\n" +
"\t`28`\tTEXT,\n" +
"\t`29`\tTEXT,\n" +
"\t`30`\tTEXT,\n" +
"\t`31`\tTEXT \n" +
")";
db.execSQL(table_attend_query);
}
//Creation of Trigger for information table
String trigger_info_table="CREATE TRIGGER after_insert_on_info_table \n" +
"after insert on " + Table_Information + " for each row begin \n" +
"insert into " + Table_info_attend[0] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[1] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[2] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[3] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[4] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[5] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[6] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[7] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[8] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[9] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[10] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[11] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[12] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[13] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[14] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[15] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[16] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[17] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[18] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[19] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[20] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[21] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[22] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[23] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[24] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[25] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend[26] + " (rollno) values(new.rollno) ;\n" +
"end";
//creation of trigger for Computer table
String trigger_co_table="CREATE TRIGGER after_insert_on_Computer \n" +
"after insert on " + Table_Computer + " for each row begin\n" +
"insert into " + Table_info_attend_Computer[0] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[1] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[2] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[3] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[4] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[5] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[6] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[7] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[8] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[9] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[10] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[11] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[12] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[13] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[14] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[15] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[16] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[17] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[18] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[19] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[20] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[21] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[22] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[23] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[24] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[25] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Computer[26] + " (rollno) values(new.rollno) ;\n" +
"end";
//creation of table for Electronics
String trigger_Ej_insert="CREATE TRIGGER after_insert_EJ\n" +
"after insert on " + Table_Electronics + " for each row begin\n" +
"insert into " + Table_info_attend_Electronics[0] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[1] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[2] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[3] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[4] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[5] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[6] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[7] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[8] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[9] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[10] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[11] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[12] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[13] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[14] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[15] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[16] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[17] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[18] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[19] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[20] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[21] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[22] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[23] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[24] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[25] + " (rollno) values(new.rollno) ;\n" +
"insert into " + Table_info_attend_Electronics[26] + " (rollno) values(new.rollno) ;\n" +
"end";
db.execSQL(table_query);
db.execSQL(Information_Query);
db.execSQL(Electronics_Query);
db.execSQL(Computer_Query);
db.execSQL(trigger_after_insert_Information);
db.execSQL(trigger_after_insert_Computer);
db.execSQL(trigger_after_insert_Electronics);
db.execSQL(trigger_info_table);
db.execSQL(trigger_co_table);
db.execSQL(trigger_Ej_insert);
}
public void register(Student s)
{
SQLiteDatabase db = getWritableDatabase();
ContentValues vs =new ContentValues();
vs.put(roll_no,s.getRollno());
vs.put(Firstname,s.getFirst());
vs.put(Last_name,s.getLast());
vs.put(col_phone,s.getPhone());
vs.put(Email_id,s.getEmailid());
vs.put(Course,s.getCourse());
db.insert(Table_name,null,vs);
}
@Override
public void onUpgrade(SQLiteDatabase db, int i, int i1) {
for(String temp_table:Table_info_attend)
{
String drop_query="DROP TABLE IF EXISTS " + temp_table;
db.execSQL(drop_query);
}
for(String temp:Table_info_attend_Computer)
{
String drop_query="DROP TABLE IF EXIST " + temp;
db.execSQL(drop_query);
}
for(String temp:Table_info_attend_Electronics)
{
String drop_query="DROP TABLE IF EXIST " + temp;
db.execSQL(drop_query);
}
//Droping all tables if they are already present in sd card;
String query=" DROP TABLE IF EXISTS " + Table_name;
String drop_computer="DROP TABLE IF EXISTS " +Table_Computer;
String drop_Infrormation="DROP TABLE IF EXISTS " + Table_Information;
String drop_Electronics="DROP TABLE IF EXISTS " +Table_Electronics;
String trigger_query="DROP TRIGGER " + Trigger_after;
db.execSQL(query);
db.execSQL(drop_computer);
db.execSQL(drop_Electronics);
db.execSQL(drop_Infrormation);
db.execSQL(trigger_query);
db.execSQL(trigger_after_insert_Electronics);
db.execSQL(trigger_after_insert_Computer);
db.execSQL(trigger_after_insert_Information);
onCreate(db);
}
public void update(String course, String semester, TreeSet<String> Student_numbers,String status) {
String tablename, day;
Calendar cal = Calendar.getInstance();
String month = String.valueOf(1 + (cal.get(Calendar.MONTH)));
day = String.valueOf(cal.get(Calendar.DATE));
tablename = Tablenamereturns(course, semester, month);
// Toast.makeText(1, "Table Name" + tablename, Toast.LENGTH_LONG).show();
// Toast.makeText(l, "date" + day, Toast.LENGTH_LONG).show();
SQLiteDatabase db = getWritableDatabase();
TreeSet<String> S = new TreeSet<>(Student_numbers);
String[] numbers = S.toArray(new String[S.size()]);
if (status.equalsIgnoreCase("present")) {
for (String number : numbers) {
// Toast.makeText(1,"number" + number, Toast.LENGTH_LONG).show();
db.execSQL("update " + tablename + " \n" +
"set \"" + day + "\"=\"present\"\n" +
"where rollno = " + number + " \n");
}
}
else
{
for(String number:numbers)
{
// Toast.makeText(l, "calling from else part:absent", Toast.LENGTH_SHORT).show();
db.execSQL("update " + tablename + " \n" +
"set \"" + day + "\"=\"absent\"\n" +
"where rollno = " + number + " \n");
}
}
}
public String Tablenamereturns(String course,String semster,String month)
{
switch(course)
{
case "information":
{
switch(semster)
{
case "first":
{
if(month.equals("6"))
{
return("IF1G_JUNE");
}
if(month.equals("7"))
{
return ("IF1G_JULY");
}
if(month.equals("8"))
return ("IF1G_AUGUST");
if(month.equals("9"))
return ("IF1G_SEPT");
if(month.equals("10"))
return ("IF1G_OCT");
break;
}
case "second":
{
System.out.println("month"+month);
if(month.equals("12"))
return("IF2G_DEC");
if(month.equals("1"))
return("IF2G_JAN");
if(month.equals("2"))
return("IF2G_FEB");
if(month.equals("3"))
return("IF2G_MARCH");
break;
}
case "third":
{
if(month.equals("6"))
return("IF3G_JUNE");
if(month.equals("7"))
return("IF3G_JULY");
if(month.equals("8"))
return("IF3G_AUGUST");
if(month.equals("9"))
return("IF3G_SEPT");
if(month.equals("10"))
return("IF3G_OCT");
break;
}
case "fourth":
{ if(month.equals("12"))
return("IF4G_DEC");
if(month.equals("1"))
return("IF4G_JAN");
if(month.equals("2"))
return("IF4G_FEB");
if(month.equals("3"))
return("IF4G_MARCH");
break;
}
case "fifth":
{
if(month.equals("6"))
return("IF5G_JUNE");
if(month.equals("7"))
return("IF5G_JULY");
if(month.equals("8"))
return("IF5G_AUGUST");
if(month.equals("9"))
return("IF5G_SEPT");
if(month.equals("10"))
return("IF5G_OCT");
break;
}
case "sixth":
{
if(month.equals("12"))
return("IF6G_DEC");
if(month.equals("1"))
return("IF6G_JAN");
if(month.equals("2"))
return("IF6G_FEB");
if(month.equals("3"))
return("IF6G_MARCH");
break;
}
}
break;
}
case "electronics":
{
switch(semster)
{
case "first":
{
if(month.equals("6"))
{
return("EJ1G_JUNE");
}
if(month.equals("7"))
{
return ("EJ1G_JULY");
}
if(month.equals("8"))
return ("EJ1G_AUGUST");
if(month.equals("9"))
return ("EJ1G_SEPT");
if(month.equals("10"))
return ("EJ1G_OCT");
break;
}
case "second":
{
System.out.println("month"+month);
if(month.equals("12"))
return("EJ2G_DEC");
if(month.equals("1"))
return("EJ2G_JAN");
if(month.equals("2"))
return("EJ2G_FEB");
if(month.equals("3"))
return("EJ2G_MARCH");
break;
}
case "third":
{
if(month.equals("6"))
return("EJ3G_JUNE");
if(month.equals("7"))
return("EJ3G_JULY");
if(month.equals("8"))
return("EJ3G_AUGUST");
if(month.equals("9"))
return("EJ3G_SEPT");
if(month.equals("10"))
return("EJ3G_OCT");
break;
}
case "fourth":
{ if(month.equals("12"))
return("EJ4G_DEC");
if(month.equals("1"))
return("EJ4G_JAN");
if(month.equals("2"))
return("EJ4G_FEB");
if(month.equals("3"))
return("EJ4G_MARCH");
break;
}
case "fifth":
{
if(month.equals("6"))
return("EJ5G_JUNE");
if(month.equals("7"))
return("EJ5G_JULY");
if(month.equals("8"))
return("EJ5G_AUGUST");
if(month.equals("9"))
return("EJ5G_SEPT");
if(month.equals("10"))
return("EJ5G_OCT");
break;
}
case "sixth":
{
if(month.equals("12"))
return("EJ6G_DEC");
if(month.equals("1"))
return("EJ6G_JAN");
if(month.equals("2"))
return("EJ6G_FEB");
if(month.equals("3"))
return("EJ6G_MARCH");
break;
}
}
break;
}
case "computer":
{
switch(semster)
{
case "first":
{
if(month.equals("6"))
{
return("CO1G_JUNE");
}
if(month.equals("7"))
{
return ("CO1G_JULY");
}
if(month.equals("8"))
return ("CO1G_AUGUST");
if(month.equals("9"))
return ("CO1G_SEPT");
if(month.equals("10"))
return ("CO1G_OCT");
break;
}
case "second":
{
System.out.println("month"+month);
if(month.equals("12"))
return("CO2G_DEC");
if(month.equals("1"))
return("CO2G_JAN");
if(month.equals("2"))
return("CO2G_FEB");
if(month.equals("3"))
return("CO2G_MARCH");
break;
}
case "third":
{
if(month.equals("6"))
return("CO3G_JUNE");
if(month.equals("7"))
return("CO3G_JULY");
if(month.equals("8"))
return("CO3G_AUGUST");
if(month.equals("9"))
return("CO3G_SEPT");
if(month.equals("10"))
return("CO3G_OCT");
break;
}
case "fourth":
{ if(month.equals("12"))
return("CO4G_DEC");
if(month.equals("1"))
return("CO4G_JAN");
if(month.equals("2"))
return("CO4G_FEB");
if(month.equals("3"))
return("CO4G_MARCH");
break;
}
case "fifth":
{
if(month.equals("6"))
return("CO5G_JUNE");
if(month.equals("7"))
return("CO5G_JULY");
if(month.equals("8"))
return("CO5G_AUGUST");
if(month.equals("9"))
return("CO5G_SEPT");
if(month.equals("10"))
return("CO5G_OCT");
break;
}
case "sixth":
{
if(month.equals("12"))
return("CO6G_DEC");
if(month.equals("1"))
return("CO6G_JAN");
if(month.equals("2"))
return("CO6G_FEB");
if(month.equals("3"))
return("CO6G_MARCH");
break;
}
}
break;
}
default:
return("no record found");
}
return ("no record found");
}
public ArrayList<defaultdetails> defaulterFor1month (String tablename)
{
SQLiteDatabase db = getWritableDatabase();
String [] columns ={"`rollno`,`1`,`2`,`3`,`4`,`5`,`6`,`7`,`8`,`9`,`10`,`11`,`12`,`13`,`14`,`15`,`16`,`17`,`18`,`19`,`20`,`21`,`22`,`23`,`24`,`25`,`26`,`27`,`28`,`29`,`30`,`31`"};
Cursor cursor = db.query(tablename, columns, null, null, null, null, null);
Cursor cur2;
try {
while (cursor.moveToNext()) {
double c = 0.00, holiday = 0.00;
for (int i = 1; i <= 31; i++) {
if(!cursor.isNull(i))
{
String n=cursor.getString(i);
Log.v("values",n);
if(n.equals("present"))
{
c++;
}
}
else {
holiday++;
}
}
Log.v("valueofc","row"+cursor.getPosition()+c);
Log.v("valueofholiday",""+holiday);
double defaulterpercent = (c / (31.0-holiday )) * 100;
Log.v("lifes","row"+cursor.getPosition()+defaulterpercent);
if (defaulterpercent < 75) {
String[] col = {"FirstName", "rollno", "Course"};
cur2 = db.query(Table_name, col, "rollno =" + cursor.getString(cursor.getColumnIndex(roll_no)), null, null, null, null);
cur2.moveToFirst();
dd = new defaultdetails(cur2.getString(cur2.getColumnIndex(Firstname)) + " ", cur2.getString(cur2.getColumnIndex(roll_no)) + " ", cur2.getString(cur2.getColumnIndex(Course)) + " ", defaulterpercent);
}
}
al1 = dd.getTemp();
} catch (NullPointerException e) {
Log.i("no", "Data Found");
}
return al1;
}//get email id to send mail
public ArrayList<emailStructure> getEmail(String[] rollno)
{
SQLiteDatabase db=getReadableDatabase();
String[] col={"FirstName","rollno","Course","EmailId","PhoneNo"};
int i=0;
Cursor cursor;
ArrayList<emailStructure> e=new ArrayList<>();
while(i < rollno.length) {
cursor = db.query(Table_name, col, "rollno=" + rollno[i], null, null, null, null);
cursor.moveToFirst();
emailtemp = new emailStructure(cursor.getString(cursor.getColumnIndex(Firstname)),
cursor.getString(cursor.getColumnIndex(roll_no)),
cursor.getString(cursor.getColumnIndex(Course)),
cursor.getString(cursor.getColumnIndex(Email_id)),
cursor.getString(cursor.getColumnIndex(col_phone)));
i++;
}
e=emailtemp.getE();
return e;
}
//month retrieval
public ArrayList<dmonth> getUsers(String tablename , String roll){
ArrayList<dmonth> user = new ArrayList<>();
Cursor cursor;
String[] temp = new String[32];
SQLiteDatabase db = getWritableDatabase();
String[] columns = {"rollno", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31"};
if (roll == null)
cursor = db.query(tablename, columns, null, null, null, null, null);
else
cursor = db.query(tablename, columns, "rollno=" + roll, null, null, null, null);
while (cursor.moveToNext()) {
for (int i = 0; i < 32; i++) {
temp[i] = cursor.getString(i);
}
m = new dmonth(temp);
}
user = m.getTemp();
return user;
}
public ArrayList<dmonth> getUsersbymonth(String tablename){
ArrayList<dmonth> user = new ArrayList<>();
Cursor cursor;
String[] temp = new String[32];
SQLiteDatabase db = getWritableDatabase();
String[] columns = {"rollno", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31"};
cursor = db.query(tablename, columns, null, null, null, null, null);
while (cursor.moveToNext()) {
for (int i = 0; i < 32; i++) {
temp[i] = cursor.getString(i);
}
m = new dmonth(temp);
}
user= m.getTemp();
return user;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import static junit.framework.Assert.assertSame;
import static junit.framework.Assert.assertNotSame;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.Test;
import java.security.PrivilegedExceptionAction;
import java.util.concurrent.Semaphore;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class TestFileSystemCaching {
@Test
public void testCacheEnabled() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
FileSystem fs1 = FileSystem.get(new URI("cachedfile://a"), conf);
FileSystem fs2 = FileSystem.get(new URI("cachedfile://a"), conf);
assertSame(fs1, fs2);
}
static class DefaultFs extends LocalFileSystem {
URI uri;
@Override
public void initialize(URI uri, Configuration conf) {
this.uri = uri;
}
@Override
public URI getUri() {
return uri;
}
}
@Test
public void testDefaultFsUris() throws Exception {
final Configuration conf = new Configuration();
conf.set("fs.defaultfs.impl", DefaultFs.class.getName());
final URI defaultUri = URI.create("defaultfs://host");
FileSystem.setDefaultUri(conf, defaultUri);
FileSystem fs = null;
// sanity check default fs
final FileSystem defaultFs = FileSystem.get(conf);
assertEquals(defaultUri, defaultFs.getUri());
// has scheme, no auth
fs = FileSystem.get(URI.create("defaultfs:/"), conf);
assertSame(defaultFs, fs);
fs = FileSystem.get(URI.create("defaultfs:///"), conf);
assertSame(defaultFs, fs);
// has scheme, same auth
fs = FileSystem.get(URI.create("defaultfs://host"), conf);
assertSame(defaultFs, fs);
// has scheme, different auth
fs = FileSystem.get(URI.create("defaultfs://host2"), conf);
assertNotSame(defaultFs, fs);
// no scheme, no auth
fs = FileSystem.get(URI.create("/"), conf);
assertSame(defaultFs, fs);
// no scheme, same auth
try {
fs = FileSystem.get(URI.create("//host"), conf);
fail("got fs with auth but no scheme");
} catch (Exception e) {
assertEquals("No FileSystem for scheme: null", e.getMessage());
}
// no scheme, different auth
try {
fs = FileSystem.get(URI.create("//host2"), conf);
fail("got fs with auth but no scheme");
} catch (Exception e) {
assertEquals("No FileSystem for scheme: null", e.getMessage());
}
}
public static class InitializeForeverFileSystem extends LocalFileSystem {
final static Semaphore sem = new Semaphore(0);
public void initialize(URI uri, Configuration conf) throws IOException {
// notify that InitializeForeverFileSystem started initialization
sem.release();
try {
while (true) {
Thread.sleep(1000);
}
} catch (InterruptedException e) {
return;
}
}
}
@Test
public void testCacheEnabledWithInitializeForeverFS() throws Exception {
final Configuration conf = new Configuration();
Thread t = new Thread() {
public void run() {
conf.set("fs.localfs1.impl", "org.apache.hadoop.fs." +
"TestFileSystemCaching$InitializeForeverFileSystem");
try {
FileSystem.get(new URI("localfs1://a"), conf);
} catch (IOException e) {
e.printStackTrace();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
};
t.start();
// wait for InitializeForeverFileSystem to start initialization
InitializeForeverFileSystem.sem.acquire();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
FileSystem.get(new URI("cachedfile://a"), conf);
t.interrupt();
t.join();
}
@Test
public void testCacheDisabled() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.uncachedfile.impl", conf.get("fs.file.impl"));
conf.setBoolean("fs.uncachedfile.impl.disable.cache", true);
FileSystem fs1 = FileSystem.get(new URI("uncachedfile://a"), conf);
FileSystem fs2 = FileSystem.get(new URI("uncachedfile://a"), conf);
assertNotSame(fs1, fs2);
}
@SuppressWarnings("unchecked")
@Test
public <T extends TokenIdentifier> void testCacheForUgi() throws Exception {
final Configuration conf = new Configuration();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo");
UserGroupInformation ugiB = UserGroupInformation.createRemoteUser("bar");
FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
//Since the UGIs are the same, we should have the same filesystem for both
assertSame(fsA, fsA1);
FileSystem fsB = ugiB.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
//Since the UGIs are different, we should end up with different filesystems
//corresponding to the two UGIs
assertNotSame(fsA, fsB);
Token<T> t1 = mock(Token.class);
UserGroupInformation ugiA2 = UserGroupInformation.createRemoteUser("foo");
fsA = ugiA2.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
// Although the users in the UGI are same, they have different subjects
// and so are different.
assertNotSame(fsA, fsA1);
ugiA.addToken(t1);
fsA = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
// Make sure that different UGI's with the same subject lead to the same
// file system.
assertSame(fsA, fsA1);
}
@Test
public void testUserFS() throws Exception {
final Configuration conf = new Configuration();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
FileSystem fsU1 = FileSystem.get(new URI("cachedfile://a"), conf, "bar");
FileSystem fsU2 = FileSystem.get(new URI("cachedfile://a"), conf, "foo");
assertNotSame(fsU1, fsU2);
}
@Test
public void testFsUniqueness() throws Exception {
final Configuration conf = new Configuration();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
// multiple invocations of FileSystem.get return the same object.
FileSystem fs1 = FileSystem.get(conf);
FileSystem fs2 = FileSystem.get(conf);
assertTrue(fs1 == fs2);
// multiple invocations of FileSystem.newInstance return different objects
fs1 = FileSystem.newInstance(new URI("cachedfile://a"), conf, "bar");
fs2 = FileSystem.newInstance(new URI("cachedfile://a"), conf, "bar");
assertTrue(fs1 != fs2 && !fs1.equals(fs2));
fs1.close();
fs2.close();
}
@Test
public void testCloseAllForUGI() throws Exception {
final Configuration conf = new Configuration();
conf.set("fs.cachedfile.impl", conf.get("fs.file.impl"));
UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo");
FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
//Now we should get the cached filesystem
FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
assertSame(fsA, fsA1);
FileSystem.closeAllForUGI(ugiA);
//Now we should get a different (newly created) filesystem
fsA1 = ugiA.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws Exception {
return FileSystem.get(new URI("cachedfile://a"), conf);
}
});
assertNotSame(fsA, fsA1);
}
@Test
public void testDelete() throws IOException {
FileSystem mockFs = mock(FileSystem.class);
FileSystem fs = new FilterFileSystem(mockFs);
Path path = new Path("/a");
fs.delete(path, false);
verify(mockFs).delete(eq(path), eq(false));
reset(mockFs);
fs.delete(path, true);
verify(mockFs).delete(eq(path), eq(true));
}
@Test
public void testDeleteOnExit() throws IOException {
FileSystem mockFs = mock(FileSystem.class);
FileSystem fs = new FilterFileSystem(mockFs);
Path path = new Path("/a");
// delete on close if path does exist
when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
assertTrue(fs.deleteOnExit(path));
verify(mockFs).getFileStatus(eq(path));
reset(mockFs);
when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
fs.close();
verify(mockFs).getFileStatus(eq(path));
verify(mockFs).delete(eq(path), eq(true));
}
@Test
public void testDeleteOnExitFNF() throws IOException {
FileSystem mockFs = mock(FileSystem.class);
FileSystem fs = new FilterFileSystem(mockFs);
Path path = new Path("/a");
// don't delete on close if path doesn't exist
assertFalse(fs.deleteOnExit(path));
verify(mockFs).getFileStatus(eq(path));
reset(mockFs);
fs.close();
verify(mockFs, never()).getFileStatus(eq(path));
verify(mockFs, never()).delete(any(Path.class), anyBoolean());
}
@Test
public void testDeleteOnExitRemoved() throws IOException {
FileSystem mockFs = mock(FileSystem.class);
FileSystem fs = new FilterFileSystem(mockFs);
Path path = new Path("/a");
// don't delete on close if path existed, but later removed
when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
assertTrue(fs.deleteOnExit(path));
verify(mockFs).getFileStatus(eq(path));
reset(mockFs);
fs.close();
verify(mockFs).getFileStatus(eq(path));
verify(mockFs, never()).delete(any(Path.class), anyBoolean());
}
@Test
public void testCancelDeleteOnExit() throws IOException {
FileSystem mockFs = mock(FileSystem.class);
FileSystem fs = new FilterFileSystem(mockFs);
Path path = new Path("/a");
// don't delete on close if path existed, but later cancelled
when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
assertTrue(fs.deleteOnExit(path));
verify(mockFs).getFileStatus(eq(path));
assertTrue(fs.cancelDeleteOnExit(path));
assertFalse(fs.cancelDeleteOnExit(path)); // false because not registered
reset(mockFs);
fs.close();
verify(mockFs, never()).getFileStatus(any(Path.class));
verify(mockFs, never()).delete(any(Path.class), anyBoolean());
}
}
| |
package ir.pi0.irproject.repository;
import gnu.trove.iterator.TIntIntIterator;
import gnu.trove.iterator.TIntIterator;
import gnu.trove.iterator.TIntObjectIterator;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.map.TIntIntMap;
import gnu.trove.map.hash.TIntDoubleHashMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import ir.pi0.irproject.Consts;
import ir.pi0.irproject.proecessors.QueryParser;
import ir.pi0.irproject.structures.LRUCache;
import ir.pi0.irproject.utils.Util;
import javafx.util.Pair;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
public class WordDict {
public final static String csvHeader_word_repeat = "word_id,word,repeats,in_articles";
protected final TObjectIntHashMap<String> word_id_map
= new TObjectIntHashMap<>(Consts.PREDICTED_INITIAL_WORDS_COUNT);
protected final TIntIntMap word_repeats
= new TIntIntHashMap(Consts.PREDICTED_INITIAL_WORDS_COUNT);
protected final TIntIntMap word_repeats_in_article
= new TIntIntHashMap(Consts.PREDICTED_INITIAL_WORDS_COUNT);
protected final TIntObjectHashMap<TIntIntHashMap> article_words
= new TIntObjectHashMap<>(Consts.PREDICTED_INITIAL_WORDS_COUNT);
protected final TIntObjectHashMap<TIntDoubleHashMap> article_word_weights
= new TIntObjectHashMap<>(Consts.PREDICTED_INITIAL_WORDS_COUNT);
protected final LRUCache<Integer, WordPosting> word_postings;
public final File word_repeat_file;
public final File article_words_dir;
public final File word_postings_dir;
public final File clusters_dir;
AtomicInteger last_word_id = new AtomicInteger(0);
boolean sync_postings;
QueryParser queryParser = new QueryParser(this);
List<Cluster> clusters = new ArrayList<>();
public WordDict(File db, boolean purge_old_data, boolean sync_postings) {
this(db, purge_old_data, sync_postings, true);
}
public WordDict(File db, boolean purge_old_data, boolean sync_postings, boolean load_data) {
this.sync_postings = sync_postings;
this.word_repeat_file = db;
if (purge_old_data && db.exists())
db.delete();
File data_root_dir = new File(db.getParent(), db.getName() + ".data");
if (!data_root_dir.exists())
data_root_dir.mkdirs();
else if (purge_old_data)
Util.deleteRecursive(data_root_dir);
this.article_words_dir = new File(data_root_dir, "article_words");
if (!article_words_dir.exists())
article_words_dir.mkdirs();
this.clusters_dir = new File(data_root_dir, "clusters");
if (!clusters_dir.exists())
clusters_dir.mkdirs();
this.word_postings_dir = new File(data_root_dir, "word_postings");
if (sync_postings) {
if (!word_postings_dir.exists())
word_postings_dir.mkdirs();
}
//Create posting root dirs
if (purge_old_data && sync_postings)
for (int i = 0; i < Consts.POSTINGS_L1_Index_SIZE; i++)
new File(word_postings_dir, String.valueOf(i)).mkdir();
word_postings = new LRUCache<>(Consts.WORD_POSTINGS_LRU_SIZE, new Consumer<WordPosting>() {
@Override
public void accept(WordPosting wordPosting) {
wordPosting.flush();
}
});
//(Debug)
System.gc();
long heapFreeSize = Runtime.getRuntime().freeMemory();
//-------------------------
if (!purge_old_data && load_data) {
System.out.println("Reading Words database ...");
BufferedReader r;
try {
r = new BufferedReader(new InputStreamReader(new FileInputStream(db)));
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
}
String l;
try {
l = r.readLine();
if (!csvHeader_word_repeat.equals(l))
System.err.println("Warning: Invalid csv header");
} catch (IOException e) {
e.printStackTrace();
}
try {
while ((l = r.readLine()) != null) {
String[] split = l.split(",");
int id = Integer.parseInt(split[0]);
int repeats = Integer.parseInt(split[2]);
addItem(split[1], id, repeats);
}
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("Reading clusters");
this.clusters.clear();
for (File f : clusters_dir.listFiles()) {
int id = Integer.parseInt(f.getName());
Cluster c = new Cluster(this, id);
c.loadFromFile(false);//Don't load subclusters by default for memory efficiency
clusters.add(c);
}
}
//-------------------------
//(Debug)
System.gc();
long heapFreeSize2 = Runtime.getRuntime().freeMemory();
System.out.format("Heap usage: %s \n",
Util.humanReadableByteCount((int) Math.abs(heapFreeSize - heapFreeSize2), false));
}
public void save() {
if (article_words_dir == null)
return;
try {
//-------------------------
System.out.println("Saving Words");
final BufferedWriter w =
new BufferedWriter(new FileWriter(word_repeat_file, false/*don't append*/));
w.write(csvHeader_word_repeat);
w.write("\r\n");
TObjectIntIterator i = word_id_map.iterator();
for (int j = word_id_map.size(); j-- > 0; ) {
i.advance();
Integer word_id = word_id_map.get(i.value());
Integer repeats = word_repeats.get(word_id);
Integer repeats_arc = word_repeats_in_article.get(word_id);
try {
w.write(String.valueOf(word_id) + ',' + i.key() + ',' + repeats + "," + repeats_arc + "\n");
} catch (Exception e) {
e.printStackTrace();
}
}
//-------------------------
System.out.println("Flushing remaining Articles");
synchronized (article_words) {
TIntObjectIterator<TIntIntHashMap> i2 = article_words.iterator();
for (; i2.hasNext(); ) {
flush_article(i2.key(), true, true);
}
}
System.out.println("Flushing all article postings");
word_postings.forEach(new BiConsumer<Integer, WordPosting>() {
@Override
public void accept(Integer integer, WordPosting wordPosting) {
wordPosting.flush();
}
});
//-------------------------
System.out.println("Save done");
w.flush();
w.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public TIntDoubleHashMap calculate_weight(TIntIntHashMap article) {
final TIntDoubleHashMap ws = new TIntDoubleHashMap();
final double logN = Util.log2(word_repeats.size());
//Find maximum repeats in this article
int max_repeats = 0;
for (int r : article.values())
if (r > max_repeats)
max_repeats = r;
//pre-calculate
final double InvNMax = 1.0 / max_repeats;
//for each word, calculate Wij
TIntIntIterator i = article.iterator();
for (int j = article.size(); j-- > 0; ) {
i.advance();
int key = i.key();
double logRepeatInArticles = Util.log2(word_repeats_in_article.get(key));
double w = i.value() * InvNMax * (logN - logRepeatInArticles);
ws.put(key, w);
}
return ws;
}
public void calculate_weights() {
System.out.println("Calculate weights");
final int[] articles = list_articles();
//(debug)
long startTime = System.currentTimeMillis();
long heapFreeSize_min = Runtime.getRuntime().totalMemory();
double p, last_p = -1;
for (int i = 0; i < articles.length; i++) {
final int article_id = articles[i];
//(Debug)
p = (i * 1.0) / articles.length;
if (p - last_p > .001) {
Util.clearLine();
Util.printProgress(p, System.currentTimeMillis() - startTime, false, true, "Progress");
last_p = p;
}
long heapFreeSize = Runtime.getRuntime().freeMemory();
if (heapFreeSize < heapFreeSize_min)
heapFreeSize_min = heapFreeSize;
//Load article
TIntIntHashMap article = load_article(article_id, false).getKey();
//Open article file for rewriting
//Now the article file is empty !!!
final Writer writer;
try {
writer = new BufferedWriter(new FileWriter(
new File(article_words_dir, String.valueOf(article_id)), false));
} catch (IOException e) {
e.printStackTrace();
return;
}
//Calculate doc
final TIntDoubleHashMap weights = calculate_weight(article);
TIntIntIterator it = article.iterator();
for (int j = article.size(); j-- > 0; ) {
it.advance();
int word_id = it.key();
double w = weights.get(word_id);
try {
writer.append(String.format("%d,%d,%f\n", word_id, it.value(), w));
} catch (IOException e) {
e.printStackTrace();
}
}
try {
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
//(debug)
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
Util.clearLine();
System.out.println();
System.out.printf("Max heap usage during process : %s\n",
Util.humanReadableByteCount(Runtime.getRuntime().totalMemory() - heapFreeSize_min));
System.out.printf("Took : %s\n", Util.getDurationBreakdown(elapsedTime, true));
}
public void cluster_articles() {
int[] articles = this.list_articles();
//Pick sqrt(n) random articles as leaders
int clusters_count = (int) (Math.sqrt((double) articles.length));
Cluster[] clusters = new Cluster[clusters_count];
int inc = articles.length / clusters_count;
//Initialize clusters
for (int i = 0; i < clusters_count; i++) {
clusters[i] = new Cluster(this, i + 1);
clusters[i].add(inc * i, null);
}
//Now distribute all articles among leaders
//(debug)
double p, last_p = -1;
long startTime = System.currentTimeMillis();
long heapTotal = Runtime.getRuntime().totalMemory();
long heapFreeSize_min = heapTotal;
for (int i = 0; i < articles.length; i++) {
if (i % inc == 0)
continue;//Skip leaders !!
final int article_id = articles[i];
TIntDoubleHashMap article = load_article(article_id, false).getValue();
//(Debug)
long heapFreeSize = Runtime.getRuntime().freeMemory();
if (heapFreeSize < heapFreeSize_min)
heapFreeSize_min = heapFreeSize;
p = (i * 1.0) / articles.length;
if (p - last_p > .001) {
Util.clearLine();
Util.printProgress(p, System.currentTimeMillis() - startTime, false, true, "Clustering");
Util.printProgress(1 - (heapFreeSize * 1.0 / heapTotal), 0, false, false, "Heap usage");
last_p = p;
}
//Compare it to all leaders
double best_match = -1;
Cluster best_match_c = clusters[0];
for (Cluster cluster : clusters) {
if (!cluster.active)
continue;
if (cluster.getSize() > inc) {
cluster.saveToFile();
cluster.discardSubClusters();
cluster.active = false;
System.out.println("\nCluster " + cluster.id + " full & saved");
System.gc();
continue;
}
double score = cluster.compareToArticle(article);
if (score > best_match) {
best_match = score;
best_match_c = cluster;
}
}
best_match_c.add(article_id, article);
}
//Now save all clusters
System.out.println("\nRemoving all old cluster files");
Util.deleteRecursive(clusters_dir);
clusters_dir.mkdirs();
System.out.println("Saving all clusters");
for (Cluster c : clusters)
if (c.active)
c.saveToFile();
System.out.println("Save done");
}
public static double articleCompare(TIntDoubleHashMap doc1, TIntDoubleHashMap doc2) {
if (doc1.size() > doc2.size()) {
TIntDoubleHashMap tmp = doc1;
doc1 = doc2;
doc2 = tmp;
}
double score = 0;
double no_entity = doc1.getNoEntryValue();
for (int key : doc1.keys()) {
double val2 = doc2.get(key);
if (val2 == no_entity)
continue;
double val1 = doc1.get(key);
score += val1 * val2;
}
return score;
}
public List<Integer> query(String query, int limit) {
System.out.println("Do Query");
//(debug)
long startTime = System.currentTimeMillis();
long heapFreeSize_min = Runtime.getRuntime().totalMemory();
double p, last_p = -1;
final TIntDoubleHashMap query_doc = queryParser.get_weights(query);
//-------------------------
//L1 -- No limit
TreeSet<QueryResult> results_l1 = new TreeSet<>();/*TreeSet is sorted*/
for (Cluster c : clusters) {
results_l1.add(new QueryResult(c.compareToArticle(query_doc), c));
}
//L2
TreeSet<QueryResult> results_l2 = new TreeSet<>();
int sz = 0;
while (results_l1.size() > 0 && sz < limit) {
QueryResult r = results_l1.pollLast();
sz += r.cluster.size;
System.out.println("Debug: Scanning cluster " + r.cluster.id);
for (SubCluster sc : r.cluster.getSubClusters()) {
results_l2.add(new QueryResult(sc.compareToArticle(query_doc), sc));
}
r.cluster.discardSubClusters();//Don't waste memory
}
//L3
final TreeSet<QueryResult> results_l3 = new TreeSet<>();
while (results_l2.size() > 0 && results_l3.size() < limit) {
QueryResult r2 = results_l2.pollLast();
TIntIterator i = r2.subCluster.articles.iterator();
for (int j = r2.subCluster.articles.size(); j-- > 0; ) {
int article_id = i.next();
TIntDoubleHashMap article = load_article(article_id, false).getValue();
results_l3.add(new QueryResult(articleCompare(query_doc, article), article_id));
}
}
//Final
List<Integer> results = new ArrayList<>(limit);
while (results_l3.size() > 0 && results.size() < limit) {
QueryResult result = results_l3.pollLast();
results.add(result.article_id);
}
//-------------------------
//(debug)
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
System.out.println();
System.out.printf("Max heap usage during process : %s\n",
Util.humanReadableByteCount(Runtime.getRuntime().totalMemory() - heapFreeSize_min));
System.out.printf("Took : %s\n", Util.getDurationBreakdown(elapsedTime, true));
return results;
}
public void addItem(String word, Integer id, Integer repeats) {
if (id > last_word_id.get())
last_word_id.set(id + 1);
word_id_map.put(word, id);
word_repeats.put(id, repeats);
}
public WordPosting getWordPosting(int word_id) {
WordPosting p;
synchronized (word_postings) {
p = word_postings.get(word_id);
}
if (p == null) {
String filename = (word_id % Consts.POSTINGS_L1_Index_SIZE) + "/"
+ (word_id / Consts.POSTINGS_L1_Index_SIZE);
p = new WordPosting(new File(word_postings_dir, filename));
synchronized (word_postings) {
word_postings.put(word_id, p);
}
}
return p;
}
public Integer getWordID(String word) {
int id = word_id_map.get(word);
if (id != word_id_map.getNoEntryValue())
return id;
return null;
}
public Integer getWordRepeats(int id) {
return word_repeats.get(id);
}
public Integer getWordRepeats(String word) {
int word_id = word_id_map.get(word);
if (word_id == word_id_map.getNoEntryValue())
return null;
return word_repeats.get(word_id);
}
public void increment(String word, int article_id, int by) {
if (word.length() == 0)
return;
//Get word_id
Integer word_id;
synchronized (word_id_map) {
word_id = word_id_map.get(word);
if (word_id == word_id_map.getNoEntryValue())
word_id_map.put(word, word_id = last_word_id.incrementAndGet());
}
//Adjust total repeats
synchronized (word_repeats) {
word_repeats.adjustOrPutValue(word_id, by, by);
}
//Find article
TIntIntHashMap article = getArticleAndCache(article_id);
article.adjustOrPutValue(word_id, by, by);
}
public void flush_article(final int article_id, final boolean update_postings, boolean save) {
TIntIntHashMap article = getArticleAndCache(article_id);
final TIntDoubleHashMap article_weights = article_word_weights.get(article_id);
if (article == null)
return;
File article_words_file = new File(article_words_dir, String.valueOf(article_id));
try {
//Save article words
if (save) {
final BufferedWriter w = new BufferedWriter(new FileWriter(article_words_file));
synchronized (article) {
TIntIntIterator it = article.iterator();
for (int j = article.size(); j-- > 0; ) {
it.advance();
int word_id = it.key();
int word_repeats=it.value();
try {
if (article_weights.containsKey(word_id))
w.write(word_id + "," + word_repeats + "," + article_weights.get(word_id) + "\n");
else
w.write(word_id + "," + word_repeats + "\n");
if (update_postings && sync_postings) {
//Also update postings
getWordPosting(word_id).append(article_id);
}
word_repeats_in_article.adjustOrPutValue(word_id, 1, 1);
} catch (Exception e) {
e.printStackTrace();
}
}
}
w.close();
}
this.article_words.remove(article_id);
} catch (IOException e) {
e.printStackTrace();
}
}
public Pair<TIntIntHashMap, TIntDoubleHashMap> load_article(int article_id, boolean cache) {
TIntIntHashMap article = new TIntIntHashMap();
TIntDoubleHashMap article_weight = new TIntDoubleHashMap();
File file = new File(article_words_dir, String.valueOf(article_id));
if (file.exists()) {
try {
final BufferedReader r = new BufferedReader(new FileReader(file));
String l;
while ((l = r.readLine()) != null) {
String[] ls = l.split(",");
int word_id = Integer.parseInt(ls[0]);
article.put(word_id, Integer.parseInt(ls[1]));
if (ls.length > 2)
article_weight.put(word_id, Double.parseDouble(ls[2]));
}
} catch (IOException e) {
e.printStackTrace();
}
}
if (cache)
synchronized (article_words) {
article_words.put(article_id, article);
article_word_weights.put(article_id, article_weight);
}
return new Pair<>(article, article_weight);
}
public TIntIntHashMap getArticleAndCache(int article_id) {
TIntIntHashMap m;
synchronized (article_words) {
m = article_words.get(article_id);
}
if (m == null)
m = load_article(article_id, true).getKey();//Magically load it from disk
return m;
}
public TIntDoubleHashMap getArticleWeightAndCache(int article_id) {
TIntDoubleHashMap m;
synchronized (article_words) {
m = article_word_weights.get(article_id);
}
if (m == null)
m = load_article(article_id, true).getValue();//Magically load it from disk
return m;
}
public int[] list_articles() {
File[] files = article_words_dir.listFiles();
if (files == null)
return new int[0];
int[] ids = new int[files.length];
for (int i = 0; i < files.length; i++) {
ids[i] = Integer.parseInt(files[i].getName());
}
return ids;
}
}
| |
/*
* <!--
* ~ Copyright 2015-2017 OpenCB
* ~
* ~ Licensed under the Apache License, Version 2.0 (the "License");
* ~ you may not use this file except in compliance with the License.
* ~ You may obtain a copy of the License at
* ~
* ~ http://www.apache.org/licenses/LICENSE-2.0
* ~
* ~ Unless required by applicable law or agreed to in writing, software
* ~ distributed under the License is distributed on an "AS IS" BASIS,
* ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* ~ See the License for the specific language governing permissions and
* ~ limitations under the License.
* -->
*
*/
package org.opencb.biodata.models.variant.annotation;
import java.util.Arrays;
import java.util.Objects;
/**
*
* @author Cristina Yenyxe Gonzalez Garcia <cyenyxe@ebi.ac.uk>
*
* TODO IND - individual name
* TODO ZYG - zygosity of individual genotype at this locus
*/
@Deprecated
public class VariantEffect {
/**
* Chromosome where the variant occurred
*/
private String chromosome;
/**
* Genomic position
*/
private int position;
/**
* Reference allele
*/
private String referenceAllele;
/**
* Alternate allele
*/
private String alternateAllele;
/**
* Ensembl stable ID of affected gene
*/
private String geneId;
/**
* The gene symbol
*/
private String geneName;
/**
* The gene symbol source
*/
private String geneNameSource;
/**
* Ensembl stable ID of feature
*/
private String featureId;
/**
* Type of feature, currently one of Transcript, RegulatoryFeature, MotifFeature
*/
private String featureType;
/**
* Biotype of transcript or gene
*/
private String featureBiotype;
/**
* The DNA strand (1 or -1) on which the transcript/feature lies
*/
private String featureStrand;
/**
* Relative position of base pair in cDNA sequence
*/
private int cDnaPosition;
/**
* The CCDS identifier for this transcript, where applicable
*/
private String ccdsId;
/**
* Relative position of base pair in coding sequence
*/
private int cdsPosition;
/**
* Ensembl protein identifier of the affected transcript
*/
private String proteinId;
/**
* Relative position of amino acid in protein
*/
private int proteinPosition;
/**
* Source and identifier of any overlapping protein domains
*/
private String[] proteinDomains;
/**
* Only given if the variation affects the protein-coding sequence
*/
private String aminoacidChange;
/**
* The alternative codons with the variant base in upper case
*/
private String codonChange;
/**
* Known identifier of existing variation
*/
private String variationId;
/**
* IDs of overlapping structural variants
*/
private String[] structuralVariantsId;
/**
* Consequence type of this variation (SO code)
*/
private int[] consequenceTypes;
/**
* Flag indicating if the transcript is denoted as the canonical transcript for this gene
*/
private boolean canonical;
/**
* HGVS coding sequence name
*/
private String hgvsc;
/**
* HGVS protein sequence name
*/
private String hgvsp;
/**
* Intron number, out of total number
*/
private String intronNumber;
/**
* Exon number, out of total number
*/
private String exonNumber;
/**
* Shortest distance from variant to transcript
*/
private int variantToTranscriptDistance;
/**
* Clinical significance of variant from dbSNP
*/
private String clinicalSignificance;
/**
* Pubmed ID(s) of publications that cite existing variant
*/
private String[] pubmed;
VariantEffect() {
this(null, -1, null, null);
}
public VariantEffect(String chromosome, int position, String referenceAllele, String alternateAllele) {
this.chromosome = chromosome;
this.position = position;
this.referenceAllele = referenceAllele;
this.alternateAllele = alternateAllele;
this.cDnaPosition = -1;
this.cdsPosition = -1;
this.proteinPosition = -1;
this.variantToTranscriptDistance = -1;
this.proteinDomains = new String[0];
this.structuralVariantsId = new String[0];
this.consequenceTypes = new int[0];
this.pubmed = new String[0];
}
public String getChromosome() {
return chromosome;
}
public void setChromosome(String chromosome) {
this.chromosome = chromosome;
}
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public String getReferenceAllele() {
return referenceAllele;
}
public void setReferenceAllele(String referenceAllele) {
this.referenceAllele = referenceAllele;
}
public String getAlternateAllele() {
return alternateAllele;
}
public void setAlternateAllele(String alternateAllele) {
this.alternateAllele = alternateAllele;
}
public String getGeneId() {
return geneId;
}
public void setGeneId(String geneId) {
this.geneId = geneId;
}
public String getGeneName() {
return geneName;
}
public void setGeneName(String geneName) {
this.geneName = geneName;
}
public String getGeneNameSource() {
return geneNameSource;
}
public void setGeneNameSource(String geneNameSource) {
this.geneNameSource = geneNameSource;
}
public String getFeatureId() {
return featureId;
}
public void setFeatureId(String featureId) {
this.featureId = featureId;
}
public String getFeatureType() {
return featureType;
}
public void setFeatureType(String featureType) {
this.featureType = featureType;
}
public String getFeatureBiotype() {
return featureBiotype;
}
public void setFeatureBiotype(String featureBiotype) {
this.featureBiotype = featureBiotype;
}
public String getFeatureStrand() {
return featureStrand;
}
public void setFeatureStrand(String featureStrand) {
this.featureStrand = featureStrand;
}
public int getcDnaPosition() {
return cDnaPosition;
}
public void setcDnaPosition(int cDnaPosition) {
this.cDnaPosition = cDnaPosition;
}
public String getCcdsId() {
return ccdsId;
}
public void setCcdsId(String ccdsId) {
this.ccdsId = ccdsId;
}
public int getCdsPosition() {
return cdsPosition;
}
public void setCdsPosition(int cdsPosition) {
this.cdsPosition = cdsPosition;
}
public String getProteinId() {
return proteinId;
}
public void setProteinId(String proteinId) {
this.proteinId = proteinId;
}
public int getProteinPosition() {
return proteinPosition;
}
public void setProteinPosition(int proteinPosition) {
this.proteinPosition = proteinPosition;
}
public String[] getProteinDomains() {
return proteinDomains;
}
public void setProteinDomains(String[] proteinDomains) {
this.proteinDomains = proteinDomains;
}
public String getAminoacidChange() {
return aminoacidChange;
}
public void setAminoacidChange(String aminoacidChange) {
this.aminoacidChange = aminoacidChange;
}
public String getCodonChange() {
return codonChange;
}
public void setCodonChange(String codonChange) {
this.codonChange = codonChange;
}
public String getVariationId() {
return variationId;
}
public void setVariationId(String variationId) {
this.variationId = variationId;
}
public String[] getStructuralVariantsId() {
return structuralVariantsId;
}
public void setStructuralVariantsId(String[] structuralVariantsId) {
this.structuralVariantsId = structuralVariantsId;
}
public int[] getConsequenceTypes() {
return consequenceTypes;
}
public void setConsequenceTypes(int[] consequenceTypes) {
this.consequenceTypes = consequenceTypes;
}
public boolean isCanonical() {
return canonical;
}
public void setCanonical(boolean canonical) {
this.canonical = canonical;
}
public String getHgvsc() {
return hgvsc;
}
public void setHgvsc(String hgvsc) {
this.hgvsc = hgvsc;
}
public String getHgvsp() {
return hgvsp;
}
public void setHgvsp(String hgvsp) {
this.hgvsp = hgvsp;
}
public String getIntronNumber() {
return intronNumber;
}
public void setIntronNumber(String intronNumber) {
this.intronNumber = intronNumber;
}
public String getExonNumber() {
return exonNumber;
}
public void setExonNumber(String exonNumber) {
this.exonNumber = exonNumber;
}
public int getVariantToTranscriptDistance() {
return variantToTranscriptDistance;
}
public void setVariantToTranscriptDistance(int variantToTranscriptDistance) {
this.variantToTranscriptDistance = variantToTranscriptDistance;
}
public String getClinicalSignificance() {
return clinicalSignificance;
}
public void setClinicalSignificance(String clinicalSignificance) {
this.clinicalSignificance = clinicalSignificance;
}
public String[] getPubmed() {
return pubmed;
}
public void setPubmed(String[] pubmed) {
this.pubmed = pubmed;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final VariantEffect other = (VariantEffect) obj;
if (!Objects.equals(this.alternateAllele, other.alternateAllele)) {
return false;
}
if (!Objects.equals(this.geneId, other.geneId)) {
return false;
}
if (!Objects.equals(this.geneName, other.geneName)) {
return false;
}
if (!Objects.equals(this.geneNameSource, other.geneNameSource)) {
return false;
}
if (!Objects.equals(this.featureId, other.featureId)) {
return false;
}
if (!Objects.equals(this.featureType, other.featureType)) {
return false;
}
if (!Objects.equals(this.featureBiotype, other.featureBiotype)) {
return false;
}
if (!Objects.equals(this.featureStrand, other.featureStrand)) {
return false;
}
if (this.cDnaPosition != other.cDnaPosition) {
return false;
}
if (!Objects.equals(this.ccdsId, other.ccdsId)) {
return false;
}
if (this.cdsPosition != other.cdsPosition) {
return false;
}
if (!Objects.equals(this.proteinId, other.proteinId)) {
return false;
}
if (this.proteinPosition != other.proteinPosition) {
return false;
}
if (!Objects.equals(this.aminoacidChange, other.aminoacidChange)) {
return false;
}
if (!Objects.equals(this.codonChange, other.codonChange)) {
return false;
}
if (!Objects.equals(this.variationId, other.variationId)) {
return false;
}
if (!Arrays.equals(this.consequenceTypes, other.consequenceTypes)) {
return false;
}
if (!Objects.equals(this.intronNumber, other.intronNumber)) {
return false;
}
if (!Objects.equals(this.exonNumber, other.exonNumber)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 97 * hash + Objects.hashCode(this.alternateAllele);
hash = 97 * hash + Objects.hashCode(this.geneId);
hash = 97 * hash + Objects.hashCode(this.geneName);
hash = 97 * hash + Objects.hashCode(this.geneNameSource);
hash = 97 * hash + Objects.hashCode(this.featureId);
hash = 97 * hash + Objects.hashCode(this.featureType);
hash = 97 * hash + Objects.hashCode(this.featureBiotype);
hash = 97 * hash + Objects.hashCode(this.featureStrand);
hash = 97 * hash + this.cDnaPosition;
hash = 97 * hash + Objects.hashCode(this.ccdsId);
hash = 97 * hash + this.cdsPosition;
hash = 97 * hash + Objects.hashCode(this.proteinId);
hash = 97 * hash + this.proteinPosition;
hash = 97 * hash + Objects.hashCode(this.aminoacidChange);
hash = 97 * hash + Objects.hashCode(this.codonChange);
hash = 97 * hash + Objects.hashCode(this.variationId);
hash = 97 * hash + Arrays.hashCode(this.consequenceTypes);
hash = 97 * hash + Objects.hashCode(this.intronNumber);
hash = 97 * hash + Objects.hashCode(this.exonNumber);
return hash;
}
}
| |
package org.bigtester.ate.reporter;
import org.testng.IReporter;
import org.testng.ISuite;
import org.testng.ISuiteResult;
import org.testng.ITestContext;
import org.testng.ITestNGMethod;
import org.testng.Reporter;
import org.testng.internal.Utils;
import org.testng.reporters.XMLReporterConfig;
import org.testng.reporters.XMLStringBuffer;
import org.testng.xml.XmlSuite;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TimeZone;
/**
* The main entry for the XML generation operation
*
* @author Cosmin Marginean, Mar 16, 2007
* @author Peidong Hu
*/
@SuppressWarnings("PMD")
public class ATEXMLReporter implements IReporter {
public static final String FILE_NAME = "testng-results2.xml";
private final XMLReporterConfig config = new XMLReporterConfig();
private XMLStringBuffer rootBuffer;
@Override
public void generateReport(List<XmlSuite> xmlSuites, List<ISuite> suites,
String outputDirectory) {
if (Utils.isStringEmpty(config.getOutputDirectory())) {
config.setOutputDirectory(outputDirectory);
}
// Calculate passed/failed/skipped
int passed = 0;
int failed = 0;
int skipped = 0;
for (ISuite s : suites) {
for (ISuiteResult sr : s.getResults().values()) {
ITestContext testContext = sr.getTestContext();
passed += testContext.getPassedTests().size();
failed += testContext.getFailedTests().size();
skipped += testContext.getSkippedTests().size();
}
}
rootBuffer = new XMLStringBuffer();
Properties p = new Properties();
p.put("passed", passed);
p.put("failed", failed);
p.put("skipped", skipped);
p.put("total", passed + failed + skipped);
rootBuffer.push(XMLReporterConfig.TAG_TESTNG_RESULTS, p);
writeReporterOutput(rootBuffer);
for (int i = 0; i < suites.size(); i++) {
writeSuite(suites.get(i).getXmlSuite(), suites.get(i));
}
rootBuffer.pop();
Utils.writeUtf8File(config.getOutputDirectory(), FILE_NAME, rootBuffer,
null /* no prefix */);
}
private void writeReporterOutput(XMLStringBuffer xmlBuffer) {
// TODO: Cosmin - maybe a <line> element isn't indicated for each line
xmlBuffer.push(XMLReporterConfig.TAG_REPORTER_OUTPUT);
List<String> output = Reporter.getOutput();
for (String line : output) {
if (line != null) {
xmlBuffer.push(XMLReporterConfig.TAG_LINE);
xmlBuffer.addCDATA(line);
xmlBuffer.pop();
}
}
xmlBuffer.pop();
}
private void writeSuite(XmlSuite xmlSuite, ISuite suite) {
switch (config.getFileFragmentationLevel()) {
case XMLReporterConfig.FF_LEVEL_NONE:
writeSuiteToBuffer(rootBuffer, suite);
break;
case XMLReporterConfig.FF_LEVEL_SUITE:
case XMLReporterConfig.FF_LEVEL_SUITE_RESULT:
File suiteFile = referenceSuite(rootBuffer, suite);
writeSuiteToFile(suiteFile, suite);
}
}
private void writeSuiteToFile(File suiteFile, ISuite suite) {
XMLStringBuffer xmlBuffer = new XMLStringBuffer();
writeSuiteToBuffer(xmlBuffer, suite);
File parentDir = suiteFile.getParentFile();
if (parentDir.exists() || suiteFile.getParentFile().mkdirs()) {
Utils.writeFile(parentDir.getAbsolutePath(), FILE_NAME,
xmlBuffer.toXML());
}
}
private File referenceSuite(XMLStringBuffer xmlBuffer, ISuite suite) {
String relativePath = suite.getName() + File.separatorChar + FILE_NAME;
File suiteFile = new File(config.getOutputDirectory(), relativePath);
Properties attrs = new Properties();
attrs.setProperty(XMLReporterConfig.ATTR_URL, relativePath);
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_SUITE, attrs);
return suiteFile;
}
private void writeSuiteToBuffer(XMLStringBuffer xmlBuffer, ISuite suite) {
xmlBuffer.push(XMLReporterConfig.TAG_SUITE, getSuiteAttributes(suite));
writeSuiteGroups(xmlBuffer, suite);
Map<String, ISuiteResult> results = suite.getResults();
ATEXMLSuiteResultWriter suiteResultWriter = new ATEXMLSuiteResultWriter(
config);
for (Map.Entry<String, ISuiteResult> result : results.entrySet()) {
suiteResultWriter.writeSuiteResult(xmlBuffer, result.getValue());
}
xmlBuffer.pop();
}
private void writeSuiteGroups(XMLStringBuffer xmlBuffer, ISuite suite) {
xmlBuffer.push(XMLReporterConfig.TAG_GROUPS);
Map<String, Collection<ITestNGMethod>> methodsByGroups = suite
.getMethodsByGroups();
for (Map.Entry<String, Collection<ITestNGMethod>> entry : methodsByGroups
.entrySet()) {
Properties groupAttrs = new Properties();
groupAttrs.setProperty(XMLReporterConfig.ATTR_NAME, entry.getKey());
xmlBuffer.push(XMLReporterConfig.TAG_GROUP, groupAttrs);
Set<ITestNGMethod> groupMethods = getUniqueMethodSet(entry
.getValue());
for (ITestNGMethod groupMethod : groupMethods) {
Properties methodAttrs = new Properties();
methodAttrs.setProperty(XMLReporterConfig.ATTR_NAME,
groupMethod.getMethodName());
methodAttrs.setProperty(XMLReporterConfig.ATTR_METHOD_SIG,
groupMethod.toString());
methodAttrs.setProperty(XMLReporterConfig.ATTR_CLASS,
groupMethod.getRealClass().getName());
xmlBuffer.addEmptyElement(XMLReporterConfig.TAG_METHOD,
methodAttrs);
}
xmlBuffer.pop();
}
xmlBuffer.pop();
}
private Properties getSuiteAttributes(ISuite suite) {
Properties props = new Properties();
props.setProperty(XMLReporterConfig.ATTR_NAME, suite.getName());
// Calculate the duration
Map<String, ISuiteResult> results = suite.getResults();
Date minStartDate = new Date();
Date maxEndDate = null;
// TODO: We could probably optimize this in order not to traverse this
// twice
for (Map.Entry<String, ISuiteResult> result : results.entrySet()) {
ITestContext testContext = result.getValue().getTestContext();
Date startDate = testContext.getStartDate();
Date endDate = testContext.getEndDate();
if (minStartDate.after(startDate)) {
minStartDate = startDate;
}
if (maxEndDate == null || maxEndDate.before(endDate)) {
maxEndDate = endDate != null ? endDate : startDate;
}
}
// The suite could be completely empty
if (maxEndDate == null) {
maxEndDate = minStartDate;
}
addDurationAttributes(config, props, minStartDate, maxEndDate);
return props;
}
/**
* Add started-at, finished-at and duration-ms attributes to the <suite> tag
*/
public static void addDurationAttributes(XMLReporterConfig config,
Properties attributes, Date minStartDate, Date maxEndDate) {
SimpleDateFormat format = new SimpleDateFormat(
XMLReporterConfig.getTimestampFormat());
TimeZone utc = TimeZone.getTimeZone("UTC");
format.setTimeZone(utc);
String startTime = format.format(minStartDate);
String endTime = format.format(maxEndDate);
long duration = maxEndDate.getTime() - minStartDate.getTime();
attributes.setProperty(XMLReporterConfig.ATTR_STARTED_AT, startTime);
attributes.setProperty(XMLReporterConfig.ATTR_FINISHED_AT, endTime);
attributes.setProperty(XMLReporterConfig.ATTR_DURATION_MS,
Long.toString(duration));
}
private Set<ITestNGMethod> getUniqueMethodSet(
Collection<ITestNGMethod> methods) {
Set<ITestNGMethod> result = new LinkedHashSet<ITestNGMethod>();
for (ITestNGMethod method : methods) {
result.add(method);
}
return result;
}
// TODO: This is not the smartest way to implement the config
public int getFileFragmentationLevel() {
return config.getFileFragmentationLevel();
}
public void setFileFragmentationLevel(int fileFragmentationLevel) {
config.setFileFragmentationLevel(fileFragmentationLevel);
}
public int getStackTraceOutputMethod() {
return config.getStackTraceOutputMethod();
}
public void setStackTraceOutputMethod(int stackTraceOutputMethod) {
config.setStackTraceOutputMethod(stackTraceOutputMethod);
}
public String getOutputDirectory() {
return config.getOutputDirectory();
}
public void setOutputDirectory(String outputDirectory) {
config.setOutputDirectory(outputDirectory);
}
public boolean isGenerateGroupsAttribute() {
return config.isGenerateGroupsAttribute();
}
public void setGenerateGroupsAttribute(boolean generateGroupsAttribute) {
config.setGenerateGroupsAttribute(generateGroupsAttribute);
}
public boolean isSplitClassAndPackageNames() {
return config.isSplitClassAndPackageNames();
}
public void setSplitClassAndPackageNames(boolean splitClassAndPackageNames) {
config.setSplitClassAndPackageNames(splitClassAndPackageNames);
}
public String getTimestampFormat() {
return XMLReporterConfig.getTimestampFormat();
}
public void setTimestampFormat(String timestampFormat) {
config.setTimestampFormat(timestampFormat);
}
public boolean isGenerateDependsOnMethods() {
return config.isGenerateDependsOnMethods();
}
public void setGenerateDependsOnMethods(boolean generateDependsOnMethods) {
config.setGenerateDependsOnMethods(generateDependsOnMethods);
}
public void setGenerateDependsOnGroups(boolean generateDependsOnGroups) {
config.setGenerateDependsOnGroups(generateDependsOnGroups);
}
public boolean isGenerateDependsOnGroups() {
return config.isGenerateDependsOnGroups();
}
public void setGenerateTestResultAttributes(
boolean generateTestResultAttributes) {
config.setGenerateTestResultAttributes(generateTestResultAttributes);
}
public boolean isGenerateTestResultAttributes() {
return config.isGenerateTestResultAttributes();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.normalizer;
import static java.util.Collections.singletonList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.comparesEqualTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.when;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNameTestRule;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.StringDescription;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* A test over {@link RegionNormalizerWorker}. Being a background thread, the only points of
* interaction we have to this class are its input source ({@link RegionNormalizerWorkQueue} and
* its callbacks invoked against {@link RegionNormalizer} and {@link MasterServices}. The work
* queue is simple enough to use directly; for {@link MasterServices}, use a mock because, as of
* now, the worker only invokes 4 methods.
*/
@Category({ MasterTests.class, SmallTests.class})
public class TestRegionNormalizerWorker {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRegionNormalizerWorker.class);
@Rule
public TestName testName = new TestName();
@Rule
public TableNameTestRule tableName = new TableNameTestRule();
@Rule
public MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private MasterServices masterServices;
@Mock
private RegionNormalizer regionNormalizer;
private HBaseCommonTestingUtility testingUtility;
private RegionNormalizerWorkQueue<TableName> queue;
private ExecutorService workerPool;
private final AtomicReference<Throwable> workerThreadThrowable = new AtomicReference<>();
@Before
public void before() throws Exception {
MockitoAnnotations.initMocks(this);
when(masterServices.skipRegionManagementAction(any())).thenReturn(false);
testingUtility = new HBaseCommonTestingUtility();
queue = new RegionNormalizerWorkQueue<>();
workerThreadThrowable.set(null);
final String threadNameFmt =
TestRegionNormalizerWorker.class.getSimpleName() + "-" + testName.getMethodName() + "-%d";
final ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setNameFormat(threadNameFmt)
.setDaemon(true)
.setUncaughtExceptionHandler((t, e) -> workerThreadThrowable.set(e))
.build();
workerPool = Executors.newSingleThreadExecutor(threadFactory);
}
@After
public void after() throws Exception {
workerPool.shutdownNow(); // shutdownNow to interrupt the worker thread sitting on `take()`
assertTrue("timeout waiting for worker thread to terminate",
workerPool.awaitTermination(30, TimeUnit.SECONDS));
final Throwable workerThrowable = workerThreadThrowable.get();
assertThat("worker thread threw unexpected exception", workerThrowable, nullValue());
}
@Test
public void testMergeCounter() throws Exception {
final TableName tn = tableName.getTableName();
final TableDescriptor tnDescriptor = TableDescriptorBuilder.newBuilder(tn)
.setNormalizationEnabled(true)
.build();
when(masterServices.getTableDescriptors().get(tn)).thenReturn(tnDescriptor);
when(masterServices.mergeRegions(any(), anyBoolean(), anyLong(), anyLong()))
.thenReturn(1L);
when(regionNormalizer.computePlansForTable(tnDescriptor))
.thenReturn(singletonList(new MergeNormalizationPlan.Builder()
.addTarget(RegionInfoBuilder.newBuilder(tn).build(), 10)
.addTarget(RegionInfoBuilder.newBuilder(tn).build(), 20)
.build()));
final RegionNormalizerWorker worker = new RegionNormalizerWorker(
testingUtility.getConfiguration(), masterServices, regionNormalizer, queue);
final long beforeMergePlanCount = worker.getMergePlanCount();
workerPool.submit(worker);
queue.put(tn);
assertThatEventually("executing work should see plan count increase",
worker::getMergePlanCount, greaterThan(beforeMergePlanCount));
}
@Test
public void testSplitCounter() throws Exception {
final TableName tn = tableName.getTableName();
final TableDescriptor tnDescriptor = TableDescriptorBuilder.newBuilder(tn)
.setNormalizationEnabled(true)
.build();
when(masterServices.getTableDescriptors().get(tn)).thenReturn(tnDescriptor);
when(masterServices.splitRegion(any(), any(), anyLong(), anyLong()))
.thenReturn(1L);
when(regionNormalizer.computePlansForTable(tnDescriptor))
.thenReturn(singletonList(
new SplitNormalizationPlan(RegionInfoBuilder.newBuilder(tn).build(), 10)));
final RegionNormalizerWorker worker = new RegionNormalizerWorker(
testingUtility.getConfiguration(), masterServices, regionNormalizer, queue);
final long beforeSplitPlanCount = worker.getSplitPlanCount();
workerPool.submit(worker);
queue.put(tn);
assertThatEventually("executing work should see plan count increase",
worker::getSplitPlanCount, greaterThan(beforeSplitPlanCount));
}
/**
* Assert that a rate limit is honored, at least in a rough way. Maintainers should manually
* inspect the log messages emitted by the worker thread to confirm that expected behavior.
*/
@Test
public void testRateLimit() throws Exception {
final TableName tn = tableName.getTableName();
final TableDescriptor tnDescriptor = TableDescriptorBuilder.newBuilder(tn)
.setNormalizationEnabled(true)
.build();
final RegionInfo splitRegionInfo = RegionInfoBuilder.newBuilder(tn).build();
final RegionInfo mergeRegionInfo1 = RegionInfoBuilder.newBuilder(tn).build();
final RegionInfo mergeRegionInfo2 = RegionInfoBuilder.newBuilder(tn).build();
when(masterServices.getTableDescriptors().get(tn)).thenReturn(tnDescriptor);
when(masterServices.splitRegion(any(), any(), anyLong(), anyLong()))
.thenReturn(1L);
when(masterServices.mergeRegions(any(), anyBoolean(), anyLong(), anyLong()))
.thenReturn(1L);
when(regionNormalizer.computePlansForTable(tnDescriptor))
.thenReturn(Arrays.asList(
new SplitNormalizationPlan(splitRegionInfo, 2),
new MergeNormalizationPlan.Builder()
.addTarget(mergeRegionInfo1, 1)
.addTarget(mergeRegionInfo2, 2)
.build(),
new SplitNormalizationPlan(splitRegionInfo, 1)));
final Configuration conf = testingUtility.getConfiguration();
conf.set("hbase.normalizer.throughput.max_bytes_per_sec", "1m");
final RegionNormalizerWorker worker = new RegionNormalizerWorker(
testingUtility.getConfiguration(), masterServices, regionNormalizer, queue);
workerPool.submit(worker);
final long startTime = System.nanoTime();
queue.put(tn);
assertThatEventually("executing work should see split plan count increase",
worker::getSplitPlanCount, comparesEqualTo(2L));
assertThatEventually("executing work should see merge plan count increase",
worker::getMergePlanCount, comparesEqualTo(1L));
final long endTime = System.nanoTime();
assertThat("rate limited normalizer should have taken at least 5 seconds",
Duration.ofNanos(endTime - startTime), greaterThanOrEqualTo(Duration.ofSeconds(5)));
}
/**
* Repeatedly evaluates {@code matcher} against the result of calling {@code actualSupplier}
* until the matcher succeeds or the timeout period of 30 seconds is exhausted.
*/
private <T> void assertThatEventually(
final String reason,
final Supplier<? extends T> actualSupplier,
final Matcher<? super T> matcher
) throws Exception {
testingUtility.waitFor(TimeUnit.SECONDS.toMillis(30),
new Waiter.ExplainingPredicate<Exception>() {
private T lastValue = null;
@Override
public String explainFailure() {
final Description description = new StringDescription()
.appendText(reason)
.appendText("\nExpected: ")
.appendDescriptionOf(matcher)
.appendText("\n but: ");
matcher.describeMismatch(lastValue, description);
return description.toString();
}
@Override public boolean evaluate() {
lastValue = actualSupplier.get();
return matcher.matches(lastValue);
}
});
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.ReplicationResponse;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.flush.TransportFlushAction;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.local.LocalTransport;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndOneReplica;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithNoShard;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class BroadcastReplicationTests extends ESTestCase {
private static ThreadPool threadPool;
private TestClusterService clusterService;
private TransportService transportService;
private LocalTransport transport;
private TestBroadcastReplicationAction broadcastReplicationAction;
@BeforeClass
public static void beforeClass() {
threadPool = new ThreadPool("BroadcastReplicationTests");
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
transport = new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry());
clusterService = new TestClusterService(threadPool);
transportService = new TransportService(transport, threadPool);
transportService.start();
broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, new ActionFilters(new HashSet<ActionFilter>()), new IndexNameExpressionResolver(Settings.EMPTY), null);
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
public void testNotStartedPrimary() throws InterruptedException, ExecutionException, IOException {
final String index = "test";
clusterService.setState(state(index, randomBoolean(),
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Future<BroadcastResponse> response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index)));
for (Tuple<ShardId, ActionListener<ReplicationResponse>> shardRequests : broadcastReplicationAction.capturedShardRequests) {
if (randomBoolean()) {
shardRequests.v2().onFailure(new NoShardAvailableActionException(shardRequests.v1()));
} else {
shardRequests.v2().onFailure(new UnavailableShardsException(shardRequests.v1(), "test exception"));
}
}
response.get();
logger.info("total shards: {}, ", response.get().getTotalShards());
// we expect no failures here because UnavailableShardsException does not count as failed
assertBroadcastResponse(2, 0, 0, response.get(), null);
}
public void testStartedPrimary() throws InterruptedException, ExecutionException, IOException {
final String index = "test";
clusterService.setState(state(index, randomBoolean(),
ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Future<BroadcastResponse> response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index)));
for (Tuple<ShardId, ActionListener<ReplicationResponse>> shardRequests : broadcastReplicationAction.capturedShardRequests) {
ReplicationResponse replicationResponse = new ReplicationResponse();
replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, new ReplicationResponse.ShardInfo.Failure[0]));
shardRequests.v2().onResponse(replicationResponse);
}
logger.info("total shards: {}, ", response.get().getTotalShards());
assertBroadcastResponse(1, 1, 0, response.get(), null);
}
public void testResultCombine() throws InterruptedException, ExecutionException, IOException {
final String index = "test";
int numShards = randomInt(3);
clusterService.setState(stateWithAssignedPrimariesAndOneReplica(index, numShards));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Future<BroadcastResponse> response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index)));
int succeeded = 0;
int failed = 0;
for (Tuple<ShardId, ActionListener<ReplicationResponse>> shardRequests : broadcastReplicationAction.capturedShardRequests) {
if (randomBoolean()) {
ReplicationResponse.ShardInfo.Failure[] failures = new ReplicationResponse.ShardInfo.Failure[0];
int shardsSucceeded = randomInt(1) + 1;
succeeded += shardsSucceeded;
ReplicationResponse replicationResponse = new ReplicationResponse();
if (shardsSucceeded == 1 && randomBoolean()) {
//sometimes add failure (no failure means shard unavailable)
failures = new ReplicationResponse.ShardInfo.Failure[1];
failures[0] = new ReplicationResponse.ShardInfo.Failure(index, shardRequests.v1().id(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false);
failed++;
}
replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(2, shardsSucceeded, failures));
shardRequests.v2().onResponse(replicationResponse);
} else {
// sometimes fail
failed += 2;
// just add a general exception and see if failed shards will be incremented by 2
shardRequests.v2().onFailure(new Exception("pretend shard failed"));
}
}
assertBroadcastResponse(2 * numShards, succeeded, failed, response.get(), Exception.class);
}
public void testNoShards() throws InterruptedException, ExecutionException, IOException {
clusterService.setState(stateWithNoShard());
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new DummyBroadcastRequest());
assertBroadcastResponse(0, 0, 0, response, null);
}
public void testShardsList() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
ClusterState clusterState = state(index, randomBoolean(),
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED);
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
List<ShardId> shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.index().name()), clusterState);
assertThat(shards.size(), equalTo(1));
assertThat(shards.get(0), equalTo(shardId));
}
private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction<DummyBroadcastRequest, BroadcastResponse, BasicReplicationRequest, ReplicationResponse> {
protected final Set<Tuple<ShardId, ActionListener<ReplicationResponse>>> capturedShardRequests = ConcurrentCollections.newConcurrentSet();
public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
TransportReplicationAction replicatedBroadcastShardAction) {
super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService,
actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction);
}
@Override
protected ReplicationResponse newShardResponse() {
return new ReplicationResponse();
}
@Override
protected BasicReplicationRequest newShardRequest(DummyBroadcastRequest request, ShardId shardId) {
return new BasicReplicationRequest().setShardId(shardId);
}
@Override
protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies,
List<ShardOperationFailedException> shardFailures) {
return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures);
}
@Override
protected void shardExecute(DummyBroadcastRequest request, ShardId shardId, ActionListener<ReplicationResponse> shardActionListener) {
capturedShardRequests.add(new Tuple<>(shardId, shardActionListener));
}
}
public FlushResponse assertImmediateResponse(String index, TransportFlushAction flushAction) throws InterruptedException, ExecutionException {
Date beginDate = new Date();
FlushResponse flushResponse = flushAction.execute(new FlushRequest(index)).get();
Date endDate = new Date();
long maxTime = 500;
assertThat("this should not take longer than " + maxTime + " ms. The request hangs somewhere", endDate.getTime() - beginDate.getTime(), lessThanOrEqualTo(maxTime));
return flushResponse;
}
public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) throws InterruptedException, ExecutionException {
return (BroadcastResponse) broadcastAction.execute(request).actionGet("5s");
}
private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) {
assertThat(response.getSuccessfulShards(), equalTo(successful));
assertThat(response.getTotalShards(), equalTo(total));
assertThat(response.getFailedShards(), equalTo(failed));
for (int i = 0; i < failed; i++) {
assertThat(response.getShardFailures()[0].getCause().getCause(), instanceOf(exceptionClass));
}
}
public static class DummyBroadcastRequest extends BroadcastRequest<DummyBroadcastRequest> {
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.dex.code;
import com.android.dx.rop.code.RegisterSpec;
import com.android.dx.rop.code.RegisterSpecList;
import com.android.dx.rop.cst.Constant;
import com.android.dx.rop.cst.CstInteger;
import com.android.dx.rop.cst.CstKnownNull;
import com.android.dx.rop.cst.CstLiteral64;
import com.android.dx.rop.cst.CstLiteralBits;
import com.android.dx.rop.cst.CstString;
import com.android.dx.util.AnnotatedOutput;
import com.android.dx.util.Hex;
import java.util.BitSet;
/**
* Base class for all instruction format handlers. Instruction format
* handlers know how to translate {@link DalvInsn} instances into
* streams of code units, as well as human-oriented listing strings
* representing such translations.
*/
public abstract class InsnFormat {
/**
* Returns the string form, suitable for inclusion in a listing
* dump, of the given instruction. The instruction must be of this
* instance's format for proper operation.
*
* @param insn {@code non-null;} the instruction
* @param noteIndices whether to include an explicit notation of
* constant pool indices
* @return {@code non-null;} the string form
*/
public final String listingString(DalvInsn insn, boolean noteIndices) {
String op = insn.getOpcode().getName();
String arg = insnArgString(insn);
String comment = insnCommentString(insn, noteIndices);
StringBuilder sb = new StringBuilder(100);
sb.append(op);
if (arg.length() != 0) {
sb.append(' ');
sb.append(arg);
}
if (comment.length() != 0) {
sb.append(" // ");
sb.append(comment);
}
return sb.toString();
}
/**
* Returns the string form of the arguments to the given instruction.
* The instruction must be of this instance's format. If the instruction
* has no arguments, then the result should be {@code ""}, not
* {@code null}.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction
* @return {@code non-null;} the string form
*/
public abstract String insnArgString(DalvInsn insn);
/**
* Returns the associated comment for the given instruction, if any.
* The instruction must be of this instance's format. If the instruction
* has no comment, then the result should be {@code ""}, not
* {@code null}.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction
* @param noteIndices whether to include an explicit notation of
* constant pool indices
* @return {@code non-null;} the string form
*/
public abstract String insnCommentString(DalvInsn insn,
boolean noteIndices);
/**
* Gets the code size of instructions that use this format. The
* size is a number of 16-bit code units, not bytes. This should
* throw an exception if this format is of variable size.
*
* @return {@code >= 0;} the instruction length in 16-bit code units
*/
public abstract int codeSize();
/**
* Returns whether or not the given instruction's arguments will
* fit in this instance's format. This includes such things as
* counting register arguments, checking register ranges, and
* making sure that additional arguments are of appropriate types
* and are in-range. If this format has a branch target but the
* instruction's branch offset is unknown, this method will simply
* not check the offset.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code true} iff the instruction's arguments are
* appropriate for this instance, or {@code false} if not
*/
public abstract boolean isCompatible(DalvInsn insn);
/**
* Returns which of a given instruction's registers will fit in
* this instance's format.
*
* <p>The default implementation of this method always returns
* an empty BitSet. Subclasses must override this method if they
* have registers.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code non-null;} a BitSet flagging registers in the
* register list that are compatible to this format
*/
public BitSet compatibleRegs(DalvInsn insn) {
return new BitSet();
}
/**
* Returns whether or not the given instruction's branch offset will
* fit in this instance's format. This always returns {@code false}
* for formats that don't include a branch offset.
*
* <p>The default implementation of this method always returns
* {@code false}. Subclasses must override this method if they
* include branch offsets.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code true} iff the instruction's branch offset is
* appropriate for this instance, or {@code false} if not
*/
public boolean branchFits(TargetInsn insn) {
return false;
}
/**
* Writes the code units for the given instruction to the given
* output destination. The instruction must be of this instance's format.
*
* <p>Subclasses must override this method.</p>
*
* @param out {@code non-null;} the output destination to write to
* @param insn {@code non-null;} the instruction to write
*/
public abstract void writeTo(AnnotatedOutput out, DalvInsn insn);
/**
* Helper method to return a register list string.
*
* @param list {@code non-null;} the list of registers
* @return {@code non-null;} the string form
*/
protected static String regListString(RegisterSpecList list) {
int sz = list.size();
StringBuffer sb = new StringBuffer(sz * 5 + 2);
sb.append('{');
for (int i = 0; i < sz; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append(list.get(i).regString());
}
sb.append('}');
return sb.toString();
}
/**
* Helper method to return a register range string.
*
* @param list {@code non-null;} the list of registers (which must be
* sequential)
* @return {@code non-null;} the string form
*/
protected static String regRangeString(RegisterSpecList list) {
int size = list.size();
StringBuilder sb = new StringBuilder(30);
sb.append("{");
switch (size) {
case 0: {
// Nothing to do.
break;
}
case 1: {
sb.append(list.get(0).regString());
break;
}
default: {
RegisterSpec lastReg = list.get(size - 1);
if (lastReg.getCategory() == 2) {
/*
* Add one to properly represent a list-final
* category-2 register.
*/
lastReg = lastReg.withOffset(1);
}
sb.append(list.get(0).regString());
sb.append("..");
sb.append(lastReg.regString());
}
}
sb.append("}");
return sb.toString();
}
/**
* Helper method to return a literal bits argument string.
*
* @param value the value
* @return {@code non-null;} the string form
*/
protected static String literalBitsString(CstLiteralBits value) {
StringBuffer sb = new StringBuffer(100);
sb.append('#');
if (value instanceof CstKnownNull) {
sb.append("null");
} else {
sb.append(value.typeName());
sb.append(' ');
sb.append(value.toHuman());
}
return sb.toString();
}
/**
* Helper method to return a literal bits comment string.
*
* @param value the value
* @param width the width of the constant, in bits (used for displaying
* the uninterpreted bits; one of: {@code 4 8 16 32 64}
* @return {@code non-null;} the comment
*/
protected static String literalBitsComment(CstLiteralBits value,
int width) {
StringBuffer sb = new StringBuffer(20);
sb.append("#");
long bits;
if (value instanceof CstLiteral64) {
bits = ((CstLiteral64) value).getLongBits();
} else {
bits = value.getIntBits();
}
switch (width) {
case 4: sb.append(Hex.uNibble((int) bits)); break;
case 8: sb.append(Hex.u1((int) bits)); break;
case 16: sb.append(Hex.u2((int) bits)); break;
case 32: sb.append(Hex.u4((int) bits)); break;
case 64: sb.append(Hex.u8(bits)); break;
default: {
throw new RuntimeException("shouldn't happen");
}
}
return sb.toString();
}
/**
* Helper method to return a branch address string.
*
* @param insn {@code non-null;} the instruction in question
* @return {@code non-null;} the string form of the instruction's
* branch target
*/
protected static String branchString(DalvInsn insn) {
TargetInsn ti = (TargetInsn) insn;
int address = ti.getTargetAddress();
return (address == (char) address) ? Hex.u2(address) : Hex.u4(address);
}
/**
* Helper method to return the comment for a branch.
*
* @param insn {@code non-null;} the instruction in question
* @return {@code non-null;} the comment
*/
protected static String branchComment(DalvInsn insn) {
TargetInsn ti = (TargetInsn) insn;
int offset = ti.getTargetOffset();
return (offset == (short) offset) ? Hex.s2(offset) : Hex.s4(offset);
}
/**
* Helper method to determine if a signed int value fits in a nibble.
*
* @param value the value in question
* @return {@code true} iff it's in the range -8..+7
*/
protected static boolean signedFitsInNibble(int value) {
return (value >= -8) && (value <= 7);
}
/**
* Helper method to determine if an unsigned int value fits in a nibble.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xf
*/
protected static boolean unsignedFitsInNibble(int value) {
return value == (value & 0xf);
}
/**
* Helper method to determine if a signed int value fits in a byte.
*
* @param value the value in question
* @return {@code true} iff it's in the range -0x80..+0x7f
*/
protected static boolean signedFitsInByte(int value) {
return (byte) value == value;
}
/**
* Helper method to determine if an unsigned int value fits in a byte.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xff
*/
protected static boolean unsignedFitsInByte(int value) {
return value == (value & 0xff);
}
/**
* Helper method to determine if a signed int value fits in a short.
*
* @param value the value in question
* @return {@code true} iff it's in the range -0x8000..+0x7fff
*/
protected static boolean signedFitsInShort(int value) {
return (short) value == value;
}
/**
* Helper method to determine if an unsigned int value fits in a short.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xffff
*/
protected static boolean unsignedFitsInShort(int value) {
return value == (value & 0xffff);
}
/**
* Helper method to determine if a list of registers are sequential,
* including degenerate cases for empty or single-element lists.
*
* @param list {@code non-null;} the list of registers
* @return {@code true} iff the list is sequentially ordered
*/
protected static boolean isRegListSequential(RegisterSpecList list) {
int sz = list.size();
if (sz < 2) {
return true;
}
int first = list.get(0).getReg();
int next = first;
for (int i = 0; i < sz; i++) {
RegisterSpec one = list.get(i);
if (one.getReg() != next) {
return false;
}
next += one.getCategory();
}
return true;
}
/**
* Helper method to combine an opcode and a second byte of data into
* the appropriate form for emitting into a code buffer.
*
* @param insn {@code non-null;} the instruction containing the opcode
* @param arg {@code 0..255;} arbitrary other byte value
* @return combined value
*/
protected static short opcodeUnit(DalvInsn insn, int arg) {
if ((arg & 0xff) != arg) {
throw new IllegalArgumentException("arg out of range 0..255");
}
int opcode = insn.getOpcode().getOpcode();
if ((opcode & 0xff) != opcode) {
throw new IllegalArgumentException("opcode out of range 0..255");
}
return (short) (opcode | (arg << 8));
}
/**
* Helper method to combine two bytes into a code unit.
*
* @param low {@code 0..255;} low byte
* @param high {@code 0..255;} high byte
* @return combined value
*/
protected static short codeUnit(int low, int high) {
if ((low & 0xff) != low) {
throw new IllegalArgumentException("low out of range 0..255");
}
if ((high & 0xff) != high) {
throw new IllegalArgumentException("high out of range 0..255");
}
return (short) (low | (high << 8));
}
/**
* Helper method to combine four nibbles into a code unit.
*
* @param n0 {@code 0..15;} low nibble
* @param n1 {@code 0..15;} medium-low nibble
* @param n2 {@code 0..15;} medium-high nibble
* @param n3 {@code 0..15;} high nibble
* @return combined value
*/
protected static short codeUnit(int n0, int n1, int n2, int n3) {
if ((n0 & 0xf) != n0) {
throw new IllegalArgumentException("n0 out of range 0..15");
}
if ((n1 & 0xf) != n1) {
throw new IllegalArgumentException("n1 out of range 0..15");
}
if ((n2 & 0xf) != n2) {
throw new IllegalArgumentException("n2 out of range 0..15");
}
if ((n3 & 0xf) != n3) {
throw new IllegalArgumentException("n3 out of range 0..15");
}
return (short) (n0 | (n1 << 4) | (n2 << 8) | (n3 << 12));
}
/**
* Helper method to combine two nibbles into a byte.
*
* @param low {@code 0..15;} low nibble
* @param high {@code 0..15;} high nibble
* @return {@code 0..255;} combined value
*/
protected static int makeByte(int low, int high) {
if ((low & 0xf) != low) {
throw new IllegalArgumentException("low out of range 0..15");
}
if ((high & 0xf) != high) {
throw new IllegalArgumentException("high out of range 0..15");
}
return low | (high << 4);
}
/**
* Writes one code unit to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0) {
out.writeShort(c0);
}
/**
* Writes two code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1) {
out.writeShort(c0);
out.writeShort(c1);
}
/**
* Writes three code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
}
/**
* Writes four code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
* @param c3 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2, short c3) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
out.writeShort(c3);
}
/**
* Writes five code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
* @param c3 code unit to write
* @param c4 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2, short c3, short c4) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
out.writeShort(c3);
out.writeShort(c4);
}
/**
* Writes three code units to the given output destination, where the
* second and third are represented as single <code>int</code> and emitted
* in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2 code unit pair to write
*/
protected static void write(AnnotatedOutput out, short c0, int c1c2) {
write(out, c0, (short) c1c2, (short) (c1c2 >> 16));
}
/**
* Writes five code units to the given output destination, where the
* second through fifth are represented as single <code>long</code>
* and emitted in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2c3c4 code unit quad to write
*/
protected static void write(AnnotatedOutput out, short c0, long c1c2c3c4) {
write(out, c0, (short) c1c2c3c4, (short) (c1c2c3c4 >> 16),
(short) (c1c2c3c4 >> 32), (short) (c1c2c3c4 >> 48));
}
}
| |
/*
* Copyright (C) 2004-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.roster;
import org.jivesoftware.openfire.RoutingTable;
import org.jivesoftware.openfire.SharedGroupException;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.container.BasicModule;
import org.jivesoftware.openfire.event.GroupEventDispatcher;
import org.jivesoftware.openfire.event.GroupEventListener;
import org.jivesoftware.openfire.event.UserEventDispatcher;
import org.jivesoftware.openfire.event.UserEventListener;
import org.jivesoftware.openfire.group.Group;
import org.jivesoftware.openfire.group.GroupManager;
import org.jivesoftware.openfire.group.GroupNotFoundException;
import org.jivesoftware.openfire.user.User;
import org.jivesoftware.openfire.user.UserManager;
import org.jivesoftware.openfire.user.UserNotFoundException;
import org.jivesoftware.util.ClassUtils;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.PropertyEventDispatcher;
import org.jivesoftware.util.PropertyEventListener;
import org.jivesoftware.util.cache.Cache;
import org.jivesoftware.util.cache.CacheFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmpp.packet.JID;
import org.xmpp.packet.Presence;
import java.util.*;
import java.util.concurrent.*;
/**
* A simple service that allows components to retrieve a roster based solely on the ID
* of the owner. Users have convenience methods for obtaining a roster associated with
* the owner. However there are many components that need to retrieve the roster
* based solely on the generic ID owner key. This interface defines a service that can
* do that. This allows classes that generically manage resource for resource owners
* (such as presence updates) to generically offer their services without knowing or
* caring if the roster owner is a user, chatbot, etc.
*
* @author Iain Shigeoka
*/
public class RosterManager extends BasicModule implements GroupEventListener, UserEventListener {
private static final Logger Log = LoggerFactory.getLogger(RosterManager.class);
private static final String MUTEX_SUFFIX = " ro";
private Cache<String, Roster> rosterCache = null;
private XMPPServer server;
private RoutingTable routingTable;
private RosterItemProvider provider;
private ExecutorService executor;
/**
* Returns true if the roster service is enabled. When disabled it is not possible to
* retrieve users rosters or broadcast presence packets to roster contacts.
*
* @return true if the roster service is enabled.
*/
public static boolean isRosterServiceEnabled() {
return JiveGlobals.getBooleanProperty("xmpp.client.roster.active", true);
}
/**
* Returns true if the roster versioning is enabled.
*
* @return true if the roster versioning is enabled.
*/
public static boolean isRosterVersioningEnabled() {
return JiveGlobals.getBooleanProperty("xmpp.client.roster.versioning.active", true);
}
public RosterManager() {
super("Roster Manager");
rosterCache = CacheFactory.createCache("Roster");
initProvider();
PropertyEventDispatcher.addListener(new PropertyEventListener() {
@Override
public void propertySet(String property, Map params) {
if (property.equals("provider.roster.className")) {
initProvider();
}
}
@Override
public void propertyDeleted(String property, Map params) {}
@Override
public void xmlPropertySet(String property, Map params) {}
@Override
public void xmlPropertyDeleted(String property, Map params) {}
});
}
/**
* Returns the roster for the given username.
*
* @param username the username to search for.
* @return the roster associated with the ID.
* @throws org.jivesoftware.openfire.user.UserNotFoundException if the ID does not correspond
* to a known entity on the server.
*/
public Roster getRoster(String username) throws UserNotFoundException {
Roster roster = rosterCache.get(username);
if (roster == null) {
// Synchronize using a unique key so that other threads loading the User
// and not the Roster cannot produce a deadlock
synchronized ((username + MUTEX_SUFFIX).intern()) {
roster = rosterCache.get(username);
if (roster == null) {
// Not in cache so load a new one:
roster = new Roster(username);
rosterCache.put(username, roster);
}
}
}
return roster;
}
/**
* Removes the entire roster of a given user. This is necessary when a user
* account is being deleted from the server.
*
* @param user the user.
*/
public void deleteRoster(JID user) {
if (!server.isLocal(user)) {
// Ignore request if user is not a local user
return;
}
try {
String username = user.getNode();
// Get the roster of the deleted user
Roster roster = getRoster(username);
// Remove each roster item from the user's roster
for (RosterItem item : roster.getRosterItems()) {
try {
roster.deleteRosterItem(item.getJid(), false);
}
catch (SharedGroupException e) {
// Do nothing. We shouldn't have this exception since we disabled the checkings
Log.warn( "Unexpected exception while deleting roster of user '{}' .", user, e );
}
}
// Remove the cached roster from memory
rosterCache.remove(username);
// Get the rosters that have a reference to the deleted user
Iterator<String> usernames = provider.getUsernames(user.toBareJID());
while (usernames.hasNext()) {
username = usernames.next();
try {
// Get the roster that has a reference to the deleted user
roster = getRoster(username);
// Remove the deleted user reference from this roster
roster.deleteRosterItem(user, false);
}
catch (SharedGroupException e) {
// Do nothing. We shouldn't have this exception since we disabled the checkings
Log.warn( "Unexpected exception while deleting roster of user '{}' .", user, e );
}
catch (UserNotFoundException e) {
// Deleted user had user that no longer exists on their roster. Ignore and move on.
}
}
}
catch (UnsupportedOperationException | UserNotFoundException e) {
// Do nothing
}
}
/**
* Returns a collection with all the groups that the user may include in his roster. The
* following criteria will be used to select the groups: 1) Groups that are configured so that
* everybody can include in his roster, 2) Groups that are configured so that its users may
* include the group in their rosters and the user is a group user of the group and 3) User
* belongs to a Group that may see a Group that whose members may include the Group in their
* rosters.
*
* @param username the username of the user to return his shared groups.
* @return a collection with all the groups that the user may include in his roster.
*/
public Collection<Group> getSharedGroups(String username) {
Collection<Group> answer = new HashSet<>();
Collection<Group> groups = GroupManager.getInstance().getSharedGroups(username);
for (Group group : groups) {
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
if ("onlyGroup".equals(showInRoster)) {
if (group.isUser(username)) {
// The user belongs to the group so add the group to the answer
answer.add(group);
}
else {
// Check if the user belongs to a group that may see this group
Collection<Group> groupList = parseGroups(group.getProperties().get("sharedRoster.groupList"));
for (Group groupInList : groupList) {
if (groupInList.isUser(username)) {
answer.add(group);
}
}
}
}
else if ("everybody".equals(showInRoster)) {
// Anyone can see this group so add the group to the answer
answer.add(group);
}
}
return answer;
}
/**
* Returns the list of shared groups whose visibility is public.
*
* @return the list of shared groups whose visibility is public.
*/
public Collection<Group> getPublicSharedGroups() {
return GroupManager.getInstance().getPublicSharedGroups();
}
/**
* Returns a collection of Groups obtained by parsing a comma delimited String with the name
* of groups.
*
* @param groupNames a comma delimited string with group names.
* @return a collection of Groups obtained by parsing a comma delimited String with the name
* of groups.
*/
private Collection<Group> parseGroups(String groupNames) {
Collection<Group> answer = new HashSet<>();
for (String groupName : parseGroupNames(groupNames)) {
try {
answer.add(GroupManager.getInstance().getGroup(groupName));
}
catch (GroupNotFoundException e) {
// Do nothing. Silently ignore the invalid reference to the group
}
}
return answer;
}
/**
* Returns a collection of Groups obtained by parsing a comma delimited String with the name
* of groups.
*
* @param groupNames a comma delimited string with group names.
* @return a collection of Groups obtained by parsing a comma delimited String with the name
* of groups.
*/
private static Collection<String> parseGroupNames(String groupNames) {
Collection<String> answer = new HashSet<>();
if (groupNames != null) {
StringTokenizer tokenizer = new StringTokenizer(groupNames, ",");
while (tokenizer.hasMoreTokens()) {
answer.add(tokenizer.nextToken());
}
}
return answer;
}
@Override
public void groupCreated(Group group, Map params) {
//Do nothing
}
@Override
public void groupDeleting(Group group, Map params) {
// Get group members
Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Get users whose roster will be updated
Collection<JID> affectedUsers = getAffectedUsers(group);
// Iterate on group members and update rosters of affected users
for (JID deletedUser : users) {
groupUserDeleted(group, affectedUsers, deletedUser);
}
}
@Override
public void groupModified(final Group group, Map params) {
// Do nothing if no group property has been modified
if ("propertyDeleted".equals(params.get("type"))) {
return;
}
String keyChanged = (String) params.get("propertyKey");
String originalValue = (String) params.get("originalValue");
if ("sharedRoster.showInRoster".equals(keyChanged)) {
String currentValue = group.getProperties().get("sharedRoster.showInRoster");
// Nothing has changed so do nothing.
if (currentValue.equals(originalValue)) {
return;
}
// Get the users of the group
final Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Get the users whose roster will be affected
final Collection<JID> affectedUsers = getAffectedUsers(group, originalValue,
group.getProperties().get("sharedRoster.groupList"));
// Simulate that the group users has been added to the group. This will cause to push
// roster items to the "affected" users for the group users
executor.submit(new Callable<Boolean>()
{
public Boolean call() throws Exception
{
// Remove the group members from the affected rosters
for (JID deletedUser : users) {
groupUserDeleted(group, affectedUsers, deletedUser);
}
// Simulate that the group users has been added to the group. This will cause to push
// roster items to the "affected" users for the group users
for (JID user : users) {
groupUserAdded(group, user);
}
return true;
}
});
}
else if ("sharedRoster.groupList".equals(keyChanged)) {
String currentValue = group.getProperties().get("sharedRoster.groupList");
// Nothing has changed so do nothing.
if (currentValue.equals(originalValue)) {
return;
}
// Get the users of the group
final Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Get the users whose roster will be affected
final Collection<JID> affectedUsers = getAffectedUsers(group,
group.getProperties().get("sharedRoster.showInRoster"), originalValue);
executor.submit(new Callable<Boolean>()
{
public Boolean call() throws Exception
{
// Remove the group members from the affected rosters
for (JID deletedUser : users) {
groupUserDeleted(group, affectedUsers, deletedUser);
}
// Simulate that the group users has been added to the group. This will cause to push
// roster items to the "affected" users for the group users
for (JID user : users) {
groupUserAdded(group, user);
}
return true;
}
});
}
else if ("sharedRoster.displayName".equals(keyChanged)) {
String currentValue = group.getProperties().get("sharedRoster.displayName");
// Nothing has changed so do nothing.
if (currentValue.equals(originalValue)) {
return;
}
// Do nothing if the group is not being shown in users' rosters
if (!isSharedGroup(group)) {
return;
}
// Get all the affected users
Collection<JID> users = getAffectedUsers(group);
// Iterate on all the affected users and update their rosters
for (JID updatedUser : users) {
// Get the roster to update.
Roster roster = null;
if (server.isLocal(updatedUser)) {
roster = rosterCache.get(updatedUser.getNode());
}
if (roster != null) {
// Update the roster with the new group display name
roster.shareGroupRenamed(users);
}
}
}
}
@Override
public void initialize(XMPPServer server) {
super.initialize(server);
this.server = server;
this.routingTable = server.getRoutingTable();
RosterEventDispatcher.addListener(new RosterEventListener() {
@Override
public void rosterLoaded(Roster roster) {
// Do nothing
}
@Override
public boolean addingContact(Roster roster, RosterItem item, boolean persistent) {
// Do nothing
return true;
}
@Override
public void contactAdded(Roster roster, RosterItem item) {
// Set object again in cache. This is done so that other cluster nodes
// get refreshed with latest version of the object
rosterCache.put(roster.getUsername(), roster);
}
@Override
public void contactUpdated(Roster roster, RosterItem item) {
// Set object again in cache. This is done so that other cluster nodes
// get refreshed with latest version of the object
rosterCache.put(roster.getUsername(), roster);
}
@Override
public void contactDeleted(Roster roster, RosterItem item) {
// Set object again in cache. This is done so that other cluster nodes
// get refreshed with latest version of the object
rosterCache.put(roster.getUsername(), roster);
}
});
}
/**
* Returns true if the specified Group may be included in a user roster. The decision is made
* based on the group properties that are configurable through the Admin Console.
*
* @param group the group to check if it may be considered a shared group.
* @return true if the specified Group may be included in a user roster.
*/
public static boolean isSharedGroup(Group group) {
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
if ("onlyGroup".equals(showInRoster) || "everybody".equals(showInRoster)) {
return true;
}
return false;
}
/**
* Returns true if the specified Group may be seen by all users in the system. The decision
* is made based on the group properties that are configurable through the Admin Console.
*
* @param group the group to check if it may be seen by all users in the system.
* @return true if the specified Group may be seen by all users in the system.
*/
public static boolean isPublicSharedGroup(Group group) {
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
if ("everybody".equals(showInRoster)) {
return true;
}
return false;
}
@Override
public void memberAdded(Group group, Map params) {
JID addedUser = new JID((String) params.get("member"));
// Do nothing if the user was an admin that became a member
if (group.getAdmins().contains(addedUser)) {
return;
}
if (!isSharedGroup(group)) {
for (Group visibleGroup : getVisibleGroups(group)) {
// Get the list of affected users
Collection<JID> users = new HashSet<>(visibleGroup.getMembers());
users.addAll(visibleGroup.getAdmins());
groupUserAdded(visibleGroup, users, addedUser);
}
}
else {
groupUserAdded(group, addedUser);
}
}
@Override
public void memberRemoved(Group group, Map params) {
String member = (String) params.get("member");
if (member == null) {
return;
}
JID deletedUser = new JID(member);
// Do nothing if the user is still an admin
if (group.getAdmins().contains(deletedUser)) {
return;
}
if (!isSharedGroup(group)) {
for (Group visibleGroup : getVisibleGroups(group)) {
// Get the list of affected users
Collection<JID> users = new HashSet<>(visibleGroup.getMembers());
users.addAll(visibleGroup.getAdmins());
groupUserDeleted(visibleGroup, users, deletedUser);
}
}
else {
groupUserDeleted(group, deletedUser);
}
}
@Override
public void adminAdded(Group group, Map params) {
JID addedUser = new JID((String) params.get("admin"));
// Do nothing if the user was a member that became an admin
if (group.getMembers().contains(addedUser)) {
return;
}
if (!isSharedGroup(group)) {
for (Group visibleGroup : getVisibleGroups(group)) {
// Get the list of affected users
Collection<JID> users = new HashSet<>(visibleGroup.getMembers());
users.addAll(visibleGroup.getAdmins());
groupUserAdded(visibleGroup, users, addedUser);
}
}
else {
groupUserAdded(group, addedUser);
}
}
@Override
public void adminRemoved(Group group, Map params) {
JID deletedUser = new JID((String) params.get("admin"));
// Do nothing if the user is still a member
if (group.getMembers().contains(deletedUser)) {
return;
}
// Do nothing if the group is not being shown in group members' rosters
if (!isSharedGroup(group)) {
for (Group visibleGroup : getVisibleGroups(group)) {
// Get the list of affected users
Collection<JID> users = new HashSet<>(visibleGroup.getMembers());
users.addAll(visibleGroup.getAdmins());
groupUserDeleted(visibleGroup, users, deletedUser);
}
}
else {
groupUserDeleted(group, deletedUser);
}
}
/**
* A new user has been created so members of public shared groups need to have
* their rosters updated. Members of public shared groups need to have a roster
* item with subscription FROM for the new user since the new user can see them.
*
* @param newUser the newly created user.
* @param params event parameters.
*/
@Override
public void userCreated(User newUser, Map<String,Object> params) {
JID newUserJID = server.createJID(newUser.getUsername(), null);
// Shared public groups that are public should have a presence subscription
// of type FROM for the new user
for (Group group : getPublicSharedGroups()) {
// Get group members of public group
Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Update the roster of each group member to include a subscription of type FROM
for (JID userToUpdate : users) {
// Get the roster to update
Roster roster = null;
if (server.isLocal(userToUpdate)) {
// Check that the user exists, if not then continue with the next user
try {
UserManager.getInstance().getUser(userToUpdate.getNode());
}
catch (UserNotFoundException e) {
continue;
}
roster = rosterCache.get(userToUpdate.getNode());
}
// Only update rosters in memory
if (roster != null) {
roster.addSharedUser(group, newUserJID);
}
if (!server.isLocal(userToUpdate)) {
// Susbcribe to the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(newUserJID, userToUpdate, true);
}
}
}
}
@Override
public void userDeleting(User user, Map<String,Object> params) {
// Shared public groups that have a presence subscription of type FROM
// for the deleted user should no longer have a reference to the deleted user
JID userJID = server.createJID(user.getUsername(), null);
// Shared public groups that are public should have a presence subscription
// of type FROM for the new user
for (Group group : getPublicSharedGroups()) {
// Get group members of public group
Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Update the roster of each group member to include a subscription of type FROM
for (JID userToUpdate : users) {
// Get the roster to update
Roster roster = null;
if (server.isLocal(userToUpdate)) {
// Check that the user exists, if not then continue with the next user
try {
UserManager.getInstance().getUser(userToUpdate.getNode());
}
catch (UserNotFoundException e) {
continue;
}
roster = rosterCache.get(userToUpdate.getNode());
}
// Only update rosters in memory
if (roster != null) {
roster.deleteSharedUser(group, userJID);
}
if (!server.isLocal(userToUpdate)) {
// Unsusbcribe from the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(userJID, userToUpdate, false);
}
}
}
deleteRoster(userJID);
}
@Override
public void userModified(User user, Map<String,Object> params) {
if ("nameModified".equals(params.get("type"))) {
for (Group group : getSharedGroups(user.getUsername())) {
ArrayList<JID> groupUsers = new ArrayList<>();
groupUsers.addAll(group.getAdmins());
groupUsers.addAll(group.getMembers());
for (JID groupUser : groupUsers) {
rosterCache.remove(groupUser.getNode());
}
}
}
}
/**
* Notification that a Group user has been added. Update the group users' roster accordingly.
*
* @param group the group where the user was added.
* @param addedUser the username of the user that has been added to the group.
*/
private void groupUserAdded(Group group, JID addedUser) {
groupUserAdded(group, getAffectedUsers(group), addedUser);
}
/**
* Notification that a Group user has been added. Update the group users' roster accordingly.
*
* @param group the group where the user was added.
* @param users the users to update their rosters
* @param addedUser the username of the user that has been added to the group.
*/
private void groupUserAdded(Group group, Collection<JID> users, JID addedUser) {
// Get the roster of the added user.
Roster addedUserRoster = null;
if (server.isLocal(addedUser)) {
addedUserRoster = rosterCache.get(addedUser.getNode());
}
// Iterate on all the affected users and update their rosters
for (JID userToUpdate : users) {
if (!addedUser.equals(userToUpdate)) {
// Get the roster to update
Roster roster = null;
if (server.isLocal(userToUpdate)) {
// Check that the user exists, if not then continue with the next user
try {
UserManager.getInstance().getUser(userToUpdate.getNode());
}
catch (UserNotFoundException e) {
continue;
}
roster = rosterCache.get(userToUpdate.getNode());
}
// Only update rosters in memory
if (roster != null) {
roster.addSharedUser(group, addedUser);
}
// Check if the roster is still not in memory
if (addedUserRoster == null && server.isLocal(addedUser)) {
addedUserRoster =
rosterCache.get(addedUser.getNode());
}
// Update the roster of the newly added group user.
if (addedUserRoster != null) {
Collection<Group> groups = GroupManager.getInstance().getGroups(userToUpdate);
addedUserRoster.addSharedUser(userToUpdate, groups, group);
}
if (!server.isLocal(addedUser)) {
// Susbcribe to the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(userToUpdate, addedUser, true);
}
if (!server.isLocal(userToUpdate)) {
// Susbcribe to the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(addedUser, userToUpdate, true);
}
}
}
}
/**
* Notification that a Group user has been deleted. Update the group users' roster accordingly.
*
* @param group the group from where the user was deleted.
* @param deletedUser the username of the user that has been deleted from the group.
*/
private void groupUserDeleted(Group group, JID deletedUser) {
groupUserDeleted(group, getAffectedUsers(group), deletedUser);
}
/**
* Notification that a Group user has been deleted. Update the group users' roster accordingly.
*
* @param group the group from where the user was deleted.
* @param users the users to update their rosters
* @param deletedUser the username of the user that has been deleted from the group.
*/
private void groupUserDeleted(Group group, Collection<JID> users, JID deletedUser) {
// Get the roster of the deleted user.
Roster deletedUserRoster = null;
if (server.isLocal(deletedUser)) {
deletedUserRoster = rosterCache.get(deletedUser.getNode());
}
// Iterate on all the affected users and update their rosters
for (JID userToUpdate : users) {
// Get the roster to update
Roster roster = null;
if (server.isLocal(userToUpdate)) {
// Check that the user exists, if not then continue with the next user
try {
UserManager.getInstance().getUser(userToUpdate.getNode());
}
catch (UserNotFoundException e) {
continue;
}
roster = rosterCache.get(userToUpdate.getNode());
}
// Only update rosters in memory
if (roster != null) {
roster.deleteSharedUser(group, deletedUser);
}
// Check if the roster is still not in memory
if (deletedUserRoster == null && server.isLocal(deletedUser)) {
deletedUserRoster =
rosterCache.get(deletedUser.getNode());
}
// Update the roster of the newly deleted group user.
if (deletedUserRoster != null) {
deletedUserRoster.deleteSharedUser(userToUpdate, group);
}
if (!server.isLocal(deletedUser)) {
// Unsusbcribe from the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(userToUpdate, deletedUser, false);
}
if (!server.isLocal(userToUpdate)) {
// Unsusbcribe from the presence of the remote user. This is only necessary for
// remote users and may only work with remote users that **automatically**
// accept presence subscription requests
sendSubscribeRequest(deletedUser, userToUpdate, false);
}
}
}
private void sendSubscribeRequest(JID sender, JID recipient, boolean isSubscribe) {
Presence presence = new Presence();
presence.setFrom(sender);
presence.setTo(recipient);
if (isSubscribe) {
presence.setType(Presence.Type.subscribe);
}
else {
presence.setType(Presence.Type.unsubscribe);
}
routingTable.routePacket(recipient, presence, false);
}
private Collection<Group> getVisibleGroups(Group groupToCheck) {
return GroupManager.getInstance().getVisibleGroups(groupToCheck);
}
/**
* Returns true if a given group is visible to a given user. That means, if the user can
* see the group in his roster.
*
* @param group the group to check if the user can see.
* @param user the JID of the user to check if he may see the group.
* @return true if a given group is visible to a given user.
*/
public boolean isGroupVisible(Group group, JID user) {
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
if ("everybody".equals(showInRoster)) {
return true;
}
else if ("onlyGroup".equals(showInRoster)) {
if (group.isUser(user)) {
return true;
}
// Check if the user belongs to a group that may see this group
Collection<Group> groupList = parseGroups(group.getProperties().get(
"sharedRoster.groupList"));
for (Group groupInList : groupList) {
if (groupInList.isUser(user)) {
return true;
}
}
}
return false;
}
/**
* Returns all the users that are related to a shared group. This is the logic that we are
* using: 1) If the group visiblity is configured as "Everybody" then all users in the system or
* all logged users in the system will be returned (configurable thorugh the "filterOffline"
* flag), 2) if the group visiblity is configured as "onlyGroup" then all the group users will
* be included in the answer and 3) if the group visiblity is configured as "onlyGroup" and
* the group allows other groups to include the group in the groups users' roster then all
* the users of the allowed groups will be included in the answer.
*/
private Collection<JID> getAffectedUsers(Group group) {
return getAffectedUsers(group, group.getProperties().get("sharedRoster.showInRoster"),
group.getProperties().get("sharedRoster.groupList"));
}
/**
* This method is similar to {@link #getAffectedUsers(Group)} except that it receives
* some group properties. The group properties are passed as parameters since the called of this
* method may want to obtain the related users of the group based in some properties values.
*
* This is useful when the group is being edited and some properties has changed and we need to
* obtain the related users of the group based on the previous group state.
*/
private Collection<JID> getAffectedUsers(Group group, String showInRoster, String groupNames) {
// Answer an empty collection if the group is not being shown in users' rosters
if (!"onlyGroup".equals(showInRoster) && !"everybody".equals(showInRoster)) {
return new ArrayList<>();
}
// Add the users of the group
Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// Check if anyone can see this shared group
if ("everybody".equals(showInRoster)) {
// Add all users in the system
for (String username : UserManager.getInstance().getUsernames()) {
users.add(server.createJID(username, null, true));
}
// Add all logged users. We don't need to add all users in the system since only the
// logged ones will be affected.
//users.addAll(SessionManager.getInstance().getSessionUsers());
}
else {
// Add the users that may see the group
Collection<Group> groupList = parseGroups(groupNames);
for (Group groupInList : groupList) {
users.addAll(groupInList.getMembers());
users.addAll(groupInList.getAdmins());
}
}
return users;
}
Collection<JID> getSharedUsersForRoster(Group group, Roster roster) {
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
String groupNames = group.getProperties().get("sharedRoster.groupList");
// Answer an empty collection if the group is not being shown in users' rosters
if (!"onlyGroup".equals(showInRoster) && !"everybody".equals(showInRoster)) {
return new ArrayList<>();
}
// Add the users of the group
Collection<JID> users = new HashSet<>(group.getMembers());
users.addAll(group.getAdmins());
// If the user of the roster belongs to the shared group then we should return
// users that need to be in the roster with subscription "from"
if (group.isUser(roster.getUsername())) {
// Check if anyone can see this shared group
if ("everybody".equals(showInRoster)) {
// Add all users in the system
for (String username : UserManager.getInstance().getUsernames()) {
users.add(server.createJID(username, null, true));
}
}
else {
// Add the users that may see the group
Collection<Group> groupList = parseGroups(groupNames);
for (Group groupInList : groupList) {
users.addAll(groupInList.getMembers());
users.addAll(groupInList.getAdmins());
}
}
}
return users;
}
/**
* Returns true if a group in the first collection may mutually see a group of the
* second collection. More precisely, return true if both collections contain a public
* group (i.e. anybody can see the group) or if both collection have a group that may see
* each other and the users are members of those groups or if one group is public and the
* other group allowed the public group to see it.
*
* @param user the name of the user associated to the first collection of groups. This is always a local user.
* @param groups a collection of groups to check against the other collection of groups.
* @param otherUser the JID of the user associated to the second collection of groups.
* @param otherGroups the other collection of groups to check against the first collection.
* @return true if a group in the first collection may mutually see a group of the
* second collection.
*/
boolean hasMutualVisibility(String user, Collection<Group> groups, JID otherUser,
Collection<Group> otherGroups) {
for (Group group : groups) {
for (Group otherGroup : otherGroups) {
// Skip this groups if the users are not group users of the groups
if (!group.isUser(user) || !otherGroup.isUser(otherUser)) {
continue;
}
if (group.equals(otherGroup)) {
return true;
}
String showInRoster = group.getProperties().get("sharedRoster.showInRoster");
String otherShowInRoster = otherGroup.getProperties().get("sharedRoster.showInRoster");
// Return true if both groups are public groups (i.e. anybody can see them)
if ("everybody".equals(showInRoster) && "everybody".equals(otherShowInRoster)) {
return true;
}
else if ("onlyGroup".equals(showInRoster) && "onlyGroup".equals(otherShowInRoster)) {
String groupNames = group.getProperties().get("sharedRoster.groupList");
String otherGroupNames = otherGroup.getProperties().get("sharedRoster.groupList");
// Return true if each group may see the other group
if (groupNames != null && otherGroupNames != null) {
if (groupNames.contains(otherGroup.getName()) &&
otherGroupNames.contains(group.getName())) {
return true;
}
// Check if each shared group can be seen by a group where each user belongs
Collection<Group> groupList = parseGroups(groupNames);
Collection<Group> otherGroupList = parseGroups(otherGroupNames);
for (Group groupName : groupList) {
if (groupName.isUser(otherUser)) {
for (Group otherGroupName : otherGroupList) {
if (otherGroupName.isUser(user)) {
return true;
}
}
}
}
}
}
else if ("everybody".equals(showInRoster) && "onlyGroup".equals(otherShowInRoster)) {
// Return true if one group is public and the other group allowed the public
// group to see him
String otherGroupNames = otherGroup.getProperties().get("sharedRoster.groupList");
if (otherGroupNames != null && otherGroupNames.contains(group.getName())) {
return true;
}
}
else if ("onlyGroup".equals(showInRoster) && "everybody".equals(otherShowInRoster)) {
// Return true if one group is public and the other group allowed the public
// group to see him
String groupNames = group.getProperties().get("sharedRoster.groupList");
// Return true if each group may see the other group
if (groupNames != null && groupNames.contains(otherGroup.getName())) {
return true;
}
}
}
}
return false;
}
@Override
public void start() throws IllegalStateException {
super.start();
// Make the GroupManager listeners be registered first
GroupManager.getInstance();
// Add this module as a user event listener so we can update
// rosters when users are created or deleted
UserEventDispatcher.addListener(this);
// Add the new instance as a listener of group events
GroupEventDispatcher.addListener(this);
executor = Executors.newCachedThreadPool();
}
@Override
public void stop() {
super.stop();
// Remove this module as a user event listener
UserEventDispatcher.removeListener(this);
// Remove this module as a listener of group events
GroupEventDispatcher.removeListener(this);
executor.shutdown();
}
public static RosterItemProvider getRosterItemProvider() {
return XMPPServer.getInstance().getRosterManager().provider;
}
private void initProvider() {
JiveGlobals.migrateProperty("provider.roster.className");
String className = JiveGlobals.getProperty("provider.roster.className",
"org.jivesoftware.openfire.roster.DefaultRosterItemProvider");
if (provider == null || !className.equals(provider.getClass().getName())) {
try {
Class c = ClassUtils.forName(className);
provider = (RosterItemProvider) c.newInstance();
}
catch (Exception e) {
Log.error("Error loading roster provider: " + className, e);
provider = new DefaultRosterItemProvider();
}
}
}
}
| |
package de.eric_wiltfang.dictionary;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.sql.*;
import java.text.MessageFormat;
import java.util.EnumSet;
import java.util.Vector;
import de.eric_wiltfang.dictionary.DictionaryEvent.DictionaryEventType;
import de.eric_wiltfang.dictionary.Exporter.ExporterSettings;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.model.ZipParameters;
import net.lingala.zip4j.util.Zip4jConstants;
public class Dictionary {
private DictionarySettings settings;
private Path workingDirectory;
private Connection connection;
private Vector<DictionaryListener> listeners;
private Dictionary() throws IOException {
listeners = new Vector<>();
settings = new DictionarySettings();
try {
workingDirectory = Files.createTempDirectory("dict");
} catch (IOException e) {
throw new IOException(DictionaryMainWindow.getInstance().getLocalisedString("temporaryFolderError") + " " + e);
}
}
/**
* Since temporary files are used for the Dictionary, cleanup() has to be called
* once the Dictionary will not be used anymore, so that no files remain to clutter
* the user's system
*/
public void cleanup() throws IOException {
try {
connection.close();
File[] files = workingDirectory.toFile().listFiles();
for (File f : files) {
f.delete();
}
workingDirectory.toFile().delete();
} catch(Exception e) {
throw new IOException(DictionaryMainWindow.getInstance().getLocalisedString("cleanupExceptionMessage") + " " + e);
}
}
private void connectDatabase() throws IOException {
try {
connection = DriverManager.getConnection("jdbc:h2:" + workingDirectory + "/db", "sa", "");
} catch (SQLException e) {
throw new IOException(DictionaryMainWindow.getInstance().getLocalisedString("databaseConnectionExceptionMessage") + " " + e);
}
}
private void init() throws IOException, SQLException {
connectDatabase();
Statement s = connection.createStatement();
s.execute("create table entry (" +
" entry_id bigint not null auto_increment," +
" word varchar(255)," +
" definition varchar(10000)," +
" notes varchar(10000)," +
" category varchar(127)," +
" tags array," +
" primary key (entry_id)" +
");");
}
/**
* Loads dictionary files
*/
private void load(File from) throws IOException {
try {
ZipFile zip = new ZipFile(from);
zip.extractAll(workingDirectory.toString());
InputStreamReader reader = new InputStreamReader(new FileInputStream(workingDirectory + "/settings.properties"), Charset.forName("UTF-8"));
settings.load(reader);
connectDatabase();
} catch(Exception e) {
throw new IOException(DictionaryMainWindow.getInstance().getLocalisedString("fileReadExceptionMessage") + " " + e);
}
}
/**
* Saves dictionary files to disk
*/
public void save(File target) throws IOException, Exception {
try {
connection.close();
} catch(Exception e) {
throw new Exception(DictionaryMainWindow.getInstance().getLocalisedString("databaseReleaseExceptionMessage") + " " + e);
}
if (Files.exists(target.toPath())) {
try {
Files.delete(target.toPath());
} catch (IOException e) {
throw new IOException(MessageFormat.format(DictionaryMainWindow.getInstance().getLocalisedString("fileExistsExceptionMessage"), target) + e);
}
}
try {
OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(workingDirectory + "/VERSION"), Charset.forName("UTF-8"));
writer.write("1.0");
writer.close();
writer = new OutputStreamWriter(new FileOutputStream(workingDirectory + "/settings.properties"), Charset.forName("UTF-8"));
settings.save(writer);
writer.close();
ZipFile zip = new ZipFile(target);
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(Zip4jConstants.COMP_DEFLATE);
parameters.setCompressionLevel(Zip4jConstants.DEFLATE_LEVEL_NORMAL);
File[] files = workingDirectory.toFile().listFiles();
for (File f : files) {
zip.addFile(f, parameters);
}
} catch (Exception e) {
throw new IOException(DictionaryMainWindow.getInstance().getLocalisedString("fileSaveExceptionMessage") + " " + e);
}
connectDatabase();
}
/**
* Creates a new dictionary
*/
public static Dictionary createNew(DictionarySettings settings) throws IOException, SQLException {
Dictionary dic = new Dictionary();
dic.settings = settings;
dic.init();
return dic;
}
/**
* Creates a dictionary with contents from a file
*/
public static Dictionary createFromFile(File file) throws IOException, SQLException {
Dictionary dic = new Dictionary();
dic.load(file);
return dic;
}
public Entry getEntry(long id) throws IllegalArgumentException, SQLException {
return new Entry(id, connection);
}
public void insertEntry(Entry entry) throws SQLException {
boolean fresh = entry.insertSelf(connection);
DictionaryEventType evtype = fresh ? DictionaryEventType.NEW : DictionaryEventType.UPDATE;
DictionaryEvent e = new DictionaryEvent(this, evtype, entry.getId());
broadcast(e);
}
public void deleteEntry(Entry entry) throws SQLException {
deleteEntry(entry.getId());
}
public void deleteEntry(long entryID) throws SQLException {
PreparedStatement s = connection.prepareStatement("DELETE FROM entry WHERE entry_id = ?");
s.setLong(1, entryID);
s.execute();
broadcast(new DictionaryEvent(this, DictionaryEventType.DELETE, entryID));
}
/**
* Exports the dictionary via an exporter
*/
public void export(Exporter ex) throws SQLException, IOException {
ex.start(settings.getName());
String query = "SELECT entry_id FROM entry";
EnumSet<ExporterSettings> expSetting = ex.getSettings();
if (expSetting == null) {
expSetting = EnumSet.noneOf(ExporterSettings.class);
}
if (expSetting.contains(ExporterSettings.ALPHABETICAL)) {
query = query + " ORDER BY LOWER(word)";
}
query = query + ";";
ResultSet res = connection.createStatement().executeQuery(query);
while (res.next()) {
ex.addEntry(getEntry(res.getLong("entry_id")));
}
ex.finish();
}
/**
* Imports entries from an Importer.
* @return
*/
public int importEntries(Importer im) throws IOException, SQLException {
int num = 0;
im.initialize();
Entry entry;
while ((entry = im.nextEntry()) != null) {
entry.insertSelf(connection);
num++;
}
broadcast(new DictionaryEvent(this, DictionaryEventType.OTHER, -1));
return num;
}
public String getName() {
return settings.getName();
}
public void setSettings(DictionarySettings settings) {
this.settings = settings;
}
public DictionarySettings getSettings() {
return settings;
}
/**
* Searches for entries that contain the specified key.
* @param key The key to search for.
* @return A vector of ids for matching entries.
*/
public Vector<Long> searchID(String key) throws SQLException {
PreparedStatement s = connection.prepareStatement(
"SELECT entry_id" +
" FROM entry WHERE word like '%'||?||'%' OR definition like '%'||?||'%'" +
" ORDER BY word");
s.setString(1, key);
s.setString(2, key);
ResultSet result = s.executeQuery();
Vector<Long> ids = new Vector<Long>();
while (result.next()) {
ids.add(result.getLong("entry_id"));
}
return ids;
}
public Vector<Long> getAllIDs() throws SQLException {
ResultSet result = connection.createStatement().executeQuery("SELECT entry_id FROM entry ORDER BY word");
Vector<Long> ids = new Vector<Long>();
while (result.next()) {
ids.add(result.getLong("entry_id"));
}
return ids;
}
public Vector<Entry> searchEntry(String key) throws SQLException {
Vector<Long> ids = searchID(key);
Vector<Entry> entries = new Vector<Entry>(ids.size());
for (int i = 0; i < ids.size(); i++) {
entries.add(getEntry(ids.get(i)));
}
return entries;
}
public void addDictionaryListener(DictionaryListener l) {
listeners.add(l);
}
public void removeDictionaryListener(DictionaryListener l) {
listeners.remove(l);
}
private void broadcast(DictionaryEvent e) {
for (DictionaryListener l : listeners) {
l.recieveEvent(e);
}
}
}
| |
/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.nui.layers.hud;
import com.google.common.collect.Maps;
import org.joml.Rectanglei;
import org.joml.Vector2i;
import org.terasology.assets.ResourceUrn;
import org.terasology.assets.management.AssetManager;
import org.terasology.math.JomlUtil;
import org.terasology.math.TeraMath;
import org.terasology.math.geom.Rect2f;
import org.terasology.nui.Canvas;
import org.terasology.nui.ControlWidget;
import org.terasology.nui.UIWidget;
import org.terasology.nui.asset.UIElement;
import org.terasology.registry.In;
import org.terasology.registry.InjectionHelper;
import org.terasology.rendering.nui.CoreScreenLayer;
import org.terasology.rendering.nui.NUIManager;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
/**
*/
public class HUDScreenLayer extends CoreScreenLayer {
private Map<ResourceUrn, HUDElement> elementsLookup = Maps.newLinkedHashMap();
@In
private AssetManager assetManager;
private NUIManager manager;
public ControlWidget addHUDElement(String uri) {
return addHUDElement(uri, ControlWidget.class, Rect2f.createFromMinAndSize(0, 0, 1, 1));
}
public <T extends ControlWidget> T addHUDElement(String urn, Class<T> type, Rect2f region) {
Optional<? extends UIElement> data = assetManager.getAsset(urn, UIElement.class);
if (data.isPresent() && type.isInstance(data.get().getRootWidget())) {
return addHUDElement(data.get().getUrn(), type.cast(data.get().getRootWidget()), region);
}
return null;
}
public <T extends ControlWidget> T addHUDElement(ResourceUrn urn, Class<T> type, Rect2f region) {
Optional<? extends UIElement> data = assetManager.getAsset(urn, UIElement.class);
if (data.isPresent() && type.isInstance(data.get().getRootWidget())) {
return addHUDElement(urn, type.cast(data.get().getRootWidget()), region);
}
return null;
}
public <T extends ControlWidget> T addHUDElement(ResourceUrn urn, T widget, Rect2f region) {
InjectionHelper.inject(widget);
widget.onOpened();
elementsLookup.put(urn, new HUDElement(widget, region));
return widget;
}
public ControlWidget getHUDElement(String urn) {
return getHUDElement(new ResourceUrn(urn));
}
public ControlWidget getHUDElement(ResourceUrn urn) {
HUDElement element = elementsLookup.get(urn);
if (element != null) {
return element.widget;
}
return null;
}
public <T extends ControlWidget> T getHUDElement(String uri, Class<T> type) {
return getHUDElement(new ResourceUrn(uri), type);
}
public <T extends ControlWidget> T getHUDElement(ResourceUrn urn, Class<T> type) {
ControlWidget widget = getHUDElement(urn);
if (widget != null && type.isInstance(widget)) {
return type.cast(widget);
}
return null;
}
public boolean removeHUDElement(ResourceUrn uri) {
HUDElement removed = elementsLookup.remove(uri);
if (removed != null) {
removed.widget.onClosed();
return true;
}
return false;
}
public boolean removeHUDElement(ControlWidget element) {
Iterator<Map.Entry<ResourceUrn, HUDElement>> iterator = elementsLookup.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<ResourceUrn, HUDElement> item = iterator.next();
if (item.getValue().widget.equals(element)) {
iterator.remove();
item.getValue().widget.onClosed();
return true;
}
}
return false;
}
public void clear() {
for (HUDElement value : elementsLookup.values()) {
value.widget.onClosed();
}
elementsLookup.clear();
}
@Override
public boolean isLowerLayerVisible() {
return false;
}
@Override
public boolean isReleasingMouse() {
return false;
}
@Override
protected boolean isEscapeToCloseAllowed() {
return false;
}
@Override
public NUIManager getManager() {
return manager;
}
@Override
public void setManager(NUIManager manager) {
this.manager = manager;
}
@Override
public void onDraw(Canvas canvas) {
for (HUDElement element : elementsLookup.values()) {
int minX = TeraMath.floorToInt(element.region.minX() * canvas.size().x);
int minY = TeraMath.floorToInt(element.region.minY() * canvas.size().y);
int sizeX = TeraMath.floorToInt(element.region.width() * canvas.size().x);
int sizeY = TeraMath.floorToInt(element.region.height() * canvas.size().y);
Rectanglei region = JomlUtil.rectangleiFromMinAndSize(minX, minY, sizeX, sizeY);
canvas.drawWidget(element.widget, region);
}
}
@Override
public Vector2i getPreferredContentSize(Canvas canvas, Vector2i sizeHint) {
return sizeHint;
}
@Override
public Iterator<UIWidget> iterator() {
return new Iterator<UIWidget>() {
private Iterator<HUDElement> elementIterator = elementsLookup.values().iterator();
@Override
public boolean hasNext() {
return elementIterator.hasNext();
}
@Override
public UIWidget next() {
return elementIterator.next().widget;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public void onOpened() {
}
@Override
public void initialise() {
}
@Override
public boolean isModal() {
return false;
}
private static final class HUDElement {
ControlWidget widget;
Rect2f region;
private HUDElement(ControlWidget widget, Rect2f region) {
this.widget = widget;
this.region = region;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.protobuf.Message;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.ipc.RpcServer.Call;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.util.Threads;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@Category(SmallTests.class)
public class TestSimpleRpcScheduler {
public static final Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class);
private final RpcScheduler.Context CONTEXT = new RpcScheduler.Context() {
@Override
public InetSocketAddress getListenerAddress() {
return InetSocketAddress.createUnresolved("127.0.0.1", 1000);
}
};
private Configuration conf;
@Before
public void setUp() {
conf = HBaseConfiguration.create();
}
@Test
public void testBasic() throws IOException, InterruptedException {
PriorityFunction qosFunction = mock(PriorityFunction.class);
RpcScheduler scheduler = new SimpleRpcScheduler(
conf, 10, 0, 0, qosFunction, 0);
scheduler.init(CONTEXT);
scheduler.start();
CallRunner task = createMockTask();
scheduler.dispatch(task);
verify(task, timeout(1000)).run();
scheduler.stop();
}
@Test
public void testHandlerIsolation() throws IOException, InterruptedException {
CallRunner generalTask = createMockTask();
CallRunner priorityTask = createMockTask();
CallRunner replicationTask = createMockTask();
List<CallRunner> tasks = ImmutableList.of(
generalTask,
priorityTask,
replicationTask);
Map<CallRunner, Integer> qos = ImmutableMap.of(
generalTask, 0,
priorityTask, HConstants.HIGH_QOS + 1,
replicationTask, HConstants.REPLICATION_QOS);
PriorityFunction qosFunction = mock(PriorityFunction.class);
final Map<CallRunner, Thread> handlerThreads = Maps.newHashMap();
final CountDownLatch countDownLatch = new CountDownLatch(tasks.size());
Answer<Void> answerToRun = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
synchronized (handlerThreads) {
handlerThreads.put(
(CallRunner) invocationOnMock.getMock(),
Thread.currentThread());
}
countDownLatch.countDown();
return null;
}
};
for (CallRunner task : tasks) {
doAnswer(answerToRun).when(task).run();
}
RpcScheduler scheduler = new SimpleRpcScheduler(
conf, 1, 1 ,1, qosFunction, HConstants.HIGH_QOS);
scheduler.init(CONTEXT);
scheduler.start();
for (CallRunner task : tasks) {
when(qosFunction.getPriority((RPCProtos.RequestHeader) anyObject(), (Message) anyObject()))
.thenReturn(qos.get(task));
scheduler.dispatch(task);
}
for (CallRunner task : tasks) {
verify(task, timeout(1000)).run();
}
scheduler.stop();
// Tests that these requests are handled by three distinct threads.
countDownLatch.await();
assertEquals(3, ImmutableSet.copyOf(handlerThreads.values()).size());
}
private CallRunner createMockTask() {
Call call = mock(Call.class);
CallRunner task = mock(CallRunner.class);
when(task.getCall()).thenReturn(call);
return task;
}
@Test
public void testRpcScheduler() throws Exception {
testRpcScheduler(SimpleRpcScheduler.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE);
testRpcScheduler(SimpleRpcScheduler.CALL_QUEUE_TYPE_FIFO_CONF_VALUE);
}
private void testRpcScheduler(final String queueType) throws Exception {
Configuration schedConf = HBaseConfiguration.create();
schedConf.set(SimpleRpcScheduler.CALL_QUEUE_TYPE_CONF_KEY, queueType);
PriorityFunction priority = mock(PriorityFunction.class);
when(priority.getPriority(any(RequestHeader.class), any(Message.class)))
.thenReturn(HConstants.NORMAL_QOS);
RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 1, 1, 1, priority,
HConstants.QOS_THRESHOLD);
try {
scheduler.start();
CallRunner smallCallTask = mock(CallRunner.class);
RpcServer.Call smallCall = mock(RpcServer.Call.class);
RequestHeader smallHead = RequestHeader.newBuilder().setCallId(1).build();
when(smallCallTask.getCall()).thenReturn(smallCall);
when(smallCall.getHeader()).thenReturn(smallHead);
CallRunner largeCallTask = mock(CallRunner.class);
RpcServer.Call largeCall = mock(RpcServer.Call.class);
RequestHeader largeHead = RequestHeader.newBuilder().setCallId(50).build();
when(largeCallTask.getCall()).thenReturn(largeCall);
when(largeCall.getHeader()).thenReturn(largeHead);
CallRunner hugeCallTask = mock(CallRunner.class);
RpcServer.Call hugeCall = mock(RpcServer.Call.class);
RequestHeader hugeHead = RequestHeader.newBuilder().setCallId(100).build();
when(hugeCallTask.getCall()).thenReturn(hugeCall);
when(hugeCall.getHeader()).thenReturn(hugeHead);
when(priority.getDeadline(eq(smallHead), any(Message.class))).thenReturn(0L);
when(priority.getDeadline(eq(largeHead), any(Message.class))).thenReturn(50L);
when(priority.getDeadline(eq(hugeHead), any(Message.class))).thenReturn(100L);
final ArrayList<Integer> work = new ArrayList<Integer>();
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
synchronized (work) {
work.add(10);
}
Threads.sleepWithoutInterrupt(100);
return null;
}
}).when(smallCallTask).run();
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
synchronized (work) {
work.add(50);
}
Threads.sleepWithoutInterrupt(100);
return null;
}
}).when(largeCallTask).run();
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
synchronized (work) {
work.add(100);
}
Threads.sleepWithoutInterrupt(100);
return null;
}
}).when(hugeCallTask).run();
scheduler.dispatch(smallCallTask);
scheduler.dispatch(smallCallTask);
scheduler.dispatch(smallCallTask);
scheduler.dispatch(hugeCallTask);
scheduler.dispatch(smallCallTask);
scheduler.dispatch(largeCallTask);
scheduler.dispatch(smallCallTask);
scheduler.dispatch(smallCallTask);
while (work.size() < 8) {
Threads.sleepWithoutInterrupt(100);
}
int seqSum = 0;
int totalTime = 0;
for (int i = 0; i < work.size(); ++i) {
LOG.debug("Request i=" + i + " value=" + work.get(i));
seqSum += work.get(i);
totalTime += seqSum;
}
LOG.debug("Total Time: " + totalTime);
// -> [small small small huge small large small small]
// -> NO REORDER [10 10 10 100 10 50 10 10] -> 930 (FIFO Queue)
// -> WITH REORDER [10 10 10 10 10 10 50 100] -> 530 (Deadline Queue)
if (queueType.equals(SimpleRpcScheduler.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE)) {
assertEquals(530, totalTime);
} else /* if (queueType.equals(SimpleRpcScheduler.CALL_QUEUE_TYPE_FIFO_CONF_VALUE)) */ {
assertEquals(930, totalTime);
}
} finally {
scheduler.stop();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.qjournal.server;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Map;
import javax.management.ObjectName;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.qjournal.client.QuorumJournalManager;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.StorageErrorReporter;
import org.apache.hadoop.hdfs.server.common.StorageInfo;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.tracing.TraceUtils;
import org.apache.hadoop.util.DiskChecker;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.htrace.core.Tracer;
import org.mortbay.util.ajax.JSON;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
/**
* The JournalNode is a daemon which allows namenodes using
* the QuorumJournalManager to log and retrieve edits stored
* remotely. It is a thin wrapper around a local edit log
* directory with the addition of facilities to participate
* in the quorum protocol.
*/
@InterfaceAudience.Private
public class JournalNode implements Tool, Configurable, JournalNodeMXBean {
public static final Log LOG = LogFactory.getLog(JournalNode.class);
private Configuration conf;
private JournalNodeRpcServer rpcServer;
private JournalNodeHttpServer httpServer;
private final Map<String, Journal> journalsById = Maps.newHashMap();
private ObjectName journalNodeInfoBeanName;
private String httpServerURI;
private File localDir;
Tracer tracer;
static {
HdfsConfiguration.init();
}
/**
* When stopped, the daemon will exit with this code.
*/
private int resultCode = 0;
synchronized Journal getOrCreateJournal(String jid, StartupOption startOpt)
throws IOException {
QuorumJournalManager.checkJournalId(jid);
Journal journal = journalsById.get(jid);
if (journal == null) {
File logDir = getLogDir(jid);
LOG.info("Initializing journal in directory " + logDir);
journal = new Journal(conf, logDir, jid, startOpt, new ErrorReporter());
journalsById.put(jid, journal);
}
return journal;
}
@VisibleForTesting
public Journal getOrCreateJournal(String jid) throws IOException {
return getOrCreateJournal(jid, StartupOption.REGULAR);
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
this.localDir = new File(
conf.get(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY,
DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_DEFAULT).trim());
if (this.tracer == null) {
this.tracer = new Tracer.Builder("JournalNode").
conf(TraceUtils.wrapHadoopConf("journalnode.htrace", conf)).
build();
}
}
private static void validateAndCreateJournalDir(File dir) throws IOException {
if (!dir.isAbsolute()) {
throw new IllegalArgumentException(
"Journal dir '" + dir + "' should be an absolute path");
}
DiskChecker.checkDir(dir);
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public int run(String[] args) throws Exception {
start();
return join();
}
/**
* Start listening for edits via RPC.
*/
public void start() throws IOException {
Preconditions.checkState(!isStarted(), "JN already running");
validateAndCreateJournalDir(localDir);
DefaultMetricsSystem.initialize("JournalNode");
JvmMetrics.create("JournalNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
InetSocketAddress socAddr = JournalNodeRpcServer.getAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName());
registerJNMXBean();
httpServer = new JournalNodeHttpServer(conf, this);
httpServer.start();
httpServerURI = httpServer.getServerURI().toString();
rpcServer = new JournalNodeRpcServer(conf, this);
rpcServer.start();
}
public boolean isStarted() {
return rpcServer != null;
}
/**
* @return the address the IPC server is bound to
*/
public InetSocketAddress getBoundIpcAddress() {
return rpcServer.getAddress();
}
@Deprecated
public InetSocketAddress getBoundHttpAddress() {
return httpServer.getAddress();
}
public String getHttpServerURI() {
return httpServerURI;
}
/**
* Stop the daemon with the given status code
* @param rc the status code with which to exit (non-zero
* should indicate an error)
*/
public void stop(int rc) {
this.resultCode = rc;
if (rpcServer != null) {
rpcServer.stop();
}
if (httpServer != null) {
try {
httpServer.stop();
} catch (IOException ioe) {
LOG.warn("Unable to stop HTTP server for " + this, ioe);
}
}
for (Journal j : journalsById.values()) {
IOUtils.cleanup(LOG, j);
}
DefaultMetricsSystem.shutdown();
if (journalNodeInfoBeanName != null) {
MBeans.unregister(journalNodeInfoBeanName);
journalNodeInfoBeanName = null;
}
if (tracer != null) {
tracer.close();
tracer = null;
}
}
/**
* Wait for the daemon to exit.
* @return the result code (non-zero if error)
*/
int join() throws InterruptedException {
if (rpcServer != null) {
rpcServer.join();
}
return resultCode;
}
public void stopAndJoin(int rc) throws InterruptedException {
stop(rc);
join();
}
/**
* Return the directory inside our configured storage
* dir which corresponds to a given journal.
* @param jid the journal identifier
* @return the file, which may or may not exist yet
*/
private File getLogDir(String jid) {
String dir = conf.get(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY,
DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_DEFAULT);
Preconditions.checkArgument(jid != null &&
!jid.isEmpty(),
"bad journal identifier: %s", jid);
assert jid != null;
return new File(new File(dir), jid);
}
@Override // JournalNodeMXBean
public String getJournalsStatus() {
// jid:{Formatted:True/False}
Map<String, Map<String, String>> status =
new HashMap<String, Map<String, String>>();
synchronized (this) {
for (Map.Entry<String, Journal> entry : journalsById.entrySet()) {
Map<String, String> jMap = new HashMap<String, String>();
jMap.put("Formatted", Boolean.toString(entry.getValue().isFormatted()));
status.put(entry.getKey(), jMap);
}
}
// It is possible that some journals have been formatted before, while the
// corresponding journals are not in journalsById yet (because of restarting
// JN, e.g.). For simplicity, let's just assume a journal is formatted if
// there is a directory for it. We can also call analyzeStorage method for
// these directories if necessary.
// Also note that we do not need to check localDir here since
// validateAndCreateJournalDir has been called before we register the
// MXBean.
File[] journalDirs = localDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.isDirectory();
}
});
for (File journalDir : journalDirs) {
String jid = journalDir.getName();
if (!status.containsKey(jid)) {
Map<String, String> jMap = new HashMap<String, String>();
jMap.put("Formatted", "true");
status.put(jid, jMap);
}
}
return JSON.toString(status);
}
/**
* Register JournalNodeMXBean
*/
private void registerJNMXBean() {
journalNodeInfoBeanName = MBeans.register("JournalNode", "JournalNodeInfo", this);
}
private class ErrorReporter implements StorageErrorReporter {
@Override
public void reportErrorOnFile(File f) {
LOG.fatal("Error reported on file " + f + "... exiting",
new Exception());
stop(1);
}
}
public static void main(String[] args) throws Exception {
StringUtils.startupShutdownMessage(JournalNode.class, args, LOG);
System.exit(ToolRunner.run(new JournalNode(), args));
}
public void discardSegments(String journalId, long startTxId)
throws IOException {
getOrCreateJournal(journalId).discardSegments(startTxId);
}
public void doPreUpgrade(String journalId) throws IOException {
getOrCreateJournal(journalId).doPreUpgrade();
}
public void doUpgrade(String journalId, StorageInfo sInfo) throws IOException {
getOrCreateJournal(journalId).doUpgrade(sInfo);
}
public void doFinalize(String journalId) throws IOException {
getOrCreateJournal(journalId).doFinalize();
}
public Boolean canRollBack(String journalId, StorageInfo storage,
StorageInfo prevStorage, int targetLayoutVersion) throws IOException {
return getOrCreateJournal(journalId, StartupOption.ROLLBACK).canRollBack(
storage, prevStorage, targetLayoutVersion);
}
public void doRollback(String journalId) throws IOException {
getOrCreateJournal(journalId, StartupOption.ROLLBACK).doRollback();
}
public Long getJournalCTime(String journalId) throws IOException {
return getOrCreateJournal(journalId).getJournalCTime();
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.DateTime;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/**
* Interface of <a
* href="http://schema.org/PhotographAction}">http://schema.org/PhotographAction}</a>.
*/
public interface PhotographAction extends CreateAction {
/**
* Builder interface of <a
* href="http://schema.org/PhotographAction}">http://schema.org/PhotographAction}</a>.
*/
public interface Builder extends CreateAction.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property actionStatus. */
Builder addActionStatus(ActionStatusType value);
/** Add a value to property actionStatus. */
Builder addActionStatus(String value);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property agent. */
Builder addAgent(Organization value);
/** Add a value to property agent. */
Builder addAgent(Organization.Builder value);
/** Add a value to property agent. */
Builder addAgent(Person value);
/** Add a value to property agent. */
Builder addAgent(Person.Builder value);
/** Add a value to property agent. */
Builder addAgent(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property endTime. */
Builder addEndTime(DateTime value);
/** Add a value to property endTime. */
Builder addEndTime(String value);
/** Add a value to property error. */
Builder addError(Thing value);
/** Add a value to property error. */
Builder addError(Thing.Builder value);
/** Add a value to property error. */
Builder addError(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property instrument. */
Builder addInstrument(Thing value);
/** Add a value to property instrument. */
Builder addInstrument(Thing.Builder value);
/** Add a value to property instrument. */
Builder addInstrument(String value);
/** Add a value to property location. */
Builder addLocation(Place value);
/** Add a value to property location. */
Builder addLocation(Place.Builder value);
/** Add a value to property location. */
Builder addLocation(PostalAddress value);
/** Add a value to property location. */
Builder addLocation(PostalAddress.Builder value);
/** Add a value to property location. */
Builder addLocation(Text value);
/** Add a value to property location. */
Builder addLocation(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property object. */
Builder addObject(Thing value);
/** Add a value to property object. */
Builder addObject(Thing.Builder value);
/** Add a value to property object. */
Builder addObject(String value);
/** Add a value to property participant. */
Builder addParticipant(Organization value);
/** Add a value to property participant. */
Builder addParticipant(Organization.Builder value);
/** Add a value to property participant. */
Builder addParticipant(Person value);
/** Add a value to property participant. */
Builder addParticipant(Person.Builder value);
/** Add a value to property participant. */
Builder addParticipant(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property result. */
Builder addResult(Thing value);
/** Add a value to property result. */
Builder addResult(Thing.Builder value);
/** Add a value to property result. */
Builder addResult(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property startTime. */
Builder addStartTime(DateTime value);
/** Add a value to property startTime. */
Builder addStartTime(String value);
/** Add a value to property target. */
Builder addTarget(EntryPoint value);
/** Add a value to property target. */
Builder addTarget(EntryPoint.Builder value);
/** Add a value to property target. */
Builder addTarget(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link PhotographAction} object. */
PhotographAction build();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.recovery;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ProcessedClusterStateNonMasterUpdateTask;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.util.CancellableThreads.Interruptable;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.shard.IllegalIndexShardStateException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardClosedException;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.transport.EmptyTransportResponseHandler;
import org.elasticsearch.transport.RemoteTransportException;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicReference;
/**
* ShardRecoveryHandler handles the three phases of shard recovery, which is
* everything relating to copying the segment files as well as sending translog
* operations across the wire once the segments have been copied.
*/
public final class ShardRecoveryHandler implements Engine.RecoveryHandler {
private final ESLogger logger;
// Shard that is going to be recovered (the "source")
private final IndexShard shard;
private final String indexName;
private final int shardId;
// Request containing source and target node information
private final StartRecoveryRequest request;
private final RecoverySettings recoverySettings;
private final TransportService transportService;
private final TimeValue internalActionTimeout;
private final TimeValue internalActionLongTimeout;
private final ClusterService clusterService;
private final IndexService indexService;
private final MappingUpdatedAction mappingUpdatedAction;
private final RecoveryResponse response;
private final CancellableThreads cancellableThreads = new CancellableThreads() {
@Override
protected void onCancel(String reason, @Nullable Throwable suppressedException) {
RuntimeException e;
if (shard.state() == IndexShardState.CLOSED) { // check if the shard got closed on us
e = new IndexShardClosedException(shard.shardId(), "shard is closed and recovery was canceled reason [" + reason + "]");
} else {
e = new ExecutionCancelledException("recovery was canceled reason [" + reason + "]");
}
if (suppressedException != null) {
e.addSuppressed(suppressedException);
}
throw e;
}
};
public ShardRecoveryHandler(final IndexShard shard, final StartRecoveryRequest request, final RecoverySettings recoverySettings,
final TransportService transportService, final TimeValue internalActionTimeout,
final TimeValue internalActionLongTimeout, final ClusterService clusterService,
final IndicesService indicesService, final MappingUpdatedAction mappingUpdatedAction, final ESLogger logger) {
this.shard = shard;
this.request = request;
this.recoverySettings = recoverySettings;
this.logger = logger;
this.transportService = transportService;
this.internalActionTimeout = internalActionTimeout;
this.internalActionLongTimeout = internalActionLongTimeout;
this.clusterService = clusterService;
this.indexName = this.request.shardId().index().name();
this.shardId = this.request.shardId().id();
this.indexService = indicesService.indexServiceSafe(indexName);
this.mappingUpdatedAction = mappingUpdatedAction;
this.response = new RecoveryResponse();
}
/**
* @return the {@link RecoveryResponse} after the recovery has completed all three phases
*/
public RecoveryResponse getResponse() {
return this.response;
}
/**
* Perform phase1 of the recovery operations. Once this {@link SnapshotIndexCommit}
* snapshot has been performed no commit operations (files being fsync'd)
* are effectively allowed on this index until all recovery phases are done
*
* Phase1 examines the segment files on the target node and copies over the
* segments that are missing. Only segments that have the same size and
* checksum can be reused
*
* {@code InternalEngine#recover} is responsible for snapshotting the index
* and releasing the snapshot once all 3 phases of recovery are complete
*/
@Override
public void phase1(final SnapshotIndexCommit snapshot) throws ElasticsearchException {
cancellableThreads.checkForCancel();
// Total size of segment files that are recovered
long totalSize = 0;
// Total size of segment files that were able to be re-used
long existingTotalSize = 0;
final Store store = shard.store();
store.incRef();
try {
StopWatch stopWatch = new StopWatch().start();
final Store.MetadataSnapshot recoverySourceMetadata = store.getMetadata(snapshot);
for (String name : snapshot.getFiles()) {
final StoreFileMetaData md = recoverySourceMetadata.get(name);
if (md == null) {
logger.info("Snapshot differs from actual index for file: {} meta: {}", name, recoverySourceMetadata.asMap());
throw new CorruptIndexException("Snapshot differs from actual index - maybe index was removed metadata has " +
recoverySourceMetadata.asMap().size() + " files", name);
}
}
// Generate a "diff" of all the identical, different, and missing
// segment files on the target node, using the existing files on
// the source node
final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(new Store.MetadataSnapshot(request.existingFiles()));
for (StoreFileMetaData md : diff.identical) {
response.phase1ExistingFileNames.add(md.name());
response.phase1ExistingFileSizes.add(md.length());
existingTotalSize += md.length();
if (logger.isTraceEnabled()) {
logger.trace("[{}][{}] recovery [phase1] to {}: not recovering [{}], exists in local store and has checksum [{}], size [{}]",
indexName, shardId, request.targetNode(), md.name(), md.checksum(), md.length());
}
totalSize += md.length();
}
for (StoreFileMetaData md : Iterables.concat(diff.different, diff.missing)) {
if (request.existingFiles().containsKey(md.name())) {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], exists in local store, but is different: remote [{}], local [{}]",
indexName, shardId, request.targetNode(), md.name(), request.existingFiles().get(md.name()), md);
} else {
logger.trace("[{}][{}] recovery [phase1] to {}: recovering [{}], does not exists in remote",
indexName, shardId, request.targetNode(), md.name());
}
response.phase1FileNames.add(md.name());
response.phase1FileSizes.add(md.length());
totalSize += md.length();
}
response.phase1TotalSize = totalSize;
response.phase1ExistingTotalSize = existingTotalSize;
logger.trace("[{}][{}] recovery [phase1] to {}: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]",
indexName, shardId, request.targetNode(), response.phase1FileNames.size(),
new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
RecoveryFilesInfoRequest recoveryInfoFilesRequest = new RecoveryFilesInfoRequest(request.recoveryId(), request.shardId(),
response.phase1FileNames, response.phase1FileSizes, response.phase1ExistingFileNames, response.phase1ExistingFileSizes,
response.phase1TotalSize, response.phase1ExistingTotalSize);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILES_INFO, recoveryInfoFilesRequest,
TransportRequestOptions.options().withTimeout(internalActionTimeout),
EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
// This latch will be used to wait until all files have been transferred to the target node
final CountDownLatch latch = new CountDownLatch(response.phase1FileNames.size());
final CopyOnWriteArrayList<Throwable> exceptions = new CopyOnWriteArrayList<>();
final AtomicReference<Throwable> corruptedEngine = new AtomicReference<>();
int fileIndex = 0;
ThreadPoolExecutor pool;
for (final String name : response.phase1FileNames) {
long fileSize = response.phase1FileSizes.get(fileIndex);
// Files are split into two categories, files that are "small"
// (under 5mb) and other files. Small files are transferred
// using a separate thread pool dedicated to small files.
//
// The idea behind this is that while we are transferring an
// older, large index, a user may create a new index, but that
// index will not be able to recover until the large index
// finishes, by using two different thread pools we can allow
// tiny files (like segments for a brand new index) to be
// recovered while ongoing large segment recoveries are
// happening. It also allows these pools to be configured
// separately.
if (fileSize > recoverySettings.SMALL_FILE_CUTOFF_BYTES) {
pool = recoverySettings.concurrentStreamPool();
} else {
pool = recoverySettings.concurrentSmallFileStreamPool();
}
pool.execute(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
// we either got rejected or the store can't be incremented / we are canceled
logger.debug("Failed to transfer file [" + name + "] on recovery");
}
public void onAfter() {
// Signify this file has completed by decrementing the latch
latch.countDown();
}
@Override
protected void doRun() {
cancellableThreads.checkForCancel();
store.incRef();
final StoreFileMetaData md = recoverySourceMetadata.get(name);
try (final IndexInput indexInput = store.directory().openInput(name, IOContext.READONCE)) {
final int BUFFER_SIZE = (int) recoverySettings.fileChunkSize().bytes();
final byte[] buf = new byte[BUFFER_SIZE];
boolean shouldCompressRequest = recoverySettings.compress();
if (CompressorFactory.isCompressed(indexInput)) {
shouldCompressRequest = false;
}
final long len = indexInput.length();
long readCount = 0;
final TransportRequestOptions requestOptions = TransportRequestOptions.options()
.withCompress(shouldCompressRequest)
.withType(TransportRequestOptions.Type.RECOVERY)
.withTimeout(internalActionTimeout);
while (readCount < len) {
if (shard.state() == IndexShardState.CLOSED) { // check if the shard got closed on us
throw new IndexShardClosedException(shard.shardId());
}
int toRead = readCount + BUFFER_SIZE > len ? (int) (len - readCount) : BUFFER_SIZE;
final long position = indexInput.getFilePointer();
// Pause using the rate limiter, if desired, to throttle the recovery
if (recoverySettings.rateLimiter() != null) {
recoverySettings.rateLimiter().pause(toRead);
}
indexInput.readBytes(buf, 0, toRead, false);
final BytesArray content = new BytesArray(buf, 0, toRead);
readCount += toRead;
final boolean lastChunk = readCount == len;
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
// Actually send the file chunk to the target node, waiting for it to complete
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FILE_CHUNK,
new RecoveryFileChunkRequest(request.recoveryId(), request.shardId(), md, position, content, lastChunk),
requestOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
}
} catch (Throwable e) {
final Throwable corruptIndexException;
if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(e)) != null) {
if (store.checkIntegrity(md) == false) { // we are corrupted on the primary -- fail!
logger.warn("{} Corrupted file detected {} checksum mismatch", shard.shardId(), md);
if (corruptedEngine.compareAndSet(null, corruptIndexException) == false) {
// if we are not the first exception, add ourselves as suppressed to the main one:
corruptedEngine.get().addSuppressed(e);
}
} else { // corruption has happened on the way to replica
RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null);
exception.addSuppressed(e);
exceptions.add(0, exception); // last exception first
logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK",
corruptIndexException, shard.shardId(), request.targetNode(), md);
}
} else {
exceptions.add(0, e); // last exceptions first
}
} finally {
store.decRef();
}
}
});
fileIndex++;
}
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
// Wait for all files that need to be transferred to finish transferring
latch.await();
}
});
if (corruptedEngine.get() != null) {
throw corruptedEngine.get();
} else {
ExceptionsHelper.rethrowAndSuppress(exceptions);
}
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
// Send the CLEAN_FILES request, which takes all of the files that
// were transferred and renames them from their temporary file
// names to the actual file names. It also writes checksums for
// the files after they have been renamed.
//
// Once the files have been renamed, any other files that are not
// related to this recovery (out of date segments, for example)
// are deleted
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.CLEAN_FILES,
new RecoveryCleanFilesRequest(request.recoveryId(), shard.shardId(), recoverySourceMetadata),
TransportRequestOptions.options().withTimeout(internalActionTimeout),
EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase1] to {}: took [{}]", indexName, shardId, request.targetNode(), stopWatch.totalTime());
response.phase1Time = stopWatch.totalTime().millis();
} catch (Throwable e) {
throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e);
} finally {
store.decRef();
}
}
/**
* Perform phase2 of the recovery process
*
* Phase2 takes a snapshot of the current translog *without* acquiring the
* write lock (however, the translog snapshot is a point-in-time view of
* the translog). It then sends each translog operation to the target node
* so it can be replayed into the new shard.
*
* {@code InternalEngine#recover} is responsible for taking the snapshot
* of the translog and releasing it once all 3 phases of recovery are complete
*/
@Override
public void phase2(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
cancellableThreads.checkForCancel();
logger.trace("{} recovery [phase2] to {}: start", request.shardId(), request.targetNode());
StopWatch stopWatch = new StopWatch().start();
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
// Send a request preparing the new shard's translog to receive
// operations. This ensures the shard engine is started and disables
// garbage collection (not the JVM's GC!) of tombstone deletes
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.PREPARE_TRANSLOG,
new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId()),
TransportRequestOptions.options().withTimeout(internalActionTimeout), EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
stopWatch.stop();
response.startTime = stopWatch.totalTime().millis();
logger.trace("{} recovery [phase2] to {}: start took [{}]",
request.shardId(), request.targetNode(), request.targetNode(), stopWatch.totalTime());
logger.trace("{} recovery [phase2] to {}: updating current mapping to master", request.shardId(), request.targetNode());
// Ensure that the mappings are synced with the master node
updateMappingOnMaster();
logger.trace("{} recovery [phase2] to {}: sending transaction log operations", request.shardId(), request.targetNode());
stopWatch = new StopWatch().start();
// Send all the snapshot's translog operations to the target
int totalOperations = sendSnapshot(snapshot);
stopWatch.stop();
logger.trace("{} recovery [phase2] to {}: took [{}]", request.shardId(), request.targetNode(), stopWatch.totalTime());
response.phase2Time = stopWatch.totalTime().millis();
response.phase2Operations = totalOperations;
}
/**
* Perform phase 3 of the recovery process
*
* Phase3 again takes a snapshot of the translog, however this time the
* snapshot is acquired under a write lock. The translog operations are
* sent to the target node where they are replayed.
*
* {@code InternalEngine#recover} is responsible for taking the snapshot
* of the translog, and after phase 3 completes the snapshots from all
* three phases are released.
*/
@Override
public void phase3(Translog.Snapshot snapshot) throws ElasticsearchException {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
cancellableThreads.checkForCancel();
logger.trace("[{}][{}] recovery [phase3] to {}: sending transaction log operations", indexName, shardId, request.targetNode());
StopWatch stopWatch = new StopWatch().start();
// Send the translog operations to the target node
int totalOperations = sendSnapshot(snapshot);
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
// Send the FINALIZE request to the target node. The finalize request
// clears unreferenced translog files, refreshes the engine now that
// new segments are available, and enables garbage collection of
// tombstone files. The shard is also moved to the POST_RECOVERY phase
// during this time
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.FINALIZE,
new RecoveryFinalizeRecoveryRequest(request.recoveryId(), request.shardId()),
TransportRequestOptions.options().withTimeout(internalActionLongTimeout),
EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
if (request.markAsRelocated()) {
// TODO what happens if the recovery process fails afterwards, we need to mark this back to started
try {
shard.relocated("to " + request.targetNode());
} catch (IllegalIndexShardStateException e) {
// we can ignore this exception since, on the other node, when it moved to phase3
// it will also send shard started, which might cause the index shard we work against
// to move be closed by the time we get to the the relocated method
}
}
stopWatch.stop();
logger.trace("[{}][{}] recovery [phase3] to {}: took [{}]",
indexName, shardId, request.targetNode(), stopWatch.totalTime());
response.phase3Time = stopWatch.totalTime().millis();
response.phase3Operations = totalOperations;
}
/**
* Ensures that the mapping in the cluster state is the same as the mapping
* in our mapper service. If the mapping is not in sync, sends a request
* to update it in the cluster state and blocks until it has finished
* being updated.
*/
private void updateMappingOnMaster() {
// we test that the cluster state is in sync with our in memory mapping stored by the mapperService
// we have to do it under the "cluster state update" thread to make sure that one doesn't modify it
// while we're checking
final BlockingQueue<DocumentMapper> documentMappersToUpdate = ConcurrentCollections.newBlockingQueue();
final CountDownLatch latch = new CountDownLatch(1);
clusterService.submitStateUpdateTask("recovery_mapping_check", new ProcessedClusterStateNonMasterUpdateTask() {
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
IndexMetaData indexMetaData = clusterService.state().metaData().getIndices().get(indexService.index().getName());
ImmutableOpenMap<String, MappingMetaData> metaDataMappings = null;
if (indexMetaData != null) {
metaDataMappings = indexMetaData.getMappings();
}
// default mapping should not be sent back, it can only be updated by put mapping API, and its
// a full in place replace, we don't want to override a potential update coming it
for (DocumentMapper documentMapper : indexService.mapperService().docMappers(false)) {
MappingMetaData mappingMetaData = metaDataMappings == null ? null : metaDataMappings.get(documentMapper.type());
if (mappingMetaData == null || !documentMapper.refreshSource().equals(mappingMetaData.source())) {
// not on master yet in the right form
documentMappersToUpdate.add(documentMapper);
}
}
return currentState;
}
@Override
public void onFailure(String source, @Nullable Throwable t) {
logger.error("unexpected error while checking for pending mapping changes", t);
latch.countDown();
}
});
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
latch.await();
}
});
if (documentMappersToUpdate.isEmpty()) {
return;
}
final CountDownLatch updatedOnMaster = new CountDownLatch(documentMappersToUpdate.size());
MappingUpdatedAction.MappingUpdateListener listener = new MappingUpdatedAction.MappingUpdateListener() {
@Override
public void onMappingUpdate() {
updatedOnMaster.countDown();
}
@Override
public void onFailure(Throwable t) {
logger.debug("{} recovery to {}: failed to update mapping on master", request.shardId(), request.targetNode(), t);
updatedOnMaster.countDown();
}
};
for (DocumentMapper documentMapper : documentMappersToUpdate) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().getName(), documentMapper, indexService.indexUUID(), listener);
}
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
try {
if (!updatedOnMaster.await(internalActionTimeout.millis(), TimeUnit.MILLISECONDS)) {
logger.debug("[{}][{}] recovery [phase2] to {}: waiting on pending mapping update timed out. waited [{}]",
indexName, shardId, request.targetNode(), internalActionTimeout);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
logger.debug("interrupted while waiting for mapping to update on master");
}
}
});
}
/**
* Send the given snapshot's operations to this handler's target node.
*
* Operations are bulked into a single request depending on an operation
* count limit or size-in-bytes limit
*
* @return the total number of translog operations that were sent
*/
private int sendSnapshot(Translog.Snapshot snapshot) throws ElasticsearchException {
int ops = 0;
long size = 0;
int totalOperations = 0;
final List<Translog.Operation> operations = Lists.newArrayList();
Translog.Operation operation = snapshot.next();
final TransportRequestOptions recoveryOptions = TransportRequestOptions.options()
.withCompress(recoverySettings.compress())
.withType(TransportRequestOptions.Type.RECOVERY)
.withTimeout(internalActionLongTimeout);
while (operation != null) {
if (shard.state() == IndexShardState.CLOSED) {
throw new IndexShardClosedException(request.shardId());
}
cancellableThreads.checkForCancel();
operations.add(operation);
ops += 1;
size += operation.estimateSize();
totalOperations++;
// Check if this request is past the size or bytes threshold, and
// if so, send it off
if (ops >= recoverySettings.translogOps() || size >= recoverySettings.translogSize().bytes()) {
// don't throttle translog, since we lock for phase3 indexing,
// so we need to move it as fast as possible. Note, since we
// index docs to replicas while the index files are recovered
// the lock can potentially be removed, in which case, it might
// make sense to re-enable throttling in this phase
// if (recoverySettings.rateLimiter() != null) {
// recoverySettings.rateLimiter().pause(size);
// }
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest,
recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
ops = 0;
size = 0;
operations.clear();
}
operation = snapshot.next();
}
// send the leftover
if (!operations.isEmpty()) {
cancellableThreads.execute(new Interruptable() {
@Override
public void run() throws InterruptedException {
RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest(request.recoveryId(), request.shardId(), operations);
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest,
recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
});
}
return totalOperations;
}
/**
* Cancels the recovery and interrupts all eligible threads.
*/
public void cancel(String reason) {
cancellableThreads.cancel(reason);
}
@Override
public String toString() {
return "ShardRecoveryHandler{" +
"shardId=" + request.shardId() +
", sourceNode=" + request.sourceNode() +
", targetNode=" + request.targetNode() +
'}';
}
}
| |
/*
* Copyright 2017, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.androidthings.bluetooth.audio;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.util.Log;
import android.view.KeyEvent;
import com.google.android.things.bluetooth.BluetoothProfileManager;
import com.google.android.things.contrib.driver.button.Button;
import com.google.android.things.contrib.driver.button.ButtonInputDriver;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
/**
* Sample usage of the A2DP sink bluetooth profile. At startup, this activity sets the Bluetooth
* adapter in pairing mode for {@link #DISCOVERABLE_TIMEOUT_MS} ms.
*
* To re-enable pairing mode, press "p" on an attached keyboard, use "adb shell input keyevent 44"
* or press a button attached to the GPIO pin returned by {@link BoardDefaults#getGPIOForPairing()}
*
* To forcefully disconnect any connected A2DP device, press "d" on an attached keyboard, use
* "adb shell input keyevent 32" or press a button attached to the GPIO pin
* returned by {@link BoardDefaults#getGPIOForDisconnectAllBTDevices()}
*
* NOTE: While in pairing mode, pairing requests are auto-accepted - at this moment there's no
* way to block specific pairing attempts while in pairing mode. This is known limitation that is
* being worked on.
*
*/
public class A2dpSinkActivity extends Activity {
private static final String TAG = "A2dpSinkActivity";
private static final String ADAPTER_FRIENDLY_NAME = "My Android Things device";
private static final int DISCOVERABLE_TIMEOUT_MS = 300;
private static final int REQUEST_CODE_ENABLE_DISCOVERABLE = 100;
private static final String UTTERANCE_ID =
"com.example.androidthings.bluetooth.audio.UTTERANCE_ID";
private BluetoothAdapter mBluetoothAdapter;
private BluetoothProfile mA2DPSinkProxy;
private ButtonInputDriver mPairingButtonDriver;
private ButtonInputDriver mDisconnectAllButtonDriver;
private TextToSpeech mTtsEngine;
/**
* Handle an intent that is broadcast by the Bluetooth adapter whenever it changes its
* state (after calling enable(), for example).
* Action is {@link BluetoothAdapter#ACTION_STATE_CHANGED} and extras describe the old
* and the new states. You can use this intent to indicate that the device is ready to go.
*/
private final BroadcastReceiver mAdapterStateChangeReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
int oldState = A2dpSinkHelper.getPreviousAdapterState(intent);
int newState = A2dpSinkHelper.getCurrentAdapterState(intent);
Log.d(TAG, "Bluetooth Adapter changing state from " + oldState + " to " + newState);
if (newState == BluetoothAdapter.STATE_ON) {
Log.i(TAG, "Bluetooth Adapter is ready");
initA2DPSink();
}
}
};
/**
* Handle an intent that is broadcast by the Bluetooth A2DP sink profile whenever a device
* connects or disconnects to it.
* Action is {@link A2dpSinkHelper#ACTION_CONNECTION_STATE_CHANGED} and
* extras describe the old and the new connection states. You can use it to indicate that
* there's a device connected.
*/
private final BroadcastReceiver mSinkProfileStateChangeReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(A2dpSinkHelper.ACTION_CONNECTION_STATE_CHANGED)) {
int oldState = A2dpSinkHelper.getPreviousProfileState(intent);
int newState = A2dpSinkHelper.getCurrentProfileState(intent);
BluetoothDevice device = A2dpSinkHelper.getDevice(intent);
Log.d(TAG, "Bluetooth A2DP sink changing connection state from " + oldState +
" to " + newState + " device " + device);
if (device != null) {
String deviceName = Objects.toString(device.getName(), "a device");
if (newState == BluetoothProfile.STATE_CONNECTED) {
speak("Connected to " + deviceName);
} else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
speak("Disconnected from " + deviceName);
}
}
}
}
};
/**
* Handle an intent that is broadcast by the Bluetooth A2DP sink profile whenever a device
* starts or stops playing through the A2DP sink.
* Action is {@link A2dpSinkHelper#ACTION_PLAYING_STATE_CHANGED} and
* extras describe the old and the new playback states. You can use it to indicate that
* there's something playing. You don't need to handle the stream playback by yourself.
*/
private final BroadcastReceiver mSinkProfilePlaybackChangeReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(A2dpSinkHelper.ACTION_PLAYING_STATE_CHANGED)) {
int oldState = A2dpSinkHelper.getPreviousProfileState(intent);
int newState = A2dpSinkHelper.getCurrentProfileState(intent);
BluetoothDevice device = A2dpSinkHelper.getDevice(intent);
Log.d(TAG, "Bluetooth A2DP sink changing playback state from " + oldState +
" to " + newState + " device " + device);
if (device != null) {
if (newState == A2dpSinkHelper.STATE_PLAYING) {
Log.i(TAG, "Playing audio from device " + device.getAddress());
} else if (newState == A2dpSinkHelper.STATE_NOT_PLAYING) {
Log.i(TAG, "Stopped playing audio from " + device.getAddress());
}
}
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (mBluetoothAdapter == null) {
Log.w(TAG, "No default Bluetooth adapter. Device likely does not support bluetooth.");
return;
}
// We use Text-to-Speech to indicate status change to the user
initTts();
registerReceiver(mAdapterStateChangeReceiver, new IntentFilter(
BluetoothAdapter.ACTION_STATE_CHANGED));
registerReceiver(mSinkProfileStateChangeReceiver, new IntentFilter(
A2dpSinkHelper.ACTION_CONNECTION_STATE_CHANGED));
registerReceiver(mSinkProfilePlaybackChangeReceiver, new IntentFilter(
A2dpSinkHelper.ACTION_PLAYING_STATE_CHANGED));
if (mBluetoothAdapter.isEnabled()) {
Log.d(TAG, "Bluetooth Adapter is already enabled.");
initA2DPSink();
} else {
Log.d(TAG, "Bluetooth adapter not enabled. Enabling.");
mBluetoothAdapter.enable();
}
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_P:
// Enable Pairing mode (discoverable)
enableDiscoverable();
return true;
case KeyEvent.KEYCODE_D:
// Disconnect any currently connected devices
disconnectConnectedDevices();
return true;
}
return super.onKeyUp(keyCode, event);
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy");
try {
if (mPairingButtonDriver != null) mPairingButtonDriver.close();
} catch (IOException e) { /* close quietly */}
try {
if (mDisconnectAllButtonDriver != null) mDisconnectAllButtonDriver.close();
} catch (IOException e) { /* close quietly */}
unregisterReceiver(mAdapterStateChangeReceiver);
unregisterReceiver(mSinkProfileStateChangeReceiver);
unregisterReceiver(mSinkProfilePlaybackChangeReceiver);
if (mA2DPSinkProxy != null) {
mBluetoothAdapter.closeProfileProxy(A2dpSinkHelper.A2DP_SINK_PROFILE,
mA2DPSinkProxy);
}
if (mTtsEngine != null) {
mTtsEngine.stop();
mTtsEngine.shutdown();
}
// we intentionally leave the Bluetooth adapter enabled, so that other samples can use it
// without having to initialize it.
}
private void setupBTProfiles() {
BluetoothProfileManager bluetoothProfileManager = BluetoothProfileManager.getInstance();
List<Integer> enabledProfiles = bluetoothProfileManager.getEnabledProfiles();
if (!enabledProfiles.contains(A2dpSinkHelper.A2DP_SINK_PROFILE)) {
Log.d(TAG, "Enabling A2dp sink mode.");
List<Integer> toDisable = Arrays.asList(BluetoothProfile.A2DP);
List<Integer> toEnable = Arrays.asList(
A2dpSinkHelper.A2DP_SINK_PROFILE,
A2dpSinkHelper.AVRCP_CONTROLLER_PROFILE);
bluetoothProfileManager.enableAndDisableProfiles(toEnable, toDisable);
} else {
Log.d(TAG, "A2dp sink profile is enabled.");
}
}
/**
* Initiate the A2DP sink.
*/
private void initA2DPSink() {
if (mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled()) {
Log.e(TAG, "Bluetooth adapter not available or not enabled.");
return;
}
setupBTProfiles();
Log.d(TAG, "Set up Bluetooth Adapter name and profile");
mBluetoothAdapter.setName(ADAPTER_FRIENDLY_NAME);
mBluetoothAdapter.getProfileProxy(this, new BluetoothProfile.ServiceListener() {
@Override
public void onServiceConnected(int profile, BluetoothProfile proxy) {
mA2DPSinkProxy = proxy;
enableDiscoverable();
}
@Override
public void onServiceDisconnected(int profile) {
}
}, A2dpSinkHelper.A2DP_SINK_PROFILE);
configureButton();
}
/**
* Enable the current {@link BluetoothAdapter} to be discovered (available for pairing) for
* the next {@link #DISCOVERABLE_TIMEOUT_MS} ms.
*/
private void enableDiscoverable() {
Log.d(TAG, "Registering for discovery.");
Intent discoverableIntent =
new Intent(BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE);
discoverableIntent.putExtra(BluetoothAdapter.EXTRA_DISCOVERABLE_DURATION,
DISCOVERABLE_TIMEOUT_MS);
startActivityForResult(discoverableIntent, REQUEST_CODE_ENABLE_DISCOVERABLE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_ENABLE_DISCOVERABLE) {
Log.d(TAG, "Enable discoverable returned with result " + resultCode);
// ResultCode, as described in BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE, is either
// RESULT_CANCELED or the number of milliseconds that the device will stay in
// discoverable mode. In a regular Android device, the user will see a popup requesting
// authorization, and if they cancel, RESULT_CANCELED is returned. In Android Things,
// on the other hand, the authorization for pairing is always given without user
// interference, so RESULT_CANCELED should never be returned.
if (resultCode == RESULT_CANCELED) {
Log.e(TAG, "Enable discoverable has been cancelled by the user. " +
"This should never happen in an Android Things device.");
return;
}
Log.i(TAG, "Bluetooth adapter successfully set to discoverable mode. " +
"Any A2DP source can find it with the name " + ADAPTER_FRIENDLY_NAME +
" and pair for the next " + DISCOVERABLE_TIMEOUT_MS + " ms. " +
"Try looking for it on your phone, for example.");
// There is nothing else required here, since Android framework automatically handles
// A2DP Sink. Most relevant Bluetooth events, like connection/disconnection, will
// generate corresponding broadcast intents or profile proxy events that you can
// listen to and react appropriately.
speak("Bluetooth audio sink is discoverable for " + DISCOVERABLE_TIMEOUT_MS +
" milliseconds. Look for a device named " + ADAPTER_FRIENDLY_NAME);
}
}
private void disconnectConnectedDevices() {
if (mA2DPSinkProxy == null || mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled()) {
return;
}
speak("Disconnecting devices");
for (BluetoothDevice device: mA2DPSinkProxy.getConnectedDevices()) {
Log.i(TAG, "Disconnecting device " + device);
A2dpSinkHelper.disconnect(mA2DPSinkProxy, device);
}
}
private void configureButton() {
try {
mPairingButtonDriver = new ButtonInputDriver(BoardDefaults.getGPIOForPairing(),
Button.LogicState.PRESSED_WHEN_LOW, KeyEvent.KEYCODE_P);
mPairingButtonDriver.register();
mDisconnectAllButtonDriver = new ButtonInputDriver(
BoardDefaults.getGPIOForDisconnectAllBTDevices(),
Button.LogicState.PRESSED_WHEN_LOW, KeyEvent.KEYCODE_D);
mDisconnectAllButtonDriver.register();
} catch (IOException e) {
Log.w(TAG, "Could not register GPIO button drivers. Use keyboard events to trigger " +
"the functions instead", e);
}
}
private void initTts() {
mTtsEngine = new TextToSpeech(A2dpSinkActivity.this,
new TextToSpeech.OnInitListener() {
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
mTtsEngine.setLanguage(Locale.US);
} else {
Log.w(TAG, "Could not open TTS Engine (onInit status=" + status
+ "). Ignoring text to speech");
mTtsEngine = null;
}
}
});
}
private void speak(String utterance) {
Log.i(TAG, utterance);
if (mTtsEngine != null) {
mTtsEngine.speak(utterance, TextToSpeech.QUEUE_ADD, null, UTTERANCE_ID);
}
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.cluster.impl;
import com.hazelcast.cluster.ClusterState;
import com.hazelcast.internal.cluster.Joiner;
import com.hazelcast.config.Config;
import com.hazelcast.cluster.Member;
import com.hazelcast.core.MemberLeftException;
import com.hazelcast.instance.impl.Node;
import com.hazelcast.instance.impl.NodeExtension;
import com.hazelcast.internal.cluster.ClusterService;
import com.hazelcast.internal.cluster.impl.SplitBrainJoinMessage.SplitBrainMergeCheckResult;
import com.hazelcast.internal.cluster.impl.operations.MergeClustersOp;
import com.hazelcast.internal.cluster.impl.operations.SplitBrainMergeValidationOp;
import com.hazelcast.logging.ILogger;
import com.hazelcast.cluster.Address;
import com.hazelcast.internal.nio.Connection;
import com.hazelcast.spi.impl.NodeEngine;
import com.hazelcast.spi.impl.operationservice.Operation;
import com.hazelcast.spi.impl.operationservice.OperationService;
import com.hazelcast.spi.properties.ClusterProperty;
import com.hazelcast.internal.util.Clock;
import com.hazelcast.internal.util.FutureUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.cluster.ClusterState.FROZEN;
import static com.hazelcast.cluster.ClusterState.IN_TRANSITION;
import static com.hazelcast.instance.EndpointQualifier.MEMBER;
import static com.hazelcast.spi.impl.operationservice.OperationResponseHandlerFactory.createEmptyResponseHandler;
import static com.hazelcast.internal.util.FutureUtil.waitWithDeadline;
import static java.lang.Thread.currentThread;
public abstract class AbstractJoiner
implements Joiner {
private static final int JOIN_TRY_COUNT = 5;
private static final int SPLIT_BRAIN_MERGE_TIMEOUT_SECONDS = 30;
private static final int SPLIT_BRAIN_JOIN_CHECK_TIMEOUT_SECONDS = 10;
private static final long MIN_WAIT_BEFORE_JOIN_SECONDS = 10;
private static final long SPLIT_BRAIN_SLEEP_TIME_MILLIS = 10;
private static final long SPLIT_BRAIN_CONN_TIMEOUT_MILLIS = 5000;
protected final Config config;
protected final Node node;
protected final ClusterServiceImpl clusterService;
protected final ILogger logger;
// map blacklisted endpoints. Boolean value represents if blacklist is temporary or permanent
protected final ConcurrentMap<Address, Boolean> blacklistedAddresses = new ConcurrentHashMap<>();
protected final ClusterJoinManager clusterJoinManager;
private final AtomicLong joinStartTime = new AtomicLong(Clock.currentTimeMillis());
private final AtomicInteger tryCount = new AtomicInteger(0);
private final long mergeNextRunDelayMs;
private volatile Address targetAddress;
private final FutureUtil.ExceptionHandler splitBrainMergeExceptionHandler = new FutureUtil.ExceptionHandler() {
@Override
public void handleException(Throwable throwable) {
if (throwable instanceof MemberLeftException) {
return;
}
logger.warning("Problem while waiting for merge operation result", throwable);
}
};
public AbstractJoiner(Node node) {
this.node = node;
this.logger = node.loggingService.getLogger(getClass());
this.config = node.config;
this.clusterService = node.getClusterService();
this.clusterJoinManager = clusterService.getClusterJoinManager();
this.mergeNextRunDelayMs = node.getProperties().getMillis(ClusterProperty.MERGE_NEXT_RUN_DELAY_SECONDS);
}
@Override
public final long getStartTime() {
return joinStartTime.get();
}
@Override
public void setTargetAddress(Address targetAddress) {
this.targetAddress = targetAddress;
}
@Override
public void blacklist(Address address, boolean permanent) {
logger.info(address + " is added to the blacklist.");
blacklistedAddresses.putIfAbsent(address, permanent);
}
@Override
public boolean unblacklist(Address address) {
if (blacklistedAddresses.remove(address, Boolean.FALSE)) {
logger.info(address + " is removed from the blacklist.");
return true;
}
return false;
}
@Override
public boolean isBlacklisted(Address address) {
return blacklistedAddresses.containsKey(address);
}
public abstract void doJoin();
@Override
public final void join() {
blacklistedAddresses.clear();
doJoin();
if (!clusterService.isJoined() && isMemberExcludedFromHotRestart()) {
logger.warning("Could not join to the cluster because hot restart data must be reset.");
node.getNodeExtension().getInternalHotRestartService().forceStartBeforeJoin();
reset();
doJoin();
}
postJoin();
}
protected final boolean shouldRetry() {
return node.isRunning() && !clusterService.isJoined() && !isMemberExcludedFromHotRestart();
}
private boolean isMemberExcludedFromHotRestart() {
final NodeExtension nodeExtension = node.getNodeExtension();
return !nodeExtension.isStartCompleted()
&& nodeExtension.getInternalHotRestartService().isMemberExcluded(node.getThisAddress(), node.getThisUuid());
}
private void postJoin() {
blacklistedAddresses.clear();
if (logger.isFineEnabled()) {
logger.fine("PostJoin master: " + clusterService.getMasterAddress() + ", isMaster: " + clusterService.isMaster());
}
if (!node.isRunning()) {
return;
}
if (tryCount.incrementAndGet() == JOIN_TRY_COUNT) {
logger.warning("Join try count exceed limit, setting this node as master!");
clusterJoinManager.setThisMemberAsMaster();
}
if (clusterService.isJoined()) {
if (!clusterService.isMaster()) {
ensureConnectionToAllMembers();
}
if (clusterService.getSize() == 1) {
clusterService.printMemberList();
}
}
}
private void ensureConnectionToAllMembers() {
if (clusterService.isJoined()) {
logger.fine("Waiting for all connections");
int connectAllWaitSeconds = node.getProperties().getSeconds(ClusterProperty.CONNECT_ALL_WAIT_SECONDS);
int checkCount = 0;
while (checkCount++ < connectAllWaitSeconds) {
boolean allConnected = true;
Collection<Member> members = clusterService.getMembers();
for (Member member : members) {
if (!member.localMember() && node.getServer().getConnectionManager(MEMBER)
.getOrConnect(member.getAddress()) == null) {
allConnected = false;
if (logger.isFineEnabled()) {
logger.fine("Not-connected to " + member.getAddress());
}
}
}
if (allConnected) {
break;
}
try {
//noinspection BusyWait
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException ignored) {
currentThread().interrupt();
}
}
}
}
protected final long getMaxJoinMillis() {
return node.getProperties().getMillis(ClusterProperty.MAX_JOIN_SECONDS);
}
protected final long getMaxJoinTimeToMasterNode() {
// max join time to found master node,
// this should be significantly greater than MAX_WAIT_SECONDS_BEFORE_JOIN property
// hence we add 10 seconds more
return TimeUnit.SECONDS.toMillis(MIN_WAIT_BEFORE_JOIN_SECONDS)
+ node.getProperties().getMillis(ClusterProperty.MAX_WAIT_SECONDS_BEFORE_JOIN);
}
/**
* Sends a split brain join request to the target address and checks the response to see if this node should merge
* to the target address.
*/
protected final SplitBrainMergeCheckResult sendSplitBrainJoinMessageAndCheckResponse(Address target,
SplitBrainJoinMessage request) {
SplitBrainJoinMessage response = sendSplitBrainJoinMessage(target, request);
return clusterService.getClusterJoinManager().shouldMerge(response);
}
/**
* Sends a split brain join request to the target address and returns the response.
*/
private SplitBrainJoinMessage sendSplitBrainJoinMessage(Address target, SplitBrainJoinMessage request) {
if (logger.isFineEnabled()) {
logger.fine("Sending SplitBrainJoinMessage to " + target);
}
Connection conn = node.getServer().getConnectionManager(MEMBER).getOrConnect(target, true);
long timeout = SPLIT_BRAIN_CONN_TIMEOUT_MILLIS;
while (conn == null) {
timeout -= SPLIT_BRAIN_SLEEP_TIME_MILLIS;
if (timeout < 0) {
logger.fine("Returning null timeout<0, " + timeout);
return null;
}
try {
//noinspection BusyWait
Thread.sleep(SPLIT_BRAIN_SLEEP_TIME_MILLIS);
} catch (InterruptedException e) {
currentThread().interrupt();
return null;
}
conn = node.getServer().getConnectionManager(MEMBER).get(target);
}
NodeEngine nodeEngine = node.nodeEngine;
Future future = nodeEngine.getOperationService().createInvocationBuilder(ClusterServiceImpl.SERVICE_NAME,
new SplitBrainMergeValidationOp(request), target)
.setTryCount(1).invoke();
try {
return (SplitBrainJoinMessage) future.get(SPLIT_BRAIN_JOIN_CHECK_TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (TimeoutException e) {
logger.fine("Timeout during join check!", e);
} catch (Exception e) {
logger.warning("Error during join check!", e);
}
return null;
}
@Override
public void reset() {
joinStartTime.set(Clock.currentTimeMillis());
tryCount.set(0);
}
protected void startClusterMerge(Address targetAddress, int expectedMemberListVersion) {
ClusterServiceImpl clusterService = node.clusterService;
if (!prepareClusterState(clusterService, expectedMemberListVersion)) {
return;
}
OperationService operationService = node.nodeEngine.getOperationService();
Collection<Member> memberList = clusterService.getMembers();
Collection<Future> futures = new ArrayList<>(memberList.size());
for (Member member : memberList) {
if (!member.localMember()) {
Operation op = new MergeClustersOp(targetAddress);
Future<Object> future =
operationService.invokeOnTarget(ClusterServiceImpl.SERVICE_NAME, op, member.getAddress());
futures.add(future);
}
}
waitWithDeadline(futures, SPLIT_BRAIN_MERGE_TIMEOUT_SECONDS, TimeUnit.SECONDS, splitBrainMergeExceptionHandler);
Operation op = new MergeClustersOp(targetAddress);
op.setNodeEngine(node.nodeEngine).setService(clusterService).setOperationResponseHandler(createEmptyResponseHandler());
operationService.run(op);
}
/**
* Prepares the cluster state for cluster merge by changing it to {@link ClusterState#FROZEN}. It expects the current
* cluster state to be {@link ClusterState#ACTIVE} or {@link ClusterState#NO_MIGRATION}.
* The method will keep trying to change the cluster state until {@link ClusterProperty#MERGE_NEXT_RUN_DELAY_SECONDS} elapses
* or until the sleep period between two attempts has been interrupted.
*
* @param clusterService the cluster service used for state change
* @return true if the cluster state was successfully prepared
*/
private boolean prepareClusterState(ClusterServiceImpl clusterService, int expectedMemberListVersion) {
if (!preCheckClusterState(clusterService)) {
return false;
}
long until = Clock.currentTimeMillis() + mergeNextRunDelayMs;
while (Clock.currentTimeMillis() < until) {
ClusterState clusterState = clusterService.getClusterState();
if (!clusterState.isMigrationAllowed() && !clusterState.isJoinAllowed() && clusterState != IN_TRANSITION) {
return (clusterService.getMemberListVersion() == expectedMemberListVersion);
}
if (clusterService.getMemberListVersion() != expectedMemberListVersion) {
logger.warning("Could not change cluster state to FROZEN because local member list version: "
+ clusterService.getMemberListVersion() + " is different than expected member list version: "
+ expectedMemberListVersion);
return false;
}
// If state is IN_TRANSITION, then skip trying to change state.
// Otherwise transaction will print noisy warning logs.
if (clusterState != IN_TRANSITION) {
try {
clusterService.changeClusterState(FROZEN);
return verifyMemberListVersionAfterStateChange(clusterService, clusterState, expectedMemberListVersion);
} catch (Exception e) {
String error = e.getClass().getName() + ": " + e.getMessage();
logger.warning("While changing cluster state to FROZEN! " + error);
}
}
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.warning("Interrupted while preparing cluster for merge!");
// restore interrupt flag
Thread.currentThread().interrupt();
return false;
}
}
logger.warning("Could not change cluster state to FROZEN in time. Postponing merge process until next attempt.");
return false;
}
private boolean verifyMemberListVersionAfterStateChange(ClusterServiceImpl clusterService, ClusterState clusterState,
int expectedMemberListVersion) {
if (clusterService.getMemberListVersion() != expectedMemberListVersion) {
try {
logger.warning("Reverting cluster state back to " + clusterState + " because member list version: "
+ clusterService.getMemberListVersion() + " is different than expected member list version: "
+ expectedMemberListVersion);
clusterService.changeClusterState(clusterState);
} catch (Exception e) {
String error = e.getClass().getName() + ": " + e.getMessage();
logger.warning("While reverting cluster state to " + clusterState + "! " + error);
}
return false;
}
return true;
}
/**
* Returns true if the current cluster state allows join; either
* {@link ClusterState#ACTIVE} or {@link ClusterState#NO_MIGRATION}.
*/
private boolean preCheckClusterState(ClusterService clusterService) {
ClusterState initialState = clusterService.getClusterState();
if (!initialState.isJoinAllowed()) {
logger.warning("Could not prepare cluster state since it has been changed to " + initialState);
return false;
}
return true;
}
protected Address getTargetAddress() {
Address target = targetAddress;
targetAddress = null;
return target;
}
}
| |
package org.threeveed.core;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.mail.Address;
import javax.mail.BodyPart;
import javax.mail.Message.RecipientType;
import javax.mail.MessagingException;
import javax.mail.Multipart;
import javax.mail.Part;
import javax.mail.Session;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EmlParser {
private static final Logger log = LoggerFactory.getLogger(EmlParser.class);
private File emailFile;
private ArrayList<String> to;
private Address[] _bcc;
private Address[] _cc;
private Address[] _to;
private Address[] _from;
private String _subject;
private Object _content;
private MimeMessage email;
private List<String> _attachments;
private Date _date;
private Date _sentDate;
private Map<String, String> attachmentsContent;
private int attachmentSeq = 0;
public EmlParser(File emailFile) throws Exception {
this.emailFile = emailFile;
_attachments = new ArrayList<>();
System.setProperty("mail.mime.address.strict", "false");
System.setProperty("mail.mime.decodeparameters", "true");
attachmentsContent = new HashMap<>();
parseEmail();
}
private void parseEmail() throws Exception {
java.util.Properties properties = System.getProperties();
Session session = Session.getDefaultInstance(properties);
FileInputStream fis = null;
try {
fis = new FileInputStream(emailFile);
email = new MimeMessage(session, fis);
_bcc = email.getRecipients(RecipientType.BCC);
_cc = email.getRecipients(RecipientType.CC);
_to = email.getRecipients(RecipientType.TO);
_from = email.getFrom();
_subject = email.getSubject();
_content = email.getContent();
_date = email.getReceivedDate();
_sentDate = email.getSentDate();
//System.out.println("content type: " + email.getContentType());
//System.out.println("\nsubject: " + email.getSubject());
//to = EmailUtil.parseAddressLines(email
// .getHeader(Message.RecipientType.TO.toString()));
} catch (MessagingException e) {
throw new IllegalStateException("illegal state issue", e);
} catch (FileNotFoundException e) {
throw new IllegalStateException("file not found issue issue: "
+ emailFile.getAbsolutePath(), e);
} catch (Exception e) {
log.error("Problem parsing eml file", e);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
log.warn("Problem closing the stream", e);
}
}
}
}
private List<String> getAddressAsList(Address[] address) {
List<String> result = new ArrayList<String>();
if(address!=null) {
for(Address a : address) {
result.add(a.toString());
}
}
return result;
}
public List<String> getFrom() {
return getAddressAsList(_from);
}
public List<String> getRecepient() {
return getAddressAsList(_to);
}
public List<String> getCC() {
return getAddressAsList(_cc);
}
public List<String> getBCC() {
return getAddressAsList(_bcc);
}
public String getSubject() {
return _subject;
}
public Date getDate() {
return _date;
}
public String getContent() throws MessagingException, IOException {
if (_content instanceof String) {
return _content.toString();
}
if (_content instanceof MimeMultipart) {
MimeMultipart cnt = (MimeMultipart)_content;
int size = cnt.getCount();
StringBuilder res = new StringBuilder();
for (int i = 0; i < size; ++i) {
BodyPart bp = cnt.getBodyPart(i);
res.append(dumpPart(bp));
}
return res.toString();
}
return "";
}
private String dumpPart(Part p) throws MessagingException, IOException {
StringBuilder buf = new StringBuilder();
if(p.isMimeType("text/plain")) {
buf.append(p.getContent());
} else if(p.isMimeType("multipart/*")) {
Multipart mp = (Multipart)p.getContent();
int count = mp.getCount();
for (int i = 0; i < count; i++) {
BodyPart bp = mp.getBodyPart(i);
if (bp.isMimeType("text/plain")) {
buf.append(dumpPart(bp));
}
}
} else if (p.isMimeType("message/rfc822")) {
buf.append(dumpPart((Part)p.getContent()));
} else
{
String disp = null;
try {
disp = p.getDisposition();
} catch (Exception e) {
}
String filename = "attach-" + (attachmentSeq++);
try {
filename = p.getFileName();
} catch (Exception e) {
log.error("Problem getting the real attachment name", e);
}
if (disp == null || disp.equalsIgnoreCase(Part.ATTACHMENT)) {
log.debug("Adding attachment: " + filename);
_attachments.add(filename);
}
}
return buf.toString();
}
/**
* @return the emailFile
*/
public File getEmailFile() {
return emailFile;
}
/**
* @param emailFile
* the emailFile to set
*/
public void setEmailFile(File emailFile) {
this.emailFile = emailFile;
}
/**
* @param to
* the to to set
*/
public void setTo(ArrayList<String> to) {
this.to = to;
}
/**
* @return the to
*/
public List<String> getTo() {
return getAddressAsList(_to);
}
public List<String> getAttachmentNames() {
return _attachments;
}
public Date getSentDate() {
return this._sentDate;
}
public void saveAttachments() throws MessagingException, IOException {
if (email.isMimeType("text/*")) {
// no attachments there - this is just the email itself
}
// TODO - why repeat the code?
if (email.isMimeType("multipart/alternative")) {
Multipart mp = (Multipart) email.getContent();
for (int i = 0; i < mp.getCount(); i++) {
MimeBodyPart bodyPart = (MimeBodyPart) mp.getBodyPart(i);
String attachmentFileName = bodyPart.getFileName();
if (attachmentFileName != null) {
bodyPart.saveFile(attachmentFileName);
}
}
} else if (email.isMimeType("multipart/*")) {
Multipart mp = (Multipart) email.getContent();
for (int i = 0; i < mp.getCount(); i++) {
MimeBodyPart bodyPart = (MimeBodyPart) mp.getBodyPart(i);
String attachmentFileName = bodyPart.getFileName();
if (attachmentFileName != null) {
bodyPart.saveFile(attachmentFileName);
}
}
}
}
public Map<String, String> getAttachmentsContent() {
return attachmentsContent;
}
}
| |
package com.intuso.housemate.client.proxy.bridge.v1_0;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.intuso.housemate.client.api.bridge.v1_0.object.HardwareMapper;
import com.intuso.housemate.client.api.internal.*;
import com.intuso.housemate.client.api.internal.Runnable;
import com.intuso.housemate.client.api.internal.object.Hardware;
import com.intuso.housemate.client.api.internal.object.view.HardwareView;
import com.intuso.housemate.client.proxy.internal.ChildUtil;
import com.intuso.housemate.client.v1_0.messaging.api.Sender;
import com.intuso.utilities.collection.ManagedCollectionFactory;
import org.slf4j.Logger;
/**
* Created by tomc on 28/11/16.
*/
public class ProxyHardwareBridge
extends ProxyObjectBridge<com.intuso.housemate.client.v1_0.api.object.Hardware.Data, Hardware.Data, Hardware.Listener<? super ProxyHardwareBridge>, HardwareView>
implements Hardware<ProxyCommandBridge,
ProxyCommandBridge,
ProxyCommandBridge,
ProxyValueBridge,
ProxyValueBridge,
ProxyPropertyBridge,
ProxyValueBridge,
ProxyListBridge<ProxyCommandBridge>,
ProxyListBridge<ProxyValueBridge>,
ProxyListBridge<ProxyPropertyBridge>,
ProxyListBridge<ProxyDeviceConnectedBridge>,
ProxyHardwareBridge> {
private final ProxyCommandBridge renameCommand;
private final ProxyCommandBridge removeCommand;
private final ProxyValueBridge runningValue;
private final ProxyCommandBridge startCommand;
private final ProxyCommandBridge stopCommand;
private final ProxyValueBridge errorValue;
private final ProxyPropertyBridge driverProperty;
private final ProxyValueBridge driverLoadedValue;
private final ProxyListBridge<ProxyCommandBridge> commands;
private final ProxyListBridge<ProxyValueBridge> values;
private final ProxyListBridge<ProxyPropertyBridge> properties;
private final ProxyListBridge<ProxyDeviceConnectedBridge> devices;
@Inject
protected ProxyHardwareBridge(@Assisted Logger logger,
HardwareMapper hardwareMapper,
ManagedCollectionFactory managedCollectionFactory,
com.intuso.housemate.client.messaging.api.internal.Receiver.Factory internalReceiverFactory,
Sender.Factory v1_0SenderFactory,
Factory<ProxyCommandBridge> commandFactory,
Factory<ProxyListBridge<ProxyCommandBridge>> commandsFactory,
Factory<ProxyValueBridge> valueFactory,
Factory<ProxyListBridge<ProxyValueBridge>> valuesFactory,
Factory<ProxyPropertyBridge> propertyFactory,
Factory<ProxyListBridge<ProxyPropertyBridge>> propertiesFactory,
Factory<ProxyListBridge<ProxyDeviceConnectedBridge>> devicesFactory) {
super(logger, Hardware.Data.class, hardwareMapper, managedCollectionFactory, internalReceiverFactory, v1_0SenderFactory);
renameCommand = commandFactory.create(ChildUtil.logger(logger, Renameable.RENAME_ID));
removeCommand = commandFactory.create(ChildUtil.logger(logger, Removeable.REMOVE_ID));
runningValue = valueFactory.create(ChildUtil.logger(logger, Runnable.RUNNING_ID));
startCommand = commandFactory.create(ChildUtil.logger(logger, Runnable.START_ID));
stopCommand = commandFactory.create(ChildUtil.logger(logger, Runnable.STOP_ID));
errorValue = valueFactory.create(ChildUtil.logger(logger, Failable.ERROR_ID));
driverProperty = propertyFactory.create(ChildUtil.logger(logger, UsesDriver.DRIVER_ID));
driverLoadedValue = valueFactory.create(ChildUtil.logger(logger, UsesDriver.DRIVER_LOADED_ID));
commands = commandsFactory.create(ChildUtil.logger(logger, Hardware.COMMANDS_ID));
values = valuesFactory.create(ChildUtil.logger(logger, Hardware.VALUES_ID));
properties = propertiesFactory.create(ChildUtil.logger(logger, Hardware.PROPERTIES_ID));
devices = devicesFactory.create(ChildUtil.logger(logger, Hardware.DEVICES_ID));
}
@Override
protected void initChildren(String versionName, String internalName) {
super.initChildren(versionName, internalName);
renameCommand.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Renameable.RENAME_ID),
ChildUtil.name(internalName, Renameable.RENAME_ID)
);
removeCommand.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Removeable.REMOVE_ID),
ChildUtil.name(internalName, Removeable.REMOVE_ID)
);
runningValue.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Runnable.RUNNING_ID),
ChildUtil.name(internalName, Runnable.RUNNING_ID)
);
stopCommand.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Runnable.STOP_ID),
ChildUtil.name(internalName, Runnable.STOP_ID)
);
startCommand.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Runnable.START_ID),
ChildUtil.name(internalName, Runnable.START_ID)
);
errorValue.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.Failable.ERROR_ID),
ChildUtil.name(internalName, Failable.ERROR_ID)
);
driverProperty.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.UsesDriver.DRIVER_ID),
ChildUtil.name(internalName, UsesDriver.DRIVER_ID)
);
driverLoadedValue.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.UsesDriver.DRIVER_LOADED_ID),
ChildUtil.name(internalName, UsesDriver.DRIVER_LOADED_ID)
);
commands.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.object.Hardware.COMMANDS_ID),
ChildUtil.name(internalName, Hardware.COMMANDS_ID)
);
values.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.object.Hardware.VALUES_ID),
ChildUtil.name(internalName, Hardware.VALUES_ID)
);
properties.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.object.Hardware.PROPERTIES_ID),
ChildUtil.name(internalName, Hardware.PROPERTIES_ID)
);
devices.init(
com.intuso.housemate.client.proxy.internal.ChildUtil.name(versionName, com.intuso.housemate.client.v1_0.api.object.Hardware.DEVICES_ID),
ChildUtil.name(internalName, Hardware.DEVICES_ID)
);
}
@Override
protected void uninitChildren() {
super.uninitChildren();
renameCommand.uninit();
removeCommand.uninit();
runningValue.uninit();
startCommand.uninit();
stopCommand.uninit();
errorValue.uninit();
driverProperty.uninit();
driverLoadedValue.uninit();
commands.uninit();
values.uninit();
properties.uninit();
devices.uninit();
}
@Override
public ProxyCommandBridge getRenameCommand() {
return renameCommand;
}
@Override
public ProxyCommandBridge getRemoveCommand() {
return removeCommand;
}
@Override
public ProxyValueBridge getRunningValue() {
return runningValue;
}
@Override
public ProxyCommandBridge getStartCommand() {
return startCommand;
}
@Override
public ProxyCommandBridge getStopCommand() {
return stopCommand;
}
@Override
public ProxyValueBridge getErrorValue() {
return errorValue;
}
@Override
public ProxyPropertyBridge getDriverProperty() {
return driverProperty;
}
@Override
public ProxyValueBridge getDriverLoadedValue() {
return driverLoadedValue;
}
@Override
public ProxyListBridge<ProxyCommandBridge> getCommands() {
return commands;
}
@Override
public ProxyListBridge<ProxyValueBridge> getValues() {
return values;
}
@Override
public ProxyListBridge<ProxyPropertyBridge> getProperties() {
return properties;
}
public ProxyListBridge<ProxyDeviceConnectedBridge> getDeviceConnecteds() {
return devices;
}
@Override
public ProxyObjectBridge<?, ?, ?, ?> getChild(String id) {
if(RENAME_ID.equals(id))
return renameCommand;
else if(REMOVE_ID.equals(id))
return removeCommand;
else if(RUNNING_ID.equals(id))
return runningValue;
else if(START_ID.equals(id))
return startCommand;
else if(STOP_ID.equals(id))
return stopCommand;
else if(ERROR_ID.equals(id))
return errorValue;
else if(DRIVER_ID.equals(id))
return driverProperty;
else if(DRIVER_LOADED_ID.equals(id))
return driverLoadedValue;
else if(COMMANDS_ID.equals(id))
return commands;
else if(PROPERTIES_ID.equals(id))
return properties;
else if(VALUES_ID.equals(id))
return values;
else if(DEVICES_ID.equals(id))
return devices;
return null;
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.spec;
import static org.xnio.Bits.allAreClear;
import static org.xnio.Bits.anyAreClear;
import static org.xnio.Bits.anyAreSet;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import javax.servlet.DispatcherType;
import javax.servlet.ServletOutputStream;
import javax.servlet.ServletRequest;
import javax.servlet.WriteListener;
import io.undertow.UndertowLogger;
import org.xnio.Buffers;
import org.xnio.ChannelListener;
import org.xnio.IoUtils;
import org.xnio.channels.Channels;
import org.xnio.channels.StreamSinkChannel;
import io.undertow.connector.ByteBufferPool;
import io.undertow.connector.PooledByteBuffer;
import io.undertow.io.BufferWritableOutputStream;
import io.undertow.server.protocol.http.HttpAttachments;
import io.undertow.servlet.UndertowServletMessages;
import io.undertow.servlet.handlers.ServletRequestContext;
import io.undertow.util.Headers;
/**
* This stream essentially has two modes. When it is being used in standard blocking mode then
* it will buffer in the pooled buffer. If the stream is closed before the buffer is full it will
* set a content-length header if one has not been explicitly set.
* <p>
* If a content-length header was present when the stream was created then it will automatically
* close and flush itself once the appropriate amount of data has been written.
* <p>
* Once the listener has been set it goes into async mode, and writes become non blocking. Most methods
* have two different code paths, based on if the listener has been set or not
* <p>
* Once the write listener has been set operations must only be invoked on this stream from the write
* listener callback. Attempting to invoke from a different thread will result in an IllegalStateException.
* <p>
* Async listener tasks are queued in the {@link AsyncContextImpl}. At most one listener can be active at
* one time, which simplifies the thread safety requirements.
*
* @author Stuart Douglas
*/
public class ServletOutputStreamImpl extends ServletOutputStream implements BufferWritableOutputStream {
private final ServletRequestContext servletRequestContext;
private PooledByteBuffer pooledBuffer;
private ByteBuffer buffer;
private Integer bufferSize;
private StreamSinkChannel channel;
private long written;
private volatile int state;
private volatile boolean asyncIoStarted;
private AsyncContextImpl asyncContext;
private WriteListener listener;
private WriteChannelListener internalListener;
/**
* buffers that are queued up to be written via async writes. This will include
* {@link #buffer} as the first element, and maybe a user supplied buffer that
* did not fit
*/
private ByteBuffer[] buffersToWrite;
private FileChannel pendingFile;
private static final int FLAG_CLOSED = 1;
private static final int FLAG_WRITE_STARTED = 1 << 1;
private static final int FLAG_READY = 1 << 2;
private static final int FLAG_DELEGATE_SHUTDOWN = 1 << 3;
private static final int FLAG_IN_CALLBACK = 1 << 4;
//TODO: should this be configurable?
private static final int MAX_BUFFERS_TO_ALLOCATE = 6;
private static final AtomicIntegerFieldUpdater<ServletOutputStreamImpl> stateUpdater = AtomicIntegerFieldUpdater.newUpdater(ServletOutputStreamImpl.class, "state");
/**
* Construct a new instance. No write timeout is configured.
*/
public ServletOutputStreamImpl(final ServletRequestContext servletRequestContext) {
this.servletRequestContext = servletRequestContext;
}
/**
* Construct a new instance. No write timeout is configured.
*/
public ServletOutputStreamImpl(final ServletRequestContext servletRequestContext, int bufferSize) {
this.bufferSize = bufferSize;
this.servletRequestContext = servletRequestContext;
}
/**
* {@inheritDoc}
*/
public void write(final int b) throws IOException {
write(new byte[]{(byte) b}, 0, 1);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b, final int off, final int len) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
if (len < 1) {
return;
}
if (listener == null) {
ByteBuffer buffer = buffer();
if (buffer.remaining() < len) {
writeTooLargeForBuffer(b, off, len, buffer);
} else {
buffer.put(b, off, len);
if (buffer.remaining() == 0) {
writeBufferBlocking(false);
}
}
updateWritten(len);
} else {
writeAsync(b, off, len);
}
}
private void writeTooLargeForBuffer(byte[] b, int off, int len, ByteBuffer buffer) throws IOException {
//so what we have will not fit.
//We allocate multiple buffers up to MAX_BUFFERS_TO_ALLOCATE
//and put it in them
//if it still dopes not fit we loop, re-using these buffers
StreamSinkChannel channel = this.channel;
if (channel == null) {
this.channel = channel = servletRequestContext.getExchange().getResponseChannel();
}
final ByteBufferPool bufferPool = servletRequestContext.getExchange().getConnection().getByteBufferPool();
ByteBuffer[] buffers = new ByteBuffer[MAX_BUFFERS_TO_ALLOCATE + 1];
PooledByteBuffer[] pooledBuffers = new PooledByteBuffer[MAX_BUFFERS_TO_ALLOCATE];
try {
buffers[0] = buffer;
int bytesWritten = 0;
int rem = buffer.remaining();
buffer.put(b, bytesWritten + off, rem);
buffer.flip();
bytesWritten += rem;
int bufferCount = 1;
for (int i = 0; i < MAX_BUFFERS_TO_ALLOCATE; ++i) {
PooledByteBuffer pooled = bufferPool.allocate();
pooledBuffers[bufferCount - 1] = pooled;
buffers[bufferCount++] = pooled.getBuffer();
ByteBuffer cb = pooled.getBuffer();
int toWrite = len - bytesWritten;
if (toWrite > cb.remaining()) {
rem = cb.remaining();
cb.put(b, bytesWritten + off, rem);
cb.flip();
bytesWritten += rem;
} else {
cb.put(b, bytesWritten + off, toWrite);
bytesWritten = len;
cb.flip();
break;
}
}
Channels.writeBlocking(channel, buffers, 0, bufferCount);
while (bytesWritten < len) {
//ok, it did not fit, loop and loop and loop until it is done
bufferCount = 0;
for (int i = 0; i < MAX_BUFFERS_TO_ALLOCATE + 1; ++i) {
ByteBuffer cb = buffers[i];
cb.clear();
bufferCount++;
int toWrite = len - bytesWritten;
if (toWrite > cb.remaining()) {
rem = cb.remaining();
cb.put(b, bytesWritten + off, rem);
cb.flip();
bytesWritten += rem;
} else {
cb.put(b, bytesWritten + off, toWrite);
bytesWritten = len;
cb.flip();
break;
}
}
Channels.writeBlocking(channel, buffers, 0, bufferCount);
}
buffer.clear();
} finally {
for (int i = 0; i < pooledBuffers.length; ++i) {
PooledByteBuffer p = pooledBuffers[i];
if (p == null) {
break;
}
p.close();
}
}
}
private void writeAsync(byte[] b, int off, int len) throws IOException {
if (anyAreClear(state, FLAG_READY)) {
throw UndertowServletMessages.MESSAGES.streamNotReady();
}
//even though we are in async mode we are still buffering
try {
ByteBuffer buffer = buffer();
if (buffer.remaining() > len) {
buffer.put(b, off, len);
} else {
buffer.flip();
final ByteBuffer userBuffer = ByteBuffer.wrap(b, off, len);
final ByteBuffer[] bufs = new ByteBuffer[]{buffer, userBuffer};
long toWrite = Buffers.remaining(bufs);
long res;
long written = 0;
createChannel();
setFlags(FLAG_WRITE_STARTED);
do {
res = channel.write(bufs);
written += res;
if (res == 0) {
//write it out with a listener
//but we need to copy any extra data
final ByteBuffer copy = ByteBuffer.allocate(userBuffer.remaining());
copy.put(userBuffer);
copy.flip();
this.buffersToWrite = new ByteBuffer[]{buffer, copy};
clearFlags(FLAG_READY);
return;
}
} while (written < toWrite);
buffer.clear();
}
} finally {
updateWrittenAsync(len);
}
}
@Override
public void write(ByteBuffer[] buffers) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
int len = 0;
for (ByteBuffer buf : buffers) {
len += buf.remaining();
}
if (len < 1) {
return;
}
if (listener == null) {
//if we have received the exact amount of content write it out in one go
//this is a common case when writing directly from a buffer cache.
if (this.written == 0 && len == servletRequestContext.getOriginalResponse().getContentLength()) {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
Channels.writeBlocking(channel, buffers, 0, buffers.length);
setFlags(FLAG_WRITE_STARTED);
} else {
ByteBuffer buffer = buffer();
if (len < buffer.remaining()) {
Buffers.copy(buffer, buffers, 0, buffers.length);
} else {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
if (buffer.position() == 0) {
Channels.writeBlocking(channel, buffers, 0, buffers.length);
} else {
final ByteBuffer[] newBuffers = new ByteBuffer[buffers.length + 1];
buffer.flip();
newBuffers[0] = buffer;
System.arraycopy(buffers, 0, newBuffers, 1, buffers.length);
Channels.writeBlocking(channel, newBuffers, 0, newBuffers.length);
buffer.clear();
}
setFlags(FLAG_WRITE_STARTED);
}
}
updateWritten(len);
} else {
if (anyAreClear(state, FLAG_READY)) {
throw UndertowServletMessages.MESSAGES.streamNotReady();
}
//even though we are in async mode we are still buffering
try {
ByteBuffer buffer = buffer();
if (buffer.remaining() > len) {
Buffers.copy(buffer, buffers, 0, buffers.length);
} else {
final ByteBuffer[] bufs = new ByteBuffer[buffers.length + 1];
buffer.flip();
bufs[0] = buffer;
System.arraycopy(buffers, 0, bufs, 1, buffers.length);
long toWrite = Buffers.remaining(bufs);
long res;
long written = 0;
createChannel();
setFlags(FLAG_WRITE_STARTED);
do {
res = channel.write(bufs);
written += res;
if (res == 0) {
//write it out with a listener
//but we need to copy any extra data
//TODO: should really allocate from the pool here
final ByteBuffer copy = ByteBuffer.allocate((int) Buffers.remaining(buffers));
Buffers.copy(copy, buffers, 0, buffers.length);
copy.flip();
this.buffersToWrite = new ByteBuffer[]{buffer, copy};
clearFlags(FLAG_READY);
channel.resumeWrites();
return;
}
} while (written < toWrite);
buffer.clear();
}
} finally {
updateWrittenAsync(len);
}
}
}
@Override
public void write(ByteBuffer byteBuffer) throws IOException {
write(new ByteBuffer[]{byteBuffer});
}
void updateWritten(final long len) throws IOException {
this.written += len;
long contentLength = servletRequestContext.getOriginalResponse().getContentLength();
if (contentLength != -1 && this.written >= contentLength) {
close();
}
}
void updateWrittenAsync(final long len) throws IOException {
this.written += len;
long contentLength = servletRequestContext.getOriginalResponse().getContentLength();
if (contentLength != -1 && this.written >= contentLength) {
setFlags(FLAG_CLOSED);
//if buffersToWrite is set we are already flushing
//so we don't have to do anything
if (buffersToWrite == null && pendingFile == null) {
if (flushBufferAsync(true)) {
channel.shutdownWrites();
setFlags(FLAG_DELEGATE_SHUTDOWN);
channel.flush();
if (pooledBuffer != null) {
pooledBuffer.close();
buffer = null;
pooledBuffer = null;
}
}
}
}
}
private boolean flushBufferAsync(final boolean writeFinal) throws IOException {
ByteBuffer[] bufs = buffersToWrite;
if (bufs == null) {
ByteBuffer buffer = this.buffer;
if (buffer == null || buffer.position() == 0) {
return true;
}
buffer.flip();
bufs = new ByteBuffer[]{buffer};
}
long toWrite = Buffers.remaining(bufs);
if (toWrite == 0) {
//we clear the buffer, so it can be written to again
buffer.clear();
return true;
}
setFlags(FLAG_WRITE_STARTED);
createChannel();
long res;
long written = 0;
do {
if (writeFinal) {
res = channel.writeFinal(bufs);
} else {
res = channel.write(bufs);
}
written += res;
if (res == 0) {
//write it out with a listener
clearFlags(FLAG_READY);
buffersToWrite = bufs;
channel.resumeWrites();
return false;
}
} while (written < toWrite);
buffer.clear();
return true;
}
/**
* Returns the underlying buffer. If this has not been created yet then
* it is created.
* <p>
* Callers that use this method must call {@link #updateWritten(long)} to update the written
* amount.
* <p>
* This allows the buffer to be filled directly, which can be more efficient.
* <p>
* This method is basically a hack that should only be used by the print writer
*
* @return The underlying buffer
*/
ByteBuffer underlyingBuffer() {
if (anyAreSet(state, FLAG_CLOSED)) {
return null;
}
return buffer();
}
/**
* {@inheritDoc}
*/
public void flush() throws IOException {
//according to the servlet spec we ignore a flush from within an include
if (servletRequestContext.getOriginalRequest().getDispatcherType() == DispatcherType.INCLUDE ||
servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
if (servletRequestContext.getDeployment().getDeploymentInfo().isIgnoreFlush() &&
servletRequestContext.getExchange().isRequestComplete() &&
servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null) {
//we mark the stream as flushed, but don't actually flush
//because in most cases flush just kills performance
//we only do this if the request is fully read, so that http tunneling scenarios still work
servletRequestContext.getOriginalResponse().setIgnoredFlushPerformed(true);
return;
}
flushInternal();
}
/**
* {@inheritDoc}
*/
public void flushInternal() throws IOException {
if (listener == null) {
if (anyAreSet(state, FLAG_CLOSED)) {
//just return
return;
}
if (buffer != null && buffer.position() != 0) {
writeBufferBlocking(false);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
Channels.flushBlocking(channel);
} else {
if (anyAreClear(state, FLAG_READY)) {
return;
}
createChannel();
if (buffer == null || buffer.position() == 0) {
//nothing to flush, we just flush the underlying stream
//it does not matter if this succeeds or not
channel.flush();
return;
}
//we have some data in the buffer, we can just write it out
//if the write fails we just compact, rather than changing the ready state
setFlags(FLAG_WRITE_STARTED);
buffer.flip();
long res;
do {
res = channel.write(buffer);
} while (buffer.hasRemaining() && res != 0);
if (!buffer.hasRemaining()) {
channel.flush();
}
buffer.compact();
}
}
@Override
public void transferFrom(FileChannel source) throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.streamIsClosed();
}
if (listener == null) {
if (buffer != null && buffer.position() != 0) {
writeBufferBlocking(false);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
long position = source.position();
long count = source.size() - position;
Channels.transferBlocking(channel, source, position, count);
updateWritten(count);
} else {
setFlags(FLAG_WRITE_STARTED);
createChannel();
long pos = 0;
try {
long size = source.size();
pos = source.position();
while (size - pos > 0) {
long ret = channel.transferFrom(pendingFile, pos, size - pos);
if (ret <= 0) {
clearFlags(FLAG_READY);
pendingFile = source;
source.position(pos);
channel.resumeWrites();
return;
}
pos += ret;
}
} finally {
updateWrittenAsync(pos - source.position());
}
}
}
private void writeBufferBlocking(final boolean writeFinal) throws IOException {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
buffer.flip();
while (buffer.hasRemaining()) {
if (writeFinal) {
channel.writeFinal(buffer);
} else {
channel.write(buffer);
}
if (buffer.hasRemaining()) {
channel.awaitWritable();
}
}
buffer.clear();
setFlags(FLAG_WRITE_STARTED);
}
/**
* {@inheritDoc}
*/
public void close() throws IOException {
if (servletRequestContext.getOriginalRequest().getDispatcherType() == DispatcherType.INCLUDE ||
servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
if (listener == null) {
if (anyAreSet(state, FLAG_CLOSED)) return;
setFlags(FLAG_CLOSED);
clearFlags(FLAG_READY);
if (allAreClear(state, FLAG_WRITE_STARTED) && channel == null && servletRequestContext.getOriginalResponse().getHeader(Headers.CONTENT_LENGTH_STRING) == null) {
if (servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null
&& servletRequestContext.getExchange().getAttachment(HttpAttachments.RESPONSE_TRAILER_SUPPLIER) == null
&& servletRequestContext.getExchange().getAttachment(HttpAttachments.RESPONSE_TRAILERS) == null) {
if (buffer == null) {
servletRequestContext.getExchange().getResponseHeaders().put(Headers.CONTENT_LENGTH, "0");
} else {
servletRequestContext.getExchange().getResponseHeaders().put(Headers.CONTENT_LENGTH, Integer.toString(buffer.position()));
}
}
}
try {
if (buffer != null) {
writeBufferBlocking(true);
}
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
}
setFlags(FLAG_DELEGATE_SHUTDOWN);
StreamSinkChannel channel = this.channel;
if (channel != null) { //mock requests
channel.shutdownWrites();
Channels.flushBlocking(channel);
}
} catch (IOException | RuntimeException | Error e) {
IoUtils.safeClose(this.channel);
throw e;
} finally {
if (pooledBuffer != null) {
pooledBuffer.close();
buffer = null;
} else {
buffer = null;
}
}
} else {
closeAsync();
}
}
/**
* Closes the channel, and flushes any data out using async IO
* <p>
* This is used in two situations, if an output stream is not closed when a
* request is done, and when performing a close on a stream that is in async
* mode
*
* @throws IOException
*/
public void closeAsync() throws IOException {
if (anyAreSet(state, FLAG_CLOSED) || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
return;
}
if (!servletRequestContext.getExchange().isInIoThread()) {
servletRequestContext.getExchange().getIoThread().execute(new Runnable() {
@Override
public void run() {
try {
closeAsync();
} catch (IOException e) {
UndertowLogger.REQUEST_IO_LOGGER.closeAsyncFailed(e);
}
}
});
return;
}
try {
setFlags(FLAG_CLOSED);
clearFlags(FLAG_READY);
if (allAreClear(state, FLAG_WRITE_STARTED) && channel == null) {
if (servletRequestContext.getOriginalResponse().getHeader(Headers.TRANSFER_ENCODING_STRING) == null) {
if (buffer == null) {
servletRequestContext.getOriginalResponse().setHeader(Headers.CONTENT_LENGTH, "0");
} else {
servletRequestContext.getOriginalResponse().setHeader(Headers.CONTENT_LENGTH, Integer.toString(buffer.position()));
}
}
}
createChannel();
if (buffer != null) {
if (!flushBufferAsync(true)) {
return;
}
if (pooledBuffer != null) {
pooledBuffer.close();
buffer = null;
} else {
buffer = null;
}
}
channel.shutdownWrites();
setFlags(FLAG_DELEGATE_SHUTDOWN);
if (!channel.flush()) {
channel.resumeWrites();
}
} catch (IOException | RuntimeException | Error e) {
if (pooledBuffer != null) {
pooledBuffer.close();
pooledBuffer = null;
buffer = null;
}
throw e;
}
}
private void createChannel() {
if (channel == null) {
channel = servletRequestContext.getExchange().getResponseChannel();
if (internalListener != null) {
channel.getWriteSetter().set(internalListener);
}
}
}
private ByteBuffer buffer() {
ByteBuffer buffer = this.buffer;
if (buffer != null) {
return buffer;
}
if (bufferSize != null) {
this.buffer = ByteBuffer.allocateDirect(bufferSize);
return this.buffer;
} else {
this.pooledBuffer = servletRequestContext.getExchange().getConnection().getByteBufferPool().allocate();
this.buffer = pooledBuffer.getBuffer();
return this.buffer;
}
}
public void resetBuffer() {
if (allAreClear(state, FLAG_WRITE_STARTED)) {
if (pooledBuffer != null) {
pooledBuffer.close();
pooledBuffer = null;
}
buffer = null;
this.written = 0;
} else {
throw UndertowServletMessages.MESSAGES.responseAlreadyCommited();
}
}
public void setBufferSize(final int size) {
if (buffer != null || servletRequestContext.getOriginalResponse().isTreatAsCommitted()) {
throw UndertowServletMessages.MESSAGES.contentHasBeenWritten();
}
this.bufferSize = size;
}
public boolean isClosed() {
return anyAreSet(state, FLAG_CLOSED);
}
@Override
public boolean isReady() {
if (listener == null) {
//TODO: is this the correct behaviour?
throw UndertowServletMessages.MESSAGES.streamNotInAsyncMode();
}
if (!asyncIoStarted) {
//if we don't add this guard here calls to isReady could start async IO too soon
//resulting in a 'resuming + dispatched' message
return false;
}
if (!anyAreSet(state, FLAG_READY)) {
if (channel != null) {
channel.resumeWrites();
}
return false;
}
return true;
}
@Override
public void setWriteListener(final WriteListener writeListener) {
if (writeListener == null) {
throw UndertowServletMessages.MESSAGES.listenerCannotBeNull();
}
if (listener != null) {
throw UndertowServletMessages.MESSAGES.listenerAlreadySet();
}
final ServletRequest servletRequest = servletRequestContext.getOriginalRequest();
if (!servletRequest.isAsyncStarted()) {
throw UndertowServletMessages.MESSAGES.asyncNotStarted();
}
asyncContext = (AsyncContextImpl) servletRequest.getAsyncContext();
listener = writeListener;
//we register the write listener on the underlying connection
//so we don't have to force the creation of the response channel
//under normal circumstances this will break write listener delegation
this.internalListener = new WriteChannelListener();
if (this.channel != null) {
this.channel.getWriteSetter().set(internalListener);
}
//we resume from an async task, after the request has been dispatched
asyncContext.addAsyncTask(new Runnable() {
@Override
public void run() {
asyncIoStarted = true;
if (channel == null) {
servletRequestContext.getExchange().getIoThread().execute(new Runnable() {
@Override
public void run() {
internalListener.handleEvent(null);
}
});
} else {
channel.resumeWrites();
}
}
});
}
ServletRequestContext getServletRequestContext() {
return servletRequestContext;
}
private class WriteChannelListener implements ChannelListener<StreamSinkChannel> {
@Override
public void handleEvent(final StreamSinkChannel aChannel) {
//flush the channel if it is closed
if (anyAreSet(state, FLAG_DELEGATE_SHUTDOWN)) {
try {
//either it will work, and the channel is closed
//or it won't, and we continue with writes resumed
channel.flush();
return;
} catch (Throwable t) {
handleError(t);
return;
}
}
//if there is data still to write
if (buffersToWrite != null) {
long toWrite = Buffers.remaining(buffersToWrite);
long written = 0;
long res;
if (toWrite > 0) { //should always be true, but just to be defensive
do {
try {
res = channel.write(buffersToWrite);
written += res;
if (res == 0) {
return;
}
} catch (Throwable t) {
handleError(t);
return;
}
} while (written < toWrite);
}
buffersToWrite = null;
buffer.clear();
}
if (pendingFile != null) {
try {
long size = pendingFile.size();
long pos = pendingFile.position();
while (size - pos > 0) {
long ret = channel.transferFrom(pendingFile, pos, size - pos);
if (ret <= 0) {
pendingFile.position(pos);
return;
}
pos += ret;
}
pendingFile = null;
} catch (Throwable t) {
handleError(t);
return;
}
}
if (anyAreSet(state, FLAG_CLOSED)) {
try {
if (pooledBuffer != null) {
pooledBuffer.close();
buffer = null;
} else {
buffer = null;
}
channel.shutdownWrites();
setFlags(FLAG_DELEGATE_SHUTDOWN);
channel.flush();
} catch (Throwable t) {
handleError(t);
return;
}
} else {
if (asyncContext.isDispatched()) {
//this is no longer an async request
//we just return for now
//TODO: what do we do here? Revert back to blocking mode?
channel.suspendWrites();
return;
}
setFlags(FLAG_READY);
try {
setFlags(FLAG_IN_CALLBACK);
//if the stream is still ready then we do not resume writes
//this is per spec, we only call the listener once for each time
//isReady returns true
if (channel != null) {
channel.suspendWrites();
}
servletRequestContext.getCurrentServletContext().invokeOnWritePossible(servletRequestContext.getExchange(), listener);
} catch (Throwable e) {
IoUtils.safeClose(channel);
} finally {
clearFlags(FLAG_IN_CALLBACK);
}
}
}
private void handleError(final Throwable t) {
try {
servletRequestContext.getCurrentServletContext().invokeRunnable(servletRequestContext.getExchange(), new Runnable() {
@Override
public void run() {
listener.onError(t);
}
});
} finally {
IoUtils.safeClose(channel, servletRequestContext.getExchange().getConnection());
if (pooledBuffer != null) {
pooledBuffer.close();
pooledBuffer = null;
buffer = null;
}
}
}
}
private void setFlags(int flags) {
int old;
do {
old = state;
} while (!stateUpdater.compareAndSet(this, old, old | flags));
}
private void clearFlags(int flags) {
int old;
do {
old = state;
} while (!stateUpdater.compareAndSet(this, old, old & ~flags));
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package TWoT;
import TWoT.EquippableItem.EItem;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author Lagoni
*/
public class Player implements Serializable {
private String playerName;
private double attValue;
private double defValue;
private int health;
private int gold = 0;
private int highscore = 0;
private Inventory playersInventory;
/**
* Empty player
*/
public Player() {
}
/**
* A player
*
* @param playerName the players name
* @param attValue start attack value
* @param defValue start deff value
* @param health start health
* @param playersInventory the start inventory
*/
public Player(String playerName, double attValue, double defValue, int health, Inventory playersInventory) {
this.playerName = playerName;
this.attValue = attValue;
this.defValue = defValue;
this.health = health;
this.playersInventory = playersInventory;
}
/**
* A player
*
* @param playerName the players name
* @param attValue start attack value
* @param defValue start deff value
* @param health start health
*/
public Player(String playerName, double attValue, double defValue, int health) {
this.playerName = playerName;
this.attValue = attValue;
this.defValue = defValue;
this.health = health;
}
/**
* adds an item to the inventory
*
* @param i Item
*/
public void addItemToInventory(Item i) {
playersInventory.addInventoryItem(i);
}
/**
* Equips an item to a player
*
* @param i EquippableItem to be equipped
* @param p The player to equip item on
*/
public void addItemToEquippableInventory(EquippableItem i, Player p) {
playersInventory.addEquipItem(i, p);
}
/**
* Removes an item from inventory and puts in equipped inventory
*
* @param i EquippableItem
*/
public void removeItemToEquippableInventory(EquippableItem i) {
playersInventory.removeEquipItem(i);
this.removeAtt(i.getAttackBuff());
this.removeDef(i.getDefenseBuff());
}
/**
* Returns the list of items in the inventory-
*
* @return List of items
*/
public List<Item> getInventoryItems() {
return playersInventory.getInventoryItems();
}
/**
* returns the current equipped items
*
* @return Hashmap of equiped items
*/
public HashMap<EquippableItem.EItem, EquippableItem> getEquippableItems() {
return playersInventory.getEquippableItem();
}
/**
* @return the monsterName
*/
public String getPlayerName() {
return playerName;
}
/**
* @return the attValue
*/
public double getAttValue() {
return attValue;
}
/**
* @param attValue the attValue to set
*/
public void setAttValue(double attValue) {
this.attValue = attValue;
}
/**
* @return the defValue
*/
public double getDefValue() {
return defValue;
}
/**
* @param defValue the defValue to set
*/
public void setDefValue(double defValue) {
this.defValue = defValue;
}
/**
* @return the health
*/
public int getHealth() {
return health;
}
/**
* @param health the health to set
*/
public void setHealth(int health) {
this.health = health;
}
/**
* add health to the current health
*
* @param healthToRegen to add to current health
*/
public void regenHealth(int healthToRegen) {
this.health += healthToRegen;
}
/**
* @param damage the damage to take
*/
public void setDamage(int damage) {
health -= damage;
}
/**
* @return the gold
*/
public int getGold() {
return gold;
}
/**
* adds attack to the current attack value
*
* @param att double
*/
public void addAtt(double att) {
this.attValue += att;
}
/**
* removes attack value
*
* @param att double
*/
public void removeAtt(double att) {
this.attValue -= att;
}
/**
* adds deff value to current
*
* @param def double
*/
public void addDef(double def) {
this.defValue += def;
}
/**
* removes def from current def value
*
* @param def double
*/
public void removeDef(double def) {
this.defValue -= def;
}
/**
* adds gold to current gold
*
* @param gold int
*/
public void addGold(int gold) {
this.gold += gold;
}
/**
* Sets the playername for this object
*
* @param playerName String
*/
public void setPlayerName(String playerName) {
this.playerName = playerName;
}
/**
* @return the highscore
*/
public int getHighscore() {
int value = 0;
for (Item i : playersInventory.getInventoryItems()) {
value = value + i.getItemValue();
}
for (Map.Entry<EItem, EquippableItem> i : playersInventory.getEquippableItem().entrySet()) {
value = value + i.getValue().getItemValue();
}
return gold + value + highscore;
}
/**
* @param highscore the highscore to set
*/
public void setHighscore(int highscore) {
this.highscore = highscore;
}
/**
* Highscore to add to the player
*
* @param h int highscore
*/
public void addHighscore(int h) {
this.highscore = this.highscore + h;
}
/**
* Removes highscore from current
*
* @param h int highscore
*/
public void removeHighscore(int h) {
this.highscore = this.highscore - h;
}
/**
* Item to be removed from the players inventory.
*
* @param i Item
*/
public void removeInventoryItem(Item i) {
playersInventory.removeInventoryItem(i);
}
}
| |
/*
* *
* * Licensed to the Apache Software Foundation (ASF) under one
* * or more contributor license agreements. See the NOTICE file
* * distributed with this work for additional information
* * regarding copyright ownership. The ASF licenses this file
* * to you under the Apache License, Version 2.0 (the
* * "License"); you may not use this file except in compliance
* * with the License. You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.apache.tez.runtime.library.conf;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Map;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.tez.common.TezUtils;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.common.ConfigUtils;
import org.apache.tez.runtime.library.input.OrderedGroupedKVInput;
import org.apache.tez.runtime.library.input.OrderedGroupedInputLegacy;
@InterfaceAudience.Public
@InterfaceStability.Evolving
/**
* Configure {@link org.apache.tez.runtime.library.input.OrderedGroupedKVInput} </p>
*
* Values will be picked up from tez-site if not specified, otherwise defaults from
* {@link org.apache.tez.runtime.library.api.TezRuntimeConfiguration} will be used.
*/
public class OrderedGroupedKVInputConfig {
/**
* Configure parameters which are specific to the Input.
*/
@InterfaceAudience.Private
public static interface SpecificConfigBuilder<T> extends BaseConfigBuilder<T> {
/**
* Specifies whether the legacy version of this input should be used.
* @return instance of the current builder
*/
public T useLegacyInput();
/**
* Sets the buffer fraction, as a fraction of container size, to be used while fetching remote
* data.
*
* @param shuffleBufferFraction fraction of container size
* @return instance of the current builder
*/
public T setShuffleBufferFraction(float shuffleBufferFraction);
/**
* Sets the buffer fraction, as a fraction of container size, to be used after the fetch and
* merge are complete. This buffer is used to cache merged data and avoids writing it out to
* disk.
*
* @param postMergeBufferFraction fraction of container size
* @return instance of the current builder
*/
public T setPostMergeBufferFraction(float postMergeBufferFraction);
/**
* Sets a size limit on the maximum segment size to be shuffled to disk. This is a fraction of
* the shuffle buffer.
*
* @param maxSingleSegmentFraction fraction of memory determined by ShuffleBufferFraction
* @return instance of the current builder
*/
public T setMaxSingleMemorySegmentFraction(float maxSingleSegmentFraction);
/**
* Enable the memory to memory merger
*
* @param enable whether to enable the memory to memory merger
* @return instance of the current builder
*/
public T setMemToMemMerger(boolean enable); // Not super useful until additional params are used.
/**
* Configure the point at which in memory segments will be merged. This is specified as a
* fraction of the shuffle buffer.
*
* @param mergeFraction fraction of memory determined by ShuffleBufferFraction, which when
* filled, will
* trigger a merge
* @return instance of the current builder
*/
public T setMergeFraction(float mergeFraction);
/**
* Configure the combiner class
*
* @param combinerClassName the combiner class name
* @return instance of the current builder
*/
public T setCombiner(String combinerClassName);
/**
* Configure the combiner class and it's associated configuration (specified as key-value
* pairs). This method should only be used if the combiner requires some specific configuration.
* {@link #setCombiner(String)} is the preferred method for setting a combiner.
*
* @param combinerClassName the combiner class name
* @param combinerConf the combiner configuration. This can be null, and otherwise
* is a {@link java.util.Map} of key-value pairs. The keys should
* be limited to the ones required by the combiner.
* @return instance of the current builder
*/
public T setCombiner(String combinerClassName, @Nullable Map<String, String> combinerConf);
}
@SuppressWarnings("rawtypes")
@InterfaceAudience.Public
@InterfaceStability.Evolving
public static class SpecificBuilder<E extends HadoopKeyValuesBasedBaseEdgeConfig.Builder> implements
SpecificConfigBuilder<SpecificBuilder> {
private final E edgeBuilder;
private final OrderedGroupedKVInputConfig.Builder builder;
@InterfaceAudience.Private
SpecificBuilder(E edgeBuilder, OrderedGroupedKVInputConfig.Builder builder) {
this.edgeBuilder = edgeBuilder;
this.builder = builder;
}
public SpecificBuilder<E> useLegacyInput() {
builder.useLegacyInput();
return this;
}
@Override
public SpecificBuilder<E> setShuffleBufferFraction(float shuffleBufferFraction) {
builder.setShuffleBufferFraction(shuffleBufferFraction);
return this;
}
@Override
public SpecificBuilder<E> setPostMergeBufferFraction(float postMergeBufferFraction) {
builder.setPostMergeBufferFraction(postMergeBufferFraction);
return this;
}
@Override
public SpecificBuilder<E> setMaxSingleMemorySegmentFraction(float maxSingleSegmentFraction) {
builder.setMaxSingleMemorySegmentFraction(maxSingleSegmentFraction);
return this;
}
@Override
public SpecificBuilder<E> setMemToMemMerger(boolean enable) {
builder.setMemToMemMerger(enable);
return this;
}
@Override
public SpecificBuilder<E> setMergeFraction(float mergeFraction) {
builder.setMergeFraction(mergeFraction);
return this;
}
@Override
public SpecificBuilder<E> setCombiner(String combinerClassName) {
return setCombiner(combinerClassName, null);
}
@Override
public SpecificBuilder<E> setCombiner(String combinerClassName, Map<String, String> combinerConf) {
builder.setCombiner(combinerClassName, combinerConf);
return this;
}
@Override
public SpecificBuilder<E> setAdditionalConfiguration(String key, String value) {
builder.setAdditionalConfiguration(key, value);
return this;
}
@Override
public SpecificBuilder<E> setAdditionalConfiguration(Map<String, String> confMap) {
builder.setAdditionalConfiguration(confMap);
return this;
}
@Override
public SpecificBuilder<E> setFromConfiguration(Configuration conf) {
builder.setFromConfiguration(conf);
return this;
}
public E done() {
return edgeBuilder;
}
}
@InterfaceAudience.Private
@VisibleForTesting
Configuration conf;
private String inputClassName;
@InterfaceAudience.Private
@VisibleForTesting
OrderedGroupedKVInputConfig() {
}
private OrderedGroupedKVInputConfig(Configuration conf, boolean useLegacyInput) {
this.conf = conf;
if (useLegacyInput) {
inputClassName = OrderedGroupedInputLegacy.class.getName();
} else {
inputClassName = OrderedGroupedKVInput.class.getName();
}
}
/**
* Get a UserPayload representation of the Configuration
* @return a {@link org.apache.tez.dag.api.UserPayload} instance
*/
public UserPayload toUserPayload() {
try {
return TezUtils.createUserPayloadFromConf(conf);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@InterfaceAudience.Private
public void fromUserPayload(UserPayload payload) {
try {
this.conf = TezUtils.createConfFromUserPayload(payload);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public String getInputClassName() {
return inputClassName;
}
public static Builder newBuilder(String keyClass, String valueClass) {
return new Builder(keyClass, valueClass);
}
@InterfaceAudience.Public
@InterfaceStability.Evolving
public static class Builder implements SpecificConfigBuilder<Builder> {
private final Configuration conf = new Configuration(false);
private boolean useLegacyInput = false;
/**
* Create a configuration builder for {@link org.apache.tez.runtime.library.input.OrderedGroupedKVInput}
*
* @param keyClassName the key class name
* @param valueClassName the value class name
*/
@InterfaceAudience.Private
Builder(String keyClassName, String valueClassName) {
this();
Preconditions.checkNotNull(keyClassName, "Key class name cannot be null");
Preconditions.checkNotNull(valueClassName, "Value class name cannot be null");
setKeyClassName(keyClassName);
setValueClassName(valueClassName);
}
@InterfaceAudience.Private
Builder() {
Map<String, String> tezDefaults = ConfigUtils
.extractConfigurationMap(TezRuntimeConfiguration.getTezRuntimeConfigDefaults(),
OrderedGroupedKVInput.getConfigurationKeySet());
ConfigUtils.addConfigMapToConfiguration(this.conf, tezDefaults);
ConfigUtils.addConfigMapToConfiguration(this.conf, TezRuntimeConfiguration.getOtherConfigDefaults());
}
@InterfaceAudience.Private
Builder setKeyClassName(String keyClassName) {
Preconditions.checkNotNull(keyClassName, "Key class name cannot be null");
this.conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS, keyClassName);
return this;
}
@InterfaceAudience.Private
Builder setValueClassName(String valueClassName) {
Preconditions.checkNotNull(valueClassName, "Value class name cannot be null");
this.conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS, valueClassName);
return this;
}
public Builder useLegacyInput() {
this.useLegacyInput = true;
return this;
}
@Override
public Builder setShuffleBufferFraction(float shuffleBufferFraction) {
this.conf
.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT, shuffleBufferFraction);
return this;
}
@Override
public Builder setPostMergeBufferFraction(float postMergeBufferFraction) {
this.conf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT, postMergeBufferFraction);
return this;
}
@Override
public Builder setMaxSingleMemorySegmentFraction(float maxSingleSegmentFraction) {
this.conf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT,
maxSingleSegmentFraction);
return this;
}
@Override
public Builder setMemToMemMerger(boolean enable) {
this.conf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM, enable);
return this;
}
@Override
public Builder setMergeFraction(float mergeFraction) {
this.conf.setFloat(TezRuntimeConfiguration.TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT, mergeFraction);
return this;
}
public Builder setCombiner(String combinerClassName) {
return setCombiner(combinerClassName, null);
}
@Override
public Builder setCombiner(String combinerClassName, Map<String, String> combinerConf) {
this.conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_COMBINER_CLASS, combinerClassName);
if (combinerConf != null) {
// Merging the confs for now. Change to be specific in the future.
ConfigUtils.mergeConfsWithExclusions(this.conf, combinerConf,
TezRuntimeConfiguration.getRuntimeConfigKeySet());
}
return this;
}
/**
* Set the key comparator class
*
* @param comparatorClassName the key comparator class name
* @return instance of the current builder
*/
public Builder setKeyComparatorClass(String comparatorClassName) {
return this.setKeyComparatorClass(comparatorClassName, null);
}
/**
* Set the key comparator class and it's associated configuration. This method should only be
* used if the comparator requires some specific configuration, which is typically not the
* case. {@link #setKeyComparatorClass(String)} is the preferred method for setting a
* comparator.
*
* @param comparatorClassName the key comparator class name
* @param comparatorConf the comparator configuration. This can be null, and is a {@link
* java.util.Map} of key-value pairs. The keys should be limited to
* the ones required by the comparator.
* @return instance of the current builder
*/
public Builder setKeyComparatorClass(String comparatorClassName,
@Nullable Map<String, String> comparatorConf) {
Preconditions.checkNotNull(comparatorClassName, "Comparator class name cannot be null");
this.conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_COMPARATOR_CLASS,
comparatorClassName);
if (comparatorConf != null) {
// Merging the confs for now. Change to be specific in the future.
ConfigUtils.mergeConfsWithExclusions(this.conf, comparatorConf,
TezRuntimeConfiguration.getRuntimeConfigKeySet());
}
return this;
}
@Override
public Builder setAdditionalConfiguration(String key, String value) {
Preconditions.checkNotNull(key, "Key cannot be null");
if (ConfigUtils.doesKeyQualify(key,
Lists.newArrayList(OrderedGroupedKVInput.getConfigurationKeySet(),
TezRuntimeConfiguration.getRuntimeAdditionalConfigKeySet()),
TezRuntimeConfiguration.getAllowedPrefixes())) {
if (value == null) {
this.conf.unset(key);
} else {
this.conf.set(key, value);
}
}
return this;
}
@Override
public Builder setAdditionalConfiguration(Map<String, String> confMap) {
Preconditions.checkNotNull(confMap, "ConfMap cannot be null");
Map<String, String> map = ConfigUtils.extractConfigurationMap(confMap,
Lists.newArrayList(OrderedGroupedKVInput.getConfigurationKeySet(),
TezRuntimeConfiguration.getRuntimeAdditionalConfigKeySet()), TezRuntimeConfiguration.getAllowedPrefixes());
ConfigUtils.addConfigMapToConfiguration(this.conf, map);
return this;
}
@Override
public Builder setFromConfiguration(Configuration conf) {
// Maybe ensure this is the first call ? Otherwise this can end up overriding other parameters
Preconditions.checkArgument(conf != null, "Configuration cannot be null");
Map<String, String> map = ConfigUtils.extractConfigurationMap(conf,
Lists.newArrayList(OrderedGroupedKVInput.getConfigurationKeySet(),
TezRuntimeConfiguration.getRuntimeAdditionalConfigKeySet()), TezRuntimeConfiguration.getAllowedPrefixes());
ConfigUtils.addConfigMapToConfiguration(this.conf, map);
return this;
}
public Builder setCompression(boolean enabled, @Nullable String compressionCodec,
@Nullable Map<String, String> codecConf) {
this.conf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS, enabled);
if (enabled && compressionCodec != null) {
this.conf
.set(TezRuntimeConfiguration.TEZ_RUNTIME_COMPRESS_CODEC, compressionCodec);
}
if (codecConf != null) {
// Merging the confs for now. Change to be specific in the future.
ConfigUtils.mergeConfsWithExclusions(this.conf, codecConf,
TezRuntimeConfiguration.getRuntimeConfigKeySet());
}
return this;
}
/**
* Set serialization class and the relevant comparator to be used for sorting.
* Providing custom serialization class could change the way, keys needs to be compared in
* sorting. Providing invalid comparator here could create invalid results.
*
* @param serializationClassName
* @param comparatorClassName
* @param serializerConf the serializer configuration. This can be null, and is a
* {@link java.util.Map} of key-value pairs. The keys should be limited
* to the ones required by the comparator.
* @return this object for further chained method calls
*/
public Builder setKeySerializationClass(String serializationClassName,
String comparatorClassName, @Nullable Map<String, String> serializerConf) {
Preconditions.checkArgument(serializationClassName != null,
"serializationClassName cannot be null");
Preconditions.checkArgument(comparatorClassName != null,
"comparator cannot be null");
this.conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, serializationClassName + ","
+ conf.get(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY));
setKeyComparatorClass(comparatorClassName, null);
if (serializerConf != null) {
// Merging the confs for now. Change to be specific in the future.
ConfigUtils.mergeConfsWithExclusions(this.conf, serializerConf,
TezRuntimeConfiguration.getRuntimeConfigKeySet());
}
return this;
}
/**
* Serialization class to be used for serializing values.
*
* @param serializationClassName
* @param serializerConf the serializer configuration. This can be null, and is a
* {@link java.util.Map} of key-value pairs. The keys should be limited
* to the ones required by the comparator.
* @return this object for further chained method calls
*/
public Builder setValueSerializationClass(String serializationClassName,
@Nullable Map<String, String> serializerConf) {
Preconditions.checkArgument(serializationClassName != null,
"serializationClassName cannot be null");
this.conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, serializationClassName + ","
+ conf.get(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY));
if (serializerConf != null) {
// Merging the confs for now. Change to be specific in the future.
ConfigUtils.mergeConfsWithExclusions(this.conf, serializerConf,
TezRuntimeConfiguration.getRuntimeConfigKeySet());
}
return this;
}
/**
* Create the actual configuration instance.
*
* @return an instance of the Configuration
*/
public OrderedGroupedKVInputConfig build() {
return new OrderedGroupedKVInputConfig(this.conf, this.useLegacyInput);
}
}
}
| |
/*
* Copyright 2014 Ranjan Kumar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.restfiddle.controller.rest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import com.restfiddle.constant.NodeType;
import com.restfiddle.controller.util.NodeUtil;
import com.restfiddle.dao.ConversationRepository;
import com.restfiddle.dao.GenericEntityRepository;
import com.restfiddle.dao.NodeRepository;
import com.restfiddle.dao.ProjectRepository;
import com.restfiddle.dao.TagRepository;
import com.restfiddle.dao.util.TreeNodeBuilder;
import com.restfiddle.dto.NodeDTO;
import com.restfiddle.dto.TagDTO;
import com.restfiddle.entity.BaseNode;
import com.restfiddle.entity.Conversation;
import com.restfiddle.entity.GenericEntity;
import com.restfiddle.entity.Project;
import com.restfiddle.entity.Tag;
import com.restfiddle.util.EntityToDTO;
import com.restfiddle.util.TreeNode;
@RestController
@EnableAutoConfiguration
@ComponentScan
@Transactional
public class NodeController {
private static final String PROJECT = "PROJECT";
Logger logger = LoggerFactory.getLogger(NodeController.class);
@Autowired
private ProjectRepository projectRepository;
@Autowired
private NodeRepository nodeRepository;
@Autowired
private TagRepository tagRepository;
@Autowired
private ConversationRepository conversationRepository;
@Autowired
private GenericEntityRepository genericEntityRepository;
@Autowired
private GenerateApiController generateApiController;
// Note : Creating a node requires parentId. Project-node is the root node and it is created during project creation.
@RequestMapping(value = "/api/nodes/{parentId}/children", method = RequestMethod.POST, headers = "Accept=application/json")
public @ResponseBody
NodeDTO create(@PathVariable("parentId") String parentId, @RequestBody NodeDTO nodeDTO) {
logger.debug("Creating a new node with information: " + nodeDTO);
BaseNode node = new BaseNode();
node.setName(nodeDTO.getName());
node.setDescription(nodeDTO.getDescription());
node.setNodeType(nodeDTO.getNodeType());
node.setStarred(nodeDTO.getStarred());
BaseNode parentNode = nodeRepository.findOne(parentId);
node.setWorkspaceId(parentNode.getWorkspaceId());
node.setParentId(parentId);
// To find the last child node and its position
long lastChildPosition = NodeUtil.findLastChildPosition(nodeRepository.getChildren(parentId));
// Set the appropriate node position.
node.setPosition(lastChildPosition + 1);
node = nodeRepository.save(node);
if (nodeDTO.getConversationDTO() != null && nodeDTO.getConversationDTO().getId() != null) {
Conversation conversation = conversationRepository.findOne(nodeDTO.getConversationDTO().getId());
node.setConversation(conversation);
conversation.setNodeId(node.getId());
conversationRepository.save(conversation);
}
if (nodeDTO.getGenericEntityDTO() != null && nodeDTO.getGenericEntityDTO().getId() != null) {
GenericEntity genericEntity = genericEntityRepository.findOne(nodeDTO.getGenericEntityDTO().getId());
genericEntity.setBaseNodeId(node.getId());
genericEntityRepository.save(genericEntity);
node.setGenericEntity(genericEntity);
}
Project project = projectRepository.findOne(nodeDTO.getProjectId());
node.setProjectId(project.getId());
BaseNode savedNode = nodeRepository.save(node);
// set tags
List<Tag> tags = new ArrayList<Tag>();
List<TagDTO> tagDTOs = nodeDTO.getTags();
if (tagDTOs != null && !tagDTOs.isEmpty()) {
List<String> tagIds = new ArrayList<String>();
for (TagDTO tagDTO : tagDTOs) {
tagIds.add(tagDTO.getId());
}
tags = (List<Tag>) tagRepository.findAll(tagIds);
}
savedNode.setTags(tags);
savedNode = nodeRepository.save(savedNode);
// Generate APIs for Entity
if (nodeDTO.getGenericEntityDTO() != null && nodeDTO.getGenericEntityDTO().getId() != null) {
generateApiController.generateApi(savedNode);
}
return EntityToDTO.toDTO(savedNode);
}
@RequestMapping(value = "/api/nodes/{id}", method = RequestMethod.DELETE, headers = "Accept=application/json")
public @ResponseBody
void delete(@PathVariable("id") String id) {
logger.debug("Deleting node with id: " + id);
BaseNode nodeToDelete = nodeRepository.findOne(id);
Long deletedNodePosition = nodeToDelete.getPosition();
deleteNodesRecursively(nodeToDelete);
BaseNode parent = nodeRepository.findOne(nodeToDelete.getParentId());
List<BaseNode> children = nodeRepository.getChildren(parent.getId());
if (children != null && !children.isEmpty()) {
for (BaseNode baseNode : children) {
if (baseNode.getPosition() > deletedNodePosition) {
baseNode.setPosition(baseNode.getPosition() - 1);
nodeRepository.save(baseNode);
}
}
}
}
@RequestMapping(value = "/api/nodes/{id}/copy", method = RequestMethod.POST, headers = "Accept=application/json")
public @ResponseBody
void copy(@PathVariable("id") String id, @RequestBody NodeDTO nodeDTO) {
BaseNode node = nodeRepository.findOne(id);
node.setName(nodeDTO.getName());
node.setDescription(nodeDTO.getDescription());
copyNodesRecursively(node, node.getParentId());
}
public void copyNodesRecursively(BaseNode node, String parentId) {
NodeDTO dto = EntityToDTO.toDTO(node);
NodeDTO newNode = create(parentId, dto);
String nodeType = node.getNodeType();
if (nodeType != null
&& (NodeType.FOLDER.name().equalsIgnoreCase(nodeType) || NodeType.PROJECT.name().equalsIgnoreCase(nodeType) || NodeType.ENTITY.name()
.equalsIgnoreCase(nodeType))) {
List<BaseNode> children = getChildren(node.getId());
if (children != null && !children.isEmpty()) {
for (BaseNode childNode : children) {
copyNodesRecursively(childNode, newNode.getId());
}
}
}
// This is just a workaround added for now.
if (nodeType != null && NodeType.FOLDER.name().equalsIgnoreCase(nodeType)) {
if (node.getGenericEntity() != null) {
// TODO : genericEntityRepository.delete(node.getGenericEntity());
}
} else if (nodeType != null && NodeType.ENTITY.name().equalsIgnoreCase(nodeType)) {
// TODO : genericEntityRepository.delete(node.getGenericEntity());
}
}
public void deleteNodesRecursively(BaseNode node) {
String nodeType = node.getNodeType();
if (nodeType != null
&& (NodeType.FOLDER.name().equalsIgnoreCase(nodeType) || NodeType.PROJECT.name().equalsIgnoreCase(nodeType) || NodeType.ENTITY.name()
.equalsIgnoreCase(nodeType))) {
List<BaseNode> children = getChildren(node.getId());
if (children != null && !children.isEmpty()) {
for (BaseNode childNode : children) {
deleteNodesRecursively(childNode);
}
}
}
// This is just a workaround added for now.
if (nodeType != null && NodeType.FOLDER.name().equalsIgnoreCase(nodeType)) {
if (node.getGenericEntity() != null) {
genericEntityRepository.delete(node.getGenericEntity());
}
} else if (nodeType != null && NodeType.ENTITY.name().equalsIgnoreCase(nodeType)) {
genericEntityRepository.delete(node.getGenericEntity());
}
nodeRepository.delete(node);
}
@RequestMapping(value = "/api/nodes", method = RequestMethod.GET)
public @ResponseBody
List<BaseNode> findAll() {
logger.debug("Finding all nodes");
return nodeRepository.findAll();
}
@RequestMapping(value = "/api/nodes/{id}", method = RequestMethod.GET)
public @ResponseBody
BaseNode findById(@PathVariable("id") String id) {
logger.debug("Finding node by id: " + id);
BaseNode baseNode = nodeRepository.findOne(id);
baseNode.getConversation();
return baseNode;
}
@RequestMapping(value = "/api/nodes/{parentId}/children", method = RequestMethod.GET)
public @ResponseBody
List<BaseNode> getChildren(@PathVariable("parentId") String parentId) {
logger.debug("Finding children nodes");
return nodeRepository.getChildren(parentId);
}
@RequestMapping(value = "/api/nodes/{id}", method = RequestMethod.PUT, headers = "Accept=application/json")
public @ResponseBody
BaseNode update(@PathVariable("id") String id, @RequestBody NodeDTO updated) {
logger.debug("Updating node with information: " + updated);
BaseNode node = nodeRepository.findOne(updated.getId());
if (updated.getName() != null) {
node.setName(updated.getName());
}
if (updated.getDescription() != null) {
node.setDescription(updated.getDescription());
}
if (updated.getStarred() != null) {
node.setStarred(updated.getStarred());
}
nodeRepository.save(node);
return node;
}
public TreeNode getProjectTree(String id) {
return getProjectTree(id, null, null);
}
// Get tree-structure for a project. Id parameter is the project-reference node-id.
@RequestMapping(value = "/api/nodes/{id}/tree", method = RequestMethod.GET)
public @ResponseBody
TreeNode getProjectTree(@PathVariable("id") String id, @RequestParam(value = "search", required = false) String search,
@RequestParam(value = "sort", required = false) String sort) {
// Note : There must be a better way of doing it. This method is written in a hurry.
// Get project Id from the reference node
BaseNode projectRefNode = nodeRepository.findOne(id);
String projectId = projectRefNode.getProjectId();
// Get the list of nodes for a project.
List<BaseNode> listOfNodes = nodeRepository.searchNodesFromAProject(projectId, search != null ? search : "");
// Creating a map of nodes with node-id as key
Map<String, BaseNode> baseNodeMap = new HashMap<String, BaseNode>();
Map<String, TreeNode> treeNodeMap = new HashMap<String, TreeNode>();
TreeNode rootNode = null;
TreeNode treeNode;
TreeNode parentTreeNode;
String methodType = "";
for (BaseNode baseNode : listOfNodes) {
String nodeId = baseNode.getId();
baseNodeMap.put(nodeId, baseNode);
if (baseNode.getConversation() != null) {
methodType = baseNode.getConversation().getRfRequest().getMethodType();
}
treeNode = TreeNodeBuilder.createTreeNode(nodeId, baseNode.getName(), baseNode.getDescription(), baseNode.getWorkspaceId(), baseNode.getParentId(), baseNode.getPosition(), baseNode.getNodeType(),
baseNode.getStarred(), methodType, baseNode.getLastModifiedDate(), baseNode.getLastModifiedBy());
treeNode.setProjectId(projectId);
treeNodeMap.put(nodeId, treeNode);
}
for (BaseNode baseNode : listOfNodes) {
String nodeId = baseNode.getId();
String parentId = baseNode.getParentId();
treeNode = treeNodeMap.get(nodeId);
if (NodeType.PROJECT.name().equals(baseNode.getNodeType())) {
// Identify root node for a project
rootNode = treeNode;
} else {
// Build parent node
parentTreeNode = treeNodeMap.get(parentId);
// Set parent tree node
treeNode.setParent(parentTreeNode);
// Add child node to the parent
parentTreeNode.getChildren().add(treeNode);
}
}
if (search != null && !search.trim().equals("")) {
for (BaseNode baseNode : listOfNodes) {
if (baseNode.getNodeType() != null && !NodeType.PROJECT.name().equals(baseNode.getNodeType())) {
TreeNode node = treeNodeMap.get(baseNode.getId());
if (node.getChildren().isEmpty()) {
TreeNode parent = treeNodeMap.get(baseNode.getParentId());
parent.getChildren().remove(node);
}
}
}
}
int order = 1;
if (sort != null) {
if (sort.trim().charAt(0) == '-') {
order = -1;
sort = sort.substring(1);
}
sortTree(rootNode, sort, order);
}else{
sortTree(rootNode, "position", order);
}
return rootNode;
}
private void sortTree(TreeNode rootNode, String sort, final int order) {
if (rootNode != null && rootNode.getChildren() != null) {
Comparator<TreeNode> comparator;
switch (sort) {
case "lastModified":
comparator = new Comparator<TreeNode>() {
@Override
public int compare(TreeNode o1, TreeNode o2) {
int val = 0;
if (o1.getLastModifiedDate() != null && o2.getLastModifiedDate() != null) {
val = o1.getLastModifiedDate().compareTo(o2.getLastModifiedDate());
} else if (o1.getLastModifiedDate() != null) {
val = 1;
} else if (o2.getLastModifiedDate() != null) {
val = -1;
}
return order * val;
}
};
break;
case "name":
comparator = new Comparator<TreeNode>() {
@Override
public int compare(TreeNode o1, TreeNode o2) {
return order * o1.getName().compareTo(o2.getName());
}
};
break;
default:
comparator = new Comparator<TreeNode>() {
@Override
public int compare(TreeNode o1, TreeNode o2) {
return order * o1.getPosition().compareTo(o2.getPosition());
}
};
break;
}
sortTreeNodes(rootNode, comparator);
}
}
private void sortTreeNodes(TreeNode rootNode, Comparator<TreeNode> comparator) {
if (rootNode != null && rootNode.getChildren() != null) {
List<TreeNode> childs = rootNode.getChildren();
for (TreeNode node : childs) {
sortTreeNodes(node, comparator);
}
Collections.sort(childs, comparator);
if (!childs.isEmpty()) {
rootNode.setLastModifiedDate(childs.get(0).getLastModifiedDate());
}
}
}
@RequestMapping(value = "/api/workspaces/{workspaceId}/nodes/starred", method = RequestMethod.GET)
public @ResponseBody
List<NodeDTO> findStarredNodes(@PathVariable("workspaceId") String workspaceId, @RequestParam(value = "page", required = false) Integer page,
@RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "search", required = false) String search,
@RequestParam(value = "sortBy", required = false) String sortBy) {
logger.debug("Finding starred nodes.");
int pageNo = 0;
if (page != null && page > 0) {
pageNo = page;
}
int numberOfRecords = 10;
if (limit != null && limit > 0) {
numberOfRecords = limit;
}
Sort sort = new Sort(Direction.DESC, "lastModifiedDate");
if ("name".equals(sortBy)) {
sort = new Sort(Direction.ASC, "name");
} else if ("lastRun".equals(sortBy)) {
sort = new Sort(Direction.DESC, "lastModifiedDate");
} else if ("nameDesc".equals(sortBy)) {
sort = new Sort(Direction.DESC, "name");
}
Pageable pageable = new PageRequest(pageNo, numberOfRecords, sort);
Page<BaseNode> paginatedStarredNodes = nodeRepository.findStarredNodes(workspaceId, search != null ? search : "", pageable);
List<BaseNode> starredNodes = paginatedStarredNodes.getContent();
long totalElements = paginatedStarredNodes.getTotalElements();
List<NodeDTO> response = new ArrayList<NodeDTO>();
for (BaseNode item : starredNodes) {
response.add(EntityToDTO.toDTO(item));
}
System.out.println("totalElements : " + totalElements);
return response;
}
@RequestMapping(value = "/api/nodes/{id}/tags", method = RequestMethod.POST, headers = "Accept=application/json")
public @ResponseBody
Boolean addTags(@PathVariable("id") String id, @RequestBody List<TagDTO> tagDTOs) {
logger.debug("Adding the following tags: " + tagDTOs);
BaseNode node = nodeRepository.findOne(id);
List<Tag> tags = new ArrayList<Tag>();
if (tagDTOs != null && !tagDTOs.isEmpty()) {
List<String> tagIds = new ArrayList<String>();
for (TagDTO tagDTO : tagDTOs) {
tagIds.add(tagDTO.getId());
}
tags = (List<Tag>) tagRepository.findAll(tagIds);
}
node.setTags(tags);
nodeRepository.save(node);
return Boolean.TRUE;
}
@RequestMapping(value = "/api/nodes/{id}/requests", method = RequestMethod.GET)
public @ResponseBody
List<BaseNode> getProjectRequests(@PathVariable("id") String id, @RequestParam(value = "page", required = false) Integer page,
@RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "search", required = false) String search,
@RequestParam(value = "sort", required = false) String sortBy) {
// Note : There must be a better way of doing it. This method is written in a hurry.
// Get project Id from the reference node
BaseNode projectRefNode = nodeRepository.findOne(id);
String projectId = projectRefNode.getProjectId();
List<BaseNode> requestsNodes = nodeRepository.findRequestsFromAProject(projectId, search != null ? search : "");
return requestsNodes;
}
@RequestMapping(value = "/api/nodes/{id}/move", method = RequestMethod.POST, headers = "Accept=application/json")
public @ResponseBody
void move(@PathVariable("id") String id, @RequestParam(value = "newRefNodeId", required = true) String newRefNodeId,
@RequestParam(value = "position", required = true) String position) {
BaseNode node = nodeRepository.findOne(id);
Long oldPosition = node.getPosition();
BaseNode newRefNode = nodeRepository.findOne(newRefNodeId);
BaseNode oldParentNode = nodeRepository.findOne(node.getParentId());
Long newPosition;
String newParentId;
if(position.equals("over")){
newParentId = newRefNode.getId();
newPosition = (long) 1;
}else if(position.equals("before")){
newParentId = newRefNode.getParentId();
newPosition = newRefNode.getPosition();
}else{
newParentId = newRefNode.getParentId();
newPosition = newRefNode.getPosition()+1;
}
// Not allowed to save request under a request
if (position.equals("over") && !(newRefNode.getNodeType().equalsIgnoreCase(PROJECT) || newRefNode.getNodeType().equalsIgnoreCase("FOLDER"))) {
return;
}
// Special case where -1 getting saved for non-project node
if (!(node.getNodeType() != null && node.getNodeType().equalsIgnoreCase(PROJECT)) && newParentId.equals("-1")){
return;
}
// update new folder
List<BaseNode> newFolderChildren;
if(newRefNode.getNodeType() != null && (newRefNode.getNodeType().equalsIgnoreCase(PROJECT) || newRefNode.getNodeType().equalsIgnoreCase("FOLDER"))){
newFolderChildren = nodeRepository.getChildren(newRefNode.getId());
}else{
newFolderChildren = nodeRepository.getChildren(newRefNode.getParentId());
}
node.setParentId(newParentId);
node.setPosition(newPosition);
nodeRepository.save(node);
for (BaseNode newFolderChild : newFolderChildren) {
if (newFolderChild.getPosition() >= newPosition && newFolderChild.getId() != id) {
newFolderChild.setPosition(newFolderChild.getPosition() + 1);
nodeRepository.save(newFolderChild);
}
}
//If node is moved within the same folder, updating new folder is sufficient
if(oldParentNode.getId().equals(newParentId)){
return;
}
// update old folder
List<BaseNode> oldFolderChildren = nodeRepository.getChildren(oldParentNode.getId());
if (oldFolderChildren != null && !oldFolderChildren.isEmpty()) {
for (BaseNode oldFolderChild : oldFolderChildren) {
if (oldFolderChild.getPosition() >= oldPosition) {
oldFolderChild.setPosition(oldFolderChild.getPosition() - 1);
nodeRepository.save(oldFolderChild);
}
}
}
}
@RequestMapping(value = "/api/workspaces/{workspaceId}/projects", method = RequestMethod.GET)
public @ResponseBody
List<BaseNode> findProjectsFromAWorkspace(@PathVariable("workspaceId") String workspaceId,
@RequestParam(value = "search", required = false) String search) {
return nodeRepository.findProjectsfromAWorkspace(workspaceId, search != null ? search : "");
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.createDescriptionArgWithDefaults;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.cxx.CxxDescriptionEnhancer;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.Either;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildRuleType;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.Label;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.junit.Test;
public class AppleBuildRulesTest {
@Test
public void testAppleLibraryIsXcodeTargetBuildRuleType() throws Exception {
assertTrue(AppleBuildRules.isXcodeTargetBuildRuleType(AppleLibraryDescription.TYPE));
}
@Test
public void testIosResourceIsNotXcodeTargetBuildRuleType() throws Exception {
assertFalse(AppleBuildRules.isXcodeTargetBuildRuleType(AppleResourceDescription.TYPE));
}
@Test
public void testAppleTestIsXcodeTargetTestBuildRuleType() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
AppleTestBuilder appleTestBuilder = new AppleTestBuilder(
BuildTargetFactory.newInstance("//foo:xctest#iphoneos-i386"))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setContacts(Optional.of(ImmutableSortedSet.<String>of()))
.setLabels(Optional.of(ImmutableSortedSet.<Label>of()))
.setDeps(Optional.of(ImmutableSortedSet.<BuildTarget>of()));
TargetNode<?> appleTestNode = appleTestBuilder.build();
BuildRule testRule = appleTestBuilder.build(
resolver,
new FakeProjectFilesystem(),
TargetGraphFactory.newInstance(ImmutableSet.<TargetNode<?>>of(appleTestNode)));
assertTrue(AppleBuildRules.isXcodeTargetTestBuildRule(testRule));
}
@Test
public void testAppleLibraryIsNotXcodeTargetTestBuildRuleType() throws Exception {
BuildRuleParams params = new FakeBuildRuleParamsBuilder("//foo:lib").build();
AppleLibraryDescription.Arg arg =
createDescriptionArgWithDefaults(FakeAppleRuleDescriptions.LIBRARY_DESCRIPTION);
BuildRule libraryRule = FakeAppleRuleDescriptions
.LIBRARY_DESCRIPTION
.createBuildRule(
TargetGraph.EMPTY,
params,
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()),
arg);
assertFalse(AppleBuildRules.isXcodeTargetTestBuildRule(libraryRule));
}
@Test
public void testXctestIsTestBundleExtension() throws Exception {
assertTrue(AppleBuildRules.isXcodeTargetTestBundleExtension(AppleBundleExtension.XCTEST));
}
@Test
public void testOctestIsTestBundleExtension() throws Exception {
assertTrue(AppleBuildRules.isXcodeTargetTestBundleExtension(AppleBundleExtension.OCTEST));
}
@Test
public void testRecursiveTargetsIncludesBundleBinaryFromOutsideBundle() throws Exception {
BuildTarget libraryTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?> libraryNode = AppleLibraryBuilder
.createBuilder(libraryTarget)
.build();
BuildTarget bundleTarget = BuildTargetFactory.newInstance("//foo:bundle");
TargetNode<?> bundleNode = AppleBundleBuilder
.createBuilder(bundleTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setBinary(libraryTarget)
.build();
BuildTarget rootTarget = BuildTargetFactory.newInstance("//foo:root");
TargetNode<?> rootNode = AppleLibraryBuilder
.createBuilder(rootTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(libraryTarget, bundleTarget)))
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(ImmutableSet.of(libraryNode, bundleNode, rootNode)),
AppleBuildRules.RecursiveDependenciesMode.BUILDING,
rootNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertTrue(Iterables.elementsEqual(ImmutableSortedSet.of(libraryNode, bundleNode), rules));
}
@Test
public void exportedDepsOfDylibsAreCollectedForLinking() throws Exception {
BuildTarget fooLibTarget =
BuildTargetFactory.newInstance("//foo:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget fooFrameworkTarget = BuildTargetFactory.newInstance("//foo:framework");
TargetNode<?> fooFrameworkNode = AppleBundleBuilder
.createBuilder(fooFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(fooLibTarget)
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooFrameworkTarget)))
.setExportedDeps(Optional.of(ImmutableSortedSet.of(fooFrameworkTarget)))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?> barFrameworkNode = AppleBundleBuilder
.createBuilder(barFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.build();
BuildTarget rootTarget = BuildTargetFactory.newInstance("//foo:root");
TargetNode<?> rootNode = AppleLibraryBuilder
.createBuilder(rootTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barFrameworkTarget)))
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(
ImmutableSet.of(
rootNode,
fooLibNode,
fooFrameworkNode,
barLibNode,
barFrameworkNode)),
AppleBuildRules.RecursiveDependenciesMode.LINKING,
rootNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertEquals(
ImmutableSortedSet.of(
barFrameworkNode,
fooFrameworkNode),
ImmutableSortedSet.copyOf(rules));
}
@Test
public void exportedDepsAreCollectedForCopying() throws Exception {
BuildTarget fooLibTarget =
BuildTargetFactory.newInstance("//foo:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget fooFrameworkTarget = BuildTargetFactory.newInstance("//foo:framework");
TargetNode<?> fooFrameworkNode = AppleBundleBuilder
.createBuilder(fooFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(fooLibTarget)
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooFrameworkTarget)))
.setExportedDeps(Optional.of(ImmutableSortedSet.of(fooFrameworkTarget)))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?> barFrameworkNode = AppleBundleBuilder
.createBuilder(barFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.build();
BuildTarget bazLibTarget =
BuildTargetFactory.newInstance("//baz:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> bazLibNode = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barFrameworkTarget)))
.build();
BuildTarget bazFrameworkTarget = BuildTargetFactory.newInstance("//baz:framework");
TargetNode<?> bazFrameworkNode = AppleBundleBuilder
.createBuilder(bazFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(bazLibTarget)
.build();
ImmutableSet<TargetNode<?>> targetNodes =
ImmutableSet.<TargetNode<?>>builder()
.add(
fooLibNode,
fooFrameworkNode,
barLibNode,
barFrameworkNode,
bazLibNode,
bazFrameworkNode)
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(targetNodes),
AppleBuildRules.RecursiveDependenciesMode.COPYING,
bazFrameworkNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertEquals(
ImmutableSortedSet.of(
barFrameworkNode,
fooFrameworkNode),
ImmutableSortedSet.copyOf(rules));
}
@Test
public void linkingStopsAtGenruleDep() throws Exception {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't link against or copy in the static lib.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?> fooGenruleNode = GenruleBuilder
.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.<SourcePath>of(new BuildTargetSourcePath(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooGenruleTarget)))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?> barFrameworkNode = AppleBundleBuilder
.createBuilder(barFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.build();
ImmutableSet<TargetNode<?>> targetNodes =
ImmutableSet.<TargetNode<?>>builder()
.add(
fooLibNode,
fooGenruleNode,
barLibNode,
barFrameworkNode)
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(targetNodes),
AppleBuildRules.RecursiveDependenciesMode.LINKING,
barFrameworkNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertEquals(
ImmutableSortedSet.of(fooGenruleNode),
ImmutableSortedSet.copyOf(rules));
}
@Test
public void copyingStopsAtGenruleDep() throws Exception {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't link against or copy in the static lib.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?> fooGenruleNode = GenruleBuilder
.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.<SourcePath>of(new BuildTargetSourcePath(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooGenruleTarget)))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?> barFrameworkNode = AppleBundleBuilder
.createBuilder(barFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.build();
ImmutableSet<TargetNode<?>> targetNodes =
ImmutableSet.<TargetNode<?>>builder()
.add(
fooLibNode,
fooGenruleNode,
barLibNode,
barFrameworkNode)
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(targetNodes),
AppleBuildRules.RecursiveDependenciesMode.COPYING,
barFrameworkNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertEquals(
ImmutableSortedSet.of(fooGenruleNode),
ImmutableSortedSet.copyOf(rules));
}
@Test
public void buildingStopsAtGenruleDepButNotAtBundleDep() throws Exception {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't build the dependencies of that genrule.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?> fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?> fooGenruleNode = GenruleBuilder
.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.<SourcePath>of(new BuildTargetSourcePath(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooGenruleTarget)))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?> barFrameworkNode = AppleBundleBuilder
.createBuilder(barFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.build();
BuildTarget bazLibTarget =
BuildTargetFactory.newInstance("//baz:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?> bazLibNode = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barFrameworkTarget)))
.build();
BuildTarget bazFrameworkTarget = BuildTargetFactory.newInstance("//baz:framework");
TargetNode<?> bazFrameworkNode = AppleBundleBuilder
.createBuilder(bazFrameworkTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(bazLibTarget)
.build();
ImmutableSet<TargetNode<?>> targetNodes =
ImmutableSet.<TargetNode<?>>builder()
.add(
fooLibNode,
fooGenruleNode,
barLibNode,
barFrameworkNode,
bazLibNode,
bazFrameworkNode)
.build();
Iterable<TargetNode<?>> rules = AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
TargetGraphFactory.newInstance(targetNodes),
AppleBuildRules.RecursiveDependenciesMode.BUILDING,
bazFrameworkNode,
Optional.<ImmutableSet<BuildRuleType>>absent());
assertEquals(
ImmutableSortedSet.of(barFrameworkNode, fooGenruleNode),
ImmutableSortedSet.copyOf(rules));
}
}
| |
package com.marklogic.semantics.sesame;
// https://github.com/marklogic/marklogic-sesame/issues/282
//
import static java.util.Arrays.asList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import com.marklogic.client.DatabaseClient;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.repository.RepositoryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.marklogic.client.DatabaseClientFactory;
import com.marklogic.client.semantics.GraphManager;
public class MultiThreadedPersistenceTest extends SesameTestBase {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
//create some data
List<String> identifiers = asList(
"subject1", "subject2", "subject3","subject4","subject5",
"subject6","subject7","subject8","subject9","subject10",
"subject11","subject12","subject13","subject14","subject15",
"subject16", "subject17", "subject18","subject19","subject20",
"subject21", "subject22", "subject23","subject24","subject25",
"subject26", "subject27", "subject28","subject29","subject30",
"subject31", "subject32", "subject33","subject34","subject35",
"subject36", "subject37", "subject38","subject39","subject40");
@After
public void tearDown()
throws Exception {
GraphManager gmgr = adminClient.newGraphManager();
gmgr.delete("http://graph/meta");
for (String id : identifiers)
{
gmgr.delete("http://foo/graph/" + id);
}
}
@Test
public void multiThreadedPersist() throws RepositoryException, InterruptedException {
final PersistenceService persistenceService = new PersistenceService(SesameTestBase.host, SesameTestBase.port, SesameTestBase.adminUser, SesameTestBase.adminPassword, DatabaseClientFactory.Authentication.DIGEST.toString());
//persist data with multiple threads against the persistence service - simulate multiple concurrent requests against a tomcat deployed ingestion service
//results in intermittent MarkLogicTransactionExceptions in executor threads
ExecutorService executorService = Executors.newFixedThreadPool(10);
try {
for(final String identifier: identifiers) {
for (int i=0; i<20; i++) {
executorService.submit(
new Runnable() {
@Override
public void run() {
persistenceService.persist(entitiesFor(identifier));
}
}
);
}
}
executorService.shutdown();
executorService.awaitTermination(60, TimeUnit.SECONDS);
} finally {
if(!executorService.isTerminated()) {
System.out.println("cancel non finished tasks");
}
executorService.shutdownNow();
System.out.println("shut down finished");
}
}
class PersistenceService {
private MarkLogicRepository markLogicRepository;
public PersistenceService(String host, int port, String user, String password, String digest) {
markLogicRepository = new MarkLogicRepository(host, port, user, password, digest);
try {
markLogicRepository.initialize();
} catch (RepositoryException e) {
throw new RuntimeException("error initialising repo", e);
}
}
public void persist(List<Entity> entities) {
MarkLogicRepositoryConnection connection = null;
try {
connection = markLogicRepository.getConnection();
for(Entity e : entities) {
connection.add(e.getStatements(), e.getGraph());
}
} catch (RepositoryException e) {
//print to sysout as thread exceptions are not propagated up to main thread
e.printStackTrace();
throw new RuntimeException(e);
}finally {
try {
connection.close();
} catch (RepositoryException e) {
e.printStackTrace();
}
}
}
}
@Test
public void singleConnectionMultiThreadedPersist() throws RepositoryException, InterruptedException {
final SingleConnectionPersistenceService persistenceService = new SingleConnectionPersistenceService(SesameTestBase.host, SesameTestBase.port, SesameTestBase.adminUser, SesameTestBase.adminPassword, DatabaseClientFactory.Authentication.DIGEST.toString());
//persist data with multiple threads against singleConnectionPersistence service - simulate multiple concurrent requests against a tomcat deployed ingestion service
//results in intermittent MarkLogicTransactionExceptions in executor threads
ExecutorService executorService = Executors.newFixedThreadPool(10);
try {
persistenceService.connection.begin();
for(final String identifier: identifiers) {
for (int i=0; i<20; i++) {
executorService.submit(
new Runnable() {
@Override
public void run() {
persistenceService.persist(entitiesFor(identifier));
}
}
);
}
}
persistenceService.connection.commit();
executorService.shutdown();
executorService.awaitTermination(60, TimeUnit.SECONDS);
} finally {
if(!executorService.isTerminated()) {
System.out.println("cancel non finished tasks");
}
executorService.shutdownNow();
System.out.println("shut down finished");
}
}
class SingleConnectionPersistenceService {
private MarkLogicRepositoryConnection connection;
public SingleConnectionPersistenceService(String host, int port, String user, String password, String digest) {
MarkLogicRepository markLogicRepository = new MarkLogicRepository(host, port, user, password, digest);
try {
markLogicRepository.initialize();
connection = markLogicRepository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException("error initialising repo", e);
}
}
public void persist(List<Entity> entities){
try {
for(Entity e : entities) {
connection.add(e.getStatements(), e.getGraph());
}
} catch (RepositoryException e) {
//print to sysout as thread exceptions are not propagated up to main thread
e.printStackTrace();
throw new RuntimeException(e);
}finally {
try {
connection.sync();
} catch (MarkLogicSesameException e) {
e.printStackTrace();
}
}
}
}
@Test
public void multipleConnectionMultiThreadedPersist() throws RepositoryException, InterruptedException {
final MultipleConnectionPersistenceService persistenceService = new MultipleConnectionPersistenceService(SesameTestBase.host, SesameTestBase.port, SesameTestBase.adminUser, SesameTestBase.adminPassword, DatabaseClientFactory.Authentication.DIGEST.toString());
//persist data with multiple threads against singleConnectionPersistence service - simulate multiple concurrent requests against a tomcat deployed ingestion service
//results in intermittent MarkLogicTransactionExceptions in executor threads
ExecutorService executorService = Executors.newFixedThreadPool(10);
try {
for(final String identifier: identifiers) {
for (int i=0; i<20; i++) {
executorService.submit(
new Runnable() {
@Override
public void run() {
try {
persistenceService.persist(entitiesFor(identifier));
} catch (RepositoryException e) {
e.printStackTrace();
}
}
}
);
}
}
executorService.shutdown();
executorService.awaitTermination(60, TimeUnit.SECONDS);
} finally {
if(!executorService.isTerminated()) {
System.out.println("cancel non finished tasks");
}
executorService.shutdownNow();
System.out.println("shut down finished");
}
}
class MultipleConnectionPersistenceService {
MarkLogicRepository markLogicRepository;
public MultipleConnectionPersistenceService(String host, int port, String user, String password, String digest) {
try {
DatabaseClient databaseClient = DatabaseClientFactory.newClient(host, port, user, password, DatabaseClientFactory.Authentication.valueOf("DIGEST"));
markLogicRepository = new MarkLogicRepository(databaseClient);
markLogicRepository.initialize();
} catch (RepositoryException e) {
throw new RuntimeException("error initialising repo", e);
}
}
public void persist(List<Entity> entities) throws RepositoryException {
MarkLogicRepositoryConnection connection = markLogicRepository.getConnection();
try {
connection.begin();
for(Entity e : entities) {
connection.add(e.getStatements(), e.getGraph());
}
connection.commit();
connection.close();
} catch (RepositoryException e) {
//print to sysout as thread exceptions are not propagated up to main thread
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
class Entity {
private Collection<Statement> statements;
private Resource[] graph;
Entity(Collection<Statement> statements, Resource[] graph) {
this.statements = statements;
this.graph = graph;
}
public Collection<Statement> getStatements() {
return statements;
}
public Resource[] getGraph() {
return graph;
}
}
private List<Entity> entitiesFor(String identifier) {
ValueFactoryImpl vf = ValueFactoryImpl.getInstance();
Collection<Statement> statements = asList(
vf.createStatement(
vf.createURI("http://" + identifier),
vf.createURI("http://predicate/a"),
vf.createLiteral("object value a")
),
vf.createStatement(
vf.createURI("http://" + identifier),
vf.createURI("http://predicate/b"),
vf.createLiteral("object value b")
),
vf.createStatement(
vf.createURI("http://" + identifier),
vf.createURI("http://predicate/c"),
vf.createLiteral("object value c")
)
);
Resource[] graphs = new Resource[]{vf.createURI("http://foo/graph/" + identifier)};
Entity entity = new Entity(statements, graphs);
Collection<Statement> graphStatements = asList(
vf.createStatement(
vf.createURI("http://foo/graph/" + identifier),
vf.createURI("http://graph/timestamp"),
vf.createLiteral(System.currentTimeMillis())
),
vf.createStatement(
vf.createURI("http://foo/graph/" + identifier),
vf.createURI("http://graph/version"),
vf.createLiteral("the graph version")
)
);
Resource[] graphContext = new Resource[]{vf.createURI("http://graph/meta")};
Entity entity1 = new Entity(graphStatements, graphContext);
return asList(entity, entity1);
}
}
| |
/*
* Copyright (c) 2017 manavista. All rights reserved.
*/
package jp.manavista.lessonmanager.fragment;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.google.firebase.analytics.FirebaseAnalytics;
import com.loopeer.itemtouchhelperextension.ItemTouchHelperExtension;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import io.github.luizgrp.sectionedrecyclerviewadapter.SectionedRecyclerViewAdapter;
import io.reactivex.disposables.Disposable;
import io.reactivex.disposables.Disposables;
import jp.manavista.lessonmanager.R;
import jp.manavista.lessonmanager.activity.MemberActivity;
import jp.manavista.lessonmanager.activity.MemberLessonScheduleListActivity;
import jp.manavista.lessonmanager.constants.analytics.ContentType;
import jp.manavista.lessonmanager.constants.analytics.Event;
import jp.manavista.lessonmanager.constants.analytics.Param;
import jp.manavista.lessonmanager.facade.MemberListFacade;
import jp.manavista.lessonmanager.injector.DependencyInjector;
import jp.manavista.lessonmanager.model.vo.MemberVo;
import jp.manavista.lessonmanager.service.MemberService;
import jp.manavista.lessonmanager.view.decoration.ItemDecoration;
import jp.manavista.lessonmanager.view.helper.SwipeDeleteTouchHelperCallback;
import jp.manavista.lessonmanager.view.operation.MemberOperation;
import jp.manavista.lessonmanager.view.section.MemberSection;
import lombok.Getter;
import static com.google.firebase.analytics.FirebaseAnalytics.Param.CONTENT_TYPE;
import static jp.manavista.lessonmanager.activity.MemberLessonScheduleListActivity.Extra.MEMBER_ID;
import static jp.manavista.lessonmanager.activity.MemberLessonScheduleListActivity.Extra.MEMBER_NAME;
/**
*
* Member List Fragment
*
* <p>
* Overview:<br>
* Display a categoriesList of members. <br>
* Provide interface for editing and creating new.
* </p>
*/
public final class MemberListFragment extends Fragment {
/** Logger tag string */
private static final String TAG = MemberListFragment.class.getSimpleName();
/** Activity Contents */
private Activity contents;
private RecyclerView view;
private ViewGroup emptyState;
/** Member RecyclerView Adapter */
@Getter
private SectionedRecyclerViewAdapter sectionAdapter;
/** Member Adapter Section */
private MemberSection memberSection;
/** Item Touch Helper */
private ItemTouchHelperExtension itemTouchHelper;
/** Member categoriesList disposable */
private Disposable disposable;
@Inject
SharedPreferences preferences;
@Inject
MemberService memberService;
@Inject
MemberListFacade facade;
private FirebaseAnalytics analytics;
/** Constructor */
public MemberListFragment() {
// Required empty public constructor
}
/**
*
* New Instance
*
* <p>
* Overview:<br>
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
* </p>
*
* @return A new instance of fragment MemberListFragment.
*/
public static MemberListFragment newInstance() {
MemberListFragment fragment = new MemberListFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
disposable = Disposables.empty();
analytics = FirebaseAnalytics.getInstance(getContext());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_member_list, container, false);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
DependencyInjector.appComponent().inject(this);
this.contents = getActivity();
view = contents.findViewById(R.id.rv);
emptyState = contents.findViewById(R.id.empty_state);
view.setHasFixedSize(true);
LinearLayoutManager manager = new LinearLayoutManager(contents);
view.setLayoutManager(manager);
view.addItemDecoration(new ItemDecoration(contents));
sectionAdapter = new SectionedRecyclerViewAdapter();
memberSection = MemberSection.newInstance(memberOperation);
sectionAdapter.addSection(memberSection);
view.setAdapter(sectionAdapter);
final ItemTouchHelperExtension.Callback callback = new SwipeDeleteTouchHelperCallback();
itemTouchHelper = new ItemTouchHelperExtension(callback);
itemTouchHelper.setClickToRecoverAnimation(false);
itemTouchHelper.attachToRecyclerView(view);
}
@Override
public void onResume() {
super.onResume();
final List<MemberVo> list = new ArrayList<>();
final String key = getString(R.string.key_preference_member_name_display);
final String defaultValue = getString(R.string.value_preference_member_name_display);
final int displayNameCode = Integer.valueOf(preferences.getString(key, defaultValue));
disposable = memberService.getVoListAll(displayNameCode).subscribe(list::add, throwable -> Log.e(TAG, "Can not get member List all.", throwable), () -> {
memberSection.setList(list);
sectionAdapter.notifyDataSetChanged();
if( list.isEmpty() ) {
view.setVisibility(View.GONE);
emptyState.setVisibility(View.VISIBLE);
} else {
view.setVisibility(View.VISIBLE);
emptyState.setVisibility(View.GONE);
}
});
}
@Override
public void onDestroyView() {
super.onDestroyView();
disposable.dispose();
}
private final MemberOperation memberOperation = new MemberOperation() {
@Override
public void lessonList(MemberVo dto, int position) {
itemTouchHelper.closeOpened();
final Intent intent = new Intent(contents, MemberLessonScheduleListActivity.class);
intent.putExtra(MEMBER_ID, dto.getId());
intent.putExtra(MEMBER_NAME, dto.getDisplayName());
contents.startActivity(intent);
}
@Override
public void edit(final long id, final int position) {
itemTouchHelper.closeOpened();
final Intent intent = new Intent(contents, MemberActivity.class);
intent.putExtra(MemberActivity.EXTRA_MEMBER_ID, id);
contents.startActivityForResult(intent, MemberActivity.RequestCode.EDIT);
}
@Override
public void delete(final long id, final int position) {
final String key = getString(R.string.key_preference_general_delete_confirm);
if( preferences.getBoolean(key, true) ) {
final AlertDialog.Builder builder = new AlertDialog.Builder(contents);
builder.setTitle(R.string.title_member_list_dialog_delete_confirm)
.setIcon(R.drawable.ic_delete_black)
.setMessage(R.string.message_member_list_dialog_delete_confirm)
.setPositiveButton(android.R.string.ok, onOkListener(id, position))
.setNegativeButton(android.R.string.cancel, onCancelListener)
.show();
} else {
execDelete(id, position);
}
}
private final DialogInterface.OnClickListener onCancelListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
itemTouchHelper.closeOpened();
}
};
private DialogInterface.OnClickListener onOkListener(final long id, final int position) {
return (dialogInterface, i) -> {
itemTouchHelper.closeOpened();
execDelete(id, position);
};
}
private void execDelete(final long id, final int position) {
disposable = facade.delete(id).subscribe(rows -> {
memberSection.getList().remove(position);
sectionAdapter.notifyItemRemoved(position);
if( memberSection.getList().isEmpty() ) {
view.setVisibility(View.GONE);
emptyState.setVisibility(View.VISIBLE);
} else {
view.setVisibility(View.VISIBLE);
emptyState.setVisibility(View.GONE);
}
final String message = getString(R.string.message_member_list_delete_member);
Toast.makeText(contents, message, Toast.LENGTH_SHORT).show();
final Bundle bundle = new Bundle();
bundle.putString(CONTENT_TYPE, ContentType.Member.label());
bundle.putInt(Param.Rows.label(), rows);
analytics.logEvent(Event.Delete.label(), bundle);
}, throwable -> Log.e(TAG, "Can not delete a member!", throwable));
}
};
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HBaseFileSystem;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableInfoMissingException;
/**
* Implementation of {@link TableDescriptors} that reads descriptors from the
* passed filesystem. It expects descriptors to be in a file under the
* table's directory in FS. Can be read-only -- i.e. does not modify
* the filesystem or can be read and write.
*
* <p>Also has utility for keeping up the table descriptors tableinfo file.
* The table schema file is kept under the table directory in the filesystem.
* It has a {@link #TABLEINFO_NAME} prefix and then a suffix that is the
* edit sequenceid: e.g. <code>.tableinfo.0000000003</code>. This sequenceid
* is always increasing. It starts at zero. The table schema file with the
* highest sequenceid has the most recent schema edit. Usually there is one file
* only, the most recent but there may be short periods where there are more
* than one file. Old files are eventually cleaned. Presumption is that there
* will not be lots of concurrent clients making table schema edits. If so,
* the below needs a bit of a reworking and perhaps some supporting api in hdfs.
*/
public class FSTableDescriptors implements TableDescriptors {
private static final Log LOG = LogFactory.getLog(FSTableDescriptors.class);
private final FileSystem fs;
private final Path rootdir;
private final boolean fsreadonly;
long cachehits = 0;
long invocations = 0;
/** The file name used to store HTD in HDFS */
public static final String TABLEINFO_NAME = ".tableinfo";
// This cache does not age out the old stuff. Thinking is that the amount
// of data we keep up in here is so small, no need to do occasional purge.
// TODO.
private final Map<String, TableDescriptorModtime> cache =
new ConcurrentHashMap<String, TableDescriptorModtime>();
/**
* Data structure to hold modification time and table descriptor.
*/
static class TableDescriptorModtime {
private final HTableDescriptor descriptor;
private final long modtime;
TableDescriptorModtime(final long modtime, final HTableDescriptor htd) {
this.descriptor = htd;
this.modtime = modtime;
}
long getModtime() {
return this.modtime;
}
HTableDescriptor getTableDescriptor() {
return this.descriptor;
}
}
public FSTableDescriptors(final FileSystem fs, final Path rootdir) {
this(fs, rootdir, false);
}
/**
* @param fs
* @param rootdir
* @param fsreadOnly True if we are read-only when it comes to filesystem
* operations; i.e. on remove, we do not do delete in fs.
*/
public FSTableDescriptors(final FileSystem fs, final Path rootdir,
final boolean fsreadOnly) {
super();
this.fs = fs;
this.rootdir = rootdir;
this.fsreadonly = fsreadOnly;
}
/* (non-Javadoc)
* @see org.apache.hadoop.hbase.TableDescriptors#getHTableDescriptor(java.lang.String)
*/
@Override
public HTableDescriptor get(final byte [] tablename)
throws IOException {
return get(Bytes.toString(tablename));
}
/* (non-Javadoc)
* @see org.apache.hadoop.hbase.TableDescriptors#getTableDescriptor(byte[])
*/
@Override
public HTableDescriptor get(final String tablename)
throws IOException {
invocations++;
if (HTableDescriptor.ROOT_TABLEDESC.getNameAsString().equals(tablename)) {
cachehits++;
return HTableDescriptor.ROOT_TABLEDESC;
}
if (HTableDescriptor.META_TABLEDESC.getNameAsString().equals(tablename)) {
cachehits++;
return HTableDescriptor.META_TABLEDESC;
}
// .META. and -ROOT- is already handled. If some one tries to get the descriptor for
// .logs, .oldlogs or .corrupt throw an exception.
if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(tablename)) {
throw new IOException("No descriptor found for table = " + tablename);
}
// Look in cache of descriptors.
TableDescriptorModtime cachedtdm = this.cache.get(tablename);
if (cachedtdm != null) {
// Check mod time has not changed (this is trip to NN).
if (getTableInfoModtime(this.fs, this.rootdir, tablename) <= cachedtdm.getModtime()) {
cachehits++;
return cachedtdm.getTableDescriptor();
}
}
TableDescriptorModtime tdmt = null;
try {
tdmt = getTableDescriptorModtime(this.fs, this.rootdir, tablename);
} catch (NullPointerException e) {
LOG.debug("Exception during readTableDecriptor. Current table name = "
+ tablename, e);
} catch (IOException ioe) {
LOG.debug("Exception during readTableDecriptor. Current table name = "
+ tablename, ioe);
}
if (tdmt == null) {
LOG.warn("The following folder is in HBase's root directory and " +
"doesn't contain a table descriptor, " +
"do consider deleting it: " + tablename);
} else {
this.cache.put(tablename, tdmt);
}
return tdmt == null ? null : tdmt.getTableDescriptor();
}
/* (non-Javadoc)
* @see org.apache.hadoop.hbase.TableDescriptors#getTableDescriptors(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path)
*/
@Override
public Map<String, HTableDescriptor> getAll()
throws IOException {
Map<String, HTableDescriptor> htds = new TreeMap<String, HTableDescriptor>();
List<Path> tableDirs = FSUtils.getTableDirs(fs, rootdir);
for (Path d: tableDirs) {
HTableDescriptor htd = null;
try {
htd = get(d.getName());
} catch (FileNotFoundException fnfe) {
// inability of retrieving one HTD shouldn't stop getting the remaining
LOG.warn("Trouble retrieving htd", fnfe);
}
if (htd == null) continue;
htds.put(d.getName(), htd);
}
return htds;
}
@Override
public void add(HTableDescriptor htd) throws IOException {
if (Bytes.equals(HConstants.ROOT_TABLE_NAME, htd.getName())) {
throw new NotImplementedException();
}
if (Bytes.equals(HConstants.META_TABLE_NAME, htd.getName())) {
throw new NotImplementedException();
}
if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(htd.getNameAsString())) {
throw new NotImplementedException();
}
if (!this.fsreadonly) updateHTableDescriptor(this.fs, this.rootdir, htd);
long modtime = getTableInfoModtime(this.fs, this.rootdir, htd.getNameAsString());
this.cache.put(htd.getNameAsString(), new TableDescriptorModtime(modtime, htd));
}
@Override
public HTableDescriptor remove(final String tablename)
throws IOException {
if (!this.fsreadonly) {
Path tabledir = FSUtils.getTablePath(this.rootdir, tablename);
if (this.fs.exists(tabledir)) {
if (!HBaseFileSystem.deleteDirFromFileSystem(fs, tabledir)) {
throw new IOException("Failed delete of " + tabledir.toString());
}
}
}
TableDescriptorModtime tdm = this.cache.remove(tablename);
return tdm == null ? null : tdm.getTableDescriptor();
}
/**
* Checks if <code>.tableinfo<code> exists for given table
*
* @param fs file system
* @param rootdir root directory of HBase installation
* @param tableName name of table
* @return true if exists
* @throws IOException
*/
public static boolean isTableInfoExists(FileSystem fs, Path rootdir,
String tableName) throws IOException {
FileStatus status = getTableInfoPath(fs, rootdir, tableName);
return status == null? false: fs.exists(status.getPath());
}
private static FileStatus getTableInfoPath(final FileSystem fs,
final Path rootdir, final String tableName)
throws IOException {
Path tabledir = FSUtils.getTablePath(rootdir, tableName);
return getTableInfoPath(fs, tabledir);
}
/**
* Looks under the table directory in the filesystem for files with a
* {@link #TABLEINFO_NAME} prefix. Returns reference to the 'latest' instance.
* @param fs
* @param tabledir
* @return The 'current' tableinfo file.
* @throws IOException
*/
public static FileStatus getTableInfoPath(final FileSystem fs,
final Path tabledir)
throws IOException {
FileStatus [] status = FSUtils.listStatus(fs, tabledir, new PathFilter() {
@Override
public boolean accept(Path p) {
// Accept any file that starts with TABLEINFO_NAME
return p.getName().startsWith(TABLEINFO_NAME);
}
});
if (status == null || status.length < 1) return null;
Arrays.sort(status, new FileStatusFileNameComparator());
if (status.length > 1) {
// Clean away old versions of .tableinfo
for (int i = 1; i < status.length; i++) {
Path p = status[i].getPath();
// Clean up old versions
if (!HBaseFileSystem.deleteFileFromFileSystem(fs, p)) {
LOG.warn("Failed cleanup of " + status);
} else {
LOG.debug("Cleaned up old tableinfo file " + p);
}
}
}
return status[0];
}
/**
* Compare {@link FileStatus} instances by {@link Path#getName()}.
* Returns in reverse order.
*/
static class FileStatusFileNameComparator
implements Comparator<FileStatus> {
@Override
public int compare(FileStatus left, FileStatus right) {
return -left.compareTo(right);
}
}
/**
* Width of the sequenceid that is a suffix on a tableinfo file.
*/
static final int WIDTH_OF_SEQUENCE_ID = 10;
/*
* @param number Number to use as suffix.
* @return Returns zero-prefixed 5-byte wide decimal version of passed
* number (Does absolute in case number is negative).
*/
static String formatTableInfoSequenceId(final int number) {
byte [] b = new byte[WIDTH_OF_SEQUENCE_ID];
int d = Math.abs(number);
for (int i = b.length - 1; i >= 0; i--) {
b[i] = (byte)((d % 10) + '0');
d /= 10;
}
return Bytes.toString(b);
}
/**
* Regex to eat up sequenceid suffix on a .tableinfo file.
* Use regex because may encounter oldstyle .tableinfos where there is no
* sequenceid on the end.
*/
private static final Pattern SUFFIX =
Pattern.compile(TABLEINFO_NAME + "(\\.([0-9]{" + WIDTH_OF_SEQUENCE_ID + "}))?$");
/**
* @param p Path to a <code>.tableinfo</code> file.
* @return The current editid or 0 if none found.
*/
static int getTableInfoSequenceid(final Path p) {
if (p == null) return 0;
Matcher m = SUFFIX.matcher(p.getName());
if (!m.matches()) throw new IllegalArgumentException(p.toString());
String suffix = m.group(2);
if (suffix == null || suffix.length() <= 0) return 0;
return Integer.parseInt(m.group(2));
}
/**
* @param tabledir
* @param sequenceid
* @return Name of tableinfo file.
*/
static Path getTableInfoFileName(final Path tabledir, final int sequenceid) {
return new Path(tabledir,
TABLEINFO_NAME + "." + formatTableInfoSequenceId(sequenceid));
}
/**
* @param fs
* @param rootdir
* @param tableName
* @return Modification time for the table {@link #TABLEINFO_NAME} file
* or <code>0</code> if no tableinfo file found.
* @throws IOException
*/
static long getTableInfoModtime(final FileSystem fs, final Path rootdir,
final String tableName)
throws IOException {
FileStatus status = getTableInfoPath(fs, rootdir, tableName);
return status == null? 0: status.getModificationTime();
}
/**
* Get HTD from HDFS.
* @param fs
* @param hbaseRootDir
* @param tableName
* @return Descriptor or null if none found.
* @throws IOException
*/
public static HTableDescriptor getTableDescriptor(FileSystem fs,
Path hbaseRootDir, byte[] tableName)
throws IOException {
HTableDescriptor htd = null;
try {
TableDescriptorModtime tdmt =
getTableDescriptorModtime(fs, hbaseRootDir, Bytes.toString(tableName));
htd = tdmt == null ? null : tdmt.getTableDescriptor();
} catch (NullPointerException e) {
LOG.debug("Exception during readTableDecriptor. Current table name = "
+ Bytes.toString(tableName), e);
}
return htd;
}
static HTableDescriptor getTableDescriptor(FileSystem fs,
Path hbaseRootDir, String tableName) throws NullPointerException, IOException {
TableDescriptorModtime tdmt = getTableDescriptorModtime(fs, hbaseRootDir, tableName);
return tdmt == null ? null : tdmt.getTableDescriptor();
}
static TableDescriptorModtime getTableDescriptorModtime(FileSystem fs,
Path hbaseRootDir, String tableName) throws NullPointerException, IOException{
// ignore both -ROOT- and .META. tables
if (Bytes.compareTo(Bytes.toBytes(tableName), HConstants.ROOT_TABLE_NAME) == 0
|| Bytes.compareTo(Bytes.toBytes(tableName), HConstants.META_TABLE_NAME) == 0) {
return null;
}
return getTableDescriptorModtime(fs, FSUtils.getTablePath(hbaseRootDir, tableName));
}
static TableDescriptorModtime getTableDescriptorModtime(FileSystem fs, Path tableDir)
throws NullPointerException, IOException {
if (tableDir == null) throw new NullPointerException();
FileStatus status = getTableInfoPath(fs, tableDir);
if (status == null) {
throw new TableInfoMissingException("No .tableinfo file under "
+ tableDir.toUri());
}
FSDataInputStream fsDataInputStream = fs.open(status.getPath());
HTableDescriptor hTableDescriptor = null;
try {
hTableDescriptor = new HTableDescriptor();
hTableDescriptor.readFields(fsDataInputStream);
} finally {
fsDataInputStream.close();
}
return new TableDescriptorModtime(status.getModificationTime(), hTableDescriptor);
}
public static HTableDescriptor getTableDescriptor(FileSystem fs, Path tableDir)
throws IOException, NullPointerException {
TableDescriptorModtime tdmt = getTableDescriptorModtime(fs, tableDir);
return tdmt == null? null: tdmt.getTableDescriptor();
}
/**
* Update table descriptor
* @param fs
* @param conf
* @param hTableDescriptor
* @return New tableinfo or null if we failed update.
* @throws IOException Thrown if failed update.
*/
static Path updateHTableDescriptor(FileSystem fs, Path rootdir,
HTableDescriptor hTableDescriptor)
throws IOException {
Path tableDir = FSUtils.getTablePath(rootdir, hTableDescriptor.getName());
Path p = writeTableDescriptor(fs, hTableDescriptor, tableDir,
getTableInfoPath(fs, tableDir));
if (p == null) throw new IOException("Failed update");
LOG.info("Updated tableinfo=" + p);
return p;
}
/**
* Deletes a table's directory from the file system if exists. Used in unit
* tests.
*/
public static void deleteTableDescriptorIfExists(String tableName,
Configuration conf) throws IOException {
FileSystem fs = FSUtils.getCurrentFileSystem(conf);
FileStatus status = getTableInfoPath(fs, FSUtils.getRootDir(conf), tableName);
// The below deleteDirectory works for either file or directory.
if (status != null && fs.exists(status.getPath())) {
FSUtils.deleteDirectory(fs, status.getPath());
}
}
/**
* @param fs
* @param hTableDescriptor
* @param tableDir
* @param status
* @return Descriptor file or null if we failed write.
* @throws IOException
*/
private static Path writeTableDescriptor(final FileSystem fs,
final HTableDescriptor hTableDescriptor, final Path tableDir,
final FileStatus status)
throws IOException {
// Get temporary dir into which we'll first write a file to avoid
// half-written file phenomeon.
Path tmpTableDir = new Path(tableDir, ".tmp");
// What is current sequenceid? We read the current sequenceid from
// the current file. After we read it, another thread could come in and
// compete with us writing out next version of file. The below retries
// should help in this case some but its hard to do guarantees in face of
// concurrent schema edits.
int currentSequenceid =
status == null? 0: getTableInfoSequenceid(status.getPath());
int sequenceid = currentSequenceid;
// Put arbitrary upperbound on how often we retry
int retries = 10;
int retrymax = currentSequenceid + retries;
Path tableInfoPath = null;
do {
sequenceid += 1;
Path p = getTableInfoFileName(tmpTableDir, sequenceid);
if (fs.exists(p)) {
LOG.debug(p + " exists; retrying up to " + retries + " times");
continue;
}
try {
writeHTD(fs, p, hTableDescriptor);
tableInfoPath = getTableInfoFileName(tableDir, sequenceid);
if (!HBaseFileSystem.renameDirForFileSystem(fs, p, tableInfoPath)) {
throw new IOException("Failed rename of " + p + " to " + tableInfoPath);
}
} catch (IOException ioe) {
// Presume clash of names or something; go around again.
LOG.debug("Failed write and/or rename; retrying", ioe);
if (!FSUtils.deleteDirectory(fs, p)) {
LOG.warn("Failed cleanup of " + p);
}
tableInfoPath = null;
continue;
}
// Cleanup old schema file.
if (status != null) {
if (!FSUtils.deleteDirectory(fs, status.getPath())) {
LOG.warn("Failed delete of " + status.getPath() + "; continuing");
}
}
break;
} while (sequenceid < retrymax);
return tableInfoPath;
}
private static void writeHTD(final FileSystem fs, final Path p,
final HTableDescriptor htd)
throws IOException {
FSDataOutputStream out = HBaseFileSystem.createPathOnFileSystem(fs, p, false);
try {
htd.write(out);
out.write('\n');
out.write('\n');
out.write(Bytes.toBytes(htd.toString()));
} finally {
out.close();
}
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table.
*
* @param htableDescriptor
* @param conf
*/
public static boolean createTableDescriptor(final HTableDescriptor htableDescriptor,
Configuration conf)
throws IOException {
return createTableDescriptor(htableDescriptor, conf, false);
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table. If
* forceCreation is true then even if previous table descriptor is present it
* will be overwritten
*
* @param htableDescriptor
* @param conf
* @param forceCreation True if we are to overwrite existing file.
*/
static boolean createTableDescriptor(final HTableDescriptor htableDescriptor,
final Configuration conf, boolean forceCreation)
throws IOException {
FileSystem fs = FSUtils.getCurrentFileSystem(conf);
return createTableDescriptor(fs, FSUtils.getRootDir(conf), htableDescriptor,
forceCreation);
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table.
* Used by tests.
* @param fs
* @param htableDescriptor
* @param rootdir
*/
public static boolean createTableDescriptor(FileSystem fs, Path rootdir,
HTableDescriptor htableDescriptor)
throws IOException {
return createTableDescriptor(fs, rootdir, htableDescriptor, false);
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table. If
* forceCreation is true then even if previous table descriptor is present it
* will be overwritten
*
* @param fs
* @param htableDescriptor
* @param rootdir
* @param forceCreation
* @return True if we successfully created file.
*/
public static boolean createTableDescriptor(FileSystem fs, Path rootdir,
HTableDescriptor htableDescriptor, boolean forceCreation)
throws IOException {
Path tabledir = FSUtils.getTablePath(rootdir, htableDescriptor.getNameAsString());
return createTableDescriptorForTableDirectory(fs, tabledir, htableDescriptor, forceCreation);
}
/**
* Create a new HTableDescriptor in HDFS in the specified table directory. Happens when we create
* a new table or snapshot a table.
* @param fs filesystem where the descriptor should be written
* @param tabledir directory under which we should write the file
* @param htableDescriptor description of the table to write
* @param forceCreation if <tt>true</tt>,then even if previous table descriptor is present it will
* be overwritten
* @return <tt>true</tt> if the we successfully created the file, <tt>false</tt> if the file
* already exists and we weren't forcing the descriptor creation.
* @throws IOException if a filesystem error occurs
*/
public static boolean createTableDescriptorForTableDirectory(FileSystem fs, Path tabledir,
HTableDescriptor htableDescriptor, boolean forceCreation) throws IOException {
FileStatus status = getTableInfoPath(fs, tabledir);
if (status != null) {
LOG.info("Current tableInfoPath = " + status.getPath());
if (!forceCreation) {
if (fs.exists(status.getPath()) && status.getLen() > 0) {
LOG.info("TableInfo already exists.. Skipping creation");
return false;
}
}
}
Path p = writeTableDescriptor(fs, htableDescriptor, tabledir, status);
return p != null;
}
}
| |
/*
* Copyright (c) 2009-2010, Sergey Karakovskiy and Julian Togelius
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of the Mario AI nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package ch.idsia.tools;
import ch.idsia.agents.Agent;
import ch.idsia.agents.AgentsPool;
import ch.idsia.benchmark.mario.engine.GlobalOptions;
import ch.idsia.benchmark.mario.engine.MarioVisualComponent;
import ch.idsia.benchmark.mario.engine.level.LevelGenerator;
import ch.idsia.benchmark.mario.simulation.SimulationOptions;
import javax.swing.*;
import javax.swing.border.TitledBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.basic.BasicArrowButton;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.util.Random;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: Sergey Karakovskiy
* Date: Mar 29, 2009
* Time: 6:27:25 PM
* Package: .Tools
*/
public class ToolsConfigurator extends JFrame {
private static final String strPlay = "-> Play! ->";
private static final String strSimulate = "Simulate! ->";
public static MarioVisualComponent marioVisualComponent = null;
private static MarioAIOptions marioAIOptions = null;
private static JFrame marioComponentFrame = null;
private static GameViewer gameViewer = null; //new GameViewer(null, null);
public Checkbox CheckboxShowGameViewer = new Checkbox("Show Game Viewer", true);
public Label LabelConsole = new Label("Console:");
public TextArea TextAreaConsole = new TextArea("Console:"/*, 8,40*/); // Verbose all, keys, events, actions, observations
public Checkbox CheckboxShowVizualization = new Checkbox("Enable Visualization", GlobalOptions.isVisualization);
public Checkbox CheckboxMaximizeFPS = new Checkbox("Maximize FPS");
public Choice ChoiceAgent = new Choice();
public Choice ChoiceLevelType = new Choice();
public JSpinner JSpinnerLevelRandomizationSeed = new JSpinner();
// public Checkbox CheckboxEnableTimer = new Checkbox("Enable Timer", GlobalOptions.isTimer);
public JSpinner JSpinnerLevelDifficulty = new JSpinner();
public Checkbox CheckboxPauseWorld = new Checkbox("Pause World");
public Checkbox CheckboxPauseMario = new Checkbox("Pause Mario");
public Checkbox CheckboxPowerRestoration = new Checkbox("Power Restoration");
public JSpinner JSpinnerLevelLength = new JSpinner();
public JSpinner JSpinnerMaxAttempts = new JSpinner();
public Checkbox CheckboxExitOnFinish = new Checkbox("Exit on finish");
public TextField TextFieldMatLabFileName = new TextField("FileName of output for Matlab");
public Choice ChoiceVerbose = new Choice();
// public Checkbox CheckboxStopSimulationIfWin = new Checkbox("Stop simulation If Win");
public JButton JButtonPlaySimulate = new JButton(strPlay);
public JButton JButtonResetEvaluationSummary = new JButton("Reset");
Dimension defaultSize = new Dimension(330, 100);
Point defaultLocation = new Point(0, 320);
private Evaluator evaluator;
private BasicArrowButton
upFPS = new BasicArrowButton(BasicArrowButton.NORTH),
downFPS = new BasicArrowButton(BasicArrowButton.SOUTH);
private int prevFPS = 24;
public ToolsConfigurator(Point location, Dimension size) {
super("Tools Configurator");
setSize((size == null) ? defaultSize : size);
setLocation((location == null) ? defaultLocation : location);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// Universal Listener
ToolsConfiguratorActions toolsConfiguratorActions = new ToolsConfiguratorActions();
// ToolsConfiguratorOptionsPanel
// JPanel ToolsConfiguratorOptionsPanel = new JPanel(/*new FlowLayout()*//*GridLayout(0,2)*/);
Container ToolsConfiguratorOptionsPanel = getContentPane();
// CheckboxShowGameViewer
CheckboxShowGameViewer.addItemListener(toolsConfiguratorActions);
// TextFieldConsole
// TextFieldConsole.addActionListener(toolsConfiguratorActions);
// CheckboxShowVizualization
CheckboxShowVizualization.addItemListener(toolsConfiguratorActions);
// CheckboxMaximizeFPS
CheckboxMaximizeFPS.addItemListener(toolsConfiguratorActions);
// ChoiceAgent
ChoiceAgent.addItemListener(toolsConfiguratorActions);
Set<String> AgentsNames = AgentsPool.getAgentsNames();
for (String s : AgentsNames)
ChoiceAgent.addItem(s);
// ChoiceLevelType
ChoiceLevelType.addItem("Overground");
ChoiceLevelType.addItem("Underground");
ChoiceLevelType.addItem("Castle");
ChoiceLevelType.addItem("Random");
ChoiceLevelType.addItemListener(toolsConfiguratorActions);
// JSpinnerLevelRandomizationSeed
JSpinnerLevelRandomizationSeed.setToolTipText("Hint: levels with same seed are identical for in observation");
JSpinnerLevelRandomizationSeed.setValue(1);
JSpinnerLevelRandomizationSeed.addChangeListener(toolsConfiguratorActions);
// CheckboxEnableTimer
// CheckboxEnableTimer.addItemListener(toolsConfiguratorActions);
JSpinnerLevelDifficulty.addChangeListener(toolsConfiguratorActions);
// CheckboxPauseWorld
CheckboxPauseWorld.addItemListener(toolsConfiguratorActions);
// CheckboxPauseWorld
CheckboxPauseMario.addItemListener(toolsConfiguratorActions);
CheckboxPauseMario.setEnabled(false);
// CheckboxCheckboxPowerRestoration
CheckboxPowerRestoration.addItemListener(toolsConfiguratorActions);
CheckboxPowerRestoration.setEnabled(true);
// CheckboxStopSimulationIfWin
// CheckboxStopSimulationIfWin.addItemListener(toolsConfiguratorActions);
// JButtonPlaySimulate
JButtonPlaySimulate.addActionListener(toolsConfiguratorActions);
// JSpinnerLevelLength
JSpinnerLevelLength.setValue(320);
JSpinnerLevelLength.addChangeListener(toolsConfiguratorActions);
// JSpinnerMaxAttempts
JSpinnerMaxAttempts.setValue(5);
JSpinnerMaxAttempts.addChangeListener(toolsConfiguratorActions);
// CheckboxExitOnFinish
CheckboxExitOnFinish.addItemListener(toolsConfiguratorActions);
// ChoiceVerbose
ChoiceVerbose.addItem("Nothing");
ChoiceVerbose.addItem("All");
ChoiceVerbose.addItem("Keys pressed");
ChoiceVerbose.addItem("Selected Actions");
// JPanel, ArrowButtons ++FPS, --FPS
JPanel JPanelFPSFineTune = new JPanel();
JPanelFPSFineTune.setBorder(new TitledBorder("++FPS/--FPS"));
JPanelFPSFineTune.setToolTipText("Hint: Use '+' or '=' for ++FPS and '-' for --FPS from your keyboard");
JPanelFPSFineTune.add(upFPS);
JPanelFPSFineTune.add(downFPS);
upFPS.addActionListener(toolsConfiguratorActions);
downFPS.addActionListener(toolsConfiguratorActions);
upFPS.setToolTipText("Hint: Use '+' or '=' for ++FPS and '-' for --FPS from your keyboard");
downFPS.setToolTipText("Hint: Use '+' or '=' for ++FPS and '-' for --FPS from your keyboard");
// JPanelLevelOptions
JPanel JPanelLevelOptions = new JPanel();
JPanelLevelOptions.setLayout(new BoxLayout(JPanelLevelOptions, BoxLayout.Y_AXIS));
JPanelLevelOptions.setBorder(new TitledBorder("Level Options"));
JPanelLevelOptions.add(new Label("Level Type:"));
JPanelLevelOptions.add(ChoiceLevelType);
JPanelLevelOptions.add(new Label("Level Randomization Seed:"));
JPanelLevelOptions.add(JSpinnerLevelRandomizationSeed);
JPanelLevelOptions.add(new Label("Level Difficulty:"));
JPanelLevelOptions.add(JSpinnerLevelDifficulty);
JPanelLevelOptions.add(new Label("Level Length:"));
JPanelLevelOptions.add(JSpinnerLevelLength);
// JPanelLevelOptions.add(CheckboxEnableTimer);
JPanelLevelOptions.add(CheckboxPauseWorld);
JPanelLevelOptions.add(CheckboxPauseMario);
JPanelLevelOptions.add(CheckboxPowerRestoration);
JPanelLevelOptions.add(JButtonPlaySimulate);
JPanel JPanelMiscellaneousOptions = new JPanel();
JPanelMiscellaneousOptions.setLayout(new BoxLayout(JPanelMiscellaneousOptions, BoxLayout.Y_AXIS));
JPanelMiscellaneousOptions.setBorder(new TitledBorder("Miscellaneous Options"));
JPanelMiscellaneousOptions.add(CheckboxShowGameViewer);
JPanelMiscellaneousOptions.add(CheckboxShowVizualization);
// JPanelMiscellaneousOptions.add(TextFieldConsole);
JPanelMiscellaneousOptions.add(CheckboxMaximizeFPS);
JPanelMiscellaneousOptions.add(JPanelFPSFineTune);
// JPanelMiscellaneousOptions.add(JPanelLevelOptions);
JPanelMiscellaneousOptions.add(new Label("Current Agent:"));
JPanelMiscellaneousOptions.add(ChoiceAgent);
JPanelMiscellaneousOptions.add(new Label("Verbose:"));
JPanelMiscellaneousOptions.add(ChoiceVerbose);
JPanelMiscellaneousOptions.add(new Label("Evaluation Summary: "));
JPanelMiscellaneousOptions.add(JButtonResetEvaluationSummary);
JPanelMiscellaneousOptions.add(new Label("Max # of attemps:"));
JPanelMiscellaneousOptions.add(JSpinnerMaxAttempts);
// JPanelMiscellaneousOptions.add(CheckboxStopSimulationIfWin);
JPanelMiscellaneousOptions.add(CheckboxExitOnFinish);
JPanel JPanelConsole = new JPanel(new FlowLayout());
JPanelConsole.setBorder(new TitledBorder("Console"));
TextAreaConsole.setFont(new Font("Courier New", Font.PLAIN, 12));
TextAreaConsole.setBackground(Color.BLACK);
TextAreaConsole.setForeground(Color.GREEN);
JPanelConsole.add(TextAreaConsole);
// IF GUI
// LOGGER.setTextAreaConsole(TextAreaConsole);
ToolsConfiguratorOptionsPanel.add(BorderLayout.WEST, JPanelLevelOptions);
ToolsConfiguratorOptionsPanel.add(BorderLayout.CENTER, JPanelMiscellaneousOptions);
ToolsConfiguratorOptionsPanel.add(BorderLayout.SOUTH, JPanelConsole);
JPanel borderPanel = new JPanel();
borderPanel.add(BorderLayout.NORTH, ToolsConfiguratorOptionsPanel);
setContentPane(borderPanel);
// autosize:
this.pack();
}
public static void main(String[] args) {
marioAIOptions = new MarioAIOptions(args);
ToolsConfigurator toolsConfigurator = new ToolsConfigurator(null, null);
toolsConfigurator.setVisible(marioAIOptions.isToolsConfigurator());
toolsConfigurator.ChoiceLevelType.select(marioAIOptions.getLevelType());
toolsConfigurator.JSpinnerLevelDifficulty.setValue(marioAIOptions.getLevelDifficulty());
toolsConfigurator.JSpinnerLevelRandomizationSeed.setValue(marioAIOptions.getLevelRandSeed());
toolsConfigurator.JSpinnerLevelLength.setValue(marioAIOptions.getLevelLength());
toolsConfigurator.CheckboxShowVizualization.setState(marioAIOptions.isVisualization());
toolsConfigurator.ChoiceAgent.select(AgentsPool.getCurrentAgent().getName());
toolsConfigurator.CheckboxMaximizeFPS.setState(marioAIOptions.getFPS() > GlobalOptions.MaxFPS - 1);
toolsConfigurator.CheckboxPowerRestoration.setState(marioAIOptions.isPowerRestoration());
toolsConfigurator.CheckboxExitOnFinish.setState(marioAIOptions.isExitProgramWhenFinished());
gameViewer = new GameViewer(marioAIOptions);
CreateMarioComponentFrame(marioAIOptions);
toolsConfigurator.setGameViewer(gameViewer);
gameViewer.setAlwaysOnTop(false);
gameViewer.setToolsConfigurator(toolsConfigurator);
gameViewer.setVisible(marioAIOptions.isGameViewer());
if (!marioAIOptions.isToolsConfigurator()) {
toolsConfigurator.simulateOrPlay();
}
}
@Deprecated
static void CreateMarioComponentFrame(SimulationOptions simulationOptions) {
if (marioComponentFrame == null) {
marioComponentFrame = new JFrame( "Mario AI benchmark-tools" + GlobalOptions.getVersionUID());
marioComponentFrame.pack();
marioComponentFrame.setResizable(false);
marioComponentFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
}
marioComponentFrame.setAlwaysOnTop(simulationOptions.isViewAlwaysOnTop());
marioComponentFrame.setLocation(simulationOptions.getViewLocation());
marioComponentFrame.setVisible(simulationOptions.isVisualization());
}
public void simulateOrPlay() {
//Simulate or Play!
SimulationOptions simulationOptions = prepareSimulationOptions();
assert (simulationOptions != null);
}
private SimulationOptions prepareSimulationOptions() {
SimulationOptions simulationOptions = marioAIOptions;
Agent agent = AgentsPool.getAgentByName(ChoiceAgent.getSelectedItem());
simulationOptions.setAgent(agent);
int type = ChoiceLevelType.getSelectedIndex();
if (type == 4)
type = (new Random()).nextInt(4);
simulationOptions.setLevelType(type);
simulationOptions.setLevelDifficulty(Integer.parseInt(JSpinnerLevelDifficulty.getValue().toString()));
simulationOptions.setLevelRandSeed(Integer.parseInt(JSpinnerLevelRandomizationSeed.getValue().toString()));
simulationOptions.setLevelLength(Integer.parseInt(JSpinnerLevelLength.getValue().toString()));
simulationOptions.setVisualization(CheckboxShowVizualization.getState());
// simulationOptions.setEvaluationQuota(Integer.parseInt(JSpinnerMaxAttempts.getValue().toString()));
simulationOptions.setPowerRestoration(CheckboxPowerRestoration.getState());
simulationOptions.setExitProgramWhenFinished(CheckboxExitOnFinish.getState());
// simulationOptions.setMatlabFileName(TextFieldMatLabFileName.getText());
return simulationOptions;
}
public void setGameViewer(GameViewer gameViewer) {
this.gameViewer = gameViewer;
}
public MarioVisualComponent getMarioVisualComponent() {
return marioVisualComponent;
}
public void setConsoleText(String text) {
LabelConsole.setText("Console got message:");
}
enum INTERFACE_TYPE {
CONSOLE, GUI
}
public class ToolsConfiguratorActions implements ActionListener, ItemListener, ChangeListener {
public void actionPerformed(ActionEvent ae) {
Object ob = ae.getSource();
if (ob == JButtonPlaySimulate) {
simulateOrPlay();
} else if (ob == upFPS) {
if (++GlobalOptions.FPS >= GlobalOptions.MaxFPS) {
GlobalOptions.FPS = GlobalOptions.MaxFPS;
CheckboxMaximizeFPS.setState(true);
}
// marioComponent.adjustFPS();
// LOGGER.println("FPS set to " + (CheckboxMaximizeFPS.getState() ? "infinity" : GlobalOptions.FPS),
// LOGGER.VERBOSE_MODE.INFO );
} else if (ob == downFPS) {
if (--GlobalOptions.FPS < 1)
GlobalOptions.FPS = 1;
CheckboxMaximizeFPS.setState(false);
// marioComponent.adjustFPS();
// LOGGER.println("FPS set to " + (CheckboxMaximizeFPS.getState() ? "infinity" : GlobalOptions.FPS),
// LOGGER.VERBOSE_MODE.INFO );
} else if (ob == JButtonResetEvaluationSummary) {
evaluator = null;
}
}
public void itemStateChanged(ItemEvent ie) {
Object ob = ie.getSource();
if (ob == CheckboxShowGameViewer) {
// LOGGER.println("Game Viewer " + (CheckboxShowGameViewer.getState() ? "Shown" : "Hidden"),
// LOGGER.VERBOSE_MODE.INFO );
gameViewer.setVisible(CheckboxShowGameViewer.getState());
} else if (ob == CheckboxShowVizualization) {
// LOGGER.println("Vizualization " + (CheckboxShowVizualization.getState() ? "On" : "Off"),
// LOGGER.VERBOSE_MODE.INFO );
GlobalOptions.isVisualization = CheckboxShowVizualization.getState();
marioComponentFrame.setVisible(GlobalOptions.isVisualization);
} else if (ob == CheckboxMaximizeFPS) {
prevFPS = (GlobalOptions.FPS == GlobalOptions.MaxFPS) ? prevFPS : GlobalOptions.FPS;
GlobalOptions.FPS = CheckboxMaximizeFPS.getState() ? 100 : prevFPS;
// marioComponent.adjustFPS();
// LOGGER.println("FPS set to " + (CheckboxMaximizeFPS.getState() ? "infinity" : GlobalOptions.FPS),
// LOGGER.VERBOSE_MODE.INFO );
/* } else if (ob == CheckboxEnableTimer)
{
GlobalOptions.isTimer = CheckboxEnableTimer.getState();
// LOGGER.println("Timer " + (GlobalOptions.isTimer ? "enabled" : "disabled"),
// LOGGER.VERBOSE_MODE.INFO);
*/
} else if (ob == CheckboxPauseWorld) {
// GlobalOptions.isPauseWorld = CheckboxPauseWorld.getState();
// marioComponent.setPaused(GlobalOptions.isPauseWorld);
// LOGGER.println("World " + (GlobalOptions.isPauseWorld ? "paused" : "unpaused"),
// LOGGER.VERBOSE_MODE.INFO);
} else if (ob == CheckboxPauseMario) {
TextAreaConsole.setText("1\n2\n3\n");
} else if (ob == CheckboxPowerRestoration) {
GlobalOptions.isPowerRestoration = CheckboxPowerRestoration.getState();
// LOGGER.println("Mario Power Restoration Turned " + (GlobalOptions.isPowerRestoration ? "on" : "off"),
// LOGGER.VERBOSE_MODE.INFO);
}
// else if (ob == CheckboxStopSimulationIfWin)
// {
// GlobalOptions.StopSimulationIfWin = CheckboxStopSimulationIfWin.getState();
//// LOGGER.println("Stop simulation if Win Criteria Turned " +
//// (GlobalOptions.StopSimulationIfWin ? "on" : "off"),
//// LOGGER.VERBOSE_MODE.INFO);
// }
else if (ob == ChoiceAgent) {
// LOGGER.println("Agent chosen: " + (ChoiceAgent.getSelectedItem()), LOGGER.VERBOSE_MODE.INFO);
JButtonPlaySimulate.setText(strSimulate);
} else if (ob == ChoiceLevelType) {
} else if (ob == ChoiceVerbose) {
}
}
public void stateChanged(ChangeEvent changeEvent) {
Object ob = changeEvent.getSource();
if (ob == JSpinnerLevelRandomizationSeed) {
//Change random seed in Evaluator/ Simulator Options
} else if (ob == JSpinnerLevelDifficulty) {
} else if (ob == JSpinnerLevelLength) {
if (Integer.parseInt(JSpinnerLevelLength.getValue().toString()) < LevelGenerator.LevelLengthMinThreshold)
JSpinnerLevelLength.setValue(LevelGenerator.LevelLengthMinThreshold);
}
}
}
}
| |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm.init.api;
import static com.google.gerrit.common.FileUtil.modified;
import com.google.common.io.ByteStreams;
import com.google.gerrit.common.Die;
import org.eclipse.jgit.internal.storage.file.LockFile;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.SystemReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Arrays;
/** Utility functions to help initialize a site. */
public class InitUtil {
public static Die die(String why) {
return new Die(why);
}
public static Die die(String why, Throwable cause) {
return new Die(why, cause);
}
public static void savePublic(final FileBasedConfig sec) throws IOException {
if (modified(sec)) {
sec.save();
}
}
public static void mkdir(File file) {
mkdir(file.toPath());
}
public static void mkdir(Path path) {
if (Files.isDirectory(path)) {
return;
}
try {
Files.createDirectory(path);
} catch (IOException e) {
throw die("Cannot make directory " + path, e);
}
}
public static String version() {
return com.google.gerrit.common.Version.getVersion();
}
public static String username() {
return System.getProperty("user.name");
}
public static String hostname() {
return SystemReader.getInstance().getHostname();
}
public static boolean isLocal(final String hostname) {
try {
return InetAddress.getByName(hostname).isLoopbackAddress();
} catch (UnknownHostException e) {
return false;
}
}
public static String dnOf(String name) {
if (name != null) {
int p = name.indexOf("://");
if (0 < p) {
name = name.substring(p + 3);
}
p = name.indexOf(".");
if (0 < p) {
name = name.substring(p + 1);
name = "DC=" + name.replaceAll("\\.", ",DC=");
} else {
name = null;
}
}
return name;
}
public static String domainOf(String name) {
if (name != null) {
int p = name.indexOf("://");
if (0 < p) {
name = name.substring(p + 3);
}
p = name.indexOf(".");
if (0 < p) {
name = name.substring(p + 1);
}
}
return name;
}
public static void extract(Path dst, Class<?> sibling, String name)
throws IOException {
try (InputStream in = open(sibling, name)) {
if (in != null) {
copy(dst, ByteStreams.toByteArray(in));
}
}
}
private static InputStream open(final Class<?> sibling, final String name) {
final InputStream in = sibling.getResourceAsStream(name);
if (in == null) {
String pkg = sibling.getName();
int end = pkg.lastIndexOf('.');
if (0 < end) {
pkg = pkg.substring(0, end + 1);
pkg = pkg.replace('.', '/');
} else {
pkg = "";
}
System.err.println("warn: Cannot read " + pkg + name);
return null;
}
return in;
}
public static void copy(Path dst, byte[] buf)
throws FileNotFoundException, IOException {
// If the file already has the content we want to put there,
// don't attempt to overwrite the file.
//
try (InputStream in = Files.newInputStream(dst)) {
if (Arrays.equals(buf, ByteStreams.toByteArray(in))) {
return;
}
} catch (NoSuchFileException notFound) {
// Fall through and write the file.
}
Files.createDirectories(dst.getParent());
LockFile lf = new LockFile(dst.toFile());
if (!lf.lock()) {
throw new IOException("Cannot lock " + dst);
}
try {
try (InputStream in = new ByteArrayInputStream(buf);
OutputStream out = lf.getOutputStream()) {
ByteStreams.copy(in, out);
}
if (!lf.commit()) {
throw new IOException("Cannot commit " + dst);
}
} finally {
lf.unlock();
}
}
public static URI toURI(String url) throws URISyntaxException {
final URI u = new URI(url);
if (isAnyAddress(u)) {
// If the URL uses * it means all addresses on this system, use the
// current hostname instead in the returned URI.
//
final int s = url.indexOf('*');
url = url.substring(0, s) + hostname() + url.substring(s + 1);
}
return new URI(url);
}
public static boolean isAnyAddress(final URI u) {
return u.getHost() == null
&& (u.getAuthority().equals("*") || u.getAuthority().startsWith("*:"));
}
public static int portOf(final URI uri) {
int port = uri.getPort();
if (port < 0) {
port = "https".equals(uri.getScheme()) ? 443 : 80;
}
return port;
}
private InitUtil() {
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.compute;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpMethod;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpRequest;
import com.azure.core.http.HttpResponse;
import com.azure.core.http.netty.NettyAsyncHttpClientBuilder;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.management.SubResource;
import com.azure.resourcemanager.compute.models.KnownLinuxVirtualMachineImage;
import com.azure.resourcemanager.compute.models.OperatingSystemTypes;
import com.azure.resourcemanager.compute.models.PowerState;
import com.azure.resourcemanager.compute.models.ResourceIdentityType;
import com.azure.resourcemanager.compute.models.Sku;
import com.azure.resourcemanager.compute.models.UpgradeMode;
import com.azure.resourcemanager.compute.models.VaultCertificate;
import com.azure.resourcemanager.compute.models.VaultSecretGroup;
import com.azure.resourcemanager.compute.models.VirtualMachineEvictionPolicyTypes;
import com.azure.resourcemanager.compute.models.VirtualMachineImage;
import com.azure.resourcemanager.compute.models.VirtualMachinePriorityTypes;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSet;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetExtension;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetPublicIpAddressConfiguration;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetSkuTypes;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetVM;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetVMs;
import com.azure.resourcemanager.authorization.models.BuiltInRole;
import com.azure.resourcemanager.authorization.models.RoleAssignment;
import com.azure.resourcemanager.keyvault.models.Secret;
import com.azure.resourcemanager.keyvault.models.Vault;
import com.azure.resourcemanager.network.models.ApplicationSecurityGroup;
import com.azure.resourcemanager.network.models.LoadBalancer;
import com.azure.resourcemanager.network.models.LoadBalancerBackend;
import com.azure.resourcemanager.network.models.LoadBalancerInboundNatRule;
import com.azure.resourcemanager.network.models.LoadBalancerSkuType;
import com.azure.resourcemanager.network.models.LoadBalancingRule;
import com.azure.resourcemanager.network.models.Network;
import com.azure.resourcemanager.network.models.NetworkSecurityGroup;
import com.azure.resourcemanager.network.models.PublicIpAddress;
import com.azure.resourcemanager.network.models.SecurityRuleProtocol;
import com.azure.resourcemanager.network.models.VirtualMachineScaleSetNetworkInterface;
import com.azure.resourcemanager.network.models.VirtualMachineScaleSetNicIpConfiguration;
import com.azure.resourcemanager.resources.fluentcore.utils.ResourceManagerUtils;
import com.azure.resourcemanager.resources.models.ResourceGroup;
import com.azure.resourcemanager.test.utils.TestUtilities;
import com.azure.resourcemanager.resources.fluentcore.arm.AvailabilityZoneId;
import com.azure.core.management.Region;
import com.azure.core.management.profile.AzureProfile;
import com.azure.resourcemanager.storage.models.StorageAccount;
import com.azure.resourcemanager.storage.models.StorageAccountKey;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.InputStream;
import java.net.URI;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.BlobServiceClientBuilder;
import com.azure.storage.blob.specialized.BlockBlobClient;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public class VirtualMachineScaleSetOperationsTests extends ComputeManagementTest {
private String rgName = "";
private final Region region = Region.US_WEST;
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
rgName = generateRandomResourceName("javacsmrg", 15);
super.initializeClients(httpPipeline, profile);
}
@Override
protected void cleanUpResources() {
if (rgName != null) {
resourceManager.resourceGroups().deleteByName(rgName);
}
}
@Test
public void canUpdateVirtualMachineScaleSetWithExtensionProtectedSettings() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String uname = "jvuser";
final String password = password();
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
StorageAccount storageAccount =
this
.storageManager
.storageAccounts()
.define(generateRandomResourceName("stg", 15))
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.create();
List<StorageAccountKey> keys = storageAccount.getKeys();
Assertions.assertNotNull(keys);
Assertions.assertTrue(keys.size() > 0);
String storageAccountKey = keys.get(0).value();
// Upload the script file as block blob
//
URI fileUri;
if (isPlaybackMode()) {
fileUri = new URI("http://nonexisting.blob.core.windows.net/scripts/install_apache.sh");
} else {
final String storageConnectionString = ResourceManagerUtils.getStorageConnectionString(
storageAccount.name(), storageAccountKey, storageManager.environment());
// Get the script to upload
//
String filePath = VirtualMachineScaleSetOperationsTests.class.getResource("/install_apache.sh").getPath();
File file = new File(filePath);
InputStream inputStream = VirtualMachineScaleSetOperationsTests.class
.getResourceAsStream("/install_apache.sh");
inputStream = new BufferedInputStream(inputStream);
inputStream.mark((int) file.length());
BlobServiceClient storageClient = new BlobServiceClientBuilder()
.connectionString(storageConnectionString)
.httpClient(storageManager.httpPipeline().getHttpClient())
.buildClient();
BlobContainerClient blobContainerClient = storageClient.getBlobContainerClient("scripts");
blobContainerClient.create();
BlockBlobClient blockBlobClient = blobContainerClient.getBlobClient("install_apache.sh")
.getBlockBlobClient();
blockBlobClient.upload(inputStream, file.length());
fileUri = new URI(blockBlobClient.getBlobUrl());
}
List<String> fileUris = new ArrayList<>();
fileUris.add(fileUri.toString());
Network network =
this
.networkManager
.networks()
.define(generateRandomResourceName("vmssvnet", 15))
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withoutPrimaryInternetFacingLoadBalancer()
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(uname)
.withRootPassword(password)
.withUnmanagedDisks()
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withExistingStorageAccount(storageAccount)
.defineNewExtension("CustomScriptForLinux")
.withPublisher("Microsoft.OSTCExtensions")
.withType("CustomScriptForLinux")
.withVersion("1.4")
.withMinorVersionAutoUpgrade()
.withPublicSetting("fileUris", fileUris)
.withProtectedSetting("commandToExecute", "bash install_apache.sh")
.withProtectedSetting("storageAccountName", storageAccount.name())
.withProtectedSetting("storageAccountKey", storageAccountKey)
.attach()
.create();
// Validate extensions after create
//
Map<String, VirtualMachineScaleSetExtension> extensions = virtualMachineScaleSet.extensions();
Assertions.assertNotNull(extensions);
Assertions.assertEquals(1, extensions.size());
Assertions.assertTrue(extensions.containsKey("CustomScriptForLinux"));
VirtualMachineScaleSetExtension extension = extensions.get("CustomScriptForLinux");
Assertions.assertNotNull(extension.publicSettings());
Assertions.assertEquals(1, extension.publicSettings().size());
Assertions.assertNotNull(extension.publicSettingsAsJsonString());
// Retrieve scale set
VirtualMachineScaleSet scaleSet =
this.computeManager.virtualMachineScaleSets().getById(virtualMachineScaleSet.id());
// Validate extensions after get
//
extensions = virtualMachineScaleSet.extensions();
Assertions.assertNotNull(extensions);
Assertions.assertEquals(1, extensions.size());
Assertions.assertTrue(extensions.containsKey("CustomScriptForLinux"));
extension = extensions.get("CustomScriptForLinux");
Assertions.assertNotNull(extension.publicSettings());
Assertions.assertEquals(1, extension.publicSettings().size());
Assertions.assertNotNull(extension.publicSettingsAsJsonString());
// Update VMSS capacity
//
int newCapacity = scaleSet.capacity() + 1;
virtualMachineScaleSet.update().withCapacity(newCapacity).apply();
// Validate extensions after update
//
extensions = virtualMachineScaleSet.extensions();
Assertions.assertNotNull(extensions);
Assertions.assertEquals(1, extensions.size());
Assertions.assertTrue(extensions.containsKey("CustomScriptForLinux"));
extension = extensions.get("CustomScriptForLinux");
Assertions.assertNotNull(extension.publicSettings());
Assertions.assertEquals(1, extension.publicSettings().size());
Assertions.assertNotNull(extension.publicSettingsAsJsonString());
}
@Test
public void canCreateVirtualMachineScaleSetWithCustomScriptExtension() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String uname = "jvuser";
final String password = password();
final String apacheInstallScript =
"https://raw.githubusercontent.com/Azure/azure-sdk-for-java/master/sdk/resourcemanager/azure-resourcemanager-compute/src/test/resources/install_apache.sh";
final String installCommand = "bash install_apache.sh Abc.123x(";
List<String> fileUris = new ArrayList<>();
fileUris.add(apacheInstallScript);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define(generateRandomResourceName("vmssvnet", 15))
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer = createHttpLoadBalancers(region, resourceGroup, "1");
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername(uname)
.withRootPassword(password)
.withUnmanagedDisks()
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.defineNewExtension("CustomScriptForLinux")
.withPublisher("Microsoft.OSTCExtensions")
.withType("CustomScriptForLinux")
.withVersion("1.4")
.withMinorVersionAutoUpgrade()
.withPublicSetting("fileUris", fileUris)
.withPublicSetting("commandToExecute", installCommand)
.attach()
.withUpgradeMode(UpgradeMode.MANUAL)
.create();
checkVMInstances(virtualMachineScaleSet);
List<String> publicIPAddressIds = virtualMachineScaleSet.primaryPublicIpAddressIds();
PublicIpAddress publicIPAddress = this.networkManager.publicIpAddresses().getById(publicIPAddressIds.get(0));
String fqdn = publicIPAddress.fqdn();
// Assert public load balancing connection
if (!isPlaybackMode()) {
HttpClient client = new NettyAsyncHttpClientBuilder().build();
HttpRequest request = new HttpRequest(HttpMethod.GET, "http://" + fqdn);
HttpResponse response = client.send(request).block();
Assertions.assertEquals(response.getStatusCode(), 200);
}
// Check SSH to VM instances via Nat rule
//
for (VirtualMachineScaleSetVM vm : virtualMachineScaleSet.virtualMachines().list()) {
PagedIterable<VirtualMachineScaleSetNetworkInterface> networkInterfaces = vm.listNetworkInterfaces();
Assertions.assertEquals(TestUtilities.getSize(networkInterfaces), 1);
VirtualMachineScaleSetNetworkInterface networkInterface = networkInterfaces.iterator().next();
VirtualMachineScaleSetNicIpConfiguration primaryIpConfig = null;
primaryIpConfig = networkInterface.primaryIPConfiguration();
Assertions.assertNotNull(primaryIpConfig);
Integer sshFrontendPort = null;
List<LoadBalancerInboundNatRule> natRules = primaryIpConfig.listAssociatedLoadBalancerInboundNatRules();
for (LoadBalancerInboundNatRule natRule : natRules) {
if (natRule.backendPort() == 22) {
sshFrontendPort = natRule.frontendPort();
break;
}
}
Assertions.assertNotNull(sshFrontendPort);
this.sleep(1000 * 60); // Wait some time for VM to be available
this.ensureCanDoSsh(fqdn, sshFrontendPort, uname, password);
}
}
@Test
public void canCreateVirtualMachineScaleSetWithOptionalNetworkSettings() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String vmssVmDnsLabel = generateRandomResourceName("pip", 10);
final String nsgName = generateRandomResourceName("nsg", 10);
final String asgName = generateRandomResourceName("asg", 8);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
ApplicationSecurityGroup asg =
this
.networkManager
.applicationSecurityGroups()
.define(asgName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.create();
// Create VMSS with instance public ip
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withoutPrimaryInternetFacingLoadBalancer()
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword(password())
.withVirtualMachinePublicIp(vmssVmDnsLabel)
.withExistingApplicationSecurityGroup(asg)
.create();
VirtualMachineScaleSetPublicIpAddressConfiguration currentIpConfig =
virtualMachineScaleSet.virtualMachinePublicIpConfig();
Assertions.assertNotNull(currentIpConfig);
Assertions.assertNotNull(currentIpConfig.dnsSettings());
Assertions.assertNotNull(currentIpConfig.dnsSettings().domainNameLabel());
currentIpConfig.withIdleTimeoutInMinutes(20);
virtualMachineScaleSet.update().withVirtualMachinePublicIp(currentIpConfig).apply();
currentIpConfig = virtualMachineScaleSet.virtualMachinePublicIpConfig();
Assertions.assertNotNull(currentIpConfig);
Assertions.assertNotNull(currentIpConfig.idleTimeoutInMinutes());
Assertions.assertEquals((long) 20, (long) currentIpConfig.idleTimeoutInMinutes());
virtualMachineScaleSet.refresh();
currentIpConfig = virtualMachineScaleSet.virtualMachinePublicIpConfig();
Assertions.assertNotNull(currentIpConfig);
Assertions.assertNotNull(currentIpConfig.idleTimeoutInMinutes());
Assertions.assertEquals((long) 20, (long) currentIpConfig.idleTimeoutInMinutes());
List<String> asgIds = virtualMachineScaleSet.applicationSecurityGroupIds();
Assertions.assertNotNull(asgIds);
Assertions.assertEquals(1, asgIds.size());
NetworkSecurityGroup nsg =
networkManager
.networkSecurityGroups()
.define(nsgName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.defineRule("rule1")
.allowOutbound()
.fromAnyAddress()
.fromPort(80)
.toAnyAddress()
.toPort(80)
.withProtocol(SecurityRuleProtocol.TCP)
.attach()
.create();
virtualMachineScaleSet.deallocate();
virtualMachineScaleSet
.update()
.withIpForwarding()
.withAcceleratedNetworking()
.withExistingNetworkSecurityGroup(nsg)
.apply();
Assertions.assertTrue(virtualMachineScaleSet.isIpForwardingEnabled());
Assertions.assertTrue(virtualMachineScaleSet.isAcceleratedNetworkingEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.networkSecurityGroupId());
//
virtualMachineScaleSet.refresh();
//
Assertions.assertTrue(virtualMachineScaleSet.isIpForwardingEnabled());
Assertions.assertTrue(virtualMachineScaleSet.isAcceleratedNetworkingEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.networkSecurityGroupId());
virtualMachineScaleSet
.update()
.withoutIpForwarding()
.withoutAcceleratedNetworking()
.withoutNetworkSecurityGroup()
.apply();
Assertions.assertFalse(virtualMachineScaleSet.isIpForwardingEnabled());
Assertions.assertFalse(virtualMachineScaleSet.isAcceleratedNetworkingEnabled());
Assertions.assertNull(virtualMachineScaleSet.networkSecurityGroupId());
}
@Test
@Disabled("Mock framework doesn't support data plane")
public void canCreateVirtualMachineScaleSetWithSecret() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String vaultName = generateRandomResourceName("vlt", 10);
final String secretName = generateRandomResourceName("srt", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
Vault vault =
this
.keyVaultManager
.vaults()
.define(vaultName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.defineAccessPolicy()
.forServicePrincipal(clientIdFromFile())
.allowSecretAllPermissions()
.attach()
.withDeploymentEnabled()
.create();
final InputStream embeddedJsonConfig =
VirtualMachineExtensionOperationsTests.class.getResourceAsStream("/myTest.txt");
String secretValue = IOUtils.toString(embeddedJsonConfig);
Secret secret = vault.secrets().define(secretName).withValue(secretValue).create();
List<VaultCertificate> certs = new ArrayList<>();
certs.add(new VaultCertificate().withCertificateUrl(secret.id()));
List<VaultSecretGroup> group = new ArrayList<>();
group
.add(
new VaultSecretGroup()
.withSourceVault(new SubResource().withId(vault.id()))
.withVaultCertificates(certs));
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword(password())
.withSecrets(group)
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withNewStorageAccount(generateRandomResourceName("stg3", 15))
.withUpgradeMode(UpgradeMode.MANUAL)
.create();
for (VirtualMachineScaleSetVM vm : virtualMachineScaleSet.virtualMachines().list()) {
Assertions.assertTrue(vm.osProfile().secrets().size() > 0);
}
virtualMachineScaleSet.update().withoutSecrets().apply();
for (VirtualMachineScaleSetVM vm : virtualMachineScaleSet.virtualMachines().list()) {
Assertions.assertTrue(vm.osProfile().secrets().size() == 0);
}
}
public void canCreateVirtualMachineScaleSet() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withUnmanagedDisks()
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.create();
// Validate Network specific properties (LB, VNet, NIC, IPConfig etc..)
//
Assertions.assertNull(virtualMachineScaleSet.getPrimaryInternalLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerBackends().size() == 0);
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerInboundNatPools().size() == 0);
Assertions.assertNotNull(virtualMachineScaleSet.getPrimaryInternetFacingLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerBackends().size() == 2);
Assertions
.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerInboundNatPools().size() == 2);
Network primaryNetwork = virtualMachineScaleSet.getPrimaryNetwork();
Assertions.assertNotNull(primaryNetwork.id());
PagedIterable<VirtualMachineScaleSetNetworkInterface> nics = virtualMachineScaleSet.listNetworkInterfaces();
int nicCount = 0;
for (VirtualMachineScaleSetNetworkInterface nic : nics) {
nicCount++;
Assertions.assertNotNull(nic.id());
Assertions
.assertTrue(nic.virtualMachineId().toLowerCase().startsWith(virtualMachineScaleSet.id().toLowerCase()));
Assertions.assertNotNull(nic.macAddress());
Assertions.assertNotNull(nic.dnsServers());
Assertions.assertNotNull(nic.appliedDnsServers());
Map<String, VirtualMachineScaleSetNicIpConfiguration> ipConfigs = nic.ipConfigurations();
Assertions.assertEquals(ipConfigs.size(), 1);
for (Map.Entry<String, VirtualMachineScaleSetNicIpConfiguration> entry : ipConfigs.entrySet()) {
VirtualMachineScaleSetNicIpConfiguration ipConfig = entry.getValue();
Assertions.assertNotNull(ipConfig);
Assertions.assertTrue(ipConfig.isPrimary());
Assertions.assertNotNull(ipConfig.subnetName());
Assertions.assertTrue(primaryNetwork.id().toLowerCase().equalsIgnoreCase(ipConfig.networkId()));
Assertions.assertNotNull(ipConfig.privateIpAddress());
Assertions.assertNotNull(ipConfig.privateIpAddressVersion());
Assertions.assertNotNull(ipConfig.privateIpAllocationMethod());
List<LoadBalancerBackend> lbBackends = ipConfig.listAssociatedLoadBalancerBackends();
// VMSS is created with a internet facing LB with two Backend pools so there will be two
// backends in ip-config as well
Assertions.assertEquals(lbBackends.size(), 2);
for (LoadBalancerBackend lbBackend : lbBackends) {
Map<String, LoadBalancingRule> lbRules = lbBackend.loadBalancingRules();
Assertions.assertEquals(lbRules.size(), 1);
for (Map.Entry<String, LoadBalancingRule> ruleEntry : lbRules.entrySet()) {
LoadBalancingRule rule = ruleEntry.getValue();
Assertions.assertNotNull(rule);
Assertions
.assertTrue(
(rule.frontendPort() == 80 && rule.backendPort() == 80)
|| (rule.frontendPort() == 443 && rule.backendPort() == 443));
}
}
List<LoadBalancerInboundNatRule> lbNatRules = ipConfig.listAssociatedLoadBalancerInboundNatRules();
// VMSS is created with a internet facing LB with two nat pools so there will be two
// nat rules in ip-config as well
Assertions.assertEquals(lbNatRules.size(), 2);
for (LoadBalancerInboundNatRule lbNatRule : lbNatRules) {
Assertions
.assertTrue(
(lbNatRule.frontendPort() >= 5000 && lbNatRule.frontendPort() <= 5099)
|| (lbNatRule.frontendPort() >= 6000 && lbNatRule.frontendPort() <= 6099));
Assertions.assertTrue(lbNatRule.backendPort() == 22 || lbNatRule.backendPort() == 23);
}
}
}
Assertions.assertTrue(nicCount > 0);
// Validate other properties
//
Assertions.assertEquals(virtualMachineScaleSet.vhdContainers().size(), 2);
Assertions.assertEquals(virtualMachineScaleSet.sku(), VirtualMachineScaleSetSkuTypes.STANDARD_A0);
// Check defaults
Assertions.assertTrue(virtualMachineScaleSet.upgradeModel() == UpgradeMode.AUTOMATIC);
Assertions.assertEquals(virtualMachineScaleSet.capacity(), 2);
// Fetch the primary Virtual network
primaryNetwork = virtualMachineScaleSet.getPrimaryNetwork();
String inboundNatPoolToRemove = null;
for (String inboundNatPoolName
: virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerInboundNatPools().keySet()) {
inboundNatPoolToRemove = inboundNatPoolName;
break;
}
LoadBalancer internalLoadBalancer = createInternalLoadBalancer(region, resourceGroup, primaryNetwork, "1");
virtualMachineScaleSet
.update()
.withExistingPrimaryInternalLoadBalancer(internalLoadBalancer)
.withoutPrimaryInternetFacingLoadBalancerNatPools(inboundNatPoolToRemove) // Remove one NatPool
.apply();
virtualMachineScaleSet = this.computeManager.virtualMachineScaleSets().getByResourceGroup(rgName, vmssName);
// Check LB after update
//
Assertions.assertNotNull(virtualMachineScaleSet.getPrimaryInternetFacingLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerBackends().size() == 2);
Assertions
.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerInboundNatPools().size() == 1);
Assertions.assertNotNull(virtualMachineScaleSet.getPrimaryInternalLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerBackends().size() == 2);
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerInboundNatPools().size() == 2);
// Check NIC + IpConfig after update
//
nics = virtualMachineScaleSet.listNetworkInterfaces();
nicCount = 0;
for (VirtualMachineScaleSetNetworkInterface nic : nics) {
nicCount++;
Map<String, VirtualMachineScaleSetNicIpConfiguration> ipConfigs = nic.ipConfigurations();
Assertions.assertEquals(ipConfigs.size(), 1);
for (Map.Entry<String, VirtualMachineScaleSetNicIpConfiguration> entry : ipConfigs.entrySet()) {
VirtualMachineScaleSetNicIpConfiguration ipConfig = entry.getValue();
Assertions.assertNotNull(ipConfig);
List<LoadBalancerBackend> lbBackends = ipConfig.listAssociatedLoadBalancerBackends();
Assertions.assertNotNull(lbBackends);
// Updated VMSS has a internet facing LB with two backend pools and a internal LB with two
// backend pools so there should be 4 backends in ip-config
// #1: But this is not always happening, it seems update is really happening only
// for subset of vms [TODO: Report this to network team]
// Assertions.True(lbBackends.Count == 4);
// Assertions.assertEquals(lbBackends.size(), 4);
for (LoadBalancerBackend lbBackend : lbBackends) {
Map<String, LoadBalancingRule> lbRules = lbBackend.loadBalancingRules();
Assertions.assertEquals(lbRules.size(), 1);
for (Map.Entry<String, LoadBalancingRule> ruleEntry : lbRules.entrySet()) {
LoadBalancingRule rule = ruleEntry.getValue();
Assertions.assertNotNull(rule);
Assertions
.assertTrue(
(rule.frontendPort() == 80 && rule.backendPort() == 80)
|| (rule.frontendPort() == 443 && rule.backendPort() == 443)
|| (rule.frontendPort() == 1000 && rule.backendPort() == 1000)
|| (rule.frontendPort() == 1001 && rule.backendPort() == 1001));
}
}
List<LoadBalancerInboundNatRule> lbNatRules = ipConfig.listAssociatedLoadBalancerInboundNatRules();
// Updated VMSS has a internet facing LB with one nat pool and a internal LB with two
// nat pools so there should be 3 nat rule in ip-config
// Same issue as above #1
// But this is not always happening, it seems update is really happening only
// for subset of vms [TODO: Report this to network team]
// Assertions.assertEquals(lbNatRules.size(), 3);
for (LoadBalancerInboundNatRule lbNatRule : lbNatRules) {
// As mentioned above some chnages are not propgating to all VM instances 6000+ should be there
Assertions
.assertTrue(
(lbNatRule.frontendPort() >= 6000 && lbNatRule.frontendPort() <= 6099)
|| (lbNatRule.frontendPort() >= 5000 && lbNatRule.frontendPort() <= 5099)
|| (lbNatRule.frontendPort() >= 8000 && lbNatRule.frontendPort() <= 8099)
|| (lbNatRule.frontendPort() >= 9000 && lbNatRule.frontendPort() <= 9099));
// Same as above
Assertions
.assertTrue(
lbNatRule.backendPort() == 23
|| lbNatRule.backendPort() == 22
|| lbNatRule.backendPort() == 44
|| lbNatRule.backendPort() == 45);
}
}
}
Assertions.assertTrue(nicCount > 0);
}
@Test
public void
canCreateTwoRegionalVirtualMachineScaleSetsAndAssociateEachWithDifferentBackendPoolOfZoneResilientLoadBalancer()
throws Exception {
// Zone resilient resource -> resources deployed in all zones by the service and it will be served by all AZs
// all the time.
// ZoneResilientLoadBalancer -> STANDARD LB -> [Since service deploy them to all zones, user don't have to set
// zone explicitly, even if he does its a constrain as user can set only one zone at this time]
Region region2 = Region.US_EAST2;
ResourceGroup resourceGroup =
this.resourceManager.resourceGroups().define(rgName).withRegion(region2).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region2)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
// Creates a STANDARD LB with one public frontend ip configuration with two backend pools
// Each address pool of STANDARD LB can hold different VMSS resource.
//
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region2, resourceGroup, "1", LoadBalancerSkuType.STANDARD);
// With default LB SKU BASIC, an attempt to associate two different VMSS to different
// backend pool will cause below error (more accurately, while trying to put second VMSS)
// {
// "startTime": "2017-09-06T14:27:22.1849435+00:00",
// "endTime": "2017-09-06T14:27:45.8885142+00:00",
// "status": "Failed",
// "error": {
// "code": "VmIsNotInSameAvailabilitySetAsLb",
// "message": "Virtual Machine
// /subscriptions/<sub-id>/resourceGroups/<rg-name>/providers/Microsoft.Compute/virtualMachines/|providers|Microsoft.Compute|virtualMachineScaleSets|<vm-ss-name>|virtualMachines|<instance-id> is using different Availability Set than other Virtual Machines connected to the Load Balancer(s) <lb-name>."
// },
// "name": "97531d64-db37-4d21-a1cb-9c53aad7c342"
// }
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
List<String> natpools = new ArrayList<>();
for (String natPool : publicLoadBalancer.inboundNatPools().keySet()) {
natpools.add(natPool);
}
Assertions.assertTrue(natpools.size() == 2);
final String vmssName1 = generateRandomResourceName("vmss1", 10);
// HTTP goes to this virtual machine scale set
//
VirtualMachineScaleSet virtualMachineScaleSet1 =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName1)
.withRegion(region2)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0)) // This VMSS in the first backend pool
.withPrimaryInternetFacingLoadBalancerInboundNatPools(natpools.get(0))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.create();
final String vmssName2 = generateRandomResourceName("vmss2", 10);
// HTTPS goes to this virtual machine scale set
//
VirtualMachineScaleSet virtualMachineScaleSet2 =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName2)
.withRegion(region2)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(1)) // This VMSS in the second backend pool
.withPrimaryInternetFacingLoadBalancerInboundNatPools(natpools.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.create();
// Validate Network specific properties (LB, VNet, NIC, IPConfig etc..)
//
Assertions.assertNull(virtualMachineScaleSet1.getPrimaryInternalLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet1.listPrimaryInternalLoadBalancerBackends().size() == 0);
Assertions.assertTrue(virtualMachineScaleSet1.listPrimaryInternalLoadBalancerInboundNatPools().size() == 0);
Assertions.assertNotNull(virtualMachineScaleSet1.getPrimaryInternetFacingLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet1.listPrimaryInternetFacingLoadBalancerBackends().size() == 1);
Assertions.assertNull(virtualMachineScaleSet2.getPrimaryInternalLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet2.listPrimaryInternalLoadBalancerBackends().size() == 0);
Assertions.assertTrue(virtualMachineScaleSet2.listPrimaryInternalLoadBalancerInboundNatPools().size() == 0);
Assertions.assertNotNull(virtualMachineScaleSet2.getPrimaryInternetFacingLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet2.listPrimaryInternetFacingLoadBalancerBackends().size() == 1);
}
@Test
public void canCreateZoneRedundantVirtualMachineScaleSetWithZoneResilientLoadBalancer() throws Exception {
// Zone redundant VMSS is the one with multiple zones
//
Region region2 = Region.US_EAST2;
ResourceGroup resourceGroup =
this.resourceManager.resourceGroups().define(rgName).withRegion(region2).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region2)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
// Zone redundant VMSS requires STANDARD LB
//
// Creates a STANDARD LB with one public frontend ip configuration with two backend pools
// Each address pool of STANDARD LB can hold different VMSS resource.
//
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region2, resourceGroup, "1", LoadBalancerSkuType.STANDARD);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
final String vmssName = generateRandomResourceName("vmss", 10);
// HTTP & HTTPS traffic on port 80, 443 of Internet-facing LB goes to corresponding port in virtual machine
// scale set
//
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region2)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withAvailabilityZone(AvailabilityZoneId.ZONE_1) // Zone redundant - zone 1 + zone 2
.withAvailabilityZone(AvailabilityZoneId.ZONE_2)
.create();
// Check zones
//
Assertions.assertNotNull(virtualMachineScaleSet.availabilityZones());
Assertions.assertEquals(2, virtualMachineScaleSet.availabilityZones().size());
// Validate Network specific properties (LB, VNet, NIC, IPConfig etc..)
//
Assertions.assertNull(virtualMachineScaleSet.getPrimaryInternalLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerBackends().size() == 0);
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternalLoadBalancerInboundNatPools().size() == 0);
Assertions.assertNotNull(virtualMachineScaleSet.getPrimaryInternetFacingLoadBalancer());
Assertions.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerBackends().size() == 2);
Assertions
.assertTrue(virtualMachineScaleSet.listPrimaryInternetFacingLoadBalancerInboundNatPools().size() == 2);
Network primaryNetwork = virtualMachineScaleSet.getPrimaryNetwork();
Assertions.assertNotNull(primaryNetwork.id());
}
@Test
public void canEnableMSIOnVirtualMachineScaleSetWithoutRoleAssignment() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withSystemAssignedManagedServiceIdentity()
.create();
// Validate service created service principal
//
// TODO: Renable the below code snippet: https://github.com/Azure/azure-libraries-for-net/issues/739
// ServicePrincipal servicePrincipal = authorizationManager
// .servicePrincipals()
// .getById(virtualMachineScaleSet.systemAssignedManagedServiceIdentityPrincipalId());
//
// Assertions.assertNotNull(servicePrincipal);
// Assertions.assertNotNull(servicePrincipal.inner());
// Ensure role assigned for resource group
//
PagedIterable<RoleAssignment> rgRoleAssignments = authorizationManager.roleAssignments().listByScope(resourceGroup.id());
Assertions.assertNotNull(rgRoleAssignments);
boolean found = false;
for (RoleAssignment roleAssignment : rgRoleAssignments) {
if (roleAssignment.principalId() != null
&& roleAssignment
.principalId()
.equalsIgnoreCase(virtualMachineScaleSet.systemAssignedManagedServiceIdentityPrincipalId())) {
found = true;
break;
}
}
Assertions
.assertFalse(
found, "Resource group should not have a role assignment with virtual machine scale set MSI principal");
}
@Test
public void canEnableMSIOnVirtualMachineScaleSetWithMultipleRoleAssignment() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
StorageAccount storageAccount =
this
.storageManager
.storageAccounts()
.define(generateRandomResourceName("jvcsrg", 10))
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withSystemAssignedManagedServiceIdentity()
.withSystemAssignedIdentityBasedAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR)
.withSystemAssignedIdentityBasedAccessTo(storageAccount.id(), BuiltInRole.CONTRIBUTOR)
.create();
Assertions.assertNotNull(virtualMachineScaleSet.managedServiceIdentityType());
Assertions
.assertTrue(
virtualMachineScaleSet.managedServiceIdentityType().equals(ResourceIdentityType.SYSTEM_ASSIGNED));
// Validate service created service principal
//
// TODO: Renable the below code snippet: https://github.com/Azure/azure-libraries-for-net/issues/739
// ServicePrincipal servicePrincipal = authorizationManager
// .servicePrincipals()
// .getById(virtualMachineScaleSet.systemAssignedManagedServiceIdentityPrincipalId());
//
// Assertions.assertNotNull(servicePrincipal);
// Assertions.assertNotNull(servicePrincipal.inner());
// Ensure role assigned for resource group
//
PagedIterable<RoleAssignment> rgRoleAssignments = authorizationManager.roleAssignments().listByScope(resourceGroup.id());
Assertions.assertNotNull(rgRoleAssignments);
boolean found = false;
for (RoleAssignment roleAssignment : rgRoleAssignments) {
if (roleAssignment.principalId() != null
&& roleAssignment
.principalId()
.equalsIgnoreCase(virtualMachineScaleSet.systemAssignedManagedServiceIdentityPrincipalId())) {
found = true;
break;
}
}
Assertions
.assertTrue(
found, "Resource group should have a role assignment with virtual machine scale set MSI principal");
// Ensure role assigned for storage account
//
PagedIterable<RoleAssignment> stgRoleAssignments =
authorizationManager.roleAssignments().listByScope(storageAccount.id());
Assertions.assertNotNull(stgRoleAssignments);
found = false;
for (RoleAssignment roleAssignment : stgRoleAssignments) {
if (roleAssignment.principalId() != null
&& roleAssignment
.principalId()
.equalsIgnoreCase(virtualMachineScaleSet.systemAssignedManagedServiceIdentityPrincipalId())) {
found = true;
break;
}
}
Assertions
.assertTrue(
found, "Storage account should have a role assignment with virtual machine scale set MSI principal");
}
@Test
public void canGetSingleVMSSInstance() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withNewStorageAccount(generateRandomResourceName("stg3", 15))
.withUpgradeMode(UpgradeMode.MANUAL)
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this.computeManager.virtualMachineScaleSets().getByResourceGroup(rgName, vmssName);
VirtualMachineScaleSetVMs virtualMachineScaleSetVMs = virtualMachineScaleSet.virtualMachines();
VirtualMachineScaleSetVM firstVm = virtualMachineScaleSetVMs.list().iterator().next();
VirtualMachineScaleSetVM fetchedVm = virtualMachineScaleSetVMs.getInstance(firstVm.instanceId());
this.checkVmsEqual(firstVm, fetchedVm);
VirtualMachineScaleSetVM fetchedAsyncVm =
virtualMachineScaleSetVMs.getInstanceAsync(firstVm.instanceId()).block();
this.checkVmsEqual(firstVm, fetchedAsyncVm);
}
@Test
public void canCreateLowPriorityVMSSInstance() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.STANDARD);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet vmss =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withNewStorageAccount(generateRandomResourceName("stg", 15))
.withNewStorageAccount(generateRandomResourceName("stg3", 15))
.withUpgradeMode(UpgradeMode.MANUAL)
.withLowPriorityVirtualMachine(VirtualMachineEvictionPolicyTypes.DEALLOCATE)
.withMaxPrice(-1.0)
.create();
Assertions.assertEquals(vmss.virtualMachinePriority(), VirtualMachinePriorityTypes.LOW);
Assertions.assertEquals(vmss.virtualMachineEvictionPolicy(), VirtualMachineEvictionPolicyTypes.DEALLOCATE);
Assertions.assertEquals(vmss.billingProfile().maxPrice(), (Double) (-1.0));
vmss.update().withMaxPrice(2000.0).apply();
Assertions.assertEquals(vmss.billingProfile().maxPrice(), (Double) 2000.0);
}
@Test
public void canPerformSimulateEvictionOnSpotVMSSInstance() {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups()
.define(rgName)
.withRegion(region)
.create();
Network network = this.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
VirtualMachineScaleSet vmss = computeManager.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_D3_V2)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withoutPrimaryInternetFacingLoadBalancer()
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withRootPassword("123OData!@#123")
.withSpotPriorityVirtualMachine(VirtualMachineEvictionPolicyTypes.DEALLOCATE)
.create();
PagedIterable<VirtualMachineScaleSetVM> vmInstances = vmss.virtualMachines().list();
for (VirtualMachineScaleSetVM instance: vmInstances) {
Assertions.assertTrue(instance.osDiskSizeInGB() > 0);
// call simulate eviction
vmss.virtualMachines().simulateEviction(instance.instanceId());
}
ResourceManagerUtils.sleep(Duration.ofMinutes(30));
for (VirtualMachineScaleSetVM instance: vmInstances) {
instance.refresh();
Assertions.assertTrue(instance.osDiskSizeInGB() == 0);
}
}
private void checkVmsEqual(VirtualMachineScaleSetVM original, VirtualMachineScaleSetVM fetched) {
Assertions.assertEquals(original.administratorUserName(), fetched.administratorUserName());
Assertions.assertEquals(original.availabilitySetId(), fetched.availabilitySetId());
Assertions.assertEquals(original.bootDiagnosticEnabled(), fetched.bootDiagnosticEnabled());
Assertions.assertEquals(original.bootDiagnosticStorageAccountUri(), fetched.bootDiagnosticStorageAccountUri());
Assertions.assertEquals(original.computerName(), fetched.computerName());
Assertions.assertEquals(original.dataDisks().size(), fetched.dataDisks().size());
Assertions.assertEquals(original.extensions().size(), fetched.extensions().size());
Assertions.assertEquals(original.instanceId(), fetched.instanceId());
Assertions.assertEquals(original.isLatestScaleSetUpdateApplied(), fetched.isLatestScaleSetUpdateApplied());
Assertions
.assertEquals(original.isLinuxPasswordAuthenticationEnabled(), fetched.isLatestScaleSetUpdateApplied());
Assertions.assertEquals(original.isManagedDiskEnabled(), fetched.isManagedDiskEnabled());
Assertions.assertEquals(original.isOSBasedOnCustomImage(), fetched.isOSBasedOnCustomImage());
Assertions.assertEquals(original.isOSBasedOnPlatformImage(), fetched.isOSBasedOnPlatformImage());
Assertions.assertEquals(original.isOSBasedOnStoredImage(), fetched.isOSBasedOnStoredImage());
Assertions.assertEquals(original.isWindowsAutoUpdateEnabled(), fetched.isWindowsAutoUpdateEnabled());
Assertions.assertEquals(original.isWindowsVMAgentProvisioned(), original.isWindowsVMAgentProvisioned());
Assertions.assertEquals(original.networkInterfaceIds().size(), fetched.networkInterfaceIds().size());
Assertions.assertEquals(original.osDiskCachingType(), fetched.osDiskCachingType());
Assertions.assertEquals(original.osDiskId(), fetched.osDiskId());
Assertions.assertEquals(original.osDiskName(), fetched.osDiskName());
Assertions.assertEquals(original.osDiskSizeInGB(), fetched.osDiskSizeInGB());
Assertions.assertEquals(original.osType(), fetched.osType());
Assertions.assertEquals(original.osUnmanagedDiskVhdUri(), fetched.osUnmanagedDiskVhdUri());
Assertions.assertEquals(original.powerState(), fetched.powerState());
Assertions.assertEquals(original.primaryNetworkInterfaceId(), fetched.primaryNetworkInterfaceId());
Assertions.assertEquals(original.size(), fetched.size());
Assertions.assertEquals(original.sku().name(), fetched.sku().name());
Assertions.assertEquals(original.storedImageUnmanagedVhdUri(), fetched.storedImageUnmanagedVhdUri());
Assertions.assertEquals(original.unmanagedDataDisks().size(), fetched.unmanagedDataDisks().size());
Assertions.assertEquals(original.windowsTimeZone(), fetched.windowsTimeZone());
}
private void checkVMInstances(VirtualMachineScaleSet vmScaleSet) {
VirtualMachineScaleSetVMs virtualMachineScaleSetVMs = vmScaleSet.virtualMachines();
PagedIterable<VirtualMachineScaleSetVM> virtualMachines = virtualMachineScaleSetVMs.list();
Assertions.assertEquals(TestUtilities.getSize(virtualMachines), vmScaleSet.capacity());
Assertions.assertTrue(TestUtilities.getSize(virtualMachines) > 0);
virtualMachineScaleSetVMs.updateInstances(virtualMachines.iterator().next().instanceId());
for (VirtualMachineScaleSetVM vm : virtualMachines) {
Assertions.assertNotNull(vm.size());
Assertions.assertEquals(vm.osType(), OperatingSystemTypes.LINUX);
Assertions.assertNotNull(vm.computerName().startsWith(vmScaleSet.computerNamePrefix()));
Assertions.assertTrue(vm.isLinuxPasswordAuthenticationEnabled());
Assertions.assertTrue(vm.isOSBasedOnPlatformImage());
Assertions.assertNull(vm.osDiskId()); // VMSS is un-managed, so osDiskId must be null
Assertions.assertNotNull(vm.osUnmanagedDiskVhdUri()); // VMSS is un-managed, so osVhd should not be null
Assertions.assertNull(vm.storedImageUnmanagedVhdUri());
Assertions.assertFalse(vm.isWindowsAutoUpdateEnabled());
Assertions.assertFalse(vm.isWindowsVMAgentProvisioned());
Assertions.assertTrue(vm.administratorUserName().equalsIgnoreCase("jvuser"));
VirtualMachineImage vmImage = vm.getOSPlatformImage();
Assertions.assertNotNull(vmImage);
Assertions.assertEquals(vm.extensions().size(), vmScaleSet.extensions().size());
Assertions.assertNotNull(vm.powerState());
vm.refreshInstanceView();
}
// Check actions
VirtualMachineScaleSetVM virtualMachineScaleSetVM = virtualMachines.iterator().next();
Assertions.assertNotNull(virtualMachineScaleSetVM);
virtualMachineScaleSetVM.restart();
virtualMachineScaleSetVM.powerOff();
virtualMachineScaleSetVM.refreshInstanceView();
Assertions.assertEquals(virtualMachineScaleSetVM.powerState(), PowerState.STOPPED);
virtualMachineScaleSetVM.start();
// Check Instance NICs
//
for (VirtualMachineScaleSetVM vm : virtualMachines) {
PagedIterable<VirtualMachineScaleSetNetworkInterface> nics =
vmScaleSet.listNetworkInterfacesByInstanceId(vm.instanceId());
Assertions.assertNotNull(nics);
Assertions.assertEquals(TestUtilities.getSize(nics), 1);
VirtualMachineScaleSetNetworkInterface nic = nics.iterator().next();
Assertions.assertNotNull(nic.virtualMachineId());
Assertions.assertTrue(nic.virtualMachineId().toLowerCase().equalsIgnoreCase(vm.id()));
Assertions.assertNotNull(vm.listNetworkInterfaces());
VirtualMachineScaleSetNetworkInterface nicA =
vmScaleSet.getNetworkInterfaceByInstanceId(vm.instanceId(), nic.name());
Assertions.assertNotNull(nicA);
VirtualMachineScaleSetNetworkInterface nicB = vm.getNetworkInterface(nic.name());
Assertions.assertNotNull(nicB);
}
}
@Test
public void testVirtualMachineScaleSetSkuTypes() {
rgName = null;
VirtualMachineScaleSetSkuTypes skuType = VirtualMachineScaleSetSkuTypes.STANDARD_A0;
Assertions.assertNull(skuType.sku().capacity());
// first copy of sku
Sku sku1 = skuType.sku();
Assertions.assertNull(sku1.capacity());
sku1.withCapacity(new Long(1));
Assertions.assertEquals(sku1.capacity().longValue(), 1);
// Ensure the original is not affected
Assertions.assertNull(skuType.sku().capacity());
// second copy of sku
Sku sku2 = skuType.sku();
Assertions.assertNull(sku2.capacity());
sku2.withCapacity(new Long(2));
Assertions.assertEquals(sku2.capacity().longValue(), 2);
// Ensure the original is not affected
Assertions.assertNull(skuType.sku().capacity());
// Ensure previous copy is not affected due to change in first copy
Assertions.assertEquals(sku1.capacity().longValue(), 1);
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.inspector.model;
import java.io.Serializable;
/**
*
*/
public class DescribeAssessmentRunsResult implements Serializable, Cloneable {
/**
* <p>
* Information about the assessment run.
* </p>
*/
private java.util.List<AssessmentRun> assessmentRuns;
/**
* <p>
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
* </p>
*/
private java.util.Map<String, FailedItemDetails> failedItems;
/**
* <p>
* Information about the assessment run.
* </p>
*
* @return Information about the assessment run.
*/
public java.util.List<AssessmentRun> getAssessmentRuns() {
return assessmentRuns;
}
/**
* <p>
* Information about the assessment run.
* </p>
*
* @param assessmentRuns
* Information about the assessment run.
*/
public void setAssessmentRuns(
java.util.Collection<AssessmentRun> assessmentRuns) {
if (assessmentRuns == null) {
this.assessmentRuns = null;
return;
}
this.assessmentRuns = new java.util.ArrayList<AssessmentRun>(
assessmentRuns);
}
/**
* <p>
* Information about the assessment run.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setAssessmentRuns(java.util.Collection)} or
* {@link #withAssessmentRuns(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param assessmentRuns
* Information about the assessment run.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeAssessmentRunsResult withAssessmentRuns(
AssessmentRun... assessmentRuns) {
if (this.assessmentRuns == null) {
setAssessmentRuns(new java.util.ArrayList<AssessmentRun>(
assessmentRuns.length));
}
for (AssessmentRun ele : assessmentRuns) {
this.assessmentRuns.add(ele);
}
return this;
}
/**
* <p>
* Information about the assessment run.
* </p>
*
* @param assessmentRuns
* Information about the assessment run.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeAssessmentRunsResult withAssessmentRuns(
java.util.Collection<AssessmentRun> assessmentRuns) {
setAssessmentRuns(assessmentRuns);
return this;
}
/**
* <p>
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
* </p>
*
* @return Assessment run details that cannot be described. An error code is
* provided for each failed item.
*/
public java.util.Map<String, FailedItemDetails> getFailedItems() {
return failedItems;
}
/**
* <p>
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
* </p>
*
* @param failedItems
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
*/
public void setFailedItems(
java.util.Map<String, FailedItemDetails> failedItems) {
this.failedItems = failedItems;
}
/**
* <p>
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
* </p>
*
* @param failedItems
* Assessment run details that cannot be described. An error code is
* provided for each failed item.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeAssessmentRunsResult withFailedItems(
java.util.Map<String, FailedItemDetails> failedItems) {
setFailedItems(failedItems);
return this;
}
public DescribeAssessmentRunsResult addFailedItemsEntry(String key,
FailedItemDetails value) {
if (null == this.failedItems) {
this.failedItems = new java.util.HashMap<String, FailedItemDetails>();
}
if (this.failedItems.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.failedItems.put(key, value);
return this;
}
/**
* Removes all the entries added into FailedItems. <p> Returns a
* reference to this object so that method calls can be chained together.
*/
public DescribeAssessmentRunsResult clearFailedItemsEntries() {
this.failedItems = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAssessmentRuns() != null)
sb.append("AssessmentRuns: " + getAssessmentRuns() + ",");
if (getFailedItems() != null)
sb.append("FailedItems: " + getFailedItems());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeAssessmentRunsResult == false)
return false;
DescribeAssessmentRunsResult other = (DescribeAssessmentRunsResult) obj;
if (other.getAssessmentRuns() == null
^ this.getAssessmentRuns() == null)
return false;
if (other.getAssessmentRuns() != null
&& other.getAssessmentRuns().equals(this.getAssessmentRuns()) == false)
return false;
if (other.getFailedItems() == null ^ this.getFailedItems() == null)
return false;
if (other.getFailedItems() != null
&& other.getFailedItems().equals(this.getFailedItems()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getAssessmentRuns() == null) ? 0 : getAssessmentRuns()
.hashCode());
hashCode = prime
* hashCode
+ ((getFailedItems() == null) ? 0 : getFailedItems().hashCode());
return hashCode;
}
@Override
public DescribeAssessmentRunsResult clone() {
try {
return (DescribeAssessmentRunsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.UTF8;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* Runs a job multiple times and takes average of all runs.
*/
public class MRBench extends Configured implements Tool{
private static final Log LOG = LogFactory.getLog(MRBench.class);
private static Path BASE_DIR =
new Path(System.getProperty("test.build.data","/benchmarks/MRBench"));
private static Path INPUT_DIR = new Path(BASE_DIR, "mr_input");
private static Path OUTPUT_DIR = new Path(BASE_DIR, "mr_output");
public static enum Order {RANDOM, ASCENDING, DESCENDING};
/**
* Takes input format as text lines, runs some processing on it and
* writes out data as text again.
*/
public static class Map extends MapReduceBase
implements Mapper<WritableComparable, Text, UTF8, UTF8> {
public void map(WritableComparable key, Text value,
OutputCollector<UTF8, UTF8> output,
Reporter reporter) throws IOException
{
String line = value.toString();
output.collect(new UTF8(process(line)), new UTF8(""));
}
public String process(String line) {
return line;
}
}
/**
* Ignores the key and writes values to the output.
*/
public static class Reduce extends MapReduceBase
implements Reducer<UTF8, UTF8, UTF8, UTF8> {
public void reduce(UTF8 key, Iterator<UTF8> values,
OutputCollector<UTF8, UTF8> output, Reporter reporter) throws IOException
{
while(values.hasNext()) {
output.collect(key, new UTF8(values.next().toString()));
}
}
}
/**
* Generate a text file on the given filesystem with the given path name.
* The text file will contain the given number of lines of generated data.
* The generated data are string representations of numbers. Each line
* is the same length, which is achieved by padding each number with
* an appropriate number of leading '0' (zero) characters. The order of
* generated data is one of ascending, descending, or random.
*/
public void generateTextFile(FileSystem fs, Path inputFile,
long numLines, Order sortOrder) throws IOException
{
LOG.info("creating control file: "+numLines+" numLines, "+sortOrder+" sortOrder");
PrintStream output = null;
try {
output = new PrintStream(fs.create(inputFile));
int padding = String.valueOf(numLines).length();
switch(sortOrder) {
case RANDOM:
for (long l = 0; l < numLines; l++) {
output.println(pad((new Random()).nextLong(), padding));
}
break;
case ASCENDING:
for (long l = 0; l < numLines; l++) {
output.println(pad(l, padding));
}
break;
case DESCENDING:
for (long l = numLines; l > 0; l--) {
output.println(pad(l, padding));
}
break;
}
} finally {
if (output != null)
output.close();
}
LOG.info("created control file: " + inputFile);
}
/**
* Convert the given number to a string and pad the number with
* leading '0' (zero) characters so that the string is exactly
* the given length.
*/
private static String pad(long number, int length) {
String str = String.valueOf(number);
StringBuffer value = new StringBuffer();
for (int i = str.length(); i < length; i++) {
value.append("0");
}
value.append(str);
return value.toString();
}
/**
* Create the job configuration.
*/
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
JobConf jobConf = new JobConf(getConf());
jobConf.setJarByClass(MRBench.class);
FileInputFormat.addInputPath(jobConf, INPUT_DIR);
jobConf.setInputFormat(TextInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
jobConf.setOutputValueClass(UTF8.class);
jobConf.setMapOutputKeyClass(UTF8.class);
jobConf.setMapOutputValueClass(UTF8.class);
if (null != jarFile) {
jobConf.setJar(jarFile);
}
jobConf.setMapperClass(Map.class);
jobConf.setReducerClass(Reduce.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(numReduces);
return jobConf;
}
/**
* Runs a MapReduce task, given number of times. The input to each run
* is the same file.
*/
private ArrayList<Long> runJobInSequence(JobConf masterJobConf, int numRuns) throws IOException {
Random rand = new Random();
ArrayList<Long> execTimes = new ArrayList<Long>();
for (int i = 0; i < numRuns; i++) {
// create a new job conf every time, reusing same object does not work
JobConf jobConf = new JobConf(masterJobConf);
// reset the job jar because the copy constructor doesn't
jobConf.setJar(masterJobConf.getJar());
// give a new random name to output of the mapred tasks
FileOutputFormat.setOutputPath(jobConf,
new Path(OUTPUT_DIR, "output_" + rand.nextInt()));
LOG.info("Running job " + i + ":" +
" input=" + FileInputFormat.getInputPaths(jobConf)[0] +
" output=" + FileOutputFormat.getOutputPath(jobConf));
// run the mapred task now
long curTime = System.currentTimeMillis();
JobClient.runJob(jobConf);
execTimes.add(new Long(System.currentTimeMillis() - curTime));
}
return execTimes;
}
/**
* <pre>
* Usage: mrbench
* [-baseDir <base DFS path for output/input, default is /benchmarks/MRBench>]
* [-jar <local path to job jar file containing Mapper and Reducer implementations, default is current jar file>]
* [-numRuns <number of times to run the job, default is 1>]
* [-maps <number of maps for each run, default is 2>]
* [-reduces <number of reduces for each run, default is 1>]
* [-inputLines <number of input lines to generate, default is 1>]
* [-inputType <type of input to generate, one of ascending (default), descending, random>]
* [-verbose]
* </pre>
*/
public static void main (String[] args) throws Exception {
int res = ToolRunner.run(new MRBench(), args);
System.exit(res);
}
@Override
public int run(String[] args) throws Exception {
String version = "MRBenchmark.0.0.2";
System.out.println(version);
String usage =
"Usage: mrbench " +
"[-baseDir <base DFS path for output/input, default is /benchmarks/MRBench>] " +
"[-jar <local path to job jar file containing Mapper and Reducer implementations, default is current jar file>] " +
"[-numRuns <number of times to run the job, default is 1>] " +
"[-maps <number of maps for each run, default is 2>] " +
"[-reduces <number of reduces for each run, default is 1>] " +
"[-inputLines <number of input lines to generate, default is 1>] " +
"[-inputType <type of input to generate, one of ascending (default), descending, random>] " +
"[-verbose]";
String jarFile = null;
int inputLines = 1;
int numRuns = 1;
int numMaps = 2;
int numReduces = 1;
boolean verbose = false;
Order inputSortOrder = Order.ASCENDING;
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].equals("-jar")) {
jarFile = args[++i];
} else if (args[i].equals("-numRuns")) {
numRuns = Integer.parseInt(args[++i]);
} else if (args[i].equals("-baseDir")) {
BASE_DIR = new Path(args[++i]);
} else if (args[i].equals("-maps")) {
numMaps = Integer.parseInt(args[++i]);
} else if (args[i].equals("-reduces")) {
numReduces = Integer.parseInt(args[++i]);
} else if (args[i].equals("-inputLines")) {
inputLines = Integer.parseInt(args[++i]);
} else if (args[i].equals("-inputType")) {
String s = args[++i];
if (s.equalsIgnoreCase("ascending")) {
inputSortOrder = Order.ASCENDING;
} else if (s.equalsIgnoreCase("descending")) {
inputSortOrder = Order.DESCENDING;
} else if (s.equalsIgnoreCase("random")) {
inputSortOrder = Order.RANDOM;
} else {
inputSortOrder = null;
}
} else if (args[i].equals("-verbose")) {
verbose = true;
} else {
System.err.println(usage);
System.exit(-1);
}
}
if (numRuns < 1 || // verify args
numMaps < 1 ||
numReduces < 1 ||
inputLines < 0 ||
inputSortOrder == null)
{
System.err.println(usage);
return -1;
}
JobConf jobConf = setupJob(numMaps, numReduces, jarFile);
FileSystem fs = FileSystem.get(jobConf);
Path inputFile = new Path(INPUT_DIR, "input_" + (new Random()).nextInt() + ".txt");
generateTextFile(fs, inputFile, inputLines, inputSortOrder);
// setup test output directory
fs.mkdirs(BASE_DIR);
ArrayList<Long> execTimes = new ArrayList<Long>();
try {
execTimes = runJobInSequence(jobConf, numRuns);
} finally {
// delete output -- should we really do this?
fs.delete(BASE_DIR, true);
}
if (verbose) {
// Print out a report
System.out.println("Total MapReduce jobs executed: " + numRuns);
System.out.println("Total lines of data per job: " + inputLines);
System.out.println("Maps per job: " + numMaps);
System.out.println("Reduces per job: " + numReduces);
}
int i = 0;
long totalTime = 0;
for (Long time : execTimes) {
totalTime += time.longValue();
if (verbose) {
System.out.println("Total milliseconds for task: " + (++i) +
" = " + time);
}
}
long avgTime = totalTime / numRuns;
System.out.println("DataLines\tMaps\tReduces\tAvgTime (milliseconds)");
System.out.println(inputLines + "\t\t" + numMaps + "\t" +
numReduces + "\t" + avgTime);
return 0;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/RestoreTableFromBackup" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RestoreTableFromBackupRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the new table to which the backup must be restored.
* </p>
*/
private String targetTableName;
/**
* <p>
* The Amazon Resource Name (ARN) associated with the backup.
* </p>
*/
private String backupArn;
/**
* <p>
* The billing mode of the restored table.
* </p>
*/
private String billingModeOverride;
/**
* <p>
* List of global secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*/
private java.util.List<GlobalSecondaryIndex> globalSecondaryIndexOverride;
/**
* <p>
* List of local secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*/
private java.util.List<LocalSecondaryIndex> localSecondaryIndexOverride;
/**
* <p>
* Provisioned throughput settings for the restored table.
* </p>
*/
private ProvisionedThroughput provisionedThroughputOverride;
/**
* <p>
* The new server-side encryption settings for the restored table.
* </p>
*/
private SSESpecification sSESpecificationOverride;
/**
* <p>
* The name of the new table to which the backup must be restored.
* </p>
*
* @param targetTableName
* The name of the new table to which the backup must be restored.
*/
public void setTargetTableName(String targetTableName) {
this.targetTableName = targetTableName;
}
/**
* <p>
* The name of the new table to which the backup must be restored.
* </p>
*
* @return The name of the new table to which the backup must be restored.
*/
public String getTargetTableName() {
return this.targetTableName;
}
/**
* <p>
* The name of the new table to which the backup must be restored.
* </p>
*
* @param targetTableName
* The name of the new table to which the backup must be restored.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withTargetTableName(String targetTableName) {
setTargetTableName(targetTableName);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) associated with the backup.
* </p>
*
* @param backupArn
* The Amazon Resource Name (ARN) associated with the backup.
*/
public void setBackupArn(String backupArn) {
this.backupArn = backupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) associated with the backup.
* </p>
*
* @return The Amazon Resource Name (ARN) associated with the backup.
*/
public String getBackupArn() {
return this.backupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) associated with the backup.
* </p>
*
* @param backupArn
* The Amazon Resource Name (ARN) associated with the backup.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withBackupArn(String backupArn) {
setBackupArn(backupArn);
return this;
}
/**
* <p>
* The billing mode of the restored table.
* </p>
*
* @param billingModeOverride
* The billing mode of the restored table.
* @see BillingMode
*/
public void setBillingModeOverride(String billingModeOverride) {
this.billingModeOverride = billingModeOverride;
}
/**
* <p>
* The billing mode of the restored table.
* </p>
*
* @return The billing mode of the restored table.
* @see BillingMode
*/
public String getBillingModeOverride() {
return this.billingModeOverride;
}
/**
* <p>
* The billing mode of the restored table.
* </p>
*
* @param billingModeOverride
* The billing mode of the restored table.
* @return Returns a reference to this object so that method calls can be chained together.
* @see BillingMode
*/
public RestoreTableFromBackupRequest withBillingModeOverride(String billingModeOverride) {
setBillingModeOverride(billingModeOverride);
return this;
}
/**
* <p>
* The billing mode of the restored table.
* </p>
*
* @param billingModeOverride
* The billing mode of the restored table.
* @return Returns a reference to this object so that method calls can be chained together.
* @see BillingMode
*/
public RestoreTableFromBackupRequest withBillingModeOverride(BillingMode billingModeOverride) {
this.billingModeOverride = billingModeOverride.toString();
return this;
}
/**
* <p>
* List of global secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @return List of global secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
*/
public java.util.List<GlobalSecondaryIndex> getGlobalSecondaryIndexOverride() {
return globalSecondaryIndexOverride;
}
/**
* <p>
* List of global secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @param globalSecondaryIndexOverride
* List of global secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
*/
public void setGlobalSecondaryIndexOverride(java.util.Collection<GlobalSecondaryIndex> globalSecondaryIndexOverride) {
if (globalSecondaryIndexOverride == null) {
this.globalSecondaryIndexOverride = null;
return;
}
this.globalSecondaryIndexOverride = new java.util.ArrayList<GlobalSecondaryIndex>(globalSecondaryIndexOverride);
}
/**
* <p>
* List of global secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setGlobalSecondaryIndexOverride(java.util.Collection)} or
* {@link #withGlobalSecondaryIndexOverride(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param globalSecondaryIndexOverride
* List of global secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withGlobalSecondaryIndexOverride(GlobalSecondaryIndex... globalSecondaryIndexOverride) {
if (this.globalSecondaryIndexOverride == null) {
setGlobalSecondaryIndexOverride(new java.util.ArrayList<GlobalSecondaryIndex>(globalSecondaryIndexOverride.length));
}
for (GlobalSecondaryIndex ele : globalSecondaryIndexOverride) {
this.globalSecondaryIndexOverride.add(ele);
}
return this;
}
/**
* <p>
* List of global secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @param globalSecondaryIndexOverride
* List of global secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withGlobalSecondaryIndexOverride(java.util.Collection<GlobalSecondaryIndex> globalSecondaryIndexOverride) {
setGlobalSecondaryIndexOverride(globalSecondaryIndexOverride);
return this;
}
/**
* <p>
* List of local secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @return List of local secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
*/
public java.util.List<LocalSecondaryIndex> getLocalSecondaryIndexOverride() {
return localSecondaryIndexOverride;
}
/**
* <p>
* List of local secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @param localSecondaryIndexOverride
* List of local secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
*/
public void setLocalSecondaryIndexOverride(java.util.Collection<LocalSecondaryIndex> localSecondaryIndexOverride) {
if (localSecondaryIndexOverride == null) {
this.localSecondaryIndexOverride = null;
return;
}
this.localSecondaryIndexOverride = new java.util.ArrayList<LocalSecondaryIndex>(localSecondaryIndexOverride);
}
/**
* <p>
* List of local secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setLocalSecondaryIndexOverride(java.util.Collection)} or
* {@link #withLocalSecondaryIndexOverride(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param localSecondaryIndexOverride
* List of local secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withLocalSecondaryIndexOverride(LocalSecondaryIndex... localSecondaryIndexOverride) {
if (this.localSecondaryIndexOverride == null) {
setLocalSecondaryIndexOverride(new java.util.ArrayList<LocalSecondaryIndex>(localSecondaryIndexOverride.length));
}
for (LocalSecondaryIndex ele : localSecondaryIndexOverride) {
this.localSecondaryIndexOverride.add(ele);
}
return this;
}
/**
* <p>
* List of local secondary indexes for the restored table. The indexes provided should match existing secondary
* indexes. You can choose to exclude some or all of the indexes at the time of restore.
* </p>
*
* @param localSecondaryIndexOverride
* List of local secondary indexes for the restored table. The indexes provided should match existing
* secondary indexes. You can choose to exclude some or all of the indexes at the time of restore.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withLocalSecondaryIndexOverride(java.util.Collection<LocalSecondaryIndex> localSecondaryIndexOverride) {
setLocalSecondaryIndexOverride(localSecondaryIndexOverride);
return this;
}
/**
* <p>
* Provisioned throughput settings for the restored table.
* </p>
*
* @param provisionedThroughputOverride
* Provisioned throughput settings for the restored table.
*/
public void setProvisionedThroughputOverride(ProvisionedThroughput provisionedThroughputOverride) {
this.provisionedThroughputOverride = provisionedThroughputOverride;
}
/**
* <p>
* Provisioned throughput settings for the restored table.
* </p>
*
* @return Provisioned throughput settings for the restored table.
*/
public ProvisionedThroughput getProvisionedThroughputOverride() {
return this.provisionedThroughputOverride;
}
/**
* <p>
* Provisioned throughput settings for the restored table.
* </p>
*
* @param provisionedThroughputOverride
* Provisioned throughput settings for the restored table.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withProvisionedThroughputOverride(ProvisionedThroughput provisionedThroughputOverride) {
setProvisionedThroughputOverride(provisionedThroughputOverride);
return this;
}
/**
* <p>
* The new server-side encryption settings for the restored table.
* </p>
*
* @param sSESpecificationOverride
* The new server-side encryption settings for the restored table.
*/
public void setSSESpecificationOverride(SSESpecification sSESpecificationOverride) {
this.sSESpecificationOverride = sSESpecificationOverride;
}
/**
* <p>
* The new server-side encryption settings for the restored table.
* </p>
*
* @return The new server-side encryption settings for the restored table.
*/
public SSESpecification getSSESpecificationOverride() {
return this.sSESpecificationOverride;
}
/**
* <p>
* The new server-side encryption settings for the restored table.
* </p>
*
* @param sSESpecificationOverride
* The new server-side encryption settings for the restored table.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RestoreTableFromBackupRequest withSSESpecificationOverride(SSESpecification sSESpecificationOverride) {
setSSESpecificationOverride(sSESpecificationOverride);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTargetTableName() != null)
sb.append("TargetTableName: ").append(getTargetTableName()).append(",");
if (getBackupArn() != null)
sb.append("BackupArn: ").append(getBackupArn()).append(",");
if (getBillingModeOverride() != null)
sb.append("BillingModeOverride: ").append(getBillingModeOverride()).append(",");
if (getGlobalSecondaryIndexOverride() != null)
sb.append("GlobalSecondaryIndexOverride: ").append(getGlobalSecondaryIndexOverride()).append(",");
if (getLocalSecondaryIndexOverride() != null)
sb.append("LocalSecondaryIndexOverride: ").append(getLocalSecondaryIndexOverride()).append(",");
if (getProvisionedThroughputOverride() != null)
sb.append("ProvisionedThroughputOverride: ").append(getProvisionedThroughputOverride()).append(",");
if (getSSESpecificationOverride() != null)
sb.append("SSESpecificationOverride: ").append(getSSESpecificationOverride());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RestoreTableFromBackupRequest == false)
return false;
RestoreTableFromBackupRequest other = (RestoreTableFromBackupRequest) obj;
if (other.getTargetTableName() == null ^ this.getTargetTableName() == null)
return false;
if (other.getTargetTableName() != null && other.getTargetTableName().equals(this.getTargetTableName()) == false)
return false;
if (other.getBackupArn() == null ^ this.getBackupArn() == null)
return false;
if (other.getBackupArn() != null && other.getBackupArn().equals(this.getBackupArn()) == false)
return false;
if (other.getBillingModeOverride() == null ^ this.getBillingModeOverride() == null)
return false;
if (other.getBillingModeOverride() != null && other.getBillingModeOverride().equals(this.getBillingModeOverride()) == false)
return false;
if (other.getGlobalSecondaryIndexOverride() == null ^ this.getGlobalSecondaryIndexOverride() == null)
return false;
if (other.getGlobalSecondaryIndexOverride() != null && other.getGlobalSecondaryIndexOverride().equals(this.getGlobalSecondaryIndexOverride()) == false)
return false;
if (other.getLocalSecondaryIndexOverride() == null ^ this.getLocalSecondaryIndexOverride() == null)
return false;
if (other.getLocalSecondaryIndexOverride() != null && other.getLocalSecondaryIndexOverride().equals(this.getLocalSecondaryIndexOverride()) == false)
return false;
if (other.getProvisionedThroughputOverride() == null ^ this.getProvisionedThroughputOverride() == null)
return false;
if (other.getProvisionedThroughputOverride() != null
&& other.getProvisionedThroughputOverride().equals(this.getProvisionedThroughputOverride()) == false)
return false;
if (other.getSSESpecificationOverride() == null ^ this.getSSESpecificationOverride() == null)
return false;
if (other.getSSESpecificationOverride() != null && other.getSSESpecificationOverride().equals(this.getSSESpecificationOverride()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTargetTableName() == null) ? 0 : getTargetTableName().hashCode());
hashCode = prime * hashCode + ((getBackupArn() == null) ? 0 : getBackupArn().hashCode());
hashCode = prime * hashCode + ((getBillingModeOverride() == null) ? 0 : getBillingModeOverride().hashCode());
hashCode = prime * hashCode + ((getGlobalSecondaryIndexOverride() == null) ? 0 : getGlobalSecondaryIndexOverride().hashCode());
hashCode = prime * hashCode + ((getLocalSecondaryIndexOverride() == null) ? 0 : getLocalSecondaryIndexOverride().hashCode());
hashCode = prime * hashCode + ((getProvisionedThroughputOverride() == null) ? 0 : getProvisionedThroughputOverride().hashCode());
hashCode = prime * hashCode + ((getSSESpecificationOverride() == null) ? 0 : getSSESpecificationOverride().hashCode());
return hashCode;
}
@Override
public RestoreTableFromBackupRequest clone() {
return (RestoreTableFromBackupRequest) super.clone();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.io;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.LockSupport;
import java.util.stream.Stream;
import io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueue;
import org.apache.activemq.artemis.ArtemisConstants;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.core.io.aio.AIOSequentialFileFactory;
import org.apache.activemq.artemis.core.io.mapped.MappedSequentialFileFactory;
import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory;
import org.apache.activemq.artemis.core.journal.EncodingSupport;
import org.apache.activemq.artemis.core.journal.Journal;
import org.apache.activemq.artemis.core.journal.RecordInfo;
import org.apache.activemq.artemis.core.journal.impl.JournalImpl;
import org.apache.activemq.artemis.jlibaio.LibaioContext;
import org.apache.activemq.artemis.utils.actors.ArtemisExecutor;
/**
* To benchmark Type.Aio you need to define -Djava.library.path=${project-root}/native/src/.libs when calling the JVM
*/
public class JournalTptBenchmark {
public static void main(String[] args) throws Exception {
final boolean useDefaultIoExecutor = true;
final int fileSize = 10 * 1024 * 1024;
final boolean dataSync = false;
final Type type = Type.Mapped;
final int tests = 10;
final int warmup = 20_000;
final int measurements = 100_000;
final int msgSize = 100;
final byte[] msgContent = new byte[msgSize];
Arrays.fill(msgContent, (byte) 1);
final int totalMessages = (measurements * tests + warmup);
final File tmpDirectory = new File("./");
//using the default configuration when the broker starts!
final SequentialFileFactory factory;
switch (type) {
case Mapped:
factory = new MappedSequentialFileFactory(tmpDirectory, fileSize, true, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_AIO, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_AIO, null)
.setDatasync(dataSync);
break;
case Nio:
factory = new NIOSequentialFileFactory(tmpDirectory, true, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_NIO, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_NIO, 1, false, null, null).setDatasync(dataSync);
break;
case Aio:
factory = new AIOSequentialFileFactory(tmpDirectory, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_AIO, ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_AIO, 500, false, null, null).setDatasync(dataSync);
//disable it when using directly the same buffer: ((AIOSequentialFileFactory)factory).disableBufferReuse();
if (!LibaioContext.isLoaded()) {
throw new IllegalStateException("lib AIO not loaded!");
}
break;
default:
throw new AssertionError("unsupported case");
}
int numFiles = (int) (totalMessages * factory.calculateBlockSize(msgSize)) / fileSize;
if (numFiles < 2) {
numFiles = 2;
}
ExecutorService service = null;
final Journal journal;
if (useDefaultIoExecutor) {
journal = new JournalImpl(fileSize, numFiles, numFiles, Integer.MAX_VALUE, 100, factory, "activemq-data", "amq", factory.getMaxIO());
journal.start();
} else {
final ArrayList<MpscArrayQueue<Runnable>> tasks = new ArrayList<>();
service = Executors.newSingleThreadExecutor();
journal = new JournalImpl(() -> new ArtemisExecutor() {
private final MpscArrayQueue<Runnable> taskQueue = new MpscArrayQueue<>(1024);
{
tasks.add(taskQueue);
}
@Override
public void execute(Runnable command) {
while (!taskQueue.offer(command)) {
LockSupport.parkNanos(1L);
}
}
}, fileSize, numFiles, numFiles, Integer.MAX_VALUE, 100, factory, "activemq-data", "amq", factory.getMaxIO(), 0);
journal.start();
service.execute(() -> {
final int size = tasks.size();
final int capacity = 1024;
while (!Thread.currentThread().isInterrupted()) {
for (int i = 0; i < size; i++) {
final MpscArrayQueue<Runnable> runnables = tasks.get(i);
for (int j = 0; j < capacity; j++) {
final Runnable task = runnables.poll();
if (task == null) {
break;
}
try {
task.run();
} catch (Throwable t) {
System.err.println(t);
}
}
}
}
});
}
try {
journal.load(new ArrayList<RecordInfo>(), null, null);
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
final EncodingSupport encodingSupport = new EncodingSupport() {
@Override
public int getEncodeSize() {
return msgSize;
}
@Override
public void encode(ActiveMQBuffer buffer) {
final int writerIndex = buffer.writerIndex();
buffer.setBytes(writerIndex, msgContent);
buffer.writerIndex(writerIndex + msgSize);
}
@Override
public void decode(ActiveMQBuffer buffer) {
}
};
long id = 1;
{
final long elapsed = writeMeasurements(id, journal, encodingSupport, warmup);
id += warmup;
System.out.println("warmup:" + (measurements * 1000_000_000L) / elapsed + " ops/sec");
}
for (int t = 0; t < tests; t++) {
final long elapsed = writeMeasurements(id, journal, encodingSupport, measurements);
System.out.println((measurements * 1000_000_000L) / elapsed + " ops/sec");
id += warmup;
}
} finally {
journal.stop();
if (service != null) {
service.shutdown();
}
final File[] fileToDeletes = tmpDirectory.listFiles();
System.out.println("Files to deletes" + Arrays.toString(fileToDeletes));
Stream.of(fileToDeletes).forEach(File::delete);
}
}
private static long writeMeasurements(long id,
Journal journal,
EncodingSupport encodingSupport,
int measurements) throws Exception {
System.gc();
TimeUnit.SECONDS.sleep(2);
final long start = System.nanoTime();
for (int i = 0; i < measurements; i++) {
write(id, journal, encodingSupport);
id++;
}
final long elapsed = System.nanoTime() - start;
return elapsed;
}
private static void write(long id, Journal journal, EncodingSupport encodingSupport) throws Exception {
journal.appendAddRecord(id, (byte) 1, encodingSupport, false);
journal.appendUpdateRecord(id, (byte) 1, encodingSupport, true);
}
private enum Type {
Mapped, Nio, Aio
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CustomPacingCurve.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* A curve consisting of {@link CustomPacingGoal} objects that is
* used to pace line item delivery.
*/
public class CustomPacingCurve implements java.io.Serializable {
/* The unit of the {@link CustomPacingGoalDto#amount} values. */
private com.google.api.ads.admanager.axis.v202111.CustomPacingGoalUnit customPacingGoalUnit;
/* The list of goals that make up the custom pacing curve. */
private com.google.api.ads.admanager.axis.v202111.CustomPacingGoal[] customPacingGoals;
public CustomPacingCurve() {
}
public CustomPacingCurve(
com.google.api.ads.admanager.axis.v202111.CustomPacingGoalUnit customPacingGoalUnit,
com.google.api.ads.admanager.axis.v202111.CustomPacingGoal[] customPacingGoals) {
this.customPacingGoalUnit = customPacingGoalUnit;
this.customPacingGoals = customPacingGoals;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("customPacingGoalUnit", getCustomPacingGoalUnit())
.add("customPacingGoals", getCustomPacingGoals())
.toString();
}
/**
* Gets the customPacingGoalUnit value for this CustomPacingCurve.
*
* @return customPacingGoalUnit * The unit of the {@link CustomPacingGoalDto#amount} values.
*/
public com.google.api.ads.admanager.axis.v202111.CustomPacingGoalUnit getCustomPacingGoalUnit() {
return customPacingGoalUnit;
}
/**
* Sets the customPacingGoalUnit value for this CustomPacingCurve.
*
* @param customPacingGoalUnit * The unit of the {@link CustomPacingGoalDto#amount} values.
*/
public void setCustomPacingGoalUnit(com.google.api.ads.admanager.axis.v202111.CustomPacingGoalUnit customPacingGoalUnit) {
this.customPacingGoalUnit = customPacingGoalUnit;
}
/**
* Gets the customPacingGoals value for this CustomPacingCurve.
*
* @return customPacingGoals * The list of goals that make up the custom pacing curve.
*/
public com.google.api.ads.admanager.axis.v202111.CustomPacingGoal[] getCustomPacingGoals() {
return customPacingGoals;
}
/**
* Sets the customPacingGoals value for this CustomPacingCurve.
*
* @param customPacingGoals * The list of goals that make up the custom pacing curve.
*/
public void setCustomPacingGoals(com.google.api.ads.admanager.axis.v202111.CustomPacingGoal[] customPacingGoals) {
this.customPacingGoals = customPacingGoals;
}
public com.google.api.ads.admanager.axis.v202111.CustomPacingGoal getCustomPacingGoals(int i) {
return this.customPacingGoals[i];
}
public void setCustomPacingGoals(int i, com.google.api.ads.admanager.axis.v202111.CustomPacingGoal _value) {
this.customPacingGoals[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CustomPacingCurve)) return false;
CustomPacingCurve other = (CustomPacingCurve) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.customPacingGoalUnit==null && other.getCustomPacingGoalUnit()==null) ||
(this.customPacingGoalUnit!=null &&
this.customPacingGoalUnit.equals(other.getCustomPacingGoalUnit()))) &&
((this.customPacingGoals==null && other.getCustomPacingGoals()==null) ||
(this.customPacingGoals!=null &&
java.util.Arrays.equals(this.customPacingGoals, other.getCustomPacingGoals())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getCustomPacingGoalUnit() != null) {
_hashCode += getCustomPacingGoalUnit().hashCode();
}
if (getCustomPacingGoals() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getCustomPacingGoals());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getCustomPacingGoals(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CustomPacingCurve.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomPacingCurve"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("customPacingGoalUnit");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "customPacingGoalUnit"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomPacingGoalUnit"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("customPacingGoals");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "customPacingGoals"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomPacingGoal"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.admin;
import com.google.gerrit.client.Dispatcher;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.ScreenLoadCallback;
import com.google.gerrit.client.ui.FancyFlexTable;
import com.google.gerrit.client.ui.Hyperlink;
import com.google.gerrit.client.ui.SmallHeading;
import com.google.gerrit.common.data.ApprovalType;
import com.google.gerrit.common.data.GerritConfig;
import com.google.gerrit.common.data.InheritedRefRight;
import com.google.gerrit.common.data.ProjectDetail;
import com.google.gerrit.reviewdb.AccountGroup;
import com.google.gerrit.reviewdb.ApprovalCategoryValue;
import com.google.gerrit.reviewdb.Project;
import com.google.gerrit.reviewdb.RefRight;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.FlexTable.FlexCellFormatter;
import com.google.gwtexpui.safehtml.client.SafeHtml;
import com.google.gwtexpui.safehtml.client.SafeHtmlBuilder;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
public class ProjectAccessScreen extends ProjectScreen {
private Panel parentPanel;
private Hyperlink parentName;
private RightsTable rights;
private Button delRight;
private AccessRightEditor rightEditor;
private CheckBox showInherited;
public ProjectAccessScreen(final Project.NameKey toShow) {
super(toShow);
}
@Override
protected void onInitUI() {
super.onInitUI();
initParent();
initRights();
}
@Override
protected void onLoad() {
super.onLoad();
Util.PROJECT_SVC.projectDetail(getProjectKey(),
new ScreenLoadCallback<ProjectDetail>(this) {
public void preDisplay(final ProjectDetail result) {
enableForm(true);
display(result);
}
});
}
private void enableForm(final boolean on) {
delRight.setEnabled(on);
rightEditor.enableForm(on);
}
private void initParent() {
parentName = new Hyperlink("", "");
showInherited = new CheckBox();
showInherited.setChecked(true);
showInherited.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
rights.showInherited(showInherited.isChecked());
}
});
Grid g = new Grid(2, 3);
g.setWidget(0, 0, new SmallHeading(Util.C.headingParentProjectName()));
g.setWidget(1, 0, parentName);
g.setWidget(1, 1, showInherited);
g.setText(1, 2, Util.C.headingShowInherited());
parentPanel = new VerticalPanel();
parentPanel.add(g);
add(parentPanel);
}
private void initRights() {
rights = new RightsTable();
delRight = new Button(Util.C.buttonDeleteGroupMembers());
delRight.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
final HashSet<RefRight.Key> refRightIds = rights.getRefRightIdsChecked();
doDeleteRefRights(refRightIds);
}
});
rightEditor = new AccessRightEditor(getProjectKey());
rightEditor.addValueChangeHandler(new ValueChangeHandler<ProjectDetail>() {
@Override
public void onValueChange(ValueChangeEvent<ProjectDetail> event) {
display(event.getValue());
}
});
add(new SmallHeading(Util.C.headingAccessRights()));
add(rights);
add(delRight);
add(rightEditor);
}
void display(final ProjectDetail result) {
final Project project = result.project;
final Project.NameKey wildKey = Gerrit.getConfig().getWildProject();
final boolean isWild = wildKey.equals(project.getNameKey());
Project.NameKey parent = project.getParent();
if (parent == null) {
parent = wildKey;
}
parentPanel.setVisible(!isWild);
parentName.setTargetHistoryToken(Dispatcher.toProjectAdmin(parent, ACCESS));
parentName.setText(parent.get());
rights.display(result.groups, result.rights);
rightEditor.setVisible(result.canModifyAccess);
delRight.setVisible(rights.getCanDelete());
}
private void doDeleteRefRights(final HashSet<RefRight.Key> refRightIds) {
if (!refRightIds.isEmpty()) {
Util.PROJECT_SVC.deleteRight(getProjectKey(), refRightIds,
new GerritCallback<ProjectDetail>() {
@Override
public void onSuccess(final ProjectDetail result) {
//The user could no longer modify access after deleting a ref right.
display(result);
}
});
}
}
private class RightsTable extends FancyFlexTable<InheritedRefRight> {
boolean canDelete;
Map<AccountGroup.Id, AccountGroup> groups;
RightsTable() {
table.setWidth("");
table.setText(0, 2, Util.C.columnRightOrigin());
table.setText(0, 3, Util.C.columnApprovalCategory());
table.setText(0, 4, Util.C.columnGroupName());
table.setText(0, 5, Util.C.columnRefName());
table.setText(0, 6, Util.C.columnRightRange());
final FlexCellFormatter fmt = table.getFlexCellFormatter();
fmt.addStyleName(0, 1, Gerrit.RESOURCES.css().iconHeader());
fmt.addStyleName(0, 2, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 3, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 4, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 5, Gerrit.RESOURCES.css().dataHeader());
fmt.addStyleName(0, 6, Gerrit.RESOURCES.css().dataHeader());
table.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
onOpenRow(table.getCellForEvent(event).getRowIndex());
}
});
}
HashSet<RefRight.Key> getRefRightIdsChecked() {
final HashSet<RefRight.Key> refRightIds = new HashSet<RefRight.Key>();
for (int row = 1; row < table.getRowCount(); row++) {
RefRight r = getRowItem(row).getRight();
if (r != null && table.getWidget(row, 1) instanceof CheckBox
&& ((CheckBox) table.getWidget(row, 1)).getValue()) {
refRightIds.add(r.getKey());
}
}
return refRightIds;
}
void display(final Map<AccountGroup.Id, AccountGroup> grps,
final List<InheritedRefRight> refRights) {
groups = grps;
canDelete = false;
while (1 < table.getRowCount())
table.removeRow(table.getRowCount() - 1);
for (final InheritedRefRight r : refRights) {
final int row = table.getRowCount();
table.insertRow(row);
if (! showInherited.isChecked() && r.isInherited()) {
table.getRowFormatter().setVisible(row, false);
}
applyDataRowStyle(row);
populate(row, r);
}
}
protected void onOpenRow(final int row) {
if (row > 0) {
RefRight right = getRowItem(row).getRight();
rightEditor.load(right, groups.get(right.getAccountGroupId()));
}
}
void populate(final int row, final InheritedRefRight r) {
final GerritConfig config = Gerrit.getConfig();
final RefRight right = r.getRight();
final ApprovalType ar =
config.getApprovalTypes().getApprovalType(
right.getApprovalCategoryId());
final AccountGroup group = groups.get(right.getAccountGroupId());
if (r.isInherited() || !r.isOwner()) {
table.setText(row, 1, "");
} else {
table.setWidget(row, 1, new CheckBox());
canDelete = true;
}
if (r.isInherited()) {
Project.NameKey fromProject = right.getKey().getProjectNameKey();
table.setWidget(row, 2, new Hyperlink(fromProject.get(), Dispatcher
.toProjectAdmin(fromProject, ACCESS)));
} else {
table.setText(row, 2, "");
}
table.setText(row, 3, ar != null ? ar.getCategory().getName()
: right.getApprovalCategoryId().get() );
if (group != null) {
table.setWidget(row, 4, new Hyperlink(group.getName(), Dispatcher
.toAccountGroup(group.getId())));
} else {
table.setText(row, 4, Util.M.deletedGroup(right.getAccountGroupId()
.get()));
}
table.setText(row, 5, right.getRefPatternForDisplay());
{
final SafeHtmlBuilder m = new SafeHtmlBuilder();
final ApprovalCategoryValue min, max;
min = ar != null ? ar.getValue(right.getMinValue()) : null;
max = ar != null ? ar.getValue(right.getMaxValue()) : null;
if (ar != null && ar.getCategory().isRange()) {
formatValue(m, right.getMinValue(), min);
m.br();
}
formatValue(m, right.getMaxValue(), max);
SafeHtml.set(table, row, 6, m);
}
final FlexCellFormatter fmt = table.getFlexCellFormatter();
fmt.addStyleName(row, 1, Gerrit.RESOURCES.css().iconCell());
fmt.addStyleName(row, 2, Gerrit.RESOURCES.css().dataCell());
fmt.addStyleName(row, 3, Gerrit.RESOURCES.css().dataCell());
fmt.addStyleName(row, 4, Gerrit.RESOURCES.css().dataCell());
fmt.addStyleName(row, 5, Gerrit.RESOURCES.css().dataCell());
fmt.addStyleName(row, 6, Gerrit.RESOURCES.css().dataCell());
fmt.addStyleName(row, 6, Gerrit.RESOURCES.css()
.projectAdminApprovalCategoryRangeLine());
setRowItem(row, r);
}
public void showInherited(boolean visible) {
for (int r = 0; r < table.getRowCount(); r++) {
if (getRowItem(r) != null && getRowItem(r).isInherited()) {
table.getRowFormatter().setVisible(r, visible);
}
}
}
private void formatValue(final SafeHtmlBuilder m, final short v,
final ApprovalCategoryValue e) {
m.openSpan();
m
.setStyleName(Gerrit.RESOURCES.css()
.projectAdminApprovalCategoryValue());
if (v == 0) {
m.append(' ');
} else if (v > 0) {
m.append('+');
}
m.append(v);
m.closeSpan();
if (e != null) {
m.append(": ");
m.append(e.getName());
}
}
private boolean getCanDelete() {
return canDelete;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.ProtocolVersion;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.message.BasicStatusLine;
import org.apache.http.nio.protocol.HttpAsyncRequestProducer;
import org.apache.http.nio.protocol.HttpAsyncResponseConsumer;
import org.junit.After;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import static org.elasticsearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode;
import static org.elasticsearch.client.RestClientTestUtil.randomErrorRetryStatusCode;
import static org.elasticsearch.client.RestClientTestUtil.randomHttpMethod;
import static org.elasticsearch.client.RestClientTestUtil.randomOkStatusCode;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Tests for {@link RestClient} behaviour against multiple hosts: fail-over, blacklisting etc.
* Relies on a mock http client to intercept requests and return desired responses based on request path.
*/
public class RestClientMultipleHostsTests extends RestClientTestCase {
private ExecutorService exec = Executors.newFixedThreadPool(1);
private List<Node> nodes;
private HostsTrackingFailureListener failureListener;
@SuppressWarnings("unchecked")
public RestClient createRestClient(NodeSelector nodeSelector) {
CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class);
when(httpClient.<HttpResponse>execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class),
any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer<Future<HttpResponse>>() {
@Override
public Future<HttpResponse> answer(InvocationOnMock invocationOnMock) throws Throwable {
HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0];
final HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest();
final HttpHost httpHost = requestProducer.getTarget();
HttpClientContext context = (HttpClientContext) invocationOnMock.getArguments()[2];
assertThat(context.getAuthCache().get(httpHost), instanceOf(BasicScheme.class));
final FutureCallback<HttpResponse> futureCallback = (FutureCallback<HttpResponse>) invocationOnMock.getArguments()[3];
//return the desired status code or exception depending on the path
exec.execute(new Runnable() {
@Override
public void run() {
if (request.getURI().getPath().equals("/soe")) {
futureCallback.failed(new SocketTimeoutException(httpHost.toString()));
} else if (request.getURI().getPath().equals("/coe")) {
futureCallback.failed(new ConnectTimeoutException(httpHost.toString()));
} else if (request.getURI().getPath().equals("/ioe")) {
futureCallback.failed(new IOException(httpHost.toString()));
} else {
int statusCode = Integer.parseInt(request.getURI().getPath().substring(1));
StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, "");
futureCallback.completed(new BasicHttpResponse(statusLine));
}
}
});
return null;
}
});
int numNodes = RandomNumbers.randomIntBetween(getRandom(), 2, 5);
nodes = new ArrayList<>(numNodes);
for (int i = 0; i < numNodes; i++) {
nodes.add(new Node(new HttpHost("localhost", 9200 + i)));
}
nodes = Collections.unmodifiableList(nodes);
failureListener = new HostsTrackingFailureListener();
return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector, false);
}
/**
* Shutdown the executor so we don't leak threads into other test runs.
*/
@After
public void shutdownExec() {
exec.shutdown();
}
public void testRoundRobinOkStatusCodes() throws IOException {
RestClient restClient = createRestClient(NodeSelector.ANY);
int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
for (int i = 0; i < numIters; i++) {
Set<HttpHost> hostsSet = hostsSet();
for (int j = 0; j < nodes.size(); j++) {
int statusCode = randomOkStatusCode(getRandom());
Response response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode));
assertEquals(statusCode, response.getStatusLine().getStatusCode());
assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost()));
}
assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size());
}
failureListener.assertNotCalled();
}
public void testRoundRobinNoRetryErrors() throws IOException {
RestClient restClient = createRestClient(NodeSelector.ANY);
int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5);
for (int i = 0; i < numIters; i++) {
Set<HttpHost> hostsSet = hostsSet();
for (int j = 0; j < nodes.size(); j++) {
String method = randomHttpMethod(getRandom());
int statusCode = randomErrorNoRetryStatusCode(getRandom());
try {
Response response = restClient.performRequest(new Request(method, "/" + statusCode));
if (method.equals("HEAD") && statusCode == 404) {
//no exception gets thrown although we got a 404
assertEquals(404, response.getStatusLine().getStatusCode());
assertEquals(statusCode, response.getStatusLine().getStatusCode());
assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost()));
} else {
fail("request should have failed");
}
} catch (ResponseException e) {
if (method.equals("HEAD") && statusCode == 404) {
throw e;
}
Response response = e.getResponse();
assertEquals(statusCode, response.getStatusLine().getStatusCode());
assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost()));
assertEquals(0, e.getSuppressed().length);
}
}
assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size());
}
failureListener.assertNotCalled();
}
public void testRoundRobinRetryErrors() throws IOException {
RestClient restClient = createRestClient(NodeSelector.ANY);
String retryEndpoint = randomErrorRetryEndpoint();
try {
restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint));
fail("request should have failed");
} catch (ResponseException e) {
/*
* Unwrap the top level failure that was added so the stack trace contains
* the caller. It wraps the exception that contains the failed hosts.
*/
e = (ResponseException) e.getCause();
Set<HttpHost> hostsSet = hostsSet();
//first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each
failureListener.assertCalled(nodes);
do {
Response response = e.getResponse();
assertEquals(Integer.parseInt(retryEndpoint.substring(1)), response.getStatusLine().getStatusCode());
assertTrue("host [" + response.getHost() + "] not found, most likely used multiple times",
hostsSet.remove(response.getHost()));
if (e.getSuppressed().length > 0) {
assertEquals(1, e.getSuppressed().length);
Throwable suppressed = e.getSuppressed()[0];
assertThat(suppressed, instanceOf(ResponseException.class));
e = (ResponseException)suppressed;
} else {
e = null;
}
} while(e != null);
assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size());
} catch (IOException e) {
/*
* Unwrap the top level failure that was added so the stack trace contains
* the caller. It wraps the exception that contains the failed hosts.
*/
e = (IOException) e.getCause();
Set<HttpHost> hostsSet = hostsSet();
//first request causes all the hosts to be blacklisted, the returned exception holds one suppressed exception each
failureListener.assertCalled(nodes);
do {
HttpHost httpHost = HttpHost.create(e.getMessage());
assertTrue("host [" + httpHost + "] not found, most likely used multiple times", hostsSet.remove(httpHost));
if (e.getSuppressed().length > 0) {
assertEquals(1, e.getSuppressed().length);
Throwable suppressed = e.getSuppressed()[0];
assertThat(suppressed, instanceOf(IOException.class));
e = (IOException) suppressed;
} else {
e = null;
}
} while(e != null);
assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size());
}
int numIters = RandomNumbers.randomIntBetween(getRandom(), 2, 5);
for (int i = 1; i <= numIters; i++) {
//check that one different host is resurrected at each new attempt
Set<HttpHost> hostsSet = hostsSet();
for (int j = 0; j < nodes.size(); j++) {
retryEndpoint = randomErrorRetryEndpoint();
try {
restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint));
fail("request should have failed");
} catch (ResponseException e) {
Response response = e.getResponse();
assertThat(response.getStatusLine().getStatusCode(), equalTo(Integer.parseInt(retryEndpoint.substring(1))));
assertTrue("host [" + response.getHost() + "] not found, most likely used multiple times",
hostsSet.remove(response.getHost()));
//after the first request, all hosts are blacklisted, a single one gets resurrected each time
failureListener.assertCalled(response.getHost());
assertEquals(0, e.getSuppressed().length);
} catch (IOException e) {
/*
* Unwrap the top level failure that was added so the stack trace contains
* the caller. It wraps the exception that contains the failed hosts.
*/
e = (IOException) e.getCause();
HttpHost httpHost = HttpHost.create(e.getMessage());
assertTrue("host [" + httpHost + "] not found, most likely used multiple times", hostsSet.remove(httpHost));
//after the first request, all hosts are blacklisted, a single one gets resurrected each time
failureListener.assertCalled(httpHost);
assertEquals(0, e.getSuppressed().length);
}
}
assertEquals("every host should have been used but some weren't: " + hostsSet, 0, hostsSet.size());
if (getRandom().nextBoolean()) {
//mark one host back alive through a successful request and check that all requests after that are sent to it
HttpHost selectedHost = null;
int iters = RandomNumbers.randomIntBetween(getRandom(), 2, 10);
for (int y = 0; y < iters; y++) {
int statusCode = randomErrorNoRetryStatusCode(getRandom());
Response response;
try {
response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode));
} catch (ResponseException e) {
response = e.getResponse();
}
assertThat(response.getStatusLine().getStatusCode(), equalTo(statusCode));
if (selectedHost == null) {
selectedHost = response.getHost();
} else {
assertThat(response.getHost(), equalTo(selectedHost));
}
}
failureListener.assertNotCalled();
//let the selected host catch up on number of failures, it gets selected a consecutive number of times as it's the one
//selected to be retried earlier (due to lower number of failures) till all the hosts have the same number of failures
for (int y = 0; y < i + 1; y++) {
retryEndpoint = randomErrorRetryEndpoint();
try {
restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint));
fail("request should have failed");
} catch (ResponseException e) {
Response response = e.getResponse();
assertThat(response.getStatusLine().getStatusCode(), equalTo(Integer.parseInt(retryEndpoint.substring(1))));
assertThat(response.getHost(), equalTo(selectedHost));
failureListener.assertCalled(selectedHost);
} catch(IOException e) {
/*
* Unwrap the top level failure that was added so the stack trace contains
* the caller. It wraps the exception that contains the failed hosts.
*/
e = (IOException) e.getCause();
HttpHost httpHost = HttpHost.create(e.getMessage());
assertThat(httpHost, equalTo(selectedHost));
failureListener.assertCalled(selectedHost);
}
}
}
}
}
public void testNodeSelector() throws IOException {
NodeSelector firstPositionOnly = new NodeSelector() {
@Override
public void select(Iterable<Node> restClientNodes) {
boolean found = false;
for (Iterator<Node> itr = restClientNodes.iterator(); itr.hasNext();) {
if (nodes.get(0) == itr.next()) {
found = true;
} else {
itr.remove();
}
}
assertTrue(found);
}
};
RestClient restClient = createRestClient(firstPositionOnly);
int rounds = between(1, 10);
for (int i = 0; i < rounds; i++) {
/*
* Run the request more than once to verify that the
* NodeSelector overrides the round robin behavior.
*/
Request request = new Request("GET", "/200");
Response response = restClient.performRequest(request);
assertEquals(nodes.get(0).getHost(), response.getHost());
}
}
public void testSetNodes() throws IOException {
RestClient restClient = createRestClient(NodeSelector.SKIP_DEDICATED_MASTERS);
List<Node> newNodes = new ArrayList<>(nodes.size());
for (int i = 0; i < nodes.size(); i++) {
Node.Roles roles = i == 0 ? new Node.Roles(false, true, true) : new Node.Roles(true, false, false);
newNodes.add(new Node(nodes.get(i).getHost(), null, null, null, roles, null));
}
restClient.setNodes(newNodes);
int rounds = between(1, 10);
for (int i = 0; i < rounds; i++) {
/*
* Run the request more than once to verify that the
* NodeSelector overrides the round robin behavior.
*/
Request request = new Request("GET", "/200");
Response response = restClient.performRequest(request);
assertEquals(newNodes.get(0).getHost(), response.getHost());
}
}
private static String randomErrorRetryEndpoint() {
switch(RandomNumbers.randomIntBetween(getRandom(), 0, 3)) {
case 0:
return "/" + randomErrorRetryStatusCode(getRandom());
case 1:
return "/coe";
case 2:
return "/soe";
case 3:
return "/ioe";
}
throw new UnsupportedOperationException();
}
/**
* Build a mutable {@link Set} containing all the {@link Node#getHost() hosts}
* in use by the test.
*/
private Set<HttpHost> hostsSet() {
Set<HttpHost> hosts = new HashSet<>();
for (Node node : nodes) {
hosts.add(node.getHost());
}
return hosts;
}
}
| |
// Copyright (c) 2012 Jeff Ichnowski
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above
// copyright notice, this list of conditions and the following
// disclaimer.
//
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials
// provided with the distribution.
//
// * Neither the name of the OWASP nor the names of its
// contributors may be used to endorse or promote products
// derived from this software without specific prior written
// permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
package org.owasp.encoder;
import java.io.IOException;
import java.io.Writer;
import java.nio.CharBuffer;
import java.nio.charset.CoderResult;
/**
* EncodedWriter -- A writer the encodes all input for a specific context and writes the encoded output to another writer.
*
* @author Jeff Ichnowski
*/
public class EncodedWriter extends Writer {
/**
* Buffer size to allocate.
*
*/
static final int BUFFER_SIZE = 1024;
/**
* Buffer to use for handling characters remaining in the input buffer after an encode. The value is set high enough to handle
* the lookaheads of all the encoders in the package.
*/
static final int LEFT_OVER_BUFFER = 16;
/**
* The wrapped writer.
*/
private Writer _out;
/**
* The encoder used to encode input to the output writer.
*/
private Encoder _encoder;
/**
* Where encoded output is buffered before sending on to the output writer.
*/
private CharBuffer _buffer = CharBuffer.allocate(BUFFER_SIZE);
/**
* Some encoders require more input or an explicit end-of-input flag before they will process the remaining characters of an
* input buffer. Because the writer API cannot pass this information on to the caller (e.g. by returning how many bytes were
* actually written), this writer implementation must buffer up the remaining characters between calls. The
* <code>_hasLeftOver</code> boolean is a flag used to indicate that there are left over characters in the buffer.
*/
private boolean _hasLeftOver;
/**
* See comment on _hasLeftOver. This buffer is created on-demand once. Whether it has anything to flush is determined by the
* _hasLeftOver flag.
*/
private CharBuffer _leftOverBuffer;
/**
* Creates an EncodedWriter that uses the specified encoder to encode all input before sending it to the wrapped writer.
*
* @param out the target for all writes
* @param encoder the encoder to use
*/
public EncodedWriter(Writer out, Encoder encoder) {
super(out);
// Reduntant null check, super(out) checks for null and throws NPE.
// if (out == null) {
// throw new NullPointerException("writer must not be null");
// }
if (encoder == null) {
throw new NullPointerException("encoder must not be null");
}
_out = out;
_encoder = encoder;
}
/**
* Creates an EncodedWriter that uses the specified encoder to encode all input before sending it to the wrapped writer. This
* method is equivalent to calling:
* <pre>
* new EncodedWriter(out, Encoders.forName(contextName));
* </pre>
*
* @param out the target for all writes
* @param contextName the encoding context name.
* @throws UnsupportedContextException if the contextName is unrecognized or not supported.
*/
public EncodedWriter(Writer out, String contextName) throws UnsupportedContextException {
this(out, Encoders.forName(contextName));
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
synchronized (lock) {
CharBuffer input = CharBuffer.wrap(cbuf);
input.limit(off + len).position(off);
flushLeftOver(input);
for (;;) {
CoderResult cr = _encoder.encode(input, _buffer, false);
if (cr.isUnderflow()) {
if (input.hasRemaining()) {
if (_leftOverBuffer == null) {
_leftOverBuffer = CharBuffer.allocate(LEFT_OVER_BUFFER);
}
_leftOverBuffer.put(input);
_hasLeftOver = true;
}
return;
}
if (cr.isOverflow()) {
flushBufferToWriter();
}
}
}
}
/**
* Flushes the contents of the buffer to the writer and resets the buffer to make room for more input.
*
* @throws IOException thrown by the wrapped output.
*/
private void flushBufferToWriter() throws IOException {
_out.write(_buffer.array(), 0, _buffer.position());
_buffer.clear();
}
/**
* Flushes the left-over buffer. Characters from the input buffer are used to add more data to the _leftOverBuffer in order to
* make the flush happen.
*
* @param input the next input to encode, or null if at end of file.
* @throws IOException from the underlying writer.
*/
private void flushLeftOver(CharBuffer input) throws IOException {
if (!_hasLeftOver) {
return;
}
for (;;) {
if (input != null && input.hasRemaining()) {
_leftOverBuffer.put(input.get());
}
_leftOverBuffer.flip();
CoderResult cr = _encoder.encode(_leftOverBuffer, _buffer, input == null);
if (cr.isUnderflow()) {
if (_leftOverBuffer.hasRemaining()) {
_leftOverBuffer.compact();
} else {
break;
}
}
if (cr.isOverflow()) {
flushBufferToWriter();
}
}
_hasLeftOver = false;
_leftOverBuffer.clear();
}
@Override
public void flush() throws IOException {
synchronized (lock) {
flushBufferToWriter();
_out.flush();
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
flushLeftOver(null);
flushBufferToWriter();
_out.close();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.update;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.HashMap;
import java.util.LinkedHashMap;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest {
private static final String INDEX = "index";
private static final String TYPE = "type";
@Test
public void test_all_enabled() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").field("enabled", "false").endObject().endObject().endObject().endObject();
XContentBuilder mappingUpdate = jsonBuilder().startObject().startObject("_all").field("enabled", "true").endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject();
String errorMessage = "[_all] enabled is false now encountering true";
testConflict(mapping.string(), mappingUpdate.string(), errorMessage);
}
@Test
public void test_all_conflicts() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_create_index.json");
String mappingUpdate = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json");
String[] errorMessage = {"[_all] enabled is true now encountering false",
"[_all] cannot enable norms (`norms.enabled`)",
"[_all] has different store values",
"[_all] has different store_term_vector values",
"[_all] has different store_term_vector_offsets values",
"[_all] has different store_term_vector_positions values",
"[_all] has different store_term_vector_payloads values",
"[_all] has different analyzer",
"[_all] has different similarity"};
// fielddata and search_analyzer should not report conflict
testConflict(mapping, mappingUpdate, errorMessage);
}
@Test
public void test_all_with_default() throws Exception {
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
.startObject("_all")
.field("enabled", false)
.endObject()
.endObject().endObject().string();
client().admin().indices().prepareCreate("index").addMapping("_default_", defaultMapping).get();
String docMapping = jsonBuilder().startObject()
.startObject("doc")
.endObject()
.endObject().string();
PutMappingResponse response = client().admin().indices().preparePutMapping("index").setType("doc").setSource(docMapping).get();
assertTrue(response.isAcknowledged());
String docMappingUpdate = jsonBuilder().startObject().startObject("doc")
.startObject("properties")
.startObject("text")
.field("type", "string")
.endObject()
.endObject()
.endObject()
.endObject().string();
response = client().admin().indices().preparePutMapping("index").setType("doc").setSource(docMappingUpdate).get();
assertTrue(response.isAcknowledged());
String docMappingAllExplicitEnabled = jsonBuilder().startObject()
.startObject("doc_all_enabled")
.startObject("_all")
.field("enabled", true)
.endObject()
.endObject()
.endObject().string();
response = client().admin().indices().preparePutMapping("index").setType("doc_all_enabled").setSource(docMappingAllExplicitEnabled).get();
assertTrue(response.isAcknowledged());
GetMappingsResponse mapping = client().admin().indices().prepareGetMappings("index").get();
HashMap props = (HashMap)mapping.getMappings().get("index").get("doc").getSourceAsMap().get("_all");
assertThat((Boolean)props.get("enabled"), equalTo(false));
props = (HashMap)mapping.getMappings().get("index").get("doc").getSourceAsMap().get("properties");
assertNotNull(props);
assertNotNull(props.get("text"));
props = (HashMap)mapping.getMappings().get("index").get("doc_all_enabled").getSourceAsMap().get("_all");
assertThat((Boolean)props.get("enabled"), equalTo(true));
props = (HashMap)mapping.getMappings().get("index").get("_default_").getSourceAsMap().get("_all");
assertThat((Boolean)props.get("enabled"), equalTo(false));
}
@Test
public void test_doc_valuesInvalidMapping() throws Exception {
String mapping = jsonBuilder().startObject().startObject("mappings").startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().endObject().string();
try {
prepareCreate(INDEX).setSource(mapping).get();
fail();
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
}
@Test
public void test_doc_valuesInvalidMappingOnUpdate() throws Exception {
String mapping = jsonBuilder().startObject().startObject(TYPE).startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject().string();
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
String mappingUpdate = jsonBuilder().startObject().startObject(TYPE).startObject("_all").startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject().endObject().string();
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
try {
client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mappingUpdate).get();
fail();
} catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
// make sure all nodes have same cluster state
compareMappingOnNodes(mappingsBeforeUpdateResponse);
}
// checks if the setting for timestamp and size are kept even if disabled
@Test
public void testDisabledSizeTimestampIndexDoNotLooseMappings() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
GetMappingsResponse mappingsBeforeGreen = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
ensureGreen(INDEX);
// make sure all nodes have same cluster state
compareMappingOnNodes(mappingsBeforeGreen);
}
protected void testConflict(String mapping, String mappingUpdate, String... errorMessages) throws InterruptedException {
assertAcked(prepareCreate(INDEX).setSource(mapping).get());
ensureGreen(INDEX);
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
try {
client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mappingUpdate).get();
fail();
} catch (MergeMappingException e) {
for (String errorMessage : errorMessages) {
assertThat(e.getDetailedMessage(), containsString(errorMessage));
}
}
compareMappingOnNodes(mappingsBeforeUpdateResponse);
}
private void compareMappingOnNodes(GetMappingsResponse previousMapping) {
// make sure all nodes have same cluster state
for (Client client : cluster()) {
GetMappingsResponse currentMapping = client.admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).setLocal(true).get();
assertThat(previousMapping.getMappings().get(INDEX).get(TYPE).source(), equalTo(currentMapping.getMappings().get(INDEX).get(TYPE).source()));
}
}
@Test
public void testUpdateTimestamp() throws Exception {
boolean enabled = randomBoolean();
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", enabled).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "no").endObject()
.endObject().endObject();
client().admin().indices().prepareCreate("test").addMapping("type", mapping).get();
GetMappingsResponse appliedMappings = client().admin().indices().prepareGetMappings("test").get();
LinkedHashMap timestampMapping = (LinkedHashMap) appliedMappings.getMappings().get("test").get("type").getSourceAsMap().get("_timestamp");
assertThat((Boolean) timestampMapping.get("store"), equalTo(false));
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("loading"), equalTo("lazy"));
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("format"), equalTo("doc_values"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", enabled).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "no").endObject()
.endObject().endObject();
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
appliedMappings = client().admin().indices().prepareGetMappings("test").get();
timestampMapping = (LinkedHashMap) appliedMappings.getMappings().get("test").get("type").getSourceAsMap().get("_timestamp");
assertThat((Boolean) timestampMapping.get("store"), equalTo(false));
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("loading"), equalTo("eager"));
assertThat((String)((LinkedHashMap) timestampMapping.get("fielddata")).get("format"), equalTo("array"));
}
@Test
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/10297")
public void testTimestampMergingConflicts() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject(TYPE)
.startObject("_timestamp").field("enabled", true)
.startObject("fielddata").field("format", "doc_values").endObject()
.field("store", "yes")
.field("index", "analyzed")
.field("path", "foo")
.field("default", "1970-01-01")
.endObject()
.endObject().endObject().string();
client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping).get();
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", false)
.startObject("fielddata").field("format", "array").endObject()
.field("store", "no")
.field("index", "no")
.field("path", "bar")
.field("default", "1970-01-02")
.endObject()
.endObject().endObject().string();
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
try {
client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mapping).get();
fail("This should result in conflicts when merging the mapping");
} catch (MergeMappingException e) {
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different store values", "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"};
for (String conflict : expectedConflicts) {
assertThat(e.getDetailedMessage(), containsString(conflict));
}
}
compareMappingOnNodes(mappingsBeforeUpdateResponse);
}
}
| |
/*
* Copyright 2015-2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.impl.tinkerpop.sql.impl;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Savepoint;
import java.sql.Statement;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import javax.sql.DataSource;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.MapConfiguration;
import com.tinkerpop.blueprints.CloseableIterable;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Features;
import com.tinkerpop.blueprints.KeyIndexableGraph;
import com.tinkerpop.blueprints.Parameter;
import com.tinkerpop.blueprints.ThreadedTransactionalGraph;
import com.tinkerpop.blueprints.TransactionalGraph;
import com.tinkerpop.blueprints.Vertex;
/**
* @author Lukas Krejci
* @since 0.13.0
*/
public final class SqlGraph implements ThreadedTransactionalGraph, KeyIndexableGraph {
private static final Features FEATURES = new Features();
static {
FEATURES.supportsSerializableObjectProperty = false;
FEATURES.supportsBooleanProperty = true;
FEATURES.supportsDoubleProperty = true;
FEATURES.supportsFloatProperty = true;
FEATURES.supportsIntegerProperty = true;
FEATURES.supportsPrimitiveArrayProperty = false;
FEATURES.supportsUniformListProperty = false;
FEATURES.supportsMixedListProperty = false;
FEATURES.supportsLongProperty = true;
FEATURES.supportsMapProperty = false;
FEATURES.supportsStringProperty = true;
FEATURES.supportsDuplicateEdges = true;
FEATURES.supportsSelfLoops = true;
FEATURES.isPersistent = true;
FEATURES.isWrapper = false;
FEATURES.supportsVertexIteration = true;
FEATURES.supportsEdgeIteration = true;
FEATURES.supportsVertexIndex = false;
FEATURES.supportsEdgeIndex = false;
FEATURES.ignoresSuppliedIds = true;
FEATURES.supportsTransactions = true;
FEATURES.supportsIndices = false;
FEATURES.supportsKeyIndices = true;
FEATURES.supportsVertexKeyIndex = true;
FEATURES.supportsEdgeKeyIndex = true;
FEATURES.supportsEdgeRetrieval = true;
FEATURES.supportsVertexProperties = true;
FEATURES.supportsEdgeProperties = true;
FEATURES.supportsThreadedTransactions = true;
FEATURES.supportsThreadIsolatedTransactions = false;
}
private final DataSource dataSource;
private Connection connection;
private Statements statements;
private boolean dirty;
private final String verticesTableName;
private final String edgesTableName;
private final String vertexPropertiesTableName;
private final String edgePropertiesTableName;
private final String vertexIndicesTableName;
private final String edgeIndicesTableName;
private final String uniqueVertexPropertiesTableName;
private final String uniqueEdgePropertiesTableName;
private final boolean loadPropertiesEagerly;
private final boolean closeConnectionOnTransactionEnd;
private final boolean cacheStatements;
private long transactionCount;
private Set<String> vertexPropertyIndices;
private Set<String> edgePropertyIndices;
private final IndexUpdater indexUpdater;
private final WeakHashMap<Long, WeakReference<SqlVertex>> vertexCache = new WeakHashMap<>();
/**
* Instantiates a new SQL graph configured from the provided configuration object.
* The following properties are supported:
* <ul>
* <li><code>sql.datasource.class</code> - the name of the datasource class from some JDBC driver on the
* classpath</li>
* <li><code>sql.datasource.*</code> - any properties of the datasource can be passed using this prefix.
* E.g. <code>sql.datasource.portNumber</code>, <code>sql.datasource.serverName</code>, etc. See the documentation
* of the datasource for the list of available properties.</li>
* <li><code>sql.verticesTable</code> - the name of the vertices table. Defaults to "vertices".</li>
* <li><code>sql.edgesTable</code> - the name of the edges table. Defaults to "edges".</li>
* <li><code>sql.vertexPropertiesTable</code> - the name of the table for vertex properties. Defaults to
* "vertex_properties".</li>
* <li><code>sql.edgePropertiesTable</code> - the name of the table for edge properties. Defaults to
* "edge_properties".</li>
* </ul>
*
* @param configuration the configuration to use
*
* @throws Exception
*/
public SqlGraph(Configuration configuration) throws Exception {
this((DataSource) Class.forName(configuration.getString("sql.datasource.class")).newInstance(), configuration);
setupDataSource(dataSource, configuration);
}
public SqlGraph(Map<String, Object> configuration) throws Exception {
this(new MapConfiguration(configuration));
}
private void setupDataSource(DataSource dataSource, Configuration configuration) throws Exception {
BeanInfo beanInfo = Introspector.getBeanInfo(dataSource.getClass());
PropertyDescriptor[] properties = beanInfo.getPropertyDescriptors();
Map<String, PropertyDescriptor> propsByName = new HashMap<>();
for (PropertyDescriptor p : properties) {
propsByName.put(p.getName().toLowerCase(), p);
}
Iterator it = configuration.getKeys("sql.datasource");
while (it.hasNext()) {
String key = (String) it.next();
String property = key.substring("sql.datasource.".length()).toLowerCase();
PropertyDescriptor d = propsByName.get(property);
if (d == null) {
continue;
}
Method write = d.getWriteMethod();
if (write != null) {
write.invoke(dataSource, configuration.getProperty(key));
}
}
}
public SqlGraph(DataSource dataSource) {
this(dataSource, null, null, null, null, null, null, null);
}
public SqlGraph(DataSource dataSource, Configuration configuration) throws Exception {
this(dataSource, configuration.getString("sql.verticesTable"),
configuration.getString("sql.edgesTable"),
configuration.getString("sql.vertexPropertiesTable"),
configuration.getString("sql.edgePropertiesTable"),
configuration.getBoolean("sql.loadPropertiesEagerly", null),
configuration.getBoolean("sql.closeConnectionOnTransactionEnd", null),
configuration.getBoolean("sql.cacheStatements", null));
}
private SqlGraph(DataSource dataSource, String vTable, String eTable, String vpTable, String epTable,
Boolean loadPropertiesEagerly, Boolean closeConnectionOnTransactionEnd, Boolean cacheStatements) {
this.dataSource = dataSource;
verticesTableName = vTable == null ? "vertices" : vTable;
edgesTableName = eTable == null ? "edges" : eTable;
vertexPropertiesTableName = vpTable == null ? "vertex_properties" : vpTable;
edgePropertiesTableName = epTable == null ? "edge_properties" : epTable;
this.loadPropertiesEagerly = loadPropertiesEagerly == null ? true : loadPropertiesEagerly;
this.closeConnectionOnTransactionEnd =
closeConnectionOnTransactionEnd == null ? false : closeConnectionOnTransactionEnd;
this.cacheStatements = cacheStatements == null ? true : cacheStatements;
this.vertexIndicesTableName = verticesTableName + "_uidxs";
this.edgeIndicesTableName = edgesTableName + "_uidxs";
this.uniqueVertexPropertiesTableName = vertexPropertiesTableName + "_uq";
this.uniqueEdgePropertiesTableName = edgePropertiesTableName + "_uq";
this.indexUpdater = new IndexUpdater();
}
public synchronized void createSchemaIfNeeded() throws SQLException, IOException {
ensureConnection();
try (Statement st = connection.createStatement()) {
st.execute("SELECT 1 FROM " + getVerticesTableName());
return;
} catch (SQLException ignored) {
//good, the schema doesn't exist. Let's continue
connection.rollback();
}
try {
String dbName = connection.getMetaData().getDatabaseProductName();
String script = dbName + "-schema.sql";
InputStream schemaStream = getClass().getClassLoader().getResourceAsStream(script);
if (schemaStream == null) {
schemaStream = getClass().getClassLoader().getResourceAsStream("schema.sql");
}
if (schemaStream == null) {
throw new AssertionError("Could not load the schema creation script.");
}
String contents = null;
try (InputStreamReader rdr = new InputStreamReader(schemaStream)) {
StringBuilder bld = new StringBuilder();
char[] buffer = new char[512];
int cnt;
while ((cnt = rdr.read(buffer)) != -1) {
bld.append(buffer, 0, cnt);
}
contents = bld.toString();
}
contents = contents.replace("%VERTICES%", verticesTableName);
contents = contents.replace("%VERTEX_PROPERTIES%", vertexPropertiesTableName);
contents = contents.replace("%EDGES%", edgesTableName);
contents = contents.replace("%EDGE_PROPERTIES%", edgePropertiesTableName);
String[] inst = contents.split(";");
try (Statement st = connection.createStatement()) {
for (int i = 0; i < inst.length; i++) {
// we ensure that there is no spaces before or after the request string
// in order to not execute empty statements
if (!inst[i].trim().equals("")) {
st.executeUpdate(inst[i]);
}
}
}
refreshIndices();
connection.commit();
} catch (Throwable t) {
connection.rollback();
throw t;
}
}
private synchronized void refreshIndices() {
if (vertexPropertyIndices == null) {
vertexPropertyIndices = new HashSet<>();
edgePropertyIndices = new HashSet<>();
indexUpdater.on(Vertex.class).refreshIndices();
indexUpdater.on(Edge.class).refreshIndices();
}
}
@Override
public synchronized TransactionalGraph newTransaction() {
if (!dirty && !closeConnectionOnTransactionEnd) {
return this;
}
Log.LOG.debug("Opening new transaction...");
return new SqlGraph(dataSource, verticesTableName, edgesTableName, vertexPropertiesTableName,
edgePropertiesTableName, loadPropertiesEagerly, closeConnectionOnTransactionEnd, cacheStatements);
}
@Override
public void stopTransaction(Conclusion conclusion) {
if (conclusion == Conclusion.SUCCESS) {
commit();
} else {
rollback();
}
}
@Override
public synchronized void commit() {
try {
initConnection();
Log.LOG.debug("Committing...");
connection.commit();
accountForTransactionEnd();
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public synchronized void rollback() {
try {
initConnection();
connection.rollback();
accountForTransactionEnd();
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public Features getFeatures() {
return FEATURES;
}
@Override
public synchronized Vertex addVertex(Object id) {
return withSavePoint(() -> {
PreparedStatement stmt = statements.getAddVertex();
if (stmt.executeUpdate() == 0) {
return null;
}
try (ResultSet rs = stmt.getGeneratedKeys()) {
Vertex ret = cache(statements.fromVertexResultSet(rs));
dirty = true;
return ret;
}
});
}
@Override
public synchronized SqlVertex getVertex(Object id) {
Long realId = getId(id);
if (realId == null) {
return null;
}
WeakReference<SqlVertex> ref = vertexCache.get(realId);
SqlVertex v = ref == null ? null : ref.get();
if (v != null) {
return v;
}
initConnection();
try {
PreparedStatement stmt = statements.getGetVertex(realId);
if (!stmt.execute()) {
return null;
}
try (ResultSet rs = stmt.getResultSet()) {
return cache(statements.fromVertexResultSet(rs));
}
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public synchronized void removeVertex(Vertex vertex) {
withSavePoint(() -> {
PreparedStatement stmt = statements.getRemoveVertex((Long) vertex.getId());
if (stmt.executeUpdate() == 0) {
throw new IllegalStateException("Vertex with id " + vertex.getId() + " doesn't exist.");
}
vertexCache.remove(vertex.getId());
dirty = true;
return null;
});
}
@Override
public synchronized CloseableIterable<Vertex> getVertices() {
initConnection();
try {
PreparedStatement stmt = statements.getAllVertices();
if (!stmt.execute()) {
return StatementIterable.empty();
}
return new StatementIterable<Vertex>(SqlVertex.GENERATOR, this, stmt);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public synchronized CloseableIterable<Vertex> getVertices(String key, Object value) {
return query().has(key, value).vertices();
}
@Override
public synchronized SqlEdge addEdge(Object id, Vertex outVertex, Vertex inVertex, String label) {
if (label == null) {
throw new IllegalArgumentException("null label");
}
return withSavePoint(() -> {
PreparedStatement stmt = statements
.getAddEdge((Long) inVertex.getId(), (Long) outVertex.getId(), label);
if (stmt.executeUpdate() == 0) {
return null;
}
long eid = -1;
try (ResultSet rs = stmt.getGeneratedKeys()) {
if (!rs.next()) {
return null;
}
eid = rs.getLong(1);
}
try (ResultSet rs = statements.getGetEdge(eid).executeQuery()) {
if (!rs.next()) {
return null;
}
SqlEdge ret = SqlEdge.GENERATOR.generate(this, rs);
dirty = true;
return ret;
}
});
}
@Override
public synchronized SqlEdge getEdge(Object id) {
Long eid = getId(id);
if (eid == null) {
return null;
}
initConnection();
try {
PreparedStatement stmt = statements.getGetEdge(eid);
if (!stmt.execute()) {
return null;
}
try (ResultSet rs = stmt.getResultSet()) {
if (!rs.next()) {
return null;
}
return SqlEdge.GENERATOR.generate(this, rs);
}
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public synchronized void removeEdge(Edge edge) {
withSavePoint(() -> {
PreparedStatement stmt = statements.getRemoveEdge((Long) edge.getId());
if (stmt.executeUpdate() == 0) {
throw new IllegalStateException("Edge with id " + edge.getId() + " doesn't exist.");
}
dirty = true;
return null;
});
}
@Override
public synchronized Iterable<Edge> getEdges() {
initConnection();
try {
PreparedStatement stmt = statements.getAllEdges();
return new StatementIterable<Edge>(SqlEdge.GENERATOR, this, stmt);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
@Override
public synchronized CloseableIterable<Edge> getEdges(String key, Object value) {
return query().has(key, value).edges();
}
@Override
public synchronized SqlGraphQuery query() {
return new SqlGraphQuery(this);
}
@Override
public synchronized void shutdown() {
if (connection != null) {
try {
connection.commit();
connection.close();
statements.clearCache();
Log.LOG.debugf("Shut down. Closed connection %s", connection);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
}
@Override
public synchronized <T extends Element> void createKeyIndex(String key, Class<T> elementClass, Parameter...
indexParameters) {
if (key == null || elementClass == null) {
throw new IllegalArgumentException();
}
initConnection();
indexUpdater.on(elementClass).createIndex(key);
}
@Override public <T extends Element> void dropKeyIndex(String key, Class<T> elementClass) {
if (key == null || elementClass == null) {
throw new IllegalArgumentException();
}
initConnection();
indexUpdater.on(elementClass).dropIndex(key);
}
@Override public <T extends Element> Set<String> getIndexedKeys(Class<T> elementClass) {
if (elementClass == null) {
throw new IllegalArgumentException();
}
initConnection();
if (Vertex.class.isAssignableFrom(elementClass)) {
return vertexPropertyIndices;
} else if (Edge.class.isAssignableFrom(elementClass)) {
return edgePropertyIndices;
} else {
return Collections.emptySet();
}
}
@Override
public String toString() {
return "sqlgraph(" + dataSource.toString() + ")";
}
synchronized Connection getConnection() {
initConnection();
return connection;
}
synchronized Statements getStatements() {
initConnection();
return statements;
}
synchronized long getTransactionCount() {
return transactionCount;
}
String getVerticesTableName() {
return verticesTableName;
}
String getEdgesTableName() {
return edgesTableName;
}
String getVertexPropertiesTableName() {
return vertexPropertiesTableName;
}
String getUniqueEdgePropertiesTableName() {
return uniqueEdgePropertiesTableName;
}
String getUniqueVertexPropertiesTableName() {
return uniqueVertexPropertiesTableName;
}
String getEdgePropertiesTableName() {
return edgePropertiesTableName;
}
synchronized void setDirty() {
dirty = true;
}
boolean isLoadPropertiesEagerly() {
return loadPropertiesEagerly;
}
boolean isCacheStatements() {
return cacheStatements;
}
void rollbackToSavePoint(Savepoint sp) {
if (sp != null) {
try {
connection.rollback(sp);
connection.releaseSavepoint(sp);
} catch (SQLException e) {
Log.LOG.wRollbackToSavepointFailed(e);
}
}
}
<R> R withSavePoint(SQLCallable<R> payload) throws SqlGraphException {
initConnection();
//Savepoint sp = null;
try {
//sp = connection.setSavepoint();
return payload.call();
} catch (SQLException e) {
//rollbackToSavePoint(sp);
throw new SqlGraphException(e);
}
}
private void ensureConnection() {
if (connection == null) {
try {
connection = dataSource.getConnection();
connection.setAutoCommit(false);
connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
Log.LOG.debugf("Opened connection %s", connection);
statements = new Statements(this);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
}
private void initConnection() {
ensureConnection();
refreshIndices();
}
private Long getId(Object id) {
if (id == null) {
throw new IllegalArgumentException("null id");
} else if (id instanceof String) {
try {
return Long.parseLong((String) id);
} catch (NumberFormatException e) {
return null;
}
} else if (id instanceof Number) {
return ((Number) id).longValue();
} else {
return null;
}
}
private SqlVertex cache(SqlVertex v) {
if (v != null) {
vertexCache.put(v.getId(), new WeakReference<>(v));
}
return v;
}
private void accountForTransactionEnd() throws SQLException {
if (closeConnectionOnTransactionEnd) {
connection.close();
Log.LOG.debugf("Closed connection %s", connection);
connection = null;
statements.clearCache();
}
dirty = false;
transactionCount++;
}
interface SQLCallable<R> {
R call() throws SQLException;
}
private class IndexUpdater {
Set<String> indices;
String indexTable;
String uPropTable;
String nuPropTable;
String propTableFk;
public IndexUpdater on(Class<?> elementClass) {
if (Vertex.class.isAssignableFrom(elementClass)) {
indices = vertexPropertyIndices;
indexTable = vertexIndicesTableName;
uPropTable = uniqueVertexPropertiesTableName;
nuPropTable = vertexPropertiesTableName;
propTableFk = "vertex_id";
} else if (Edge.class.isAssignableFrom(elementClass)) {
indices = edgePropertyIndices;
indexTable = edgeIndicesTableName;
uPropTable = uniqueEdgePropertiesTableName;
nuPropTable = edgePropertiesTableName;
propTableFk = "edge_id";
} else {
throw new IllegalArgumentException("Invalid elementClass: " + elementClass);
}
return this;
}
public IndexUpdater createIndex(String key) {
refreshIndices();
if (indices.contains(key)) {
throw new IllegalArgumentException("Index '" + key + "' already exists");
}
String sql = "INSERT INTO " + indexTable + "(name) VALUES (?)";
try {
PreparedStatement st = statements.get(sql);
st.setString(1, key);
st.executeUpdate();
indices.add(key);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
//now copy over the data from the non-unique table, if any
sql = "INSERT INTO " + uPropTable + "(" + propTableFk + ", name, string_value, numeric_value, value_type)"
+ " SELECT " + propTableFk + ", name, string_value, numeric_value, value_type FROM " + nuPropTable
+ " WHERE name = ?";
boolean moved = isRowsUpdated(sql, key);
//now delete from the non-unique table
if (moved) {
deleteOldKeyValues(nuPropTable, key);
}
return this;
}
public IndexUpdater dropIndex(String key) {
refreshIndices();
if (!indices.contains(key)) {
return this;
}
String sql = "DELETE FROM " + indexTable + " WHERE name = ?";
try {
PreparedStatement st = statements.get(sql);
st.setString(1, key);
st.executeUpdate();
indices.remove(key);
} catch (SQLException e) {
throw new SqlGraphException(e);
}
//now copy over the data from the unique table, if any
sql = "INSERT INTO " + nuPropTable + "(" + propTableFk + ", name, string_value, numeric_value, value_type)"
+ " SELECT " + propTableFk + ", name, string_value, numeric_value, value_type FROM " + uPropTable
+ " WHERE name = ?";
boolean moved = isRowsUpdated(sql, key);
//now delete from the unique table
if (moved) {
deleteOldKeyValues(uPropTable, key);
}
return this;
}
public void refreshIndices() {
String sql = "SELECT name FROM " + indexTable;
try (ResultSet rs = statements.get(sql).executeQuery()) {
indices.clear();
while (rs.next()) {
indices.add(rs.getString(1));
}
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
private boolean isRowsUpdated(String sql, String key) {
try {
PreparedStatement st = statements.get(sql);
st.setString(1, key);
return st.executeUpdate() > 0;
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
private void deleteOldKeyValues(String oldTableName, String key) {
String sql = "DELETE FROM " + oldTableName + " WHERE name = ?";
try {
PreparedStatement st = statements.get(sql);
st.setString(1, key);
st.executeUpdate();
} catch (SQLException e) {
throw new SqlGraphException(e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.p2p;
import java.net.URL;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.configuration.DeploymentMode;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.testframework.GridTestExternalClassLoader;
import org.apache.ignite.testframework.config.GridTestProperties;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.junit.Test;
/**
*
*/
@GridCommonTest(group = "P2P")
public class GridP2PMissedResourceCacheSizeSelfTest extends GridCommonAbstractTest {
/** Task name. */
private static final String TASK_NAME1 = "org.apache.ignite.tests.p2p.P2PTestTaskExternalPath1";
/** Task name. */
private static final String TASK_NAME2 = "org.apache.ignite.tests.p2p.P2PTestTaskExternalPath2";
/** Filter name. */
private static final String FILTER_NAME1 = "org.apache.ignite.tests.p2p.P2PEventFilterExternalPath1";
/** Filter name. */
private static final String FILTER_NAME2 = "org.apache.ignite.tests.p2p.P2PEventFilterExternalPath2";
/** Current deployment mode. Used in {@link #getConfiguration(String)}. */
private DeploymentMode depMode;
/** */
private int missedRsrcCacheSize;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
// Override P2P configuration to exclude Task and Job classes
cfg.setPeerClassLoadingLocalClassPathExclude(GridP2PTestTask.class.getName(), GridP2PTestJob.class.getName());
cfg.setDeploymentMode(depMode);
cfg.setPeerClassLoadingMissedResourcesCacheSize(missedRsrcCacheSize);
cfg.setCacheConfiguration();
return cfg;
}
/**
* Task execution here throws {@link IgniteCheckedException}.
* This is correct behavior.
*
* @param g1 Grid 1.
* @param g2 Grid 2.
* @param task Task to execute.
*/
@SuppressWarnings({"unchecked"})
private void executeFail(Ignite g1, Ignite g2, Class task) {
try {
g1.compute().execute(task, g2.cluster().localNode().id());
assert false; // Exception must be thrown.
}
catch (IgniteException e) {
// Throwing exception is a correct behaviour.
info("Received correct exception: " + e);
}
}
/**
* Querying events here throws {@link IgniteCheckedException}.
* This is correct behavior.
*
* @param g Grid.
* @param filter Event filter.
*/
private void executeFail(ClusterGroup g, IgnitePredicate<Event> filter) {
try {
g.ignite().events(g).remoteQuery(filter, 0);
assert false; // Exception must be thrown.
}
catch (IgniteException e) {
// Throwing exception is a correct behaviour.
info("Received correct exception: " + e);
}
}
/**
* @param depMode deployment mode.
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
private void processSize0Test(DeploymentMode depMode) throws Exception {
this.depMode = depMode;
missedRsrcCacheSize = 0;
try {
Ignite ignite1 = startGrid(1);
Ignite ignite2 = startGrid(2);
String path = GridTestProperties.getProperty("p2p.uri.cls");
info("Using path: " + path);
GridTestExternalClassLoader ldr = new GridTestExternalClassLoader(new URL[] {
new URL(path)
});
Class task = ldr.loadClass(TASK_NAME1);
ignite1.compute().localDeployTask(task, task.getClassLoader());
ldr.setExcludeClassNames(TASK_NAME1);
executeFail(ignite1, ignite2, task);
ldr.setExcludeClassNames();
ignite1.compute().execute(task, ignite2.cluster().localNode().id());
}
finally {
stopGrid(1);
stopGrid(2);
}
}
/**
* TODO https://issues.apache.org/jira/browse/IGNITE-603
* @param depMode deployment mode.
* @throws Exception If failed.
*/
// private void processSize2Test(GridDeploymentMode depMode) throws Exception {
// this.depMode = depMode;
//
// missedResourceCacheSize = 2;
//
// try {
// Grid g1 = startGrid(1);
// Grid g2 = startGrid(2);
//
// String path = GridTestProperties.getProperty("p2p.uri.cls");
//
// GridTestExternalClassLoader ldr = new GridTestExternalClassLoader(new URL[] {new URL(path)});
//
// Class task1 = ldr.loadClass(TASK_NAME1);
// Class task2 = ldr.loadClass(TASK_NAME2);
// GridPredicate<GridEvent> filter1 = (GridPredicate<GridEvent>)ldr.loadClass(FILTER_NAME1).newInstance();
// GridPredicate<GridEvent> filter2 = (GridPredicate<GridEvent>)ldr.loadClass(FILTER_NAME2).newInstance();
//
// g1.execute(GridP2PTestTask.class, 777).get(); // Create events.
//
// g1.deployTask(task1);
// g1.deployTask(task2);
// g1.queryEvents(filter1, 0, F.<ClusterNode>localNode(g1)); // Deploy filter1.
// g1.queryEvents(filter2, 0, F.<ClusterNode>localNode(g2)); // Deploy filter2.
//
// ldr.setExcludeClassNames(TASK_NAME1, TASK_NAME2, FILTER_NAME1, FILTER_NAME2);
//
// executeFail(g1, filter1);
// executeFail(g1, g2, task1);
//
// ldr.setExcludeClassNames();
//
// executeFail(g1, filter1);
// executeFail(g1, g2, task1);
//
// ldr.setExcludeClassNames(TASK_NAME1, TASK_NAME2, FILTER_NAME1, FILTER_NAME2);
//
// executeFail(g1, filter2);
// executeFail(g1, g2, task2);
//
// ldr.setExcludeClassNames();
//
// executeFail(g1, filter2);
// executeFail(g1, g2, task2);
//
// g1.queryEvents(filter1, 0, F.<ClusterNode>alwaysTrue());
//
// g1.execute(task1, g2.localNode().id()).get();
// }
// finally {
// stopGrid(1);
// stopGrid(2);
// }
// }
/**
* Test GridDeploymentMode.PRIVATE mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize0PrivateMode() throws Exception {
processSize0Test(DeploymentMode.PRIVATE);
}
/**
* Test GridDeploymentMode.ISOLATED mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize0IsolatedMode() throws Exception {
processSize0Test(DeploymentMode.ISOLATED);
}
/**
* Test GridDeploymentMode.CONTINUOUS mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize0ContinuousMode() throws Exception {
processSize0Test(DeploymentMode.CONTINUOUS);
}
/**
* Test GridDeploymentMode.SHARED mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize0SharedMode() throws Exception {
processSize0Test(DeploymentMode.SHARED);
}
/**
* Test GridDeploymentMode.PRIVATE mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize2PrivateMode() throws Exception {
// processSize2Test(GridDeploymentMode.PRIVATE);
}
/**
* Test GridDeploymentMode.ISOLATED mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize2IsolatedMode() throws Exception {
// processSize2Test(GridDeploymentMode.ISOLATED);
}
/**
* Test GridDeploymentMode.CONTINUOUS mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize2ContinuousMode() throws Exception {
// processSize2Test(GridDeploymentMode.CONTINUOUS);
}
/**
* Test GridDeploymentMode.SHARED mode.
*
* @throws Exception if error occur.
*/
@Test
public void testSize2SharedMode() throws Exception {
// processSize2Test(GridDeploymentMode.SHARED);
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.user.volume;
import org.apache.log4j.Logger;
import org.apache.cloudstack.acl.RoleType;
import org.apache.cloudstack.api.APICommand;
import org.apache.cloudstack.api.ApiCommandJobType;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.ApiErrorCode;
import org.apache.cloudstack.api.BaseAsyncCreateCustomIdCmd;
import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.Parameter;
import org.apache.cloudstack.api.ResponseObject.ResponseView;
import org.apache.cloudstack.api.ServerApiException;
import org.apache.cloudstack.api.response.DiskOfferingResponse;
import org.apache.cloudstack.api.response.DomainResponse;
import org.apache.cloudstack.api.response.ProjectResponse;
import org.apache.cloudstack.api.response.SnapshotResponse;
import org.apache.cloudstack.api.response.UserVmResponse;
import org.apache.cloudstack.api.response.VolumeResponse;
import org.apache.cloudstack.api.response.ZoneResponse;
import org.apache.cloudstack.context.CallContext;
import com.cloud.event.EventTypes;
import com.cloud.exception.ResourceAllocationException;
import com.cloud.storage.Snapshot;
import com.cloud.storage.Volume;
import com.cloud.vm.VirtualMachine;
@APICommand(name = "createVolume", responseObject = VolumeResponse.class, description = "Creates a disk volume from a disk offering. This disk volume must still be attached to a virtual machine to make use of it.", responseView = ResponseView.Restricted, entityType = {
Volume.class, VirtualMachine.class},
requestHasSensitiveInfo = false, responseHasSensitiveInfo = false)
public class CreateVolumeCmd extends BaseAsyncCreateCustomIdCmd {
public static final Logger s_logger = Logger.getLogger(CreateVolumeCmd.class.getName());
private static final String s_name = "createvolumeresponse";
/////////////////////////////////////////////////////
//////////////// API parameters /////////////////////
/////////////////////////////////////////////////////
@Parameter(name = ApiConstants.ACCOUNT,
type = BaseCmd.CommandType.STRING,
description = "the account associated with the disk volume. Must be used with the domainId parameter.")
private String accountName;
@Parameter(name = ApiConstants.PROJECT_ID,
type = CommandType.UUID,
entityType = ProjectResponse.class,
description = "the project associated with the volume. Mutually exclusive with account parameter")
private Long projectId;
@Parameter(name = ApiConstants.DOMAIN_ID,
type = CommandType.UUID,
entityType = DomainResponse.class,
description = "the domain ID associated with the disk offering. If used with the account parameter"
+ " returns the disk volume associated with the account for the specified domain.")
private Long domainId;
@Parameter(name = ApiConstants.DISK_OFFERING_ID,
required = false,
type = CommandType.UUID,
entityType = DiskOfferingResponse.class,
description = "the ID of the disk offering. Either diskOfferingId or snapshotId must be passed in.")
private Long diskOfferingId;
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "the name of the disk volume")
private String volumeName;
@Parameter(name = ApiConstants.SIZE, type = CommandType.LONG, description = "Arbitrary volume size")
private Long size;
@Parameter(name = ApiConstants.MIN_IOPS, type = CommandType.LONG, description = "min iops")
private Long minIops;
@Parameter(name = ApiConstants.MAX_IOPS, type = CommandType.LONG, description = "max iops")
private Long maxIops;
@Parameter(name = ApiConstants.SNAPSHOT_ID,
type = CommandType.UUID,
entityType = SnapshotResponse.class,
description = "the snapshot ID for the disk volume. Either diskOfferingId or snapshotId must be passed in.")
private Long snapshotId;
@Parameter(name = ApiConstants.ZONE_ID, type = CommandType.UUID, entityType = ZoneResponse.class, description = "the ID of the availability zone")
private Long zoneId;
@Parameter(name = ApiConstants.DISPLAY_VOLUME, type = CommandType.BOOLEAN, description = "an optional field, whether to display the volume to the end user or not.", authorized = {RoleType.Admin})
private Boolean displayVolume;
@Parameter(name = ApiConstants.VIRTUAL_MACHINE_ID,
type = CommandType.UUID,
entityType = UserVmResponse.class,
description = "the ID of the virtual machine; to be used with snapshot Id, VM to which the volume gets attached after creation")
private Long virtualMachineId;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
public String getAccountName() {
return accountName;
}
public Long getDiskOfferingId() {
return diskOfferingId;
}
public Long getDomainId() {
return domainId;
}
public String getVolumeName() {
return volumeName;
}
public Long getSize() {
return size;
}
public Long getMinIops() {
return minIops;
}
public Long getMaxIops() {
return maxIops;
}
public Long getSnapshotId() {
return snapshotId;
}
public Long getZoneId() {
return zoneId;
}
private Long getProjectId() {
return projectId;
}
public Boolean getDisplayVolume() {
return displayVolume;
}
@Override
public boolean isDisplay() {
if(displayVolume == null)
return true;
else
return displayVolume;
}
public Long getVirtualMachineId() {
return virtualMachineId;
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////
@Override
public String getCommandName() {
return s_name;
}
public static String getResultObjectName() {
return "volume";
}
@Override
public ApiCommandJobType getInstanceType() {
return ApiCommandJobType.Volume;
}
@Override
public long getEntityOwnerId() {
Long accountId = _accountService.finalyzeAccountId(accountName, domainId, projectId, true);
if (accountId == null) {
return CallContext.current().getCallingAccount().getId();
}
return accountId;
}
@Override
public String getEventType() {
return EventTypes.EVENT_VOLUME_CREATE;
}
@Override
public String getEventDescription() {
return "creating volume: " + getVolumeName() + ((getSnapshotId() == null) ? "" : " from snapshot: " + this._uuidMgr.getUuid(Snapshot.class, getSnapshotId()));
}
@Override
public void create() throws ResourceAllocationException {
Volume volume = _volumeService.allocVolume(this);
if (volume != null) {
setEntityId(volume.getId());
setEntityUuid(volume.getUuid());
} else {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to create volume");
}
}
@Override
public void execute() {
CallContext.current().setEventDetails("Volume Id: " + getEntityUuid() + ((getSnapshotId() == null) ? "" : " from snapshot: " + this._uuidMgr.getUuid(Snapshot.class, getSnapshotId())));
Volume volume = _volumeService.createVolume(this);
if (volume != null) {
VolumeResponse response = _responseGenerator.createVolumeResponse(ResponseView.Restricted, volume);
//FIXME - have to be moved to ApiResponseHelper
if (getSnapshotId() != null) {
Snapshot snap = _entityMgr.findById(Snapshot.class, getSnapshotId());
if (snap != null) {
response.setSnapshotId(snap.getUuid()); // if the volume was
// created from a
// snapshot,
// snapshotId will
// be set so we pass
// it back in the
// response
}
}
response.setResponseName(getCommandName());
setResponseObject(response);
} else {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to create a volume");
}
}
}
| |
/*
* Copyright (c) 2015 Lunci Hua
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package org.lunci.dumbthing.adapter;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.daimajia.swipe.SwipeLayout;
import com.daimajia.swipe.implments.SwipeItemAdapterMangerImpl;
import com.daimajia.swipe.interfaces.SwipeAdapterInterface;
import com.daimajia.swipe.interfaces.SwipeItemMangerInterface;
import com.daimajia.swipe.util.Attributes;
import org.lunci.dumbthing.R;
import org.lunci.dumbthing.dataModel.DumbModel;
import org.lunci.dumbthing.util.Utils;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
/**
* Created by Lunci Hua on 2/2/2015.
*/
public class DumbItemListAdapter extends ArrayAdapter<DumbModel> implements SwipeItemMangerInterface,SwipeAdapterInterface {
private static final String TAG=DumbItemListAdapter.class.getSimpleName();
private SwipeItemAdapterMangerImpl mItemManger = new SwipeItemAdapterMangerImpl(this);
public static interface DumbItemListAdapterCallbacks{
void onItemEdit(int position, View view);
void onItemDelete(int position, View view);
}
private static final DumbItemListAdapterCallbacks DummyCallbacks=new DumbItemListAdapterCallbacks(){
@Override
public void onItemEdit(int position, View view) {
}
@Override
public void onItemDelete(int position, View view) {
}
};
private DumbItemListAdapterCallbacks mCallbacks=DummyCallbacks;
public DumbItemListAdapter(Context context){
super(context, -1);
mItemManger.setMode(Attributes.Mode.Single);
}
public DumbItemListAdapter(Context context, DumbItemListAdapterCallbacks callbacks){
this(context);
mCallbacks=callbacks;
}
@Override
public boolean areAllItemsEnabled() {
return true;
}
@Override
public boolean isEnabled(int position) {
return true;
}
@Override
public long getItemId(int position) {
return getItem(position).getId();
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if(convertView==null){
final LayoutInflater mInflater=(LayoutInflater) this.getContext()
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView=mInflater.inflate(R.layout.dumb_item_list_simple, parent, false);
mItemManger.initialize(convertView, position);
final ViewHolder holder=new ViewHolder();
convertView.setTag(holder);
final TextView mDataTime=(TextView)convertView.findViewById(R.id.textView_datetime);
final TextView mContent=(TextView)convertView.findViewById(R.id.textView_content);
holder.mContent=mContent;
holder.mDate=mDataTime;
holder.mRoot=convertView;
final View buttonEdit=convertView.findViewById(R.id.imageButton_edit);
final View buttonDelete=convertView.findViewById(R.id.imageButton_delete);
final View buttonShare=convertView.findViewById(R.id.imageView_share);
final View buttonAutoShare=convertView.findViewById(R.id.imageView_auto_share);
holder.mDeleteButton=buttonDelete;
holder.mEditButton=buttonEdit;
holder.mShareButton=buttonShare;
holder.mAutoShareButton=buttonAutoShare;
final SwipeLayout swipe=(SwipeLayout)convertView;
swipe.setDragEdge(SwipeLayout.DragEdge.Left);
swipe.setShowMode(SwipeLayout.ShowMode.PullOut);
swipe.setSwipeEnabled(true);
try {
holder.setup();
}catch (NullPointerException ex){
ex.printStackTrace();
}
}
mItemManger.updateConvertView(convertView, position);
if(convertView.getTag() instanceof ViewHolder){
final DumbModel model=getItem(position);
model.deleteObservers();
final ViewHolder holder=(ViewHolder)convertView.getTag();
holder.mDate.setText(model.getCreatedAt());
holder.mContent.setText(model.getContent());
holder.mCurrentIndex=position;
model.addObserver(holder);
}
return convertView;
}
@Override
public int getSwipeLayoutResourceId(int i) {
return R.id.swipeLayout_root;
}
@Override
public void openItem(int position) {
mItemManger.openItem(position);
}
@Override
public void closeItem(int position) {
mItemManger.closeItem(position);
}
@Override
public void closeAllExcept(SwipeLayout layout) {
mItemManger.closeAllExcept(layout);
}
@Override
public void closeAllItems() {
mItemManger.closeAllItems();
}
@Override
public List<Integer> getOpenItems() {
return mItemManger.getOpenItems();
}
@Override
public List<SwipeLayout> getOpenLayouts() {
return mItemManger.getOpenLayouts();
}
@Override
public void removeShownLayouts(SwipeLayout layout) {
mItemManger.removeShownLayouts(layout);
}
@Override
public boolean isOpen(int position) {
return mItemManger.isOpen(position);
}
@Override
public Attributes.Mode getMode() {
return mItemManger.getMode();
}
@Override
public void setMode(Attributes.Mode mode) {
mItemManger.setMode(mode);
}
public void setCallbacks(DumbItemListAdapterCallbacks callbacks){
mCallbacks=callbacks;
}
private final class ViewHolder implements Observer {
public TextView mDate;
public TextView mContent;
public View mDeleteButton;
public View mEditButton;
public View mShareButton;
public View mAutoShareButton;
public View mRoot;
public View mSlideIndicator;
public int mCurrentIndex;
public void setup()throws NullPointerException{
mDeleteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mItemManger.getOpenItems().size()==1) {
final int pos=mItemManger.getOpenItems().get(0);
mCallbacks.onItemDelete(pos, mRoot);
}
}
});
mEditButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mItemManger.getOpenItems().size()==1) {
final int pos=mItemManger.getOpenItems().get(0);
mCallbacks.onItemEdit(pos, mRoot);
}
}
});
mShareButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mItemManger.getOpenItems().size()==1) {
final int pos=mItemManger.getOpenItems().get(0);
Utils.shareText(getContext(), getItem(pos).getContent());
}
}
});
mAutoShareButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mItemManger.getOpenItems().size()==1) {
final int pos=mItemManger.getOpenItems().get(0);
Utils.autoShareText(getContext(), getItem(pos).getContent());
}
}
});
}
@Override
public void update(Observable observable, Object data) {
if(observable instanceof DumbModel) {
final DumbModel model = (DumbModel) observable;
if (data instanceof String) {
switch ((String) data) {
case DumbModel.Content_Field:
mContent.setText(model.getContent());
break;
case DumbModel.CreatedAt_Field:
mDate.setText(model.getCreatedAt());
break;
default:
break;
}
}
}
}
}
}
| |
package ca.uhn.fhir.util;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import javax.xml.namespace.NamespaceContext;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
public class NonPrettyPrintWriterWrapper implements XMLStreamWriter {
private static final String PRE = "pre";
private XMLStreamWriter myTarget;
private int myInsidePre = 0;
public NonPrettyPrintWriterWrapper(XMLStreamWriter target) {
myTarget = target;
}
@Override
public void flush() throws XMLStreamException {
myTarget.flush();
}
@Override
public void close() throws XMLStreamException {
myTarget.close();
}
@Override
@CoverageIgnore
public String getPrefix(String theUri) throws XMLStreamException {
return myTarget.getPrefix(theUri);
}
@Override
@CoverageIgnore
public void setPrefix(String thePrefix, String theUri) throws XMLStreamException {
myTarget.setPrefix(thePrefix, theUri);
}
@Override
@CoverageIgnore
public void setDefaultNamespace(String theUri) throws XMLStreamException {
myTarget.setDefaultNamespace(theUri);
}
@Override
@CoverageIgnore
public void setNamespaceContext(NamespaceContext theContext) throws XMLStreamException {
myTarget.setNamespaceContext(theContext);
}
@Override
@CoverageIgnore
public NamespaceContext getNamespaceContext() {
return myTarget.getNamespaceContext();
}
@Override
public void writeStartElement(String theLocalName) throws XMLStreamException {
if (PRE.equals(theLocalName) || myInsidePre > 0) {
myInsidePre++;
}
myTarget.writeStartElement(theLocalName);
}
@Override
public void writeStartElement(String theNamespaceURI, String theLocalName) throws XMLStreamException {
if (PRE.equals(theLocalName) || myInsidePre > 0) {
myInsidePre++;
}
myTarget.writeStartElement(theNamespaceURI, theLocalName);
}
@Override
public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException {
if (PRE.equals(theLocalName) || myInsidePre > 0) {
myInsidePre++;
}
myTarget.writeStartElement(thePrefix, theLocalName, theNamespaceURI);
}
@Override
@CoverageIgnore
public void writeEmptyElement(String theNamespaceURI, String theLocalName) throws XMLStreamException {
myTarget.writeEmptyElement(theNamespaceURI, theLocalName);
}
@Override
@CoverageIgnore
public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException {
myTarget.writeEmptyElement(thePrefix, theLocalName, theNamespaceURI);
}
@Override
@CoverageIgnore
public void writeEmptyElement(String theLocalName) throws XMLStreamException {
myTarget.writeEmptyElement(theLocalName);
}
@Override
public void writeEndElement() throws XMLStreamException {
if (myInsidePre > 0) {
myInsidePre--;
}
myTarget.writeEndElement();
}
@Override
public void writeEndDocument() throws XMLStreamException {
myTarget.writeEndDocument();
}
@Override
public void writeAttribute(String theLocalName, String theValue) throws XMLStreamException {
myTarget.writeAttribute(theLocalName, theValue);
}
@Override
@CoverageIgnore
public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException {
myTarget.writeAttribute(thePrefix, theNamespaceURI, theLocalName, theValue);
}
@Override
@CoverageIgnore
public void writeAttribute(String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException {
myTarget.writeAttribute(theNamespaceURI, theLocalName, theValue);
}
@Override
public void writeNamespace(String thePrefix, String theNamespaceURI) throws XMLStreamException {
myTarget.writeNamespace(thePrefix, theNamespaceURI);
}
@Override
public void writeDefaultNamespace(String theNamespaceURI) throws XMLStreamException {
myTarget.writeDefaultNamespace(theNamespaceURI);
}
@Override
public void writeComment(String theData) throws XMLStreamException {
myTarget.writeComment(theData);
}
@Override
@CoverageIgnore
public void writeProcessingInstruction(String theTarget) throws XMLStreamException {
myTarget.writeProcessingInstruction(theTarget);
}
@Override
@CoverageIgnore
public void writeProcessingInstruction(String theTarget, String theData) throws XMLStreamException {
myTarget.writeProcessingInstruction(theTarget, theData);
}
@Override
@CoverageIgnore
public void writeCData(String theData) throws XMLStreamException {
myTarget.writeCData(theData);
}
@Override
@CoverageIgnore
public void writeDTD(String theDtd) throws XMLStreamException {
myTarget.writeDTD(theDtd);
}
@Override
@CoverageIgnore
public void writeEntityRef(String theName) throws XMLStreamException {
myTarget.writeEntityRef(theName);
}
@Override
@CoverageIgnore
public void writeStartDocument() throws XMLStreamException {
myTarget.writeStartDocument();
}
@Override
@CoverageIgnore
public void writeStartDocument(String theVersion) throws XMLStreamException {
myTarget.writeStartDocument(theVersion);
}
@Override
public void writeStartDocument(String theEncoding, String theVersion) throws XMLStreamException {
myTarget.writeStartDocument(theEncoding, theVersion);
}
@Override
public void writeCharacters(String theText) throws XMLStreamException {
if (myInsidePre > 0) {
myTarget.writeCharacters(theText);
} else {
writeCharacters(theText.toCharArray(), 0, theText.length());
}
}
@Override
public void writeCharacters(char[] theText, int theStart, int theLen) throws XMLStreamException {
writeCharacters(theText, theStart, theLen, myTarget, myInsidePre);
}
static void writeCharacters(char[] theText, int theStart, int theLen, XMLStreamWriter target, int insidePre) throws XMLStreamException {
if (theLen > 0) {
if (insidePre > 0) {
target.writeCharacters(theText, theStart, theLen);
} else {
int initialEnd = theStart + (theLen - 1);
int start = theStart;
int end = initialEnd;
while (Character.isWhitespace(theText[start]) && start < end) {
start++;
}
while (Character.isWhitespace(theText[end]) && end > start) {
end--;
}
if (start == end) {
if (Character.isWhitespace(theText[start])) {
target.writeCharacters(" ");
return;
}
}
if (start > theStart) {
target.writeCharacters(" ");
}
target.writeCharacters(theText, start, (end - start) + 1);
if (end < initialEnd) {
target.writeCharacters(" ");
}
}
}
}
@Override
@CoverageIgnore
public Object getProperty(String theName) throws IllegalArgumentException {
return myTarget.getProperty(theName);
}
}
| |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.autofill;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ListAdapter;
import android.widget.ListPopupWindow;
import android.widget.TextView;
import java.util.ArrayList;
import org.chromium.chrome.R;
import org.chromium.content.browser.ContainerViewDelegate;
import org.chromium.ui.gfx.NativeWindow;
/**
* The Autofill suggestion popup that lists relevant suggestions.
*/
public class AutofillPopup extends ListPopupWindow implements AdapterView.OnItemClickListener {
/**
* Constants defining types of Autofill suggestion entries.
* Has to be kept in sync with enum in WebAutofillClient.h
*
* Not supported: MenuItemIDWarningMessage, MenuItemIDSeparator, MenuItemIDClearForm, and
* MenuItemIDAutofillOptions.
*/
private static final int ITEM_ID_AUTOCOMPLETE_ENTRY = 0;
private static final int ITEM_ID_PASSWORD_ENTRY = -2;
private static final int ITEM_ID_DATA_LIST_ENTRY = -6;
private static final int TEXT_PADDING_DP = 30;
private final AutofillPopupDelegate mAutofillCallback;
private final NativeWindow mNativeWindow;
private final ContainerViewDelegate mContainerViewDelegate;
private AnchorView mAnchorView;
private Rect mAnchorRect;
private Paint mNameViewPaint;
private Paint mLabelViewPaint;
/**
* An interface to handle the touch interaction with an AutofillPopup object.
*/
public interface AutofillPopupDelegate {
/**
* Confirms the dismissal of the java AutofillPopup object.
*/
public void dismissed();
/**
* Handles the selection of an Autofill suggestion from an AutofillPopup.
* @param listIndex The index of the selected Autofill suggestion.
* @param value The value of the selected Autofill suggestion.
* @param uniqueId The unique id of the selected Autofill suggestion.
*/
public void suggestionSelected(int listIndex, String value, int uniqueId);
}
// ListPopupWindow needs an anchor view to determine it's size and position. We create a view
// with the given desired width at the text edit area as a stand-in. This is "Fake" in the
// sense that it draws nothing, accepts no input, and thwarts all attempts at laying it out
// "properly".
private static class AnchorView extends View {
private int mCurrentOrientation;
private AutofillPopup mAutofillPopup;
AnchorView(Context c, AutofillPopup autofillPopup) {
super(c);
mAutofillPopup = autofillPopup;
mCurrentOrientation = getResources().getConfiguration().orientation;
addOnLayoutChangeListener(new OnLayoutChangeListener() {
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom,
int oldLeft, int oldTop, int oldRight, int oldBottom) {
if (v instanceof AnchorView) mAutofillPopup.show();
}
});
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation != mCurrentOrientation) mAutofillPopup.dismiss();
}
public void setSize(Rect r, int desiredWidth) {
int width = Math.max(desiredWidth, r.right - r.left);
// Make sure that the anchor view does not go outside the screen.
Point size = new Point();
WindowManager wm = (WindowManager) getContext().getSystemService(
Context.WINDOW_SERVICE);
wm.getDefaultDisplay().getSize(size);
if (r.left + width > size.x) width = size.x - r.left;
int height = r.bottom - r.top;
// Get rid of the padding added by ListPopupWindow class.
Drawable popupBackground = mAutofillPopup.getBackground();
Rect paddingRect = new Rect();
if (popupBackground != null) popupBackground.getPadding(paddingRect);
width += paddingRect.left + paddingRect.right;
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(width, height);
lp.leftMargin = r.left - paddingRect.left;
lp.topMargin = r.top;
setLayoutParams(lp);
}
}
/**
* Creates an AutofillWindow with specified parameters.
* @param nativeWindow NativeWindow used to get application context.
* @param containerViewDelegate View delegate used to add and remove views.
* @param autofillCallback A object that handles the calls to the native AutofillPopupView.
*/
public AutofillPopup(NativeWindow nativeWindow, ContainerViewDelegate containerViewDelegate,
AutofillPopupDelegate autofillCallback) {
super(nativeWindow.getContext());
mNativeWindow = nativeWindow;
mContainerViewDelegate = containerViewDelegate;
mAutofillCallback = autofillCallback;
setOnItemClickListener(this);
mAnchorView = new AnchorView(mNativeWindow.getContext(), this);
mContainerViewDelegate.addViewToContainerView(mAnchorView);
setAnchorView(mAnchorView);
}
/**
* Sets the location and the size of the anchor view that the AutofillPopup will use to attach
* itself.
* @param x X coordinate of the top left corner of the anchor view.
* @param y Y coordinate of the top left corner of the anchor view.
* @param width The width of the anchor view.
* @param height The height of the anchor view.
*/
public void setAnchorRect(float x, float y, float width, float height) {
mAnchorRect = new Rect((int) x, (int) y, (int) (x + width), (int) (y + height));
}
/**
* Sets the Autofill suggestions to display in the popup and shows the popup.
* @param suggestions Autofill suggestion data.
*/
public void show(AutofillSuggestion[] suggestions) {
// Remove the AutofillSuggestions with IDs that are not supported by Android
ArrayList<AutofillSuggestion> cleanedData = new ArrayList<AutofillSuggestion>();
for (int i = 0; i < suggestions.length; i++) {
int itemId = suggestions[i].mUniqueId;
if (itemId > 0 || itemId == ITEM_ID_AUTOCOMPLETE_ENTRY ||
itemId == ITEM_ID_PASSWORD_ENTRY || itemId == ITEM_ID_DATA_LIST_ENTRY) {
cleanedData.add(suggestions[i]);
}
}
setAdapter(new AutofillListAdapter(mNativeWindow.getContext(), cleanedData));
// Once the mAnchorRect is resized and placed correctly, it will show the Autofill popup.
mAnchorView.setSize(mAnchorRect, getDesiredWidth(suggestions));
}
/**
* Dismisses the popup and calls to mAutofillCallback.dismissed().
*/
@Override
public void dismiss() {
super.dismiss();
mContainerViewDelegate.removeViewFromContainerView(mAnchorView);
mAutofillCallback.dismissed();
}
/**
* Get desired popup window width by calculating the maximum text length from Autofill data.
* @param data Autofill suggestion data.
* @return The popup window width.
*/
private int getDesiredWidth(AutofillSuggestion[] data) {
if (mNameViewPaint == null || mLabelViewPaint == null) {
LayoutInflater inflater =
(LayoutInflater) mNativeWindow.getContext().getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
View layout = inflater.inflate(R.layout.autofill_text, null);
TextView nameView = (TextView) layout.findViewById(R.id.autofill_name);
mNameViewPaint = nameView.getPaint();
TextView labelView = (TextView) layout.findViewById(R.id.autofill_label);
mLabelViewPaint = labelView.getPaint();
}
int maxTextWidth = 0;
Rect bounds = new Rect();
for (int i = 0; i < data.length; ++i) {
bounds.setEmpty();
String name = data[i].mName;
int width = 0;
mNameViewPaint.getTextBounds(name, 0, name.length(), bounds);
width += bounds.width();
bounds.setEmpty();
String label = data[i].mLabel;
mLabelViewPaint.getTextBounds(label, 0, label.length(), bounds);
width += bounds.width();
maxTextWidth = Math.max(width, maxTextWidth);
}
// Adding padding.
return maxTextWidth + (int) (TEXT_PADDING_DP *
mNativeWindow.getContext().getResources().getDisplayMetrics().density);
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
try {
ListAdapter adapter = (ListAdapter) parent.getAdapter();
AutofillSuggestion data = (AutofillSuggestion) adapter.getItem(position);
mAutofillCallback.suggestionSelected(position, data.mName, data.mUniqueId);
} catch (ClassCastException e) {
Log.w("AutofillWindow", "error in onItemClick", e);
assert false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.direct;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Executors;
import org.apache.beam.runners.core.DoFnRunners;
import org.apache.beam.runners.core.DoFnRunners.OutputManager;
import org.apache.beam.runners.core.KeyedWorkItem;
import org.apache.beam.runners.core.OutputAndTimeBoundedSplittableProcessElementInvoker;
import org.apache.beam.runners.core.OutputWindowedValue;
import org.apache.beam.runners.core.PushbackSideInputDoFnRunner;
import org.apache.beam.runners.core.ReadyCheckingSideInputReader;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems.ProcessElements;
import org.apache.beam.runners.core.SplittableParDoViaKeyedWorkItems.ProcessFn;
import org.apache.beam.runners.core.StateInternals;
import org.apache.beam.runners.core.StateInternalsFactory;
import org.apache.beam.runners.core.TimerInternals;
import org.apache.beam.runners.core.TimerInternalsFactory;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.joda.time.Duration;
import org.joda.time.Instant;
class SplittableProcessElementsEvaluatorFactory<
InputT, OutputT, RestrictionT, TrackerT extends RestrictionTracker<RestrictionT>>
implements TransformEvaluatorFactory {
private final ParDoEvaluatorFactory<KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT>
delegateFactory;
private final EvaluationContext evaluationContext;
SplittableProcessElementsEvaluatorFactory(EvaluationContext evaluationContext) {
this.evaluationContext = evaluationContext;
this.delegateFactory =
new ParDoEvaluatorFactory<>(
evaluationContext,
SplittableProcessElementsEvaluatorFactory
.<InputT, OutputT, RestrictionT>processFnRunnerFactory(),
ParDoEvaluatorFactory.basicDoFnCacheLoader());
}
@Override
public <T> TransformEvaluator<T> forApplication(
AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) throws Exception {
@SuppressWarnings({"unchecked", "rawtypes"})
TransformEvaluator<T> evaluator =
(TransformEvaluator<T>)
createEvaluator((AppliedPTransform) application, (CommittedBundle) inputBundle);
return evaluator;
}
@Override
public void cleanup() throws Exception {
delegateFactory.cleanup();
}
@SuppressWarnings({"unchecked", "rawtypes"})
private TransformEvaluator<KeyedWorkItem<String, KV<InputT, RestrictionT>>> createEvaluator(
AppliedPTransform<
PCollection<KeyedWorkItem<String, KV<InputT, RestrictionT>>>, PCollectionTuple,
ProcessElements<InputT, OutputT, RestrictionT, TrackerT>>
application,
CommittedBundle<InputT> inputBundle)
throws Exception {
final ProcessElements<InputT, OutputT, RestrictionT, TrackerT> transform =
application.getTransform();
ProcessFn<InputT, OutputT, RestrictionT, TrackerT> processFn =
transform.newProcessFn(transform.getFn());
DoFnLifecycleManager fnManager = DoFnLifecycleManager.of(processFn);
processFn =
((ProcessFn<InputT, OutputT, RestrictionT, TrackerT>)
fnManager.<KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT>get());
String stepName = evaluationContext.getStepName(application);
final DirectExecutionContext.DirectStepContext stepContext =
evaluationContext
.getExecutionContext(application, inputBundle.getKey())
.getStepContext(stepName);
final ParDoEvaluator<KeyedWorkItem<String, KV<InputT, RestrictionT>>>
parDoEvaluator =
delegateFactory.createParDoEvaluator(
application,
inputBundle.getKey(),
(PCollection<KeyedWorkItem<String, KV<InputT, RestrictionT>>>)
inputBundle.getPCollection(),
transform.getSideInputs(),
transform.getMainOutputTag(),
transform.getAdditionalOutputTags().getAll(),
stepContext,
processFn,
fnManager);
processFn.setStateInternalsFactory(
new StateInternalsFactory<String>() {
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public StateInternals stateInternalsForKey(String key) {
return (StateInternals) stepContext.stateInternals();
}
});
processFn.setTimerInternalsFactory(
new TimerInternalsFactory<String>() {
@Override
public TimerInternals timerInternalsForKey(String key) {
return stepContext.timerInternals();
}
});
OutputWindowedValue<OutputT> outputWindowedValue =
new OutputWindowedValue<OutputT>() {
private final OutputManager outputManager = parDoEvaluator.getOutputManager();
@Override
public void outputWindowedValue(
OutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo pane) {
outputManager.output(
transform.getMainOutputTag(), WindowedValue.of(output, timestamp, windows, pane));
}
@Override
public <AdditionalOutputT> void outputWindowedValue(
TupleTag<AdditionalOutputT> tag,
AdditionalOutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo pane) {
outputManager.output(tag, WindowedValue.of(output, timestamp, windows, pane));
}
};
processFn.setProcessElementInvoker(
new OutputAndTimeBoundedSplittableProcessElementInvoker<
InputT, OutputT, RestrictionT, TrackerT>(
transform.getFn(),
evaluationContext.getPipelineOptions(),
outputWindowedValue,
evaluationContext.createSideInputReader(transform.getSideInputs()),
// TODO: For better performance, use a higher-level executor?
// TODO: (BEAM-723) Create a shared ExecutorService for maintenance tasks in the
// DirectRunner.
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setThreadFactory(MoreExecutors.platformThreadFactory())
.setDaemon(true)
.setNameFormat("direct-splittable-process-element-checkpoint-executor")
.build()),
// Setting small values here to stimulate frequent checkpointing and better exercise
// splittable DoFn's in that respect.
100,
Duration.standardSeconds(1)));
return DoFnLifecycleManagerRemovingTransformEvaluator.wrapping(parDoEvaluator, fnManager);
}
private static <InputT, OutputT, RestrictionT>
ParDoEvaluator.DoFnRunnerFactory<KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT>
processFnRunnerFactory() {
return new ParDoEvaluator.DoFnRunnerFactory<
KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT>() {
@Override
public PushbackSideInputDoFnRunner<
KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT>
createRunner(
PipelineOptions options,
DoFn<KeyedWorkItem<String, KV<InputT, RestrictionT>>, OutputT> fn,
List<PCollectionView<?>> sideInputs,
ReadyCheckingSideInputReader sideInputReader,
OutputManager outputManager,
TupleTag<OutputT> mainOutputTag,
List<TupleTag<?>> additionalOutputTags,
DirectExecutionContext.DirectStepContext stepContext,
WindowingStrategy<?, ? extends BoundedWindow> windowingStrategy) {
ProcessFn<InputT, OutputT, RestrictionT, ?> processFn =
(ProcessFn) fn;
return DoFnRunners.newProcessFnRunner(
processFn,
options,
sideInputs,
sideInputReader,
outputManager,
mainOutputTag,
additionalOutputTags,
stepContext,
windowingStrategy);
}
};
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.scapemod.bytecode.asm;
/**
* Defines the JVM opcodes, access flags and array type codes. This interface
* does not define all the JVM opcodes because some opcodes are automatically
* handled. For example, the xLOAD and xSTORE opcodes are automatically replaced
* by xLOAD_n and xSTORE_n opcodes when possible. The xLOAD_n and xSTORE_n
* opcodes are therefore not defined in this interface. Likewise for LDC,
* automatically replaced by LDC_W or LDC2_W when necessary, WIDE, GOTO_W and
* JSR_W.
*
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
public interface Opcodes {
// ASM API versions
int ASM4 = 4 << 16 | 0 << 8 | 0;
// versions
int V1_1 = 3 << 16 | 45;
int V1_2 = 0 << 16 | 46;
int V1_3 = 0 << 16 | 47;
int V1_4 = 0 << 16 | 48;
int V1_5 = 0 << 16 | 49;
int V1_6 = 0 << 16 | 50;
int V1_7 = 0 << 16 | 51;
// access flags
int ACC_PUBLIC = 0x0001; // class, field, method
int ACC_PRIVATE = 0x0002; // class, field, method
int ACC_PROTECTED = 0x0004; // class, field, method
int ACC_STATIC = 0x0008; // field, method
int ACC_FINAL = 0x0010; // class, field, method
int ACC_SUPER = 0x0020; // class
int ACC_SYNCHRONIZED = 0x0020; // method
int ACC_VOLATILE = 0x0040; // field
int ACC_BRIDGE = 0x0040; // method
int ACC_VARARGS = 0x0080; // method
int ACC_TRANSIENT = 0x0080; // field
int ACC_NATIVE = 0x0100; // method
int ACC_INTERFACE = 0x0200; // class
int ACC_ABSTRACT = 0x0400; // class, method
int ACC_STRICT = 0x0800; // method
int ACC_SYNTHETIC = 0x1000; // class, field, method
int ACC_ANNOTATION = 0x2000; // class
int ACC_ENUM = 0x4000; // class(?) field inner
// ASM specific pseudo access flags
int ACC_DEPRECATED = 0x20000; // class, field, method
// types for NEWARRAY
int T_BOOLEAN = 4;
int T_CHAR = 5;
int T_FLOAT = 6;
int T_DOUBLE = 7;
int T_BYTE = 8;
int T_SHORT = 9;
int T_INT = 10;
int T_LONG = 11;
// tags for Handle
int H_GETFIELD = 1;
int H_GETSTATIC = 2;
int H_PUTFIELD = 3;
int H_PUTSTATIC = 4;
int H_INVOKEVIRTUAL = 5;
int H_INVOKESTATIC = 6;
int H_INVOKESPECIAL = 7;
int H_NEWINVOKESPECIAL = 8;
int H_INVOKEINTERFACE = 9;
// stack map frame types
/**
* Represents an expanded frame. See {@link ClassReader#EXPAND_FRAMES}.
*/
int F_NEW = -1;
/**
* Represents a compressed frame with complete frame data.
*/
int F_FULL = 0;
/**
* Represents a compressed frame where locals are the same as the locals in
* the previous frame, except that additional 1-3 locals are defined, and
* with an empty stack.
*/
int F_APPEND = 1;
/**
* Represents a compressed frame where locals are the same as the locals in
* the previous frame, except that the last 1-3 locals are absent and with
* an empty stack.
*/
int F_CHOP = 2;
/**
* Represents a compressed frame with exactly the same locals as the
* previous frame and with an empty stack.
*/
int F_SAME = 3;
/**
* Represents a compressed frame with exactly the same locals as the
* previous frame and with a single value on the stack.
*/
int F_SAME1 = 4;
Integer TOP = new Integer(0);
Integer INTEGER = new Integer(1);
Integer FLOAT = new Integer(2);
Integer DOUBLE = new Integer(3);
Integer LONG = new Integer(4);
Integer NULL = new Integer(5);
Integer UNINITIALIZED_THIS = new Integer(6);
// opcodes // visit method (- = idem)
int NOP = 0; // visitInsn
int ACONST_NULL = 1; // -
int ICONST_M1 = 2; // -
int ICONST_0 = 3; // -
int ICONST_1 = 4; // -
int ICONST_2 = 5; // -
int ICONST_3 = 6; // -
int ICONST_4 = 7; // -
int ICONST_5 = 8; // -
int LCONST_0 = 9; // -
int LCONST_1 = 10; // -
int FCONST_0 = 11; // -
int FCONST_1 = 12; // -
int FCONST_2 = 13; // -
int DCONST_0 = 14; // -
int DCONST_1 = 15; // -
int BIPUSH = 16; // visitIntInsn
int SIPUSH = 17; // -
int LDC = 18; // visitLdcInsn
// int LDC_W = 19; // -
// int LDC2_W = 20; // -
int ILOAD = 21; // visitVarInsn
int LLOAD = 22; // -
int FLOAD = 23; // -
int DLOAD = 24; // -
int ALOAD = 25; // -
// int ILOAD_0 = 26; // -
// int ILOAD_1 = 27; // -
// int ILOAD_2 = 28; // -
// int ILOAD_3 = 29; // -
// int LLOAD_0 = 30; // -
// int LLOAD_1 = 31; // -
// int LLOAD_2 = 32; // -
// int LLOAD_3 = 33; // -
// int FLOAD_0 = 34; // -
// int FLOAD_1 = 35; // -
// int FLOAD_2 = 36; // -
// int FLOAD_3 = 37; // -
// int DLOAD_0 = 38; // -
// int DLOAD_1 = 39; // -
// int DLOAD_2 = 40; // -
// int DLOAD_3 = 41; // -
// int ALOAD_0 = 42; // -
// int ALOAD_1 = 43; // -
// int ALOAD_2 = 44; // -
// int ALOAD_3 = 45; // -
int IALOAD = 46; // visitInsn
int LALOAD = 47; // -
int FALOAD = 48; // -
int DALOAD = 49; // -
int AALOAD = 50; // -
int BALOAD = 51; // -
int CALOAD = 52; // -
int SALOAD = 53; // -
int ISTORE = 54; // visitVarInsn
int LSTORE = 55; // -
int FSTORE = 56; // -
int DSTORE = 57; // -
int ASTORE = 58; // -
// int ISTORE_0 = 59; // -
// int ISTORE_1 = 60; // -
// int ISTORE_2 = 61; // -
// int ISTORE_3 = 62; // -
// int LSTORE_0 = 63; // -
// int LSTORE_1 = 64; // -
// int LSTORE_2 = 65; // -
// int LSTORE_3 = 66; // -
// int FSTORE_0 = 67; // -
// int FSTORE_1 = 68; // -
// int FSTORE_2 = 69; // -
// int FSTORE_3 = 70; // -
// int DSTORE_0 = 71; // -
// int DSTORE_1 = 72; // -
// int DSTORE_2 = 73; // -
// int DSTORE_3 = 74; // -
// int ASTORE_0 = 75; // -
// int ASTORE_1 = 76; // -
// int ASTORE_2 = 77; // -
// int ASTORE_3 = 78; // -
int IASTORE = 79; // visitInsn
int LASTORE = 80; // -
int FASTORE = 81; // -
int DASTORE = 82; // -
int AASTORE = 83; // -
int BASTORE = 84; // -
int CASTORE = 85; // -
int SASTORE = 86; // -
int POP = 87; // -
int POP2 = 88; // -
int DUP = 89; // -
int DUP_X1 = 90; // -
int DUP_X2 = 91; // -
int DUP2 = 92; // -
int DUP2_X1 = 93; // -
int DUP2_X2 = 94; // -
int SWAP = 95; // -
int IADD = 96; // -
int LADD = 97; // -
int FADD = 98; // -
int DADD = 99; // -
int ISUB = 100; // -
int LSUB = 101; // -
int FSUB = 102; // -
int DSUB = 103; // -
int IMUL = 104; // -
int LMUL = 105; // -
int FMUL = 106; // -
int DMUL = 107; // -
int IDIV = 108; // -
int LDIV = 109; // -
int FDIV = 110; // -
int DDIV = 111; // -
int IREM = 112; // -
int LREM = 113; // -
int FREM = 114; // -
int DREM = 115; // -
int INEG = 116; // -
int LNEG = 117; // -
int FNEG = 118; // -
int DNEG = 119; // -
int ISHL = 120; // -
int LSHL = 121; // -
int ISHR = 122; // -
int LSHR = 123; // -
int IUSHR = 124; // -
int LUSHR = 125; // -
int IAND = 126; // -
int LAND = 127; // -
int IOR = 128; // -
int LOR = 129; // -
int IXOR = 130; // -
int LXOR = 131; // -
int IINC = 132; // visitIincInsn
int I2L = 133; // visitInsn
int I2F = 134; // -
int I2D = 135; // -
int L2I = 136; // -
int L2F = 137; // -
int L2D = 138; // -
int F2I = 139; // -
int F2L = 140; // -
int F2D = 141; // -
int D2I = 142; // -
int D2L = 143; // -
int D2F = 144; // -
int I2B = 145; // -
int I2C = 146; // -
int I2S = 147; // -
int LCMP = 148; // -
int FCMPL = 149; // -
int FCMPG = 150; // -
int DCMPL = 151; // -
int DCMPG = 152; // -
int IFEQ = 153; // visitJumpInsn
int IFNE = 154; // -
int IFLT = 155; // -
int IFGE = 156; // -
int IFGT = 157; // -
int IFLE = 158; // -
int IF_ICMPEQ = 159; // -
int IF_ICMPNE = 160; // -
int IF_ICMPLT = 161; // -
int IF_ICMPGE = 162; // -
int IF_ICMPGT = 163; // -
int IF_ICMPLE = 164; // -
int IF_ACMPEQ = 165; // -
int IF_ACMPNE = 166; // -
int GOTO = 167; // -
int JSR = 168; // -
int RET = 169; // visitVarInsn
int TABLESWITCH = 170; // visiTableSwitchInsn
int LOOKUPSWITCH = 171; // visitLookupSwitch
int IRETURN = 172; // visitInsn
int LRETURN = 173; // -
int FRETURN = 174; // -
int DRETURN = 175; // -
int ARETURN = 176; // -
int RETURN = 177; // -
int GETSTATIC = 178; // visitFieldInsn
int PUTSTATIC = 179; // -
int GETFIELD = 180; // -
int PUTFIELD = 181; // -
int INVOKEVIRTUAL = 182; // visitMethodInsn
int INVOKESPECIAL = 183; // -
int INVOKESTATIC = 184; // -
int INVOKEINTERFACE = 185; // -
int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn
int NEW = 187; // visitTypeInsn
int NEWARRAY = 188; // visitIntInsn
int ANEWARRAY = 189; // visitTypeInsn
int ARRAYLENGTH = 190; // visitInsn
int ATHROW = 191; // -
int CHECKCAST = 192; // visitTypeInsn
int INSTANCEOF = 193; // -
int MONITORENTER = 194; // visitInsn
int MONITOREXIT = 195; // -
// int WIDE = 196; // NOT VISITED
int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn
int IFNULL = 198; // visitJumpInsn
int IFNONNULL = 199; // -
// int GOTO_W = 200; // -
// int JSR_W = 201; // -
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.gamekit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSBundle;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.gamekit.protocol.GKGameCenterControllerDelegate;
import apple.uikit.UINavigationController;
import apple.uikit.UIViewController;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("GameKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class GKGameCenterViewController extends UINavigationController {
static {
NatJ.register();
}
@Generated
protected GKGameCenterViewController(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native GKGameCenterViewController alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native GKGameCenterViewController allocWithZone(VoidPtr zone);
@Generated
@Selector("attemptRotationToDeviceOrientation")
public static native void attemptRotationToDeviceOrientation();
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("clearTextInputContextIdentifier:")
public static native void clearTextInputContextIdentifier(String identifier);
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native GKGameCenterViewController new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Selector("gameCenterDelegate")
@MappedReturn(ObjCObjectMapper.class)
public native GKGameCenterControllerDelegate gameCenterDelegate();
@Generated
@Selector("init")
public native GKGameCenterViewController init();
@Generated
@Selector("initWithCoder:")
public native GKGameCenterViewController initWithCoder(NSCoder coder);
@Generated
@Selector("initWithNavigationBarClass:toolbarClass:")
public native GKGameCenterViewController initWithNavigationBarClassToolbarClass(Class navigationBarClass,
Class toolbarClass);
@Generated
@Selector("initWithNibName:bundle:")
public native GKGameCenterViewController initWithNibNameBundle(String nibNameOrNil, NSBundle nibBundleOrNil);
@Generated
@Selector("initWithRootViewController:")
public native GKGameCenterViewController initWithRootViewController(UIViewController rootViewController);
@Generated
@Deprecated
@Selector("leaderboardCategory")
public native String leaderboardCategory();
@Generated
@Selector("leaderboardIdentifier")
public native String leaderboardIdentifier();
@Generated
@Selector("leaderboardTimeScope")
@NInt
public native long leaderboardTimeScope();
@Generated
@Selector("setGameCenterDelegate:")
public native void setGameCenterDelegate_unsafe(
@Mapped(ObjCObjectMapper.class) GKGameCenterControllerDelegate value);
@Generated
public void setGameCenterDelegate(@Mapped(ObjCObjectMapper.class) GKGameCenterControllerDelegate value) {
Object __old = gameCenterDelegate();
if (value != null) {
org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value);
}
setGameCenterDelegate_unsafe(value);
if (__old != null) {
org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old);
}
}
@Generated
@Deprecated
@Selector("setLeaderboardCategory:")
public native void setLeaderboardCategory(String value);
@Generated
@Selector("setLeaderboardIdentifier:")
public native void setLeaderboardIdentifier(String value);
@Generated
@Selector("setLeaderboardTimeScope:")
public native void setLeaderboardTimeScope(@NInt long value);
@Generated
@Selector("setViewState:")
public native void setViewState(@NInt long value);
@Generated
@Selector("viewState")
@NInt
public native long viewState();
/**
* Use this to display the details associated with the specified achievementID
*/
@Generated
@Selector("initWithAchievementID:")
public native GKGameCenterViewController initWithAchievementID(String achievementID);
/**
* Use this to display the scores for the specified leaderboard and player scope. Both classic and recurring leaderboards can use this method to initialize the view with their scores.
*/
@Generated
@Selector("initWithLeaderboard:playerScope:")
public native GKGameCenterViewController initWithLeaderboardPlayerScope(GKLeaderboard leaderboard,
@NInt long playerScope);
/**
* Use this to display the scores for the specified leaderboardID, player scope and time scope. The time scope is only applicable to classic leaderboards. Recurring leaderboards will always be displayed initially with the results (scores) associated with the current instance of the leaderboard.
*/
@Generated
@Selector("initWithLeaderboardID:playerScope:timeScope:")
public native GKGameCenterViewController initWithLeaderboardIDPlayerScopeTimeScope(String leaderboardID,
@NInt long playerScope, @NInt long timeScope);
/**
* Use this to display content associated with the specified state. For example setting the state to GKGameCenterViewControllerStateLeaderboards will display a list of leaderboard sets or leaderboards (if no sets). Setting state to GKGameCenterViewControllerStateAchievements will display a list of achievements.
*/
@Generated
@Selector("initWithState:")
public native GKGameCenterViewController initWithState(@NInt long state);
}
| |
/* Copyright 1995-2014 Esri
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For additional information, contact:
* Environmental Systems Research Institute, Inc.
* Attn: Contracts Dept
* 380 New York Street
* Redlands, California, USA 92373
*
* email: contracts@esri.com
*
*/
package com.esri.android.mapsapp.location;
import android.app.DialogFragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SearchView;
import android.widget.SearchView.OnQueryTextListener;
import com.esri.android.mapsapp.R;
public class RoutingDialogFragment extends DialogFragment {
public static final String ARG_END_POINT_DEFAULT = "EndPointDefault";
public static final String MY_LOCATION = "My Location";
private static final String SEARCH_FROM = "From";
private static final String SEARCH_TO = "To";
private String mEndPointDefault;
private SearchView mStartText;
private SearchView mEndText;
private RoutingDialogListener mRoutingDialogListener;
// Mandatory empty constructor for fragment manager to recreate fragment
// after it's destroyed.
public RoutingDialogFragment() {
}
/**
* Sets listener for click on Get Route button.
*
* @param listener
*/
public void setRoutingDialogListener(RoutingDialogListener listener) {
mRoutingDialogListener = listener;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setStyle(DialogFragment.STYLE_NORMAL, 0);
if (getArguments().containsKey(ARG_END_POINT_DEFAULT)) {
mEndPointDefault = getArguments().getString(ARG_END_POINT_DEFAULT);
} else {
mEndPointDefault = null;
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.routing_layout, container, false);
getDialog().setTitle(R.string.title_routing_dialog);
// Initialize searchviews
mStartText = (SearchView) view.findViewById(R.id.startPoint);
mEndText = (SearchView) view.findViewById(R.id.endPoint);
mStartText.setIconifiedByDefault(false);
mEndText.setIconifiedByDefault(false);
// Set hint for searchviews
mStartText.setQueryHint(SEARCH_FROM);
mEndText.setQueryHint(SEARCH_TO);
// Change default search icons for the search view
int startIconId = mStartText.getContext().getResources().getIdentifier("android:id/search_mag_icon", null,
null);
ImageView start_icon = (ImageView) mStartText.findViewById(startIconId);
start_icon.setImageResource(R.drawable.pin_circle_red);
int endIconId = mEndText.getContext().getResources().getIdentifier("android:id/search_mag_icon", null, null);
ImageView end_icon = (ImageView) mEndText.findViewById(endIconId);
end_icon.setImageResource(R.drawable.pin_circle_blue);
mStartText.setQuery(MY_LOCATION, false);
mStartText.clearFocus();
mEndText.requestFocus();
if (mEndPointDefault != null) {
mEndText.setQuery(mEndPointDefault, false);
}
ImageView swap = (ImageView) view.findViewById(R.id.iv_interchange);
Button routeButton = (Button) view.findViewById(R.id.getRouteButton);
// Set up onClick listener for the "Get Route" button
routeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String startPoint = mStartText.getQuery().toString();
String endPoint = mEndText.getQuery().toString();
if (mRoutingDialogListener.onGetRoute(startPoint, endPoint)) {
dismiss();
}
}
});
// Interchange the text in the searchviews
swap.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Swap the places
String temp = mStartText.getQuery().toString();
mStartText.setQuery(mEndText.getQuery().toString(), false);
mEndText.setQuery(temp, false);
}
});
// Setup listener when the search button is clicked n the keyboard for
// the searchviews
mEndText.setOnQueryTextListener(new OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
String startPoint = mStartText.getQuery().toString();
String endPoint = mEndText.getQuery().toString();
if (startPoint.length() > 0) {
if (mRoutingDialogListener.onGetRoute(startPoint, endPoint)) {
dismiss();
}
} else {
// "From" text is null
mEndText.clearFocus();
mStartText.requestFocus();
}
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
mStartText.setOnQueryTextListener(new OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
String startPoint = mStartText.getQuery().toString();
String endPoint = mEndText.getQuery().toString();
if (endPoint.length() > 0) {
if (mRoutingDialogListener.onGetRoute(startPoint, endPoint)) {
dismiss();
}
} else {
// "To" text is null
mStartText.clearFocus();
mEndText.requestFocus();
}
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
return view;
}
/**
* A callback interface that all activities containing this fragment must
* implement, to receive a routing request from this fragment.
*/
public interface RoutingDialogListener {
/**
* Callback for when the Get Route button is pressed.
*
* @param startPoint
* String entered by user to define start point.
* @param endPoint
* String entered by user to define end point.
* @return true if routing task executed, false if parameters rejected.
* If this method rejects the parameters it must display an
* explanatory Toast to the user before returning.
*/
boolean onGetRoute(String startPoint, String endPoint);
}
}
| |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.asseteditor.drools.modeldriven.ui;
import java.util.HashMap;
import java.util.Map;
import org.drools.guvnor.client.asseteditor.drools.modeldriven.HumanReadable;
import org.drools.guvnor.client.common.ClickableLabel;
import org.drools.guvnor.client.common.DirtyableFlexTable;
import org.drools.guvnor.client.common.FormStylePopup;
import org.drools.guvnor.client.messages.Constants;
import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine;
import org.drools.ide.common.client.modeldriven.brl.FactPattern;
import org.drools.ide.common.client.modeldriven.brl.FreeFormLine;
import org.drools.ide.common.client.modeldriven.brl.FromAccumulateCompositeFactPattern;
import org.drools.ide.common.client.modeldriven.brl.FromCollectCompositeFactPattern;
import org.drools.ide.common.client.modeldriven.brl.FromCompositeFactPattern;
import org.drools.ide.common.client.modeldriven.brl.FromEntryPointFactPattern;
import org.drools.ide.common.client.modeldriven.brl.IPattern;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.Widget;
public class FromCollectCompositeFactPatternWidget extends FromCompositeFactPatternWidget {
private Map<String, String> extraLeftSidePatternFactTypes = null;
public FromCollectCompositeFactPatternWidget(RuleModeller modeller,
EventBus eventBus,
FromCollectCompositeFactPattern pattern) {
super( modeller,
eventBus,
pattern );
}
public FromCollectCompositeFactPatternWidget(RuleModeller modeller,
EventBus eventBus,
FromCollectCompositeFactPattern pattern,
Boolean readOnly) {
super( modeller,
eventBus,
pattern,
readOnly );
}
private void initExtraLeftSidePatternFactTypes() {
extraLeftSidePatternFactTypes = new HashMap<String, String>();
extraLeftSidePatternFactTypes.put( "Collection",
"java.util.Collection" );
extraLeftSidePatternFactTypes.put( "List",
"java.util.List" );
extraLeftSidePatternFactTypes.put( "Set",
"java.util.Set" );
}
@Override
protected Widget getCompositeLabel() {
ClickHandler leftPatternclick = new ClickHandler() {
public void onClick(ClickEvent event) {
Widget w = (Widget) event.getSource();
showFactTypeSelector( w );
}
};
ClickHandler rightPatternclick = new ClickHandler() {
public void onClick(ClickEvent event) {
Widget w = (Widget) event.getSource();
showRightPatternSelector( w );
}
};
String lbl = "<div class='form-field'>" + HumanReadable.getCEDisplayName( "from collect" ) + "</div>";
DirtyableFlexTable panel = new DirtyableFlexTable();
int r = 0;
if ( pattern.getFactPattern() == null ) {
panel.setWidget( r++,
0,
new ClickableLabel( "<br> <font color='red'>" + Constants.INSTANCE.clickToAddPatterns() + "</font>",
leftPatternclick,
!this.readOnly ) );
}
panel.setWidget( r++,
0,
new HTML( lbl ) );
if ( this.getFromCollectPattern().getRightPattern() == null ) {
panel.setWidget( r++,
0,
new ClickableLabel( "<br> <font color='red'>" + Constants.INSTANCE.clickToAddPatterns() + "</font>",
rightPatternclick,
!this.readOnly ) );
} else {
IPattern rPattern = this.getFromCollectPattern().getRightPattern();
RuleModellerWidget patternWidget = null;
if ( rPattern instanceof FactPattern ) {
patternWidget = new FactPatternWidget( this.getModeller(),
this.getEventBus(),
rPattern,
true,
true,
this.readOnly );
} else if ( rPattern instanceof FromAccumulateCompositeFactPattern ) {
patternWidget = new FromAccumulateCompositeFactPatternWidget( this.getModeller(),
this.getEventBus(),
(FromAccumulateCompositeFactPattern) rPattern,
this.readOnly );
} else if ( rPattern instanceof FromCollectCompositeFactPattern ) {
patternWidget = new FromCollectCompositeFactPatternWidget( this.getModeller(),
this.getEventBus(),
(FromCollectCompositeFactPattern) rPattern,
this.readOnly );
} else if ( rPattern instanceof FromEntryPointFactPattern ) {
patternWidget = new FromEntryPointFactPatternWidget( this.getModeller(),
this.getEventBus(),
(FromEntryPointFactPattern) rPattern,
this.readOnly );
} else if ( rPattern instanceof FromCompositeFactPattern ) {
patternWidget = new FromCompositeFactPatternWidget( this.getModeller(),
this.getEventBus(),
(FromCompositeFactPattern) rPattern,
this.readOnly );
} else if ( rPattern instanceof FreeFormLine ) {
patternWidget = new FreeFormLineWidget( this.getModeller(),
this.getEventBus(),
(FreeFormLine) rPattern,
this.readOnly );
} else {
throw new IllegalArgumentException( "Unsuported pattern " + rPattern + " for right side of FROM COLLECT" );
}
patternWidget.addOnModifiedCommand( new Command() {
public void execute() {
setModified( true );
}
} );
panel.setWidget( r++,
0,
addRemoveButton( patternWidget,
new ClickHandler() {
public void onClick(ClickEvent event) {
if ( Window.confirm( Constants.INSTANCE.RemoveThisBlockOfData() ) ) {
setModified( true );
getFromCollectPattern().setRightPattern( null );
getModeller().refreshWidget();
}
}
} ) );
}
return panel;
}
@Override
protected void showFactTypeSelector(final Widget w) {
final FormStylePopup popup = new FormStylePopup();
popup.setTitle( Constants.INSTANCE.NewFactPattern() );
final ListBox box = new ListBox();
box.addItem( Constants.INSTANCE.Choose() );
for ( Map.Entry<String, String> entry : this.getExtraLeftSidePatternFactTypes().entrySet() ) {
box.addItem( entry.getKey(),
entry.getValue() );
}
//TODO: Add Facts that extedns Collection
// box.addItem("...");
// box.addItem("TODO: Add Facts that extedns Collection");
box.setSelectedIndex( 0 );
box.addChangeHandler( new ChangeHandler() {
public void onChange(ChangeEvent event) {
pattern.setFactPattern( new FactPattern( box.getValue( box.getSelectedIndex() ) ) );
setModified( true );
getModeller().refreshWidget();
popup.hide();
}
} );
popup.addAttribute( Constants.INSTANCE.chooseFactType(),
box );
popup.show();
}
/**
* Pops up the fact selector.
*/
protected void showRightPatternSelector(final Widget w) {
final ListBox box = new ListBox();
SuggestionCompletionEngine completions = this.getModeller().getSuggestionCompletions();
String[] facts = completions.getFactTypes();
box.addItem( Constants.INSTANCE.Choose() );
for ( int i = 0; i < facts.length; i++ ) {
box.addItem( facts[i] );
}
box.setSelectedIndex( 0 );
final FormStylePopup popup = new FormStylePopup();
popup.setTitle( Constants.INSTANCE.NewFactPattern() );
popup.addAttribute( Constants.INSTANCE.chooseFactType(),
box );
box.addChangeHandler( new ChangeHandler() {
public void onChange(ChangeEvent event) {
getFromCollectPattern().setRightPattern( new FactPattern( box.getItemText( box.getSelectedIndex() ) ) );
setModified( true );
getModeller().refreshWidget();
popup.hide();
}
} );
final Button freeFormDRLBtn = new Button( Constants.INSTANCE.FreeFormDrl() );
final Button fromBtn = new Button( Constants.INSTANCE.From() );
final Button fromAccumulateBtn = new Button( Constants.INSTANCE.FromAccumulate() );
final Button fromCollectBtn = new Button( Constants.INSTANCE.FromCollect() );
final Button fromEntryPointBtn = new Button( Constants.INSTANCE.FromEntryPoint() );
ClickHandler btnsClickHandler = new ClickHandler() {
public void onClick(ClickEvent event) {
Widget sender = (Widget) event.getSource();
if ( sender == fromBtn ) {
getFromCollectPattern().setRightPattern( new FromCompositeFactPattern() );
} else if ( sender == fromAccumulateBtn ) {
getFromCollectPattern().setRightPattern( new FromAccumulateCompositeFactPattern() );
} else if ( sender == fromCollectBtn ) {
getFromCollectPattern().setRightPattern( new FromCollectCompositeFactPattern() );
} else if ( sender == freeFormDRLBtn ) {
getFromCollectPattern().setRightPattern( new FreeFormLine() );
} else if ( sender == fromEntryPointBtn ) {
getFromCollectPattern().setRightPattern( new FromEntryPointFactPattern() );
} else {
throw new IllegalArgumentException( "Unknown sender: " + sender );
}
setModified( true );
getModeller().refreshWidget();
popup.hide();
}
};
freeFormDRLBtn.addClickHandler( btnsClickHandler );
fromBtn.addClickHandler( btnsClickHandler );
fromAccumulateBtn.addClickHandler( btnsClickHandler );
fromCollectBtn.addClickHandler( btnsClickHandler );
fromEntryPointBtn.addClickHandler( btnsClickHandler );
popup.addAttribute( "",
freeFormDRLBtn );
popup.addAttribute( "",
fromBtn );
popup.addAttribute( "",
fromAccumulateBtn );
popup.addAttribute( "",
fromCollectBtn );
popup.addAttribute( "",
fromEntryPointBtn );
popup.show();
}
private FromCollectCompositeFactPattern getFromCollectPattern() {
return (FromCollectCompositeFactPattern) this.pattern;
}
@Override
protected void calculateReadOnly() {
if ( this.pattern.factPattern != null ) {
this.readOnly = !(this.getExtraLeftSidePatternFactTypes().containsValue( this.pattern.factPattern.getFactType() )
|| this.getModeller().getSuggestionCompletions().containsFactType( this.pattern.factPattern.getFactType() ));
}
}
private Map<String, String> getExtraLeftSidePatternFactTypes() {
if ( this.extraLeftSidePatternFactTypes == null ) {
this.initExtraLeftSidePatternFactTypes();
}
return this.extraLeftSidePatternFactTypes;
}
}
| |
package com.huawei.esdk.fusionmanager.local.impl.autogen.storage;
import javax.xml.bind.annotation.XmlRegistry;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the com.huawei.esdk.fusionmanager.local.impl.autogen.storage package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: com.huawei.esdk.fusionmanager.local.impl.autogen.storage
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link CreateBSReq }
*
*/
public CreateBSReq createCreateBSReq() {
return new CreateBSReq();
}
/**
* Create an instance of {@link DeleteBSReq }
*
*/
public DeleteBSReq createDeleteBSReq() {
return new DeleteBSReq();
}
/**
* Create an instance of {@link DetachBSReq }
*
*/
public DetachBSReq createDetachBSReq() {
return new DetachBSReq();
}
/**
* Create an instance of {@link BSSet }
*
*/
public BSSet createBSSet() {
return new BSSet();
}
/**
* Create an instance of {@link AttachBSReq }
*
*/
public AttachBSReq createAttachBSReq() {
return new AttachBSReq();
}
/**
* Create an instance of {@link QosInfoSet }
*
*/
public QosInfoSet createQosInfoSet() {
return new QosInfoSet();
}
/**
* Create an instance of {@link BSQosInfoSet }
*
*/
public BSQosInfoSet createBSQosInfoSet() {
return new BSQosInfoSet();
}
/**
* Create an instance of {@link ModifyIOpropertyReq }
*
*/
public ModifyIOpropertyReq createModifyIOpropertyReq() {
return new ModifyIOpropertyReq();
}
/**
* Create an instance of {@link BSParameterSet }
*
*/
public BSParameterSet createBSParameterSet() {
return new BSParameterSet();
}
/**
* Create an instance of {@link QueryStorageUnitReq }
*
*/
public QueryStorageUnitReq createQueryStorageUnitReq() {
return new QueryStorageUnitReq();
}
/**
* Create an instance of {@link DeleteStorageResourceReq }
*
*/
public DeleteStorageResourceReq createDeleteStorageResourceReq() {
return new DeleteStorageResourceReq();
}
/**
* Create an instance of {@link CreateStorageResourceReq }
*
*/
public CreateStorageResourceReq createCreateStorageResourceReq() {
return new CreateStorageResourceReq();
}
/**
* Create an instance of {@link QueryStorageResourceByIdReq }
*
*/
public QueryStorageResourceByIdReq createQueryStorageResourceByIdReq() {
return new QueryStorageResourceByIdReq();
}
/**
* Create an instance of {@link DisconnectStorageResourceReq }
*
*/
public DisconnectStorageResourceReq createDisconnectStorageResourceReq() {
return new DisconnectStorageResourceReq();
}
/**
* Create an instance of {@link QueryStorageResourceReq }
*
*/
public QueryStorageResourceReq createQueryStorageResourceReq() {
return new QueryStorageResourceReq();
}
/**
* Create an instance of {@link ConnectStorageResourceReq }
*
*/
public ConnectStorageResourceReq createConnectStorageResourceReq() {
return new ConnectStorageResourceReq();
}
/**
* Create an instance of {@link QueryDataStoreRes }
*
*/
public QueryDataStoreRes createQueryDataStoreRes() {
return new QueryDataStoreRes();
}
/**
* Create an instance of {@link QueryDataStoreResReq }
*
*/
public QueryDataStoreResReq createQueryDataStoreResReq() {
return new QueryDataStoreResReq();
}
/**
* Create an instance of {@link QueryStorageResourceResponse }
*
*/
public QueryStorageResourceResponse createQueryStorageResourceResponse() {
return new QueryStorageResourceResponse();
}
/**
* Create an instance of {@link QueryStorageResourceResp }
*
*/
public QueryStorageResourceResp createQueryStorageResourceResp() {
return new QueryStorageResourceResp();
}
/**
* Create an instance of {@link DiscoverDisk }
*
*/
public DiscoverDisk createDiscoverDisk() {
return new DiscoverDisk();
}
/**
* Create an instance of {@link DiscoverDiskRequest }
*
*/
public DiscoverDiskRequest createDiscoverDiskRequest() {
return new DiscoverDiskRequest();
}
/**
* Create an instance of {@link DeleteStorageResource }
*
*/
public DeleteStorageResource createDeleteStorageResource() {
return new DeleteStorageResource();
}
/**
* Create an instance of {@link QueryBS }
*
*/
public QueryBS createQueryBS() {
return new QueryBS();
}
/**
* Create an instance of {@link QueryBSReq }
*
*/
public QueryBSReq createQueryBSReq() {
return new QueryBSReq();
}
/**
* Create an instance of {@link QueryStorageResource }
*
*/
public QueryStorageResource createQueryStorageResource() {
return new QueryStorageResource();
}
/**
* Create an instance of {@link CreateStorageResource }
*
*/
public CreateStorageResource createCreateStorageResource() {
return new CreateStorageResource();
}
/**
* Create an instance of {@link ConnectStorageResourceResponse }
*
*/
public ConnectStorageResourceResponse createConnectStorageResourceResponse() {
return new ConnectStorageResourceResponse();
}
/**
* Create an instance of {@link ConnectStorageResourceResp }
*
*/
public ConnectStorageResourceResp createConnectStorageResourceResp() {
return new ConnectStorageResourceResp();
}
/**
* Create an instance of {@link DeleteBS }
*
*/
public DeleteBS createDeleteBS() {
return new DeleteBS();
}
/**
* Create an instance of {@link QueryStorageResourceById }
*
*/
public QueryStorageResourceById createQueryStorageResourceById() {
return new QueryStorageResourceById();
}
/**
* Create an instance of {@link CancelObjStorage }
*
*/
public CancelObjStorage createCancelObjStorage() {
return new CancelObjStorage();
}
/**
* Create an instance of {@link CancelObjStorageReq }
*
*/
public CancelObjStorageReq createCancelObjStorageReq() {
return new CancelObjStorageReq();
}
/**
* Create an instance of {@link AddDataStorage }
*
*/
public AddDataStorage createAddDataStorage() {
return new AddDataStorage();
}
/**
* Create an instance of {@link AddDataStorageReq }
*
*/
public AddDataStorageReq createAddDataStorageReq() {
return new AddDataStorageReq();
}
/**
* Create an instance of {@link QueryObjStorageResponse }
*
*/
public QueryObjStorageResponse createQueryObjStorageResponse() {
return new QueryObjStorageResponse();
}
/**
* Create an instance of {@link QueryObjStorageResp }
*
*/
public QueryObjStorageResp createQueryObjStorageResp() {
return new QueryObjStorageResp();
}
/**
* Create an instance of {@link CreateObsAccessID }
*
*/
public CreateObsAccessID createCreateObsAccessID() {
return new CreateObsAccessID();
}
/**
* Create an instance of {@link CreateObsAccessIDReq }
*
*/
public CreateObsAccessIDReq createCreateObsAccessIDReq() {
return new CreateObsAccessIDReq();
}
/**
* Create an instance of {@link DetachBS }
*
*/
public DetachBS createDetachBS() {
return new DetachBS();
}
/**
* Create an instance of {@link AddDataStorageResponse }
*
*/
public AddDataStorageResponse createAddDataStorageResponse() {
return new AddDataStorageResponse();
}
/**
* Create an instance of {@link AddDataStorageResp }
*
*/
public AddDataStorageResp createAddDataStorageResp() {
return new AddDataStorageResp();
}
/**
* Create an instance of {@link UpdateObjStorage }
*
*/
public UpdateObjStorage createUpdateObjStorage() {
return new UpdateObjStorage();
}
/**
* Create an instance of {@link UpdateObjStorageReq }
*
*/
public UpdateObjStorageReq createUpdateObjStorageReq() {
return new UpdateObjStorageReq();
}
/**
* Create an instance of {@link QueryDataStoreResponse }
*
*/
public QueryDataStoreResponse createQueryDataStoreResponse() {
return new QueryDataStoreResponse();
}
/**
* Create an instance of {@link QueryDataStoreResp }
*
*/
public QueryDataStoreResp createQueryDataStoreResp() {
return new QueryDataStoreResp();
}
/**
* Create an instance of {@link QueryStorageUnitResponse }
*
*/
public QueryStorageUnitResponse createQueryStorageUnitResponse() {
return new QueryStorageUnitResponse();
}
/**
* Create an instance of {@link QueryStorageUnitResp }
*
*/
public QueryStorageUnitResp createQueryStorageUnitResp() {
return new QueryStorageUnitResp();
}
/**
* Create an instance of {@link QueryObjStorage }
*
*/
public QueryObjStorage createQueryObjStorage() {
return new QueryObjStorage();
}
/**
* Create an instance of {@link QueryObjStorageReq }
*
*/
public QueryObjStorageReq createQueryObjStorageReq() {
return new QueryObjStorageReq();
}
/**
* Create an instance of {@link DeleteStorageResourceResponse }
*
*/
public DeleteStorageResourceResponse createDeleteStorageResourceResponse() {
return new DeleteStorageResourceResponse();
}
/**
* Create an instance of {@link DeleteStorageResourceResp }
*
*/
public DeleteStorageResourceResp createDeleteStorageResourceResp() {
return new DeleteStorageResourceResp();
}
/**
* Create an instance of {@link DeleteDataStore }
*
*/
public DeleteDataStore createDeleteDataStore() {
return new DeleteDataStore();
}
/**
* Create an instance of {@link DeleteDataStoreReq }
*
*/
public DeleteDataStoreReq createDeleteDataStoreReq() {
return new DeleteDataStoreReq();
}
/**
* Create an instance of {@link DeleteObsAccessIDResponse }
*
*/
public DeleteObsAccessIDResponse createDeleteObsAccessIDResponse() {
return new DeleteObsAccessIDResponse();
}
/**
* Create an instance of {@link DeleteObsAccessIDResp }
*
*/
public DeleteObsAccessIDResp createDeleteObsAccessIDResp() {
return new DeleteObsAccessIDResp();
}
/**
* Create an instance of {@link CreateStorageResourceResponse }
*
*/
public CreateStorageResourceResponse createCreateStorageResourceResponse() {
return new CreateStorageResourceResponse();
}
/**
* Create an instance of {@link CreateStorageResourceResp }
*
*/
public CreateStorageResourceResp createCreateStorageResourceResp() {
return new CreateStorageResourceResp();
}
/**
* Create an instance of {@link CreateBSResponse }
*
*/
public CreateBSResponse createCreateBSResponse() {
return new CreateBSResponse();
}
/**
* Create an instance of {@link CreateBSResp }
*
*/
public CreateBSResp createCreateBSResp() {
return new CreateBSResp();
}
/**
* Create an instance of {@link QueryDataStoreById }
*
*/
public QueryDataStoreById createQueryDataStoreById() {
return new QueryDataStoreById();
}
/**
* Create an instance of {@link QueryDataStoreByIdReq }
*
*/
public QueryDataStoreByIdReq createQueryDataStoreByIdReq() {
return new QueryDataStoreByIdReq();
}
/**
* Create an instance of {@link ApplyObjStorageResponse }
*
*/
public ApplyObjStorageResponse createApplyObjStorageResponse() {
return new ApplyObjStorageResponse();
}
/**
* Create an instance of {@link ApplyObjStorageResp }
*
*/
public ApplyObjStorageResp createApplyObjStorageResp() {
return new ApplyObjStorageResp();
}
/**
* Create an instance of {@link DisconnectStorageResourceResponse }
*
*/
public DisconnectStorageResourceResponse createDisconnectStorageResourceResponse() {
return new DisconnectStorageResourceResponse();
}
/**
* Create an instance of {@link DisconnectStorageResourceResp }
*
*/
public DisconnectStorageResourceResp createDisconnectStorageResourceResp() {
return new DisconnectStorageResourceResp();
}
/**
* Create an instance of {@link ConnectStorageResource }
*
*/
public ConnectStorageResource createConnectStorageResource() {
return new ConnectStorageResource();
}
/**
* Create an instance of {@link AttachBS }
*
*/
public AttachBS createAttachBS() {
return new AttachBS();
}
/**
* Create an instance of {@link QueryStorageUnit }
*
*/
public QueryStorageUnit createQueryStorageUnit() {
return new QueryStorageUnit();
}
/**
* Create an instance of {@link DeleteDataStoreResponse }
*
*/
public DeleteDataStoreResponse createDeleteDataStoreResponse() {
return new DeleteDataStoreResponse();
}
/**
* Create an instance of {@link DeleteDataStoreResp }
*
*/
public DeleteDataStoreResp createDeleteDataStoreResp() {
return new DeleteDataStoreResp();
}
/**
* Create an instance of {@link DetachBSResponse }
*
*/
public DetachBSResponse createDetachBSResponse() {
return new DetachBSResponse();
}
/**
* Create an instance of {@link DetachBSResp }
*
*/
public DetachBSResp createDetachBSResp() {
return new DetachBSResp();
}
/**
* Create an instance of {@link UpdateObjStorageResponse }
*
*/
public UpdateObjStorageResponse createUpdateObjStorageResponse() {
return new UpdateObjStorageResponse();
}
/**
* Create an instance of {@link UpdateObjStorageResp }
*
*/
public UpdateObjStorageResp createUpdateObjStorageResp() {
return new UpdateObjStorageResp();
}
/**
* Create an instance of {@link ModifyIOpropertyResponse }
*
*/
public ModifyIOpropertyResponse createModifyIOpropertyResponse() {
return new ModifyIOpropertyResponse();
}
/**
* Create an instance of {@link ModifyIOpropertyResp }
*
*/
public ModifyIOpropertyResp createModifyIOpropertyResp() {
return new ModifyIOpropertyResp();
}
/**
* Create an instance of {@link QueryStorageResourceByIdResponse }
*
*/
public QueryStorageResourceByIdResponse createQueryStorageResourceByIdResponse() {
return new QueryStorageResourceByIdResponse();
}
/**
* Create an instance of {@link QueryStorageResourceByIdResp }
*
*/
public QueryStorageResourceByIdResp createQueryStorageResourceByIdResp() {
return new QueryStorageResourceByIdResp();
}
/**
* Create an instance of {@link QueryDataStore }
*
*/
public QueryDataStore createQueryDataStore() {
return new QueryDataStore();
}
/**
* Create an instance of {@link QueryDataStoreReq }
*
*/
public QueryDataStoreReq createQueryDataStoreReq() {
return new QueryDataStoreReq();
}
/**
* Create an instance of {@link DeleteObsAccessID }
*
*/
public DeleteObsAccessID createDeleteObsAccessID() {
return new DeleteObsAccessID();
}
/**
* Create an instance of {@link DeleteObsAccessIDReq }
*
*/
public DeleteObsAccessIDReq createDeleteObsAccessIDReq() {
return new DeleteObsAccessIDReq();
}
/**
* Create an instance of {@link QueryDataStoreByIdResponse }
*
*/
public QueryDataStoreByIdResponse createQueryDataStoreByIdResponse() {
return new QueryDataStoreByIdResponse();
}
/**
* Create an instance of {@link QueryDataStoreByIdResp }
*
*/
public QueryDataStoreByIdResp createQueryDataStoreByIdResp() {
return new QueryDataStoreByIdResp();
}
/**
* Create an instance of {@link QueryDataStoreResResponse }
*
*/
public QueryDataStoreResResponse createQueryDataStoreResResponse() {
return new QueryDataStoreResResponse();
}
/**
* Create an instance of {@link QueryDataStoreResResp }
*
*/
public QueryDataStoreResResp createQueryDataStoreResResp() {
return new QueryDataStoreResResp();
}
/**
* Create an instance of {@link QueryBSResponse }
*
*/
public QueryBSResponse createQueryBSResponse() {
return new QueryBSResponse();
}
/**
* Create an instance of {@link QueryBSResp }
*
*/
public QueryBSResp createQueryBSResp() {
return new QueryBSResp();
}
/**
* Create an instance of {@link ModifyIOproperty }
*
*/
public ModifyIOproperty createModifyIOproperty() {
return new ModifyIOproperty();
}
/**
* Create an instance of {@link DisconnectStorageResource }
*
*/
public DisconnectStorageResource createDisconnectStorageResource() {
return new DisconnectStorageResource();
}
/**
* Create an instance of {@link CreateBS }
*
*/
public CreateBS createCreateBS() {
return new CreateBS();
}
/**
* Create an instance of {@link DiscoverDiskResponse }
*
*/
public DiscoverDiskResponse createDiscoverDiskResponse() {
return new DiscoverDiskResponse();
}
/**
* Create an instance of {@link DiscoverDiskResponse2 }
*
*/
public DiscoverDiskResponse2 createDiscoverDiskResponse2() {
return new DiscoverDiskResponse2();
}
/**
* Create an instance of {@link DeleteBSResponse }
*
*/
public DeleteBSResponse createDeleteBSResponse() {
return new DeleteBSResponse();
}
/**
* Create an instance of {@link DeleteBSResp }
*
*/
public DeleteBSResp createDeleteBSResp() {
return new DeleteBSResp();
}
/**
* Create an instance of {@link CreateObsAccessIDResponse }
*
*/
public CreateObsAccessIDResponse createCreateObsAccessIDResponse() {
return new CreateObsAccessIDResponse();
}
/**
* Create an instance of {@link CreateObsAccessIDResp }
*
*/
public CreateObsAccessIDResp createCreateObsAccessIDResp() {
return new CreateObsAccessIDResp();
}
/**
* Create an instance of {@link CancelObjStorageResponse }
*
*/
public CancelObjStorageResponse createCancelObjStorageResponse() {
return new CancelObjStorageResponse();
}
/**
* Create an instance of {@link CancelObjStorageResp }
*
*/
public CancelObjStorageResp createCancelObjStorageResp() {
return new CancelObjStorageResp();
}
/**
* Create an instance of {@link AttachBSResponse }
*
*/
public AttachBSResponse createAttachBSResponse() {
return new AttachBSResponse();
}
/**
* Create an instance of {@link AttachBSResp }
*
*/
public AttachBSResp createAttachBSResp() {
return new AttachBSResp();
}
/**
* Create an instance of {@link ApplyObjStorage }
*
*/
public ApplyObjStorage createApplyObjStorage() {
return new ApplyObjStorage();
}
/**
* Create an instance of {@link ApplyObjStorageReq }
*
*/
public ApplyObjStorageReq createApplyObjStorageReq() {
return new ApplyObjStorageReq();
}
/**
* Create an instance of {@link StorageUnit }
*
*/
public StorageUnit createStorageUnit() {
return new StorageUnit();
}
/**
* Create an instance of {@link BSRespSet }
*
*/
public BSRespSet createBSRespSet() {
return new BSRespSet();
}
/**
* Create an instance of {@link DataChannel }
*
*/
public DataChannel createDataChannel() {
return new DataChannel();
}
/**
* Create an instance of {@link HostUrn }
*
*/
public HostUrn createHostUrn() {
return new HostUrn();
}
/**
* Create an instance of {@link StorageResource }
*
*/
public StorageResource createStorageResource() {
return new StorageResource();
}
/**
* Create an instance of {@link OBSParameter }
*
*/
public OBSParameter createOBSParameter() {
return new OBSParameter();
}
/**
* Create an instance of {@link Datastore }
*
*/
public Datastore createDatastore() {
return new Datastore();
}
/**
* Create an instance of {@link DatastoreRes }
*
*/
public DatastoreRes createDatastoreRes() {
return new DatastoreRes();
}
}
| |
package com.jetbrains.edu.learning.courseGeneration;
import com.google.gson.*;
import com.google.gson.stream.JsonReader;
import com.intellij.facet.ui.ValidationResult;
import com.intellij.ide.projectView.ProjectView;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl;
import com.intellij.platform.templates.github.ZipUtil;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.edu.learning.StudySerializationUtils;
import com.jetbrains.edu.learning.StudyTaskManager;
import com.jetbrains.edu.learning.StudyUtils;
import com.jetbrains.edu.learning.core.EduNames;
import com.jetbrains.edu.learning.core.EduUtils;
import com.jetbrains.edu.learning.courseFormat.Course;
import com.jetbrains.edu.learning.courseFormat.Lesson;
import com.jetbrains.edu.learning.courseFormat.Task;
import com.jetbrains.edu.learning.courseFormat.TaskFile;
import com.jetbrains.edu.learning.editor.StudyEditor;
import com.jetbrains.edu.learning.statistics.EduUsagesCollector;
import com.jetbrains.edu.learning.stepic.CourseInfo;
import com.jetbrains.edu.learning.stepic.EduStepicConnector;
import com.jetbrains.edu.learning.stepic.StepicUpdateSettings;
import com.jetbrains.edu.learning.stepic.StepicUser;
import org.apache.commons.codec.binary.Base64;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.util.*;
import static com.jetbrains.edu.learning.StudyUtils.execCancelable;
public class StudyProjectGenerator {
public static final String AUTHOR_ATTRIBUTE = "authors";
public static final String LANGUAGE_ATTRIBUTE = "language";
public static final String ADAPTIVE_COURSE_PREFIX = "__AdaptivePyCharmPython__";
public static final File OUR_COURSES_DIR = new File(PathManager.getConfigPath(), "courses");
private static final Logger LOG = Logger.getInstance(StudyProjectGenerator.class.getName());
private static final String COURSE_NAME_ATTRIBUTE = "name";
private static final String COURSE_DESCRIPTION = "description";
private static final String CACHE_NAME = "courseNames.txt";
private final List<SettingsListener> myListeners = ContainerUtil.newArrayList();
private List<CourseInfo> myCourses = new ArrayList<>();
private List<Integer> myEnrolledCoursesIds = new ArrayList<>();
protected CourseInfo mySelectedCourseInfo;
public void setCourses(List<CourseInfo> courses) {
myCourses = courses;
}
public boolean isLoggedIn() {
final StepicUser user = StepicUpdateSettings.getInstance().getUser();
return user.getAccessToken() != null;
}
public void setEnrolledCoursesIds(@NotNull final List<Integer> coursesIds) {
myEnrolledCoursesIds = coursesIds;
}
@NotNull
public List<Integer> getEnrolledCoursesIds() {
return myEnrolledCoursesIds;
}
public void setSelectedCourse(@NotNull final CourseInfo courseName) {
mySelectedCourseInfo = courseName;
}
public CourseInfo getSelectedCourseInfo() {
return mySelectedCourseInfo;
}
public void generateProject(@NotNull final Project project, @NotNull final VirtualFile baseDir) {
final Course course = getCourse(project);
if (course == null) {
LOG.warn("Course is null");
Messages.showWarningDialog("Some problems occurred while creating the course", "Error in Course Creation");
return;
}
final File courseDirectory = StudyUtils.getCourseDirectory(course);
StudyTaskManager.getInstance(project).setCourse(course);
ApplicationManager.getApplication().runWriteAction(() -> {
StudyGenerator.createCourse(course, baseDir, courseDirectory, project);
course.setCourseDirectory(courseDirectory.getAbsolutePath());
VirtualFileManager.getInstance().refreshWithoutFileWatcher(true);
StudyUtils.registerStudyToolWindow(course, project);
openFirstTask(course, project);
EduUsagesCollector.projectTypeCreated(course.isAdaptive() ? EduNames.ADAPTIVE : EduNames.STUDY);
});
}
@Nullable
public Course getCourse(@NotNull final Project project) {
final File courseFile = new File(new File(OUR_COURSES_DIR, mySelectedCourseInfo.getName()), EduNames.COURSE_META_FILE);
if (courseFile.exists()) {
final Course course = readCourseFromCache(courseFile, false);
if (course != null && course.isUpToDate()) {
return course;
}
return getCourseFromStepic(project);
}
else {
final StepicUser user = StepicUpdateSettings.getInstance().getUser();
if (user.getAccessToken() != null) {
final File adaptiveCourseFile = new File(new File(OUR_COURSES_DIR, ADAPTIVE_COURSE_PREFIX +
mySelectedCourseInfo.getName() + "_" +
user.getEmail()), EduNames.COURSE_META_FILE);
if (adaptiveCourseFile.exists()) {
return readCourseFromCache(adaptiveCourseFile, true);
}
}
}
return getCourseFromStepic(project);
}
private Course getCourseFromStepic(@NotNull Project project) {
return ProgressManager.getInstance().runProcessWithProgressSynchronously(() -> {
ProgressManager.getInstance().getProgressIndicator().setIndeterminate(true);
return execCancelable(() -> {
final Course course = EduStepicConnector.getCourse(project, mySelectedCourseInfo);
if (course != null) {
flushCourse(course);
course.initCourse(false);
}
return course;
});
}, "Creating Course", true, project);
}
@Nullable
private static Course readCourseFromCache(@NotNull File courseFile, boolean isAdaptive) {
Reader reader = null;
try {
reader = new InputStreamReader(new FileInputStream(courseFile), "UTF-8");
Gson gson =
new GsonBuilder().registerTypeAdapter(Course.class, new StudySerializationUtils.Json.CourseTypeAdapter(courseFile)).create();
final Course course = gson.fromJson(reader, Course.class);
course.initCourse(isAdaptive);
return course;
}
catch (UnsupportedEncodingException | FileNotFoundException e) {
LOG.warn(e.getMessage());
}
finally {
StudyUtils.closeSilently(reader);
}
return null;
}
public static void openFirstTask(@NotNull final Course course, @NotNull final Project project) {
LocalFileSystem.getInstance().refresh(false);
final Lesson firstLesson = StudyUtils.getFirst(course.getLessons());
if (firstLesson == null) return;
final Task firstTask = StudyUtils.getFirst(firstLesson.getTaskList());
if (firstTask == null) return;
final VirtualFile taskDir = firstTask.getTaskDir(project);
if (taskDir == null) return;
final Map<String, TaskFile> taskFiles = firstTask.getTaskFiles();
VirtualFile activeVirtualFile = null;
for (Map.Entry<String, TaskFile> entry : taskFiles.entrySet()) {
final String relativePath = entry.getKey();
final TaskFile taskFile = entry.getValue();
taskDir.refresh(false, true);
final VirtualFile virtualFile = taskDir.findFileByRelativePath(relativePath);
if (virtualFile != null) {
if (!taskFile.getActivePlaceholders().isEmpty()) {
activeVirtualFile = virtualFile;
}
}
}
if (activeVirtualFile != null) {
final PsiFile file = PsiManager.getInstance(project).findFile(activeVirtualFile);
ProjectView.getInstance(project).select(file, activeVirtualFile, false);
final FileEditor[] editors = FileEditorManager.getInstance(project).openFile(activeVirtualFile, true);
if (editors.length == 0) {
return;
}
final FileEditor studyEditor = editors[0];
if (studyEditor instanceof StudyEditor) {
StudyUtils.selectFirstAnswerPlaceholder((StudyEditor)studyEditor, project);
}
FileEditorManager.getInstance(project).openFile(activeVirtualFile, true);
}
else {
String first = StudyUtils.getFirst(taskFiles.keySet());
if (first != null) {
NewVirtualFile firstFile = ((VirtualDirectoryImpl)taskDir).refreshAndFindChild(first);
if (firstFile != null) {
FileEditorManager.getInstance(project).openFile(firstFile, true);
}
}
}
}
public static void flushCourse(@NotNull final Course course) {
final File courseDirectory = StudyUtils.getCourseDirectory(course);
FileUtil.createDirectory(courseDirectory);
flushCourseJson(course, courseDirectory);
int lessonIndex = 1;
List<Lesson> additionalLessons = new ArrayList<>();
for (Lesson lesson : course.getLessons()) {
if (lesson.getName().equals(EduNames.PYCHARM_ADDITIONAL)) {
flushAdditionalFiles(courseDirectory, lesson);
additionalLessons.add(lesson);
}
else {
final File lessonDirectory = new File(courseDirectory, EduNames.LESSON + String.valueOf(lessonIndex));
flushLesson(lessonDirectory, lesson);
lessonIndex += 1;
}
}
for (Lesson lesson : additionalLessons) {
course.getLessons().remove(lesson);
}
}
private static void flushAdditionalFiles(File courseDirectory, Lesson lesson) {
final List<Task> taskList = lesson.getTaskList();
if (taskList.size() != 1) return;
final Task task = taskList.get(0);
for (Map.Entry<String, String> entry : task.getTestsText().entrySet()) {
final String name = entry.getKey();
final String text = entry.getValue();
final File file = new File(courseDirectory, name);
FileUtil.createIfDoesntExist(file);
try {
if (EduUtils.isImage(name)) {
FileUtil.writeToFile(file, Base64.decodeBase64(text));
}
else {
FileUtil.writeToFile(file, text);
}
}
catch (IOException e) {
LOG.error("ERROR copying file " + name);
}
}
}
public static void flushLesson(@NotNull final File lessonDirectory, @NotNull final Lesson lesson) {
FileUtil.createDirectory(lessonDirectory);
int taskIndex = 1;
for (Task task : lesson.taskList) {
final File taskDirectory = new File(lessonDirectory, EduNames.TASK + String.valueOf(taskIndex));
flushTask(task, taskDirectory);
taskIndex += 1;
}
}
public static void flushTask(@NotNull final Task task, @NotNull final File taskDirectory) {
FileUtil.createDirectory(taskDirectory);
for (Map.Entry<String, TaskFile> taskFileEntry : task.taskFiles.entrySet()) {
final String name = FileUtil.toSystemDependentName(taskFileEntry.getKey());
final TaskFile taskFile = taskFileEntry.getValue();
final File file = new File(taskDirectory, name);
FileUtil.createIfDoesntExist(file);
try {
if (EduUtils.isImage(taskFile.name)) {
FileUtil.writeToFile(file, Base64.decodeBase64(taskFile.text));
}
else {
FileUtil.writeToFile(file, taskFile.text);
}
}
catch (IOException e) {
LOG.error(e);
}
}
createFiles(taskDirectory, task.getTestsText());
if (task.getTaskTexts().isEmpty()) {
createTaskHtml(task, taskDirectory);
return;
}
task.setText(null);
createFiles(taskDirectory, task.getTaskTexts());
}
private static void createTaskHtml(@NotNull Task task, @NotNull File taskDirectory) {
final File taskText = new File(taskDirectory, EduNames.TASK_HTML);
FileUtil.createIfDoesntExist(taskText);
try {
FileUtil.writeToFile(taskText, task.getText());
}
catch (IOException e) {
LOG.error(e);
}
}
private static void createFiles(@NotNull File taskDirectory, Map<String, String> files) {
for (Map.Entry<String, String> entry : files.entrySet()) {
final File file = new File(taskDirectory, entry.getKey());
if (file.exists()) {
FileUtil.delete(file);
}
FileUtil.createIfDoesntExist(file);
try {
FileUtil.writeToFile(file, entry.getValue());
}
catch (IOException e) {
LOG.error(e);
}
}
}
public static void flushCourseJson(@NotNull final Course course, @NotNull final File courseDirectory) {
final Gson gson = new GsonBuilder().setPrettyPrinting().
excludeFieldsWithoutExposeAnnotation().create();
final String json = gson.toJson(course);
final File courseJson = new File(courseDirectory, EduNames.COURSE_META_FILE);
final FileOutputStream fileOutputStream;
try {
fileOutputStream = new FileOutputStream(courseJson);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream, "UTF-8");
try {
outputStreamWriter.write(json);
}
catch (IOException e) {
Messages.showErrorDialog(e.getMessage(), "Failed to Generate Json");
LOG.info(e);
}
finally {
try {
outputStreamWriter.close();
}
catch (IOException e) {
LOG.info(e);
}
}
}
catch (FileNotFoundException | UnsupportedEncodingException e) {
LOG.info(e);
}
}
/**
* Writes courses to cache file {@link StudyProjectGenerator#CACHE_NAME}
*/
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
public static void flushCache(List<CourseInfo> courses) {
flushCache(courses, true);
}
public static void flushCache(List<CourseInfo> courses, boolean preserveOld) {
File cacheFile = new File(OUR_COURSES_DIR, CACHE_NAME);
PrintWriter writer = null;
try {
if (!createCacheFile(cacheFile)) return;
Gson gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES).create();
final Set<CourseInfo> courseInfos = new HashSet<>();
courseInfos.addAll(courses);
if (preserveOld) {
courseInfos.addAll(getCoursesFromCache());
}
writer = new PrintWriter(cacheFile, "UTF-8");
try {
for (CourseInfo courseInfo : courseInfos) {
final String json = gson.toJson(courseInfo);
writer.println(json);
}
}
finally {
StudyUtils.closeSilently(writer);
}
}
catch (IOException e) {
LOG.error(e);
}
finally {
StudyUtils.closeSilently(writer);
}
}
private static boolean createCacheFile(File cacheFile) throws IOException {
if (!OUR_COURSES_DIR.exists()) {
final boolean created = OUR_COURSES_DIR.mkdirs();
if (!created) {
LOG.error("Cannot flush courses cache. Can't create courses directory");
return false;
}
}
if (!cacheFile.exists()) {
final boolean created = cacheFile.createNewFile();
if (!created) {
LOG.error("Cannot flush courses cache. Can't create " + CACHE_NAME + " file");
return false;
}
}
return true;
}
// Supposed to be called under progress
public List<CourseInfo> getCourses(boolean force) {
if (OUR_COURSES_DIR.exists() && !force) {
myCourses = getCoursesFromCache();
}
if (force || myCourses.isEmpty()) {
myCourses = execCancelable(() -> EduStepicConnector.getCourses(StepicUpdateSettings.getInstance().getUser()));
flushCache(myCourses);
}
if (myCourses.isEmpty() || (myCourses.size() == 1 && myCourses.contains(CourseInfo.INVALID_COURSE))) {
myCourses = Collections.singletonList(getBundledIntro());
}
sortCourses(myCourses);
return myCourses;
}
public void sortCourses(List<CourseInfo> result) {
// sort courses so as to have non-adaptive courses in the beginning of the list
Collections.sort(result, (c1, c2) -> {
if (mySelectedCourseInfo != null) {
if (mySelectedCourseInfo.equals(c1)) {
return -1;
}
if (mySelectedCourseInfo.equals(c2)) {
return 1;
}
}
if ((c1.isAdaptive() && c2.isAdaptive()) || (!c1.isAdaptive() && !c2.isAdaptive())) {
return 0;
}
return c1.isAdaptive() ? 1 : -1;
});
}
@NotNull
public List<CourseInfo> getCoursesUnderProgress(boolean force, @NotNull final String progressTitle, @NotNull final Project project) {
try {
return ProgressManager.getInstance()
.runProcessWithProgressSynchronously(() -> {
ProgressManager.getInstance().getProgressIndicator().setIndeterminate(true);
return getCourses(force);
}, progressTitle, true, project);
}
catch (RuntimeException e) {
return Collections.singletonList(CourseInfo.INVALID_COURSE);
}
}
public void addSettingsStateListener(@NotNull SettingsListener listener) {
myListeners.add(listener);
}
public interface SettingsListener {
void stateChanged(ValidationResult result);
}
public void fireStateChanged(ValidationResult result) {
for (SettingsListener listener : myListeners) {
listener.stateChanged(result);
}
}
public static CourseInfo getBundledIntro() {
final File introCourse = new File(OUR_COURSES_DIR, "Introduction to Python");
if (introCourse.exists()) {
return getCourseInfo(introCourse);
}
return null;
}
public static List<CourseInfo> getCoursesFromCache() {
List<CourseInfo> courses = new ArrayList<>();
final File cacheFile = new File(OUR_COURSES_DIR, CACHE_NAME);
if (!cacheFile.exists()) {
return courses;
}
try {
final FileInputStream inputStream = new FileInputStream(cacheFile);
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
try {
String line;
while ((line = reader.readLine()) != null) {
Gson gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES).create();
final CourseInfo courseInfo = gson.fromJson(line, CourseInfo.class);
courses.add(courseInfo);
}
}
catch (IOException | JsonSyntaxException e) {
LOG.error(e.getMessage());
}
finally {
StudyUtils.closeSilently(reader);
}
}
catch (UnsupportedEncodingException e) {
LOG.error(e.getMessage());
}
finally {
StudyUtils.closeSilently(inputStream);
}
}
catch (FileNotFoundException e) {
LOG.error(e.getMessage());
}
return courses;
}
/**
* Adds course from zip archive to courses
*
* @return added course name or null if course is invalid
*/
@Nullable
public CourseInfo addLocalCourse(String zipFilePath) {
File file = new File(zipFilePath);
try {
String fileName = file.getName();
String unzippedName = fileName.substring(0, fileName.indexOf("."));
File courseDir = new File(OUR_COURSES_DIR, unzippedName);
ZipUtil.unzip(null, courseDir, file, null, null, true);
CourseInfo courseName = addCourse(myCourses, courseDir);
flushCache(myCourses);
if (courseName != null && !courseName.getName().equals(unzippedName)) {
//noinspection ResultOfMethodCallIgnored
File dest = new File(OUR_COURSES_DIR, courseName.getName());
if (dest.exists()) {
FileUtil.delete(dest);
}
courseDir.renameTo(dest);
//noinspection ResultOfMethodCallIgnored
courseDir.delete();
}
return courseName;
}
catch (IOException e) {
LOG.error("Failed to unzip course archive");
LOG.error(e);
}
return null;
}
/**
* Adds course to courses specified in params
*
* @param courses
* @param courseDir must be directory containing course file
* @return added course name or null if course is invalid
*/
@Nullable
private static CourseInfo addCourse(List<CourseInfo> courses, File courseDir) {
if (courseDir.isDirectory()) {
File[] courseFiles = courseDir.listFiles((dir, name) -> name.equals(EduNames.COURSE_META_FILE));
if (courseFiles == null || courseFiles.length != 1) {
LOG.info("User tried to add course with more than one or without course files");
return null;
}
File courseFile = courseFiles[0];
CourseInfo courseInfo = getCourseInfo(courseFile);
if (courseInfo != null) {
courses.add(0, courseInfo);
}
return courseInfo;
}
return null;
}
/**
* Parses course json meta file and finds course name
*
* @return information about course or null if course file is invalid
*/
@Nullable
private static CourseInfo getCourseInfo(File courseFile) {
if (courseFile.isDirectory()) {
File[] courseFiles = courseFile.listFiles((dir, name) -> name.equals(EduNames.COURSE_META_FILE));
if (courseFiles == null || courseFiles.length != 1) {
LOG.info("More than one or without course files");
return null;
}
courseFile = courseFiles[0];
}
CourseInfo courseInfo = null;
BufferedReader reader = null;
try {
if (courseFile.getName().equals(EduNames.COURSE_META_FILE)) {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(courseFile), "UTF-8"));
JsonReader r = new JsonReader(reader);
JsonParser parser = new JsonParser();
JsonElement el = parser.parse(r);
String courseName = el.getAsJsonObject().get(COURSE_NAME_ATTRIBUTE).getAsString();
String courseDescription = el.getAsJsonObject().get(COURSE_DESCRIPTION).getAsString();
JsonArray courseAuthors = el.getAsJsonObject().get(AUTHOR_ATTRIBUTE).getAsJsonArray();
String language = el.getAsJsonObject().get(LANGUAGE_ATTRIBUTE).getAsString();
courseInfo = new CourseInfo();
courseInfo.setName(courseName);
courseInfo.setDescription(courseDescription);
courseInfo.setType("pycharm " + language);
final ArrayList<StepicUser> authors = new ArrayList<>();
for (JsonElement author : courseAuthors) {
final JsonObject authorAsJsonObject = author.getAsJsonObject();
final StepicUser stepicUser = new StepicUser();
stepicUser.setFirstName(authorAsJsonObject.get("first_name").getAsString());
stepicUser.setLastName(authorAsJsonObject.get("last_name").getAsString());
authors.add(stepicUser);
}
courseInfo.setAuthors(authors);
}
}
catch (Exception e) {
//error will be shown in UI
}
finally {
StudyUtils.closeSilently(reader);
}
return courseInfo;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.client.console.reports;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.collections4.Transformer;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.syncope.client.console.SyncopeConsoleSession;
import org.apache.syncope.client.console.commons.Constants;
import org.apache.syncope.client.console.commons.DirectoryDataProvider;
import org.apache.syncope.client.console.commons.SortableDataProviderComparator;
import org.apache.syncope.client.console.pages.BasePage;
import org.apache.syncope.client.console.panels.DirectoryPanel;
import org.apache.syncope.client.console.panels.ModalPanel;
import org.apache.syncope.client.console.panels.search.SearchClause;
import org.apache.syncope.client.console.reports.ReportletDirectoryPanel.ReportletWrapper;
import org.apache.syncope.client.console.rest.ReportRestClient;
import org.apache.syncope.client.console.wicket.extensions.markup.html.repeater.data.table.ActionColumn;
import org.apache.syncope.client.console.wicket.markup.html.bootstrap.dialog.BaseModal;
import org.apache.syncope.client.console.wicket.markup.html.form.ActionLink;
import org.apache.syncope.client.console.wicket.markup.html.form.ActionLink.ActionType;
import org.apache.syncope.client.console.wicket.markup.html.form.ActionLinksPanel;
import org.apache.syncope.client.console.wizards.AjaxWizard;
import org.apache.syncope.common.lib.types.StandardEntitlement;
import org.apache.syncope.common.lib.SyncopeClientException;
import org.apache.syncope.common.lib.report.AbstractReportletConf;
import org.apache.syncope.common.lib.report.ReportletConf;
import org.apache.syncope.common.lib.search.AbstractFiqlSearchConditionBuilder;
import org.apache.syncope.common.lib.to.ReportTO;
import org.apache.wicket.PageReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.authroles.authorization.strategies.role.metadata.MetaDataRoleAuthorizationStrategy;
import org.apache.wicket.event.Broadcast;
import org.apache.wicket.event.IEvent;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.sort.SortOrder;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.model.StringResourceModel;
/**
* Reportlets page.
*/
public class ReportletDirectoryPanel extends DirectoryPanel<
ReportletWrapper, ReportletWrapper, DirectoryDataProvider<ReportletWrapper>, ReportRestClient>
implements ModalPanel {
private static final long serialVersionUID = 4984337552918213290L;
private final BaseModal<ReportTO> baseModal;
private final String report;
protected ReportletDirectoryPanel(
final BaseModal<ReportTO> baseModal, final String report, final PageReference pageRef) {
super(BaseModal.CONTENT_ID, pageRef, false);
disableCheckBoxes();
this.baseModal = baseModal;
this.report = report;
this.restClient = new ReportRestClient();
enableExitButton();
this.addNewItemPanelBuilder(new ReportletWizardBuilder(report, new ReportletWrapper(), pageRef), true);
MetaDataRoleAuthorizationStrategy.authorize(addAjaxLink, ENABLE, StandardEntitlement.REPORT_UPDATE);
initResultTable();
}
@Override
protected List<IColumn<ReportletWrapper, String>> getColumns() {
final List<IColumn<ReportletWrapper, String>> columns = new ArrayList<>();
columns.add(new PropertyColumn<ReportletWrapper, String>(
new StringResourceModel("reportlet", this, null), "name", "name"));
columns.add(new AbstractColumn<ReportletWrapper, String>(
new StringResourceModel("configuration", this, null)) {
private static final long serialVersionUID = -4008579357070833846L;
@Override
public void populateItem(
final Item<ICellPopulator<ReportletWrapper>> cellItem,
final String componentId,
final IModel<ReportletWrapper> rowModel) {
cellItem.add(new Label(componentId, rowModel.getObject().getConf().getClass().getName()));
}
});
columns.add(new ActionColumn<ReportletWrapper, String>(new ResourceModel("actions")) {
private static final long serialVersionUID = 2054811145491901166L;
@Override
public ActionLinksPanel<ReportletWrapper> getActions(final String componentId,
final IModel<ReportletWrapper> model) {
final ActionLinksPanel<ReportletWrapper> panel = ActionLinksPanel.<ReportletWrapper>builder().
add(new ActionLink<ReportletWrapper>() {
private static final long serialVersionUID = -3722207913631435501L;
@Override
public void onClick(final AjaxRequestTarget target, final ReportletWrapper ignore) {
AbstractReportletConf clone = SerializationUtils.clone(model.getObject().getConf());
clone.setName(null);
send(ReportletDirectoryPanel.this, Broadcast.EXACT,
new AjaxWizard.EditItemActionEvent<>(
new ReportletWrapper().setConf(clone),
target));
}
}, ActionLink.ActionType.CLONE, StandardEntitlement.REPORT_UPDATE).
add(new ActionLink<ReportletWrapper>() {
private static final long serialVersionUID = -3722207913631435501L;
@Override
public void onClick(final AjaxRequestTarget target, final ReportletWrapper ignore) {
send(ReportletDirectoryPanel.this, Broadcast.EXACT,
new AjaxWizard.EditItemActionEvent<>(model.getObject(), target));
}
}, ActionLink.ActionType.EDIT, StandardEntitlement.REPORT_UPDATE).
add(new ActionLink<ReportletWrapper>() {
private static final long serialVersionUID = -3722207913631435501L;
@Override
public void onClick(final AjaxRequestTarget target, final ReportletWrapper ignore) {
final ReportletConf reportlet = model.getObject().getConf();
try {
final ReportTO actual = restClient.read(report);
CollectionUtils.filter(actual.getReportletConfs(), new Predicate<ReportletConf>() {
@Override
public boolean evaluate(final ReportletConf object) {
return !object.getName().equals(reportlet.getName());
}
});
restClient.update(actual);
SyncopeConsoleSession.get().info(getString(Constants.OPERATION_SUCCEEDED));
customActionOnFinishCallback(target);
} catch (SyncopeClientException e) {
LOG.error("While deleting {}", reportlet.getName(), e);
SyncopeConsoleSession.get().error(StringUtils.isBlank(e.getMessage())
? e.getClass().getName() : e.getMessage());
}
((BasePage) pageRef.getPage()).getNotificationPanel().refresh(target);
}
}, ActionLink.ActionType.DELETE, StandardEntitlement.REPORT_UPDATE).build(componentId);
return panel;
}
@Override
public ActionLinksPanel<ReportletWrapper> getHeader(final String componentId) {
final ActionLinksPanel.Builder<ReportletWrapper> panel = ActionLinksPanel.builder();
return panel.add(new ActionLink<ReportletWrapper>() {
private static final long serialVersionUID = -7978723352517770644L;
@Override
public void onClick(final AjaxRequestTarget target, final ReportletWrapper ignore) {
if (target != null) {
customActionOnFinishCallback(target);
}
}
}, ActionLink.ActionType.RELOAD, StandardEntitlement.TASK_LIST).build(componentId);
}
});
return columns;
}
@Override
protected Collection<ActionType> getBulkActions() {
final List<ActionType> bulkActions = new ArrayList<>();
bulkActions.add(ActionType.DELETE);
return bulkActions;
}
@Override
protected ReportDataProvider dataProvider() {
return new ReportDataProvider(rows);
}
@Override
protected String paginatorRowsKey() {
return Constants.PREF_REPORTLET_TASKS_PAGINATOR_ROWS;
}
protected class ReportDataProvider extends DirectoryDataProvider<ReportletWrapper> {
private static final long serialVersionUID = 4725679400450513556L;
private final SortableDataProviderComparator<ReportletWrapper> comparator;
public ReportDataProvider(final int paginatorRows) {
super(paginatorRows);
//Default sorting
setSort("name", SortOrder.DESCENDING);
comparator = new SortableDataProviderComparator<>(this);
}
@Override
public Iterator<ReportletWrapper> iterator(final long first, final long count) {
final ReportTO actual = restClient.read(report);
final ArrayList<ReportletWrapper> reportlets = CollectionUtils.collect(
actual.getReportletConfs(),
new Transformer<AbstractReportletConf, ReportletWrapper>() {
@Override
public ReportletWrapper transform(final AbstractReportletConf input) {
return new ReportletWrapper(input.getName()).setName(input.getName()).setConf(input);
}
}, new ArrayList<ReportletWrapper>());
Collections.sort(reportlets, comparator);
return reportlets.subList((int) first, (int) (first + count)).iterator();
}
@Override
public long size() {
final ReportTO actual = restClient.read(report);
return actual.getReportletConfs().size();
}
@Override
public IModel<ReportletWrapper> model(final ReportletWrapper object) {
return new CompoundPropertyModel<>(object);
}
}
public static class ReportletWrapper implements Serializable {
private static final long serialVersionUID = 2472755929742424558L;
private String oldname;
private String name;
private AbstractReportletConf conf;
private final Map<String, Pair<AbstractFiqlSearchConditionBuilder, List<SearchClause>>> scondWrapper;
public ReportletWrapper() {
this(null);
}
public ReportletWrapper(final String name) {
this.oldname = name;
this.scondWrapper = new HashMap<>();
}
public boolean isNew() {
return oldname == null;
}
public String getOldName() {
return this.oldname;
}
public String getName() {
return this.name;
}
public ReportletWrapper setName(final String name) {
this.name = name;
return this;
}
public AbstractReportletConf getConf() {
return conf;
}
public ReportletWrapper setConf(final AbstractReportletConf conf) {
this.conf = conf;
return this;
}
public Map<String, Pair<AbstractFiqlSearchConditionBuilder, List<SearchClause>>> getSCondWrapper() {
return scondWrapper;
}
}
@Override
public void onEvent(final IEvent<?> event) {
super.onEvent(event);
if (event.getPayload() instanceof ExitEvent && modal != null) {
final AjaxRequestTarget target = ExitEvent.class.cast(event.getPayload()).getTarget();
baseModal.show(false);
baseModal.close(target);
}
}
}
| |
/*
* $Id: LuaStateErrorTest.java 121 2012-01-22 01:40:14Z andre@naef.com $
* See LICENSE.txt for license terms.
*/
package com.naef.jnlua.test;
import com.naef.jnlua.JavaFunction;
import com.naef.jnlua.LuaRuntimeException;
import com.naef.jnlua.LuaState;
import com.naef.jnlua.LuaValueProxy;
import com.naef.jnlua.test.fixture.TestObject;
import org.junit.Test;
import java.io.*;
import static org.junit.Assert.assertEquals;
/**
* Throws illegal arguments at the Lua state for error testing.
*/
public class LuaStateErrorTest extends AbstractLuaTest {
// -- Static
private static final int HIGH = 10;
private static final int LOW = -10;
private static final int EXTREMELY_HIGH = Integer.MAX_VALUE / 8;
private static final int EXTREMELY_LOW = Integer.MIN_VALUE / 8;
/**
* setClassLodaer(ClassLoader) with null class loader.
*/
@Test(expected = NullPointerException.class)
public void setNullClassLoader() {
luaState.setClassLoader(null);
}
/**
* setJavaReflector(JavaReflector) with null Java reflector.
*/
@Test(expected = NullPointerException.class)
public void setNullJavaReflector() {
luaState.setJavaReflector(null);
}
/**
* getMetamethod(Object, Metamethod) with null metamethod.
*/
@Test(expected = NullPointerException.class)
public void testNullGetMetamethod() {
luaState.getMetamethod(null, null);
}
/**
* setConverter(Converter) with null converter.
*/
@Test(expected = NullPointerException.class)
public void setNullConverter() {
luaState.setConverter(null);
}
/**
* openLib(Library) with null library.
*/
@Test(expected = NullPointerException.class)
public void testNullOpenLib() {
luaState.openLib(null);
}
/**
* Tests invoking a method after the Lua state has been closed.
*/
@Test(expected = IllegalStateException.class)
public void testClosed() {
luaState.close();
luaState.pushInteger(1);
}
/**
* Tests closing the Lua state while running.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalClose() {
luaState.pushJavaObject(new JavaFunction() {
@Override
public int invoke(LuaState luaState) {
luaState.close();
return 0;
}
});
luaState.call(0, 0);
}
/**
* Off-index (low)
*/
@Test(expected = IllegalArgumentException.class)
public void testLowIndex() {
luaState.toNumber(LOW);
}
/**
* Off-index (extremely low)
*/
@Test(expected = IllegalArgumentException.class)
public void testExtremelyLowIndex() {
luaState.toNumber(EXTREMELY_LOW);
}
/**
* Off-index (high)
*/
@Test(expected = IllegalArgumentException.class)
public void testHighIndex() {
luaState.toNumber(HIGH);
}
/**
* Off-index (extremely high)
*/
@Test(expected = IllegalArgumentException.class)
public void testExtremelyHighIndex() {
luaState.toNumber(EXTREMELY_HIGH);
}
/**
* gc(GcAction, int) null action
*/
@Test(expected = NullPointerException.class)
public void testNullGc() {
luaState.gc(null, 0);
}
/**
* register(JavaFunction[]) with null function.
*/
@Test(expected = NullPointerException.class)
public void testNullFunctionRegister() {
luaState.register(null);
}
/**
* register(String, JavaFunction[]) with null string.
*/
@Test(expected = NullPointerException.class)
public void testNullNameRegister() {
luaState.register(null, new JavaFunction[0]);
}
/**
* register(String, JavaFunction[]) with null functions.
*/
@Test(expected = NullPointerException.class)
public void testNullFunctionsRegister() {
luaState.register("", null);
}
/**
* load(InputStream, String) with null input stream.
*/
//@Test(expected = NullPointerException.class)
public void testNullStreamLoad() throws Exception {
luaState.load(null, "=testNullStreamLoad", "bt");
}
/**
* load(InputStream, String) with null string.
*/
@Test(expected = NullPointerException.class)
public void testNullChunkLoad1() throws Exception {
luaState.load(new ByteArrayInputStream(new byte[0]), null, "t");
}
/**
* load(String, String) with null string 1.
*/
@Test(expected = NullPointerException.class)
public void testNullStringLoad() throws Exception {
luaState.load(null, "");
}
/**
* load(String, String) with null string 2.
*/
@Test(expected = NullPointerException.class)
public void testNullChunkLoad2() throws Exception {
luaState.load("", null);
}
/**
* load(InputStream, String) with input stream throwing IO exception.
*/
/**
* load(InputStream, String) with input stream throwing IO exception.
*/
@Test(expected = IOException.class)
public void testIoExceptionLoad() throws Exception {
luaState.load(new InputStream() {
@Override
public int read() throws IOException {
throw new IOException();
}
}, "=testIoExceptionLoad", "bt");
}
/**
* dump(OutputStream) with null output stream.
*/
@Test(expected = NullPointerException.class)
public void testNullDump() throws Exception {
luaState.load("return 0", "nullDump");
luaState.dump(null);
}
/**
* dump(OutputStream) with an output stream throwing a IO exception.
*/
@Test(expected = IOException.class)
public void testIoExceptionDump() throws Exception {
luaState.load("return 0", "ioExceptionDump");
luaState.dump(new OutputStream() {
@Override
public void write(int b) throws IOException {
throw new IOException();
}
});
}
/**
* dump(OutputStream) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowDump() throws Exception {
luaState.dump(new ByteArrayOutputStream());
}
/**
* Call(int, int) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowCall() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.call(1, 1);
}
/**
* Call(int, int) with an extremely high number of returns.
*/
@Test(expected = IllegalStateException.class)
public void testOverflowCall() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.pushString("");
luaState.call(1, Integer.MAX_VALUE);
}
/**
* Call(int, int) with an illegal number of arguments.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalCall1() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.call(-1, 1);
}
/**
* Call(int, int) with an illegal number of returns.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalCall2() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.pushString("");
luaState.call(1, -2);
assertEquals(0, luaState.getTop());
}
/**
* getGlobal(String) with null.
*/
@Test(expected = NullPointerException.class)
public void testNullGetGlobal() {
luaState.getGlobal(null);
}
/**
* setGlobal(String) with null.
*/
@Test(expected = NullPointerException.class)
public void testNullSetGlobal() {
luaState.pushNumber(0.0);
luaState.setGlobal(null);
}
/**
* setGlobal(String) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowSetGlobal() {
luaState.setGlobal("global");
}
/**
* setGlobal(String) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testIllegalSetGlobal() {
luaState.setGlobal("illegal");
}
/**
* pushJavaFunction(JavaFunction) with null argument.
*/
@Test(expected = NullPointerException.class)
public void testNullPushJavaFunction() {
luaState.pushJavaFunction(null);
}
/**
* pushNumber(Double) until stack overflow.
*/
@Test(expected = IllegalStateException.class)
public void testStackOverflow() {
for (int i = 0; i < Integer.MAX_VALUE; i++) {
luaState.pushNumber(0.0);
}
}
/**
* lessThan(int, int) with illegal types.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalLessThan() {
luaState.pushNil();
luaState.pushNumber(0.0);
luaState.lessThan(1, 2);
}
/**
* length(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalLength() {
luaState.length(getIllegalIndex());
}
/**
* rawEqual(int, int) with illegal indexes.
*/
@Test
public void testIllegalRawEqual() {
luaState.rawEqual(getIllegalIndex(), getIllegalIndex());
}
/**
* toInteger(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToInteger() {
luaState.toInteger(getIllegalIndex());
}
/**
* toJavaFunction(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToIJavaFunction() {
luaState.toJavaFunction(getIllegalIndex());
}
/**
* toJavaObject(int) with illegal index and LuaValueProxy type.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToIJavaObject() {
luaState.toJavaObject(getIllegalIndex(), LuaValueProxy.class);
}
/**
* toJavaObjectRaw(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToIJavaObjectRaw() {
luaState.toJavaObjectRaw(getIllegalIndex());
}
/**
* toNumber(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToNumber() {
luaState.toNumber(getIllegalIndex());
}
/**
* toNumber(int) with maximum index.
*/
@Test(expected = IllegalArgumentException.class)
public void testMaxToNumber() {
luaState.toNumber(Integer.MAX_VALUE);
}
/**
* toNumber(int) with minimum index.
*/
@Test(expected = IllegalArgumentException.class)
public void testMinToNumbern() {
luaState.toNumber(Integer.MIN_VALUE);
}
/**
* toPointer(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToPointer() {
luaState.toPointer(getIllegalIndex());
}
/**
* toString(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalToString() {
luaState.toString(getIllegalIndex());
}
/**
* concat(int) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowConcat1() {
luaState.concat(1);
}
/**
* concat(int) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowConcat2() {
luaState.pushString("");
luaState.pushString("");
luaState.concat(3);
}
/**
* concat(int) with an illegal number of arguments.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalConcat() {
luaState.concat(-1);
}
/**
* copy(int, int) with two illegal indexes.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalCopy1() {
luaState.copy(getIllegalIndex(), getIllegalIndex());
}
/**
* copy(int, int) with one illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalCopy2() {
luaState.pushInteger(1);
luaState.copy(1, getIllegalIndex());
}
/**
* insert(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalInsert() {
luaState.insert(getIllegalIndex());
}
/**
* pop(int) with insufficient arguments.
*/
@Test(expected = IllegalArgumentException.class)
public void testUnderflowPop() {
luaState.pop(1);
}
/**
* pop(int) with an illegal number of arguments.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalPop() {
luaState.pop(-1);
}
/**
* pushValue(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalPushValue() {
luaState.pushValue(getIllegalIndex());
}
/**
* remove(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRemove() {
luaState.remove(getIllegalIndex());
}
/**
* replace(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalReplace() {
luaState.replace(getIllegalIndex());
}
/**
* setTop(int) with an illegal argument.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetTop() {
luaState.setTop(-1);
}
// -- Table tests
/**
* getTable(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetTable1() {
luaState.pushString("");
luaState.getTable(getIllegalIndex());
}
/**
* getTable(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetTable2() {
luaState.pushNumber(0.0);
luaState.pushString("");
luaState.getTable(1);
}
/**
* getField(int, String) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetField1() {
luaState.getField(getIllegalIndex(), "");
}
/**
* getField(int, String) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetField2() {
luaState.pushNumber(0.0);
luaState.getField(1, "");
}
/**
* newTable(int, int) with negative record count.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalNewTable2() {
luaState.newTable(0, -1);
}
/**
* next(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalNext1() {
luaState.pushNil();
luaState.next(getIllegalIndex());
}
/**
* next(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalNext2() {
luaState.pushNumber(0.0);
luaState.pushNil();
luaState.next(1);
}
/**
* rawGet(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawGet1() {
luaState.rawGet(getIllegalIndex());
}
/**
* rawGet(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawGet2() {
luaState.pushNumber(0.0);
luaState.pushString("");
luaState.rawGet(1);
}
/**
* rawGet(int, int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawGet3() {
luaState.rawGet(getIllegalIndex(), 1);
}
/**
* rawGet(int, int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawGet4() {
luaState.pushNumber(0.0);
luaState.rawGet(1, 1);
}
/**
* rawSet(int) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowRawSet() {
luaState.newTable();
luaState.rawSet(1);
}
/**
* rawSet(int) with nil index.
*/
@Test(expected = IllegalArgumentException.class)
public void testNilRawSet() {
luaState.newTable();
luaState.pushNil();
luaState.pushString("value");
luaState.rawSet(1);
luaState.pushJavaObject(new TestObject());
luaState.pushString("Java");
luaState.rawSet(1);
}
/**
* rawSet(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawSet1() {
luaState.pushString("key");
luaState.pushString("value");
luaState.rawSet(getIllegalIndex());
}
/**
* rawSet(int, int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawSet3() {
luaState.pushNumber(0.0);
luaState.pushString("value");
luaState.rawSet(1, 1);
}
/**
* setTable(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetTable() {
luaState.pushNil();
luaState.pushString("");
luaState.pushString("");
luaState.setTable(1);
}
/**
* setTable(int) with nil index.
*/
@Test(expected = IllegalArgumentException.class)
public void testNilSetTable() {
luaState.newTable();
luaState.pushNil();
luaState.pushString("");
luaState.setTable(1);
}
/**
* setTable(int) with insufficient arguments.
*/
@Test(expected = IllegalArgumentException.class)
public void testUnderflowSetTable() {
luaState.newTable();
luaState.setTable(1);
}
/**
* setField(int, String) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetField() {
luaState.pushNumber(0.0);
luaState.pushString("");
luaState.setField(1, "key");
}
/**
* setField(int, String) with null key.
*/
@Test(expected = NullPointerException.class)
public void testNullSetField() {
luaState.newTable();
luaState.pushString("value");
luaState.setField(1, null);
}
/**
* rawSet(int, int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRawSet2() {
luaState.pushNumber(0.0);
luaState.pushString("value");
luaState.rawSet(1, 1);
}
/**
* next(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalNext() {
luaState.pushNumber(0.0);
luaState.pushNil();
luaState.next(1);
}
/**
* setMetaTable(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetMetaTable() {
luaState.newTable();
luaState.pushNumber(0.0);
luaState.setMetatable(1);
}
/**
* setFEnv(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetFEnv() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.pushNumber(0.0);
luaState.setFEnv(1);
}
/**
* setField(int, String) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetField1() {
luaState.pushString("");
luaState.setField(getIllegalIndex(), "key");
}
/**
* setField(int, String) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetField2() {
luaState.pushNumber(0.0);
luaState.pushString("");
luaState.setField(1, "key");
}
// -- Metatable tests
/**
* setMetatable(int) with invalid table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalSetMetatable() {
luaState.newTable();
luaState.pushNumber(0.0);
luaState.setMetatable(1);
}
// -- Thread tests
/**
* resume(int, int) with insufficient arguments.
*/
@Test(expected = IllegalStateException.class)
public void testUnderflowResume() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.newThread();
luaState.resume(1, 1);
}
/**
* resume(int, int) with invalid thread.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalResume1() {
luaState.pushNumber(0.0);
luaState.resume(1, 0);
}
/**
* resume(int, int) with an illegal number of returns.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalResume2() {
luaState.openLibs();
luaState.getGlobal("print");
luaState.newThread();
luaState.resume(1, -1);
}
/**
* yield(int) with no running thread.
*/
@Test(expected = IllegalStateException.class)
public void testIllegalYield() {
luaState.pushNumber(0.0);
luaState.yield(0);
}
/**
* ref(int) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRef() {
luaState.pushNumber(0.0);
luaState.pushNumber(0.0);
luaState.ref(1);
}
/**
* unref(int, int) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalUnref() {
luaState.pushNumber(0.0);
luaState.pushNumber(0.0);
luaState.unref(1, 1);
}
/**
* getProxy(int, Class<?>) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalProxy() {
luaState.pushNumber(0.0);
luaState.getProxy(1, Runnable.class);
}
// -- Private methods
/**
* Returns an illegal index.
*/
private int getIllegalIndex() {
int multiplier = Math.random() >= 0.5 ? Integer.MAX_VALUE : 1000;
int index;
do {
index = Math.round((float) ((Math.random() - 0.5) * multiplier));
} while (index >= -15 && index <= 15);
return index;
}
/**
* status(int) with illegal thread.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalStatus() {
luaState.pushNumber(0.0);
luaState.status(1);
}
/**
* yield(int) with no running thread.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalYield1() {
luaState.register(new JavaFunction() {
@Override
public int invoke(LuaState luaState) {
return luaState.yield(0);
}
@Override
public String getName() {
return "yieldfunc";
}
});
luaState.load("return yieldfunc()", "=testIllegalYield1");
luaState.call(0, 0);
}
/**
* yield across C-call boundary.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalYield2() {
JavaFunction yieldFunction = new JavaFunction() {
@Override
public int invoke(LuaState luaState) {
luaState.load("return coroutine.yield()", "=testIllegalYield2");
luaState.call(0, 0);
return 0;
}
};
luaState.pushJavaObject(yieldFunction);
luaState.newThread();
luaState.resume(1, 0);
}
/**
* yield(int) with insufficient arguments.
*/
@Test(expected = LuaRuntimeException.class)
public void testUnderflowYield() {
luaState.register(new JavaFunction() {
@Override
public int invoke(LuaState luaState) {
return luaState.yield(1);
}
@Override
public String getName() {
return "yieldfunc";
}
});
luaState.load("yieldfunc()", "=testUnderflowYield");
luaState.newThread();
luaState.resume(1, 0);
}
// -- Reference tests
/**
* ref(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRef1() {
luaState.pushNumber(0.0);
luaState.ref(getIllegalIndex());
}
/**
* ref(int) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalRef2() {
luaState.pushNumber(0.0);
luaState.pushNumber(0.0);
luaState.ref(1);
}
/**
* unref(int, int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalUnref1() {
luaState.newTable();
luaState.pushNumber(0.0);
int reference = luaState.ref(1);
luaState.unref(getIllegalIndex(), reference);
}
/**
* unref(int, int) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalUnref2() {
luaState.pushNumber(0.0);
luaState.pushNumber(0.0);
luaState.unref(1, 1);
}
// -- Optimization tests
/**
* tableSize(int) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalTableSize1() {
luaState.pushNumber(0.0);
luaState.tableSize(1);
}
/**
* tableSize(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalTableSize2() {
luaState.tableSize(1);
}
/**
* tableMove(int, int, int, int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalTableMove1() {
luaState.tableMove(getIllegalIndex(), 1, 1, 0);
}
/**
* tableMove(int, int, int, int) with illegal count.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalTableMove2() {
luaState.newTable();
luaState.tableMove(1, 1, 1, -1);
}
// -- Argument checking tests
/**
* checkArg(int, boolean, String) with false condition.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckArg() {
luaState.pushBoolean(false);
luaState.checkArg(1, false, "");
}
/**
* checkInteger(int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckInteger1() {
luaState.pushBoolean(false);
luaState.checkInteger(1);
}
/**
* checkInteger(int, int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckInteger2() {
luaState.pushBoolean(false);
luaState.checkInteger(1, 2);
}
/**
* checkJavaObject(int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckJavaObject1() {
luaState.pushBoolean(false);
luaState.checkJavaObject(1, Integer.class);
}
/**
* checkJavaObject(int, int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckJavaFunction2() {
luaState.pushBoolean(false);
luaState.checkJavaObject(1, Integer.class, Integer.valueOf(0));
}
/**
* checkNumber(int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckNumber1() {
luaState.pushBoolean(false);
luaState.checkNumber(1);
}
/**
* checkNumber(int, double) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckNumber2() {
luaState.pushBoolean(false);
luaState.checkNumber(1, 2.0);
}
/**
* checkOption(int, String[]) with null values.
*/
@Test(expected = NullPointerException.class)
public void testNullCheckOption1() {
luaState.pushInteger(1);
luaState.checkOption(1, null);
}
/**
* checkOption(int, String[], String) with null values.
*/
@Test(expected = NullPointerException.class)
public void testNullCheckOption2() {
luaState.pushInteger(1);
luaState.checkOption(1, null, "");
}
/**
* checkOption(int, String[]) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckOption1() {
luaState.pushInteger(1);
luaState.checkOption(1, new String[]{"test"});
}
/**
* checkOption(int, String[], String) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckOption2() {
luaState.pushInteger(1);
luaState.checkOption(1, new String[]{"test"}, "test");
}
/**
* checkOption(int, String[], String) with illegal default option.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckOption3() {
luaState.checkOption(1, new String[]{"test"}, "");
}
/**
* checkString(int) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckString1() {
luaState.pushBoolean(false);
luaState.checkString(1);
}
/**
* checkString(int, String) with illegal argument.
*/
@Test(expected = LuaRuntimeException.class)
public void testIllegalCheckString2() {
luaState.pushBoolean(false);
luaState.checkString(1, "");
}
// -- Proxy tests
/**
* getProxy(int, Class[]) with null interface.
*/
@Test(expected = NullPointerException.class)
public void testNullGetProxy() {
luaState.newTable();
luaState.getProxy(1, new Class<?>[]{null});
}
/**
* getProxy(int) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetProxy1() {
luaState.getProxy(getIllegalIndex());
}
/**
* getProxy(int, Class<?>) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetProxy2() {
luaState.getProxy(getIllegalIndex(), Runnable.class);
}
/**
* getProxy(int, Class<?>) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetProxy3() {
luaState.pushNumber(0.0);
luaState.getProxy(1, Runnable.class);
}
/**
* getProxy(int, Class<?>[]) with illegal index.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetProxy4() {
luaState.getProxy(getIllegalIndex(), new Class<?>[]{Runnable.class});
}
/**
* getProxy(int, Class<?>[]) with illegal table.
*/
@Test(expected = IllegalArgumentException.class)
public void testIllegalGetProxy5() {
luaState.pushNumber(0.0);
luaState.getProxy(1, new Class<?>[]{Runnable.class});
}
}
| |
package com.github.twoleds.configuration;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class ConfigurationParser implements AutoCloseable {
private final Reader reader;
private final StringBuffer buffer;
private State state;
private int column;
private int line;
public ConfigurationParser(Reader reader) {
this.reader = new BufferedReader(reader);
this.buffer = new StringBuffer();
this.state = State.START;
this.column = 1;
this.line = 1;
}
@Override
public void close() throws ConfigurationException {
try {
this.reader.close();
} catch (IOException e) {
throw new ConfigurationException("An I/O error occurred.", e);
}
}
public Configuration parse() throws ConfigurationException {
return this.parse("", "");
}
private Configuration parse(String name, String value) throws ConfigurationException {
String tmpName = "";
String tmpValue = "";
List<Configuration> tmpChildren = new ArrayList<>();
try {
for (int c = this.reader.read(); c >= 0; c = this.reader.read()) {
switch (this.state) {
case COMMENT:
if (c == '\n') {
this.state = State.START;
}
break;
case NAME:
if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '_')) {
this.buffer.append((char)c);
break;
}
if ((c == ' ') || (c == '\t')) {
tmpName = this.buffer.toString();
this.buffer.setLength(0);
this.state = State.VALUE_START;
break;
}
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
case START:
if (c == ' ' || c == '\t' || c == '\r' || c == '\n') {
break;
}
if ((c == '#')) {
this.state = State.COMMENT;
break;
}
if ((c == '}')) {
return new Configuration(name, value, tmpChildren);
}
if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_')) {
this.state = State.NAME;
this.buffer.setLength(0);
this.buffer.append((char)c);
break;
}
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
case VALUE:
if ((c == '"')) {
tmpValue = this.buffer.toString();
this.buffer.setLength(0);
this.state = State.VALUE_END;
break;
}
if (c == '\\') {
this.state = State.VALUE_ESCAPE;
break;
}
this.buffer.append((char)c);
break;
case VALUE_DIRECT:
if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-') || (c == '.') || (c == '_')) {
this.buffer.append((char)c);
break;
}
if (c == ';') {
tmpValue = this.buffer.toString();
this.buffer.setLength(0);
tmpChildren.add(new Configuration(tmpName, tmpValue, null));
this.state = State.START;
break;
}
if (c == '{') {
tmpValue = this.buffer.toString();
this.buffer.setLength(0);
this.state = State.START;
tmpChildren.add(this.parse(tmpName, tmpValue));
break;
}
if (c == ' ' || c == '\t' || c == '\r' || c == '\n') {
tmpValue = this.buffer.toString();
this.buffer.setLength(0);
this.state = State.VALUE_END;
break;
}
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
case VALUE_END:
if ((c == ' ') || (c == '\t') || (c == '\r') || (c == '\n')) {
break;
}
if (c == '{') {
this.state = State.START;
tmpChildren.add(this.parse(tmpName, tmpValue));
break;
}
if ((c == ';')) {
tmpChildren.add(new Configuration(tmpName, tmpValue, null));
this.state = State.START;
break;
}
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
case VALUE_ESCAPE:
switch ((char)c) {
case '0':
this.buffer.append('\0');
break;
case '"':
this.buffer.append('"');
break;
case '\\':
this.buffer.append('\\');
break;
case 'b':
this.buffer.append('\b');
break;
case 'f':
this.buffer.append('\f');
break;
case 'n':
this.buffer.append('\n');
break;
case 'r':
this.buffer.append('\r');
break;
case 't':
this.buffer.append('\t');
break;
default:
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
}
this.state = State.VALUE;
break;
case VALUE_START:
if ((c == ' ') || (c == '\t')) {
break;
}
if ((c == '"')) {
this.state = State.VALUE;
break;
}
if ((c == '{')) {
this.state = State.START;
tmpChildren.add(this.parse(tmpName, tmpValue));
break;
}
if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-') || (c == '.') || (c == '_')) {
this.state = State.VALUE_DIRECT;
this.buffer.setLength(0);
this.buffer.append((char)c);
break;
}
throw new ConfigurationException(
String.format(
"An invalid character \"%c\" on line %d at column %d.",
c, this.line, this.column
)
);
}
if (c == '\n') {
this.column = 1;
this.line++;
} else {
this.column++;
}
}
} catch (IOException ex) {
throw new ConfigurationException(
String.format("An I/O error occurred on line %d at column %d.", this.line, this.column), ex
);
}
return new Configuration(name, value, tmpChildren);
}
private enum State {
START,
COMMENT,
NAME,
VALUE,
VALUE_DIRECT,
VALUE_START,
VALUE_END,
VALUE_ESCAPE,
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.regionOperations.get. Retrieves the specified region-specific
* Operations resource.
*/
public final class GetRegionOperationHttpRequest implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final String key;
private final String operation;
private final String prettyPrint;
private final String quotaUser;
private final String userIp;
private GetRegionOperationHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.key = null;
this.operation = null;
this.prettyPrint = null;
this.quotaUser = null;
this.userIp = null;
}
private GetRegionOperationHttpRequest(
String access_token,
String callback,
String fields,
String key,
String operation,
String prettyPrint,
String quotaUser,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.key = key;
this.operation = operation;
this.prettyPrint = prettyPrint;
this.quotaUser = quotaUser;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("key".equals(fieldName)) {
return key;
}
if ("operation".equals(fieldName)) {
return operation;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public ApiMessage getApiMessageRequestBody() {
return null;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/**
* Name of the Operations resource to return. It must have the format
* `{project}/regions/{region}/operations/{operation}`. \`{operation}\` must start with a letter,
* and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), * underscores
* (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs (\`%\`). It must
* be between 3 and 255 characters in length, and it * must not start with \`"goog"\`.
*/
public String getOperation() {
return operation;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(GetRegionOperationHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static GetRegionOperationHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final GetRegionOperationHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new GetRegionOperationHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private String key;
private String operation;
private String prettyPrint;
private String quotaUser;
private String userIp;
Builder() {}
public Builder mergeFrom(GetRegionOperationHttpRequest other) {
if (other == GetRegionOperationHttpRequest.getDefaultInstance()) return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getOperation() != null) {
this.operation = other.operation;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(GetRegionOperationHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.key = source.key;
this.operation = source.operation;
this.prettyPrint = source.prettyPrint;
this.quotaUser = source.quotaUser;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/**
* Name of the Operations resource to return. It must have the format
* `{project}/regions/{region}/operations/{operation}`. \`{operation}\` must start with a
* letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public String getOperation() {
return operation;
}
/**
* Name of the Operations resource to return. It must have the format
* `{project}/regions/{region}/operations/{operation}`. \`{operation}\` must start with a
* letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), *
* underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent * signs
* (\`%\`). It must be between 3 and 255 characters in length, and it * must not start with
* \`"goog"\`.
*/
public Builder setOperation(String operation) {
this.operation = operation;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public GetRegionOperationHttpRequest build() {
String missing = "";
if (operation == null) {
missing += " operation";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new GetRegionOperationHttpRequest(
access_token, callback, fields, key, operation, prettyPrint, quotaUser, userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setKey(this.key);
newBuilder.setOperation(this.operation);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "GetRegionOperationHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "key="
+ key
+ ", "
+ "operation="
+ operation
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof GetRegionOperationHttpRequest) {
GetRegionOperationHttpRequest that = (GetRegionOperationHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.operation, that.getOperation())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token, callback, fields, key, operation, prettyPrint, quotaUser, userIp);
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Educational
* Community License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at:
*
* http://opensource.org/licenses/ecl2.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.signup.tool.jsf.organizer;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.StringTokenizer;
import javax.faces.event.ActionEvent;
import javax.faces.event.ValueChangeEvent;
import javax.faces.model.SelectItem;
import org.apache.commons.lang.StringUtils;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.signup.logic.SignupUser;
import org.sakaiproject.signup.logic.SignupUserActionException;
import org.sakaiproject.signup.model.SignupAttachment;
import org.sakaiproject.signup.model.SignupAttendee;
import org.sakaiproject.signup.model.SignupMeeting;
import org.sakaiproject.signup.model.SignupTimeslot;
import org.sakaiproject.signup.tool.jsf.SignupMeetingWrapper;
import org.sakaiproject.signup.tool.jsf.SignupSiteWrapper;
import org.sakaiproject.signup.tool.jsf.SignupUIBaseBean;
import org.sakaiproject.signup.tool.jsf.TimeslotWrapper;
import org.sakaiproject.signup.tool.jsf.organizer.action.CreateMeetings;
import org.sakaiproject.signup.tool.jsf.organizer.action.CreateSitesGroups;
import org.sakaiproject.signup.tool.util.Utilities;
//import com.sun.corba.se.spi.legacy.connection.GetEndPointInfoAgainException;
/**
* <p>
* This JSF UIBean class will handle information exchanges between Organizer's
* copy meeting page:<b>copyMeeting.jsp</b> and backbone system.
*
* @author Peter Liu
*
* </P>
*/
public class CopyMeetingSignupMBean extends SignupUIBaseBean {
private SignupMeeting signupMeeting;
private boolean keepAttendees;
private int maxNumOfAttendees;
private boolean unlimited;
private String signupBeginsType;
/* singup can start before this minutes/hours/days */
private int signupBegins;
private String deadlineTimeType;
/* singup deadline before this minutes/hours/days */
private int deadlineTime;
//Location selected from the dropdown
private String selectedLocation;
//Category selected from the dropdown
private String selectedCategory;
//from the dropdown
private String creatorUserId;
private Date repeatUntil;
private String repeatType;
/* 0 for num of repeat, 1 for date choice*/
private String recurLengthChoice;
private int occurrences;
//private int timeSlotDuration;
private int numberOfSlots;
private boolean showAttendeeName;
private SignupSiteWrapper currentSite;
private List<SignupSiteWrapper> otherSites;
private List<SignupUser> allowedUserList;
private boolean missingSitGroupWarning;
private List<String> missingSites;
private List<String> missingGroups;
private boolean assignParicitpantsToAllRecurEvents;
private boolean validationError;
private boolean repeatTypeUnknown=true;
private List<SelectItem> meetingTypeRadioBttns;
private UserDefineTimeslotBean userDefineTimeslotBean;
//discontinued time slots case
private List<TimeslotWrapper> customTimeSlotWrpList;
private boolean userDefinedTS=false;
protected static boolean NEW_MEETING_SEND_EMAIL = "true".equalsIgnoreCase(Utilities.getSignupConfigParamVal(
"signup.email.notification.mandatory.for.newMeeting", "true")) ? true : false;
private boolean mandatorySendEmail = NEW_MEETING_SEND_EMAIL;
private List<SelectItem> categories = null;
private List<SelectItem> locations=null;
/**
* this reset information which contains in this UIBean lived in a session
* scope
*
*/
public void reset() {
unlimited = false;
keepAttendees = false;
assignParicitpantsToAllRecurEvents = false;
sendEmail = DEFAULT_SEND_EMAIL;
if(NEW_MEETING_SEND_EMAIL){
//mandatory send email out
sendEmail= true;
}
//sendEmailAttendeeOnly = false;
sendEmailToSelectedPeopleOnly = SEND_EMAIL_ALL_PARTICIPANTS;
publishToCalendar= DEFAULT_EXPORT_TO_CALENDAR_TOOL;
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
repeatUntil = calendar.getTime();
recurLengthChoice="1";//0 for num of repeat, 1 for date choice
occurrences=0;
repeatType = ONCE_ONLY;
repeatTypeUnknown=true;
showAttendeeName = false;
missingSitGroupWarning = false;
/*cleanup previously unused attachments in CHS*/
if(this.signupMeeting !=null)
cleanUpUnusedAttachmentCopies(this.signupMeeting.getSignupAttachments());
/*refresh copy of original*/
this.signupMeeting = signupMeetingService.loadSignupMeeting(meetingWrapper.getMeeting().getId(), sakaiFacade
.getCurrentUserId(), sakaiFacade.getCurrentLocationId());
/*prepare new attachments*/
assignMainAttachmentsCopyToSignupMeeting();
//TODO not consider copy time slot attachment yet
List<SignupTimeslot> signupTimeSlots = signupMeeting.getSignupTimeSlots();
if (signupTimeSlots != null && !signupTimeSlots.isEmpty()) {
SignupTimeslot ts = (SignupTimeslot) signupTimeSlots.get(0);
maxNumOfAttendees = ts.getMaxNoOfAttendees();
this.unlimited = ts.isUnlimitedAttendee();
showAttendeeName = ts.isDisplayAttendees();
this.numberOfSlots = signupTimeSlots.size();
} else {// announcement meeting type
setNumberOfSlots(1);
}
//populate location and cateogry data for new meeting
//since it's copymeeting, the dropdown selections should have it already there.
this.selectedLocation=this.signupMeeting.getLocation();
this.selectedCategory = this.signupMeeting.getCategory();
this.customLocation="";
this.customCategory="";
this.categories = null;
this.locations = null;
populateDataForBeginDeadline(this.signupMeeting);
/*Case: recurrence events*/
prepareRecurredEvents();
/* Initialize site/groups for current organizer */
initializeSitesGroups();
/* custom-ts case */
this.customTimeSlotWrpList = null;
this.userDefinedTS = false;
/*populate timeslot data*/
updateTimeSlotWrappers(this.meetingWrapper);
if(CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType())){
this.userDefinedTS=true;
this.customTimeSlotWrpList= getTimeslotWrappers();
markerTimeslots(this.customTimeSlotWrpList);
}
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL, this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}
/* process the relative time for Signup begin/deadline */
private void populateDataForBeginDeadline(SignupMeeting sMeeting) {
long signupBeginsTime = sMeeting.getSignupBegins() == null ? new Date().getTime() : sMeeting.getSignupBegins()
.getTime();
long signupDeadline = sMeeting.getSignupDeadline() == null ? new Date().getTime() : sMeeting
.getSignupDeadline().getTime();
/* get signup begin & deadline relative time in minutes */
long signupBeginBeforMeeting = (sMeeting.getStartTime().getTime() - signupBeginsTime) / MINUTE_IN_MILLISEC;
long signupDeadLineBeforMeetingEnd = (sMeeting.getEndTime().getTime() - signupDeadline) / MINUTE_IN_MILLISEC;
this.signupBeginsType = Utilities.getTimeScaleType(signupBeginBeforMeeting);
this.signupBegins = Utilities.getRelativeTimeValue(signupBeginsType, signupBeginBeforMeeting);
this.deadlineTimeType = Utilities.getTimeScaleType(signupDeadLineBeforMeetingEnd);
this.deadlineTime = Utilities.getRelativeTimeValue(deadlineTimeType, signupDeadLineBeforMeetingEnd);
/*user readability case for big numbers of minutes*/
if(MINUTES.equals(this.signupBeginsType) && sMeeting.getSignupBegins().before(new Date())
&& this.signupBegins > 500){
/*we assume it has to be 'start now' before and we convert it to round to days*/
this.signupBeginsType=DAYS;
this.signupBegins = Utilities.getRelativeTimeValue(DAYS, signupBeginBeforMeeting);
if(this.signupBegins == 0)
this.signupBegins = 1; //add a day
}
}
/**
* Just to overwrite the parent one
*/
public SignupMeetingWrapper getMeetingWrapper() {
return meetingWrapper;
}
/**
* This is a JSF action call method by UI to copy the event/meeting into a
* new one
*
* @return an action outcome string
*/
// TODO: what to do if timeslot is locked or canceled
public String processSaveCopy() {
if (validationError) {
validationError = false;
return COPTY_MEETING_PAGE_URL;
}
SignupMeeting sMeeting = getSignupMeeting();
try {
prepareCopy(sMeeting);
sMeeting.setRepeatUntil(getRepeatUntil());
int repeatNum = getOccurrences();
if("1".equals(getRecurLengthChoice())){
repeatNum = CreateMeetings.getNumOfRecurrence(getRepeatType(), sMeeting.getStartTime(),
getRepeatUntil());
}
sMeeting.setRepeatNum(repeatNum);
sMeeting.setRepeatType(getRepeatType());
if(CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType())){
boolean multipleCalBlocks = getUserDefineTimeslotBean().getPutInMultipleCalendarBlocks();
sMeeting.setInMultipleCalendarBlocks(multipleCalBlocks);
}
/*pass who are receiving emails*/
sMeeting.setSendEmailToSelectedPeopleOnly(getSendEmailToSelectedPeopleOnly());
CreateMeetings createMeeting = new CreateMeetings(sMeeting, sendEmail, keepAttendees
&& !assignParicitpantsToAllRecurEvents, keepAttendees && assignParicitpantsToAllRecurEvents,
getSignupBegins(), getSignupBeginsType(), getDeadlineTime(), getDeadlineTimeType(), getRecurLengthChoice(), sakaiFacade,
signupMeetingService, getAttachmentHandler(), sakaiFacade.getCurrentUserId(), sakaiFacade.getCurrentLocationId(), true);
createMeeting.setPublishToCalendar(isPublishToCalendar());
createMeeting.processSaveMeetings();
/*make sure that they don't get cleaned up in CHS when saved successfully*/
this.signupMeeting.getSignupAttachments().clear();
} catch (PermissionException e) {
logger.info(Utilities.rb.getString("no.permission_create_event") + " - " + e.getMessage());
Utilities.addErrorMessage(Utilities.rb.getString("no.permission_create_event"));
return ORGANIZER_MEETING_PAGE_URL;
} catch (SignupUserActionException ue) {
Utilities.addErrorMessage(ue.getMessage());
return COPTY_MEETING_PAGE_URL;
} catch (Exception e) {
logger.error(Utilities.rb.getString("error.occurred_try_again") + " - " + e.getMessage());
Utilities.addErrorMessage(Utilities.rb.getString("error.occurred_try_again"));
return ORGANIZER_MEETING_PAGE_URL;
}
getUserDefineTimeslotBean().reset(UserDefineTimeslotBean.COPY_MEETING);
return MAIN_EVENTS_LIST_PAGE_URL;
}
/**
* This is a validator to make sure that the event/meeting starting time is
* before ending time etc.
*
* @param e
* an ActionEvent object.
*/
public void validateCopyMeeting(ActionEvent e) {
Date eventEndTime = signupMeeting.getEndTime();
Date eventStartTime = signupMeeting.getStartTime();
/*user defined own TS case*/
if(isUserDefinedTS()){
eventEndTime= getUserDefineTimeslotBean().getEventEndTime();
eventStartTime = getUserDefineTimeslotBean().getEventStartTime();
/*pass the value since they may be null*/
this.signupMeeting.setStartTime(eventStartTime);
this.signupMeeting.setEndTime(eventEndTime);
if(getUserDefineTimeslotBean().getDestTSwrpList()==null || getUserDefineTimeslotBean().getDestTSwrpList().isEmpty()){
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.create_custom_defined_TS_blocks"));
return;
}
}
if (eventEndTime.before(eventStartTime) || eventStartTime.equals(eventEndTime)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.endTime_should_after_startTime"));
return;
}
if (!(getRepeatType().equals(ONCE_ONLY))) {
int repeatNum = getOccurrences();
if("1".equals(getRecurLengthChoice())){
repeatNum = CreateMeetings.getNumOfRecurrence(getRepeatType(), eventStartTime,
getRepeatUntil());
}
if ((DAILY.equals(getRepeatType())|| WEEKDAYS.equals(getRepeatType())) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 1)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.daily.problem"));
return;
}
if (WEEKLY.equals(getRepeatType()) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 7)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.weekly.problem"));
return;
}
if (BIWEEKLY.equals(getRepeatType()) && isMeetingOverRepeatPeriod(eventStartTime, eventEndTime, 14)) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("crossDay.event.repeat.biweekly.problem"));
return;
}
if (repeatNum < 1) {
validationError = true;
if("1".equals(getRecurLengthChoice()))
Utilities.addErrorMessage(Utilities.rb.getString("event.repeatbeforestart"));
else
Utilities.addErrorMessage(Utilities.rb.getString("event.repeatNnum.bigger.than.one"));
return;
}
}
if (!CreateSitesGroups.isAtleastASiteOrGroupSelected(this.getCurrentSite(), this.getOtherSites())) {
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("select.atleast.oneGroup.for.copyMeeting"));
}
/*for custom defined time slot case*/
if(!validationError && isUserDefinedTS()){
this.signupMeeting.setStartTime(eventStartTime);
this.signupMeeting.setEndTime(eventEndTime);
this.signupMeeting.setMeetingType(CUSTOM_TIMESLOTS);
}
//Set Location
if (StringUtils.isBlank(getCustomLocation())){
if (StringUtils.isBlank(selectedLocation) || selectedLocation.equals(Utilities.rb.getString("select_location"))){
validationError = true;
Utilities.addErrorMessage(Utilities.rb.getString("event.location_not_assigned"));
return;
}
this.signupMeeting.setLocation(selectedLocation);
}
else{
this.signupMeeting.setLocation(getCustomLocation());
}
//clear the location fields???
this.selectedLocation="";
//Set Category
//if textfield is blank, check the dropdown
if (StringUtils.isBlank(getCustomCategory())){
//if dropdown is not the default, then use its value
if(!StringUtils.equals(selectedCategory, Utilities.rb.getString("select_category"))) {
this.signupMeeting.setCategory(selectedCategory);
}
}
else{
this.signupMeeting.setCategory(getCustomCategory());
}
//clear the category fields???
this.selectedCategory="";
//set the creator/organiser
this.signupMeeting.setCreatorUserId(creatorUserId);
this.creatorUserId="";
}
/**
* This method is called by JSP page for adding/removing attachments action.
* @return null.
*/
public String addRemoveAttachments(){
getAttachmentHandler().processAddAttachRedirect(this.signupMeeting.getSignupAttachments(),null,true);
return null;
}
public String doCancelAction(){
cleanUpUnusedAttachmentCopies(this.signupMeeting.getSignupAttachments());
getUserDefineTimeslotBean().reset(UserDefineTimeslotBean.COPY_MEETING);
this.selectedLocation=null; //Reset selected option
this.selectedCategory=null; //Reset selected option
return ORGANIZER_MEETING_PAGE_URL;
}
/**
* This is a ValueChange Listener to watch changes on the selection of
* 'unlimited attendee' choice by user.
*
* @param vce
* a ValuechangeEvent object.
* @return a outcome string.
*/
public String processGroup(ValueChangeEvent vce) {
Boolean changeValue = (Boolean) vce.getNewValue();
if (changeValue != null) {
unlimited = changeValue.booleanValue();
if (unlimited)
maxNumOfAttendees = 10;
}
return "";
}
/**
* Modify the existing time slot blocks
* @return String object for next page url
*/
public String editUserDefTimeSlots(){
if(this.customTimeSlotWrpList == null){
/*initialize when it comes from other meeting type*/
this.customTimeSlotWrpList = getTimeslotWrappers();
/*Mark the time slot sequence for recurring events changes issues*/
markerTimeslots(this.customTimeSlotWrpList);
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL,this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}else{
if(!Utilities.isDataIntegritySafe(isUserDefinedTS(),UserDefineTimeslotBean.COPY_MEETING,getUserDefineTimeslotBean())){
return ORGANIZER_MEETING_PAGE_URL;
}
this.customTimeSlotWrpList = getUserDefineTimeslotBean().getDestTSwrpList();
getUserDefineTimeslotBean().init(this.signupMeeting, COPTY_MEETING_PAGE_URL,this.customTimeSlotWrpList, UserDefineTimeslotBean.COPY_MEETING);
}
return CUSTOM_DEFINED_TIMESLOT_PAGE_URL;
}
private void prepareCopy(SignupMeeting meeting) throws Exception {
meeting.setId(null);// to save as new meeting in db
meeting.setRecurrenceId(null);
meeting.setSignupSites(CreateSitesGroups.getSelectedSignupSites(getCurrentSite(), getOtherSites()));
this.allowedUserList = LoadAllowedUsers(meeting);
List<SignupTimeslot> timeslots = meeting.getSignupTimeSlots();
boolean lockOrCanceledTimeslot = false;
Calendar calendar = Calendar.getInstance();
calendar.setTime(meeting.getStartTime());
/* Announcement type */
if (getAnnouncementType() || timeslots == null || timeslots.isEmpty()) {
calendar.add(Calendar.MINUTE, getTimeSlotDuration());
meeting.setMeetingType(ANNOUNCEMENT);
meeting.setSignupTimeSlots(null);
} else {
List<SignupTimeslot> cpTimeslotList = new ArrayList<SignupTimeslot>();
List<SignupTimeslot> origTsList=null;
if (!isUserDefinedTS() && (meeting.getMeetingType().equals(INDIVIDUAL) || meeting.getMeetingType().equals(GROUP))){
origTsList = meeting.getSignupTimeSlots();
SignupTimeslot origTs = null;
for (int i = 0; i < getNumberOfSlots(); i++) {
SignupTimeslot cpTs = new SignupTimeslot();
int maxAttendees = (unlimited) ? SignupTimeslot.UNLIMITED : maxNumOfAttendees;
cpTs.setMaxNoOfAttendees(maxAttendees);
cpTs.setDisplayAttendees(showAttendeeName);
cpTs.setStartTime(calendar.getTime());
calendar.add(Calendar.MINUTE, getTimeSlotDuration());
cpTs.setEndTime(calendar.getTime());
/* pass attendees */
if (i < origTsList.size()) {
origTs = origTsList.get(i);
List<SignupAttendee> attList = origTs.getAttendees();
/* screening attendees */
removeNotAllowedAttedees(attList);
if (!unlimited && attList != null && attList.size() > maxAttendees) {
/* attendee may be truncated */
//this.truncateAttendee = true; validate by javaScript
for (int j = attList.size(); j > maxAttendees; j--)
attList.remove(j - 1);
}
cpTs.setAttendees(attList);
origTs.setAttendees(null);// cleanup,may not necessary
cpTs.setLocked(origTs.isLocked());
cpTs.setCanceled(origTs.isCanceled());
if (origTs.isCanceled() || origTs.isLocked())
lockOrCanceledTimeslot = true;
}
cpTimeslotList.add(cpTs);
}
}
/*User defined time slots case*/
if (meeting.getMeetingType().equals(CUSTOM_TIMESLOTS) || isUserDefinedTS()){
UserDefineTimeslotBean uBean = getUserDefineTimeslotBean();
if(uBean ==null || !uBean.COPY_MEETING.equals(uBean.getPlaceOrderBean())){
throw new SignupUserActionException(MessageFormat.format(Utilities.rb.getString("you.have.multiple.tabs.in.browser"),
new Object[]{getSakaiFacade().getServerConfigurationService().getServerName()}));
}
List<TimeslotWrapper> tsWrpList = uBean.getDestTSwrpList();
if (tsWrpList != null){
for (TimeslotWrapper wrapper : tsWrpList) {
SignupTimeslot slot = wrapper.getTimeSlot();
List<SignupAttendee> attList = slot.getAttendees();
/* screening attendees */
removeNotAllowedAttedees(attList);
if (attList != null && attList.size() > slot.getMaxNoOfAttendees()) {
/* attendee may be truncated */
for (int j = attList.size(); j > slot.getMaxNoOfAttendees(); j--)
attList.remove(j - 1);
}
if(slot.isLocked() || slot.isCanceled())
lockOrCanceledTimeslot = true;
cpTimeslotList.add(slot);
}
}
/*for end time purpose*/
int duration = getUserDefineTimeslotBean().getEventDuration();
calendar.add(Calendar.MINUTE, duration);
}
meeting.setSignupTimeSlots(cpTimeslotList);// pass over
if (lockOrCanceledTimeslot)
Utilities.addErrorMessage(Utilities.rb.getString("warning.some_timeslot_may_locked_canceled"));
}
meeting.setEndTime(calendar.getTime());
/* setup signup begin / deadline */
setSignupBeginDeadlineData(meeting, getSignupBegins(), getSignupBeginsType(), getDeadlineTime(),
getDeadlineTimeType());
// copySites(meeting);
/*Remove the coordinates who are not in the meeting any more due to the site group changes
* we are simplify and just copy over coordinators over and user can change it via modify meeting page*/
//TODO later we may add the coordinators ability in the copy page too and need ajax to the trick.
meeting.setCoordinatorIds(getValidatedMeetingCoordinators(meeting));
}
/**
* This method is called to get all locations to populate the dropdown
*
* @return list of allLocations
*/
public List<SelectItem> getAllLocations(){
if(locations ==null){
locations= new ArrayList<SelectItem>();
locations.addAll(Utilities.getSignupMeetingsBean().getAllLocations());
locations.add(0, new SelectItem(Utilities.rb.getString("select_location")));
}
return locations;
}
/**
* This method is called to get all categories to populate the dropdown
*
* @return list of categories
*/
public List<SelectItem> getAllCategories(){
if(categories == null){
categories= new ArrayList<SelectItem>();
categories.addAll(Utilities.getSignupMeetingsBean().getAllCategories());
//remove option 'All'
categories.remove(0);
categories.add(0, new SelectItem(Utilities.rb.getString("select_category")));
}
return categories;
}
/**
* check if the attendees in the event/meeting should be copied along with
* it
*
* @return true if the attendees in the event/meeting is copied along with
* it
*/
public boolean isKeepAttendees() {
return keepAttendees;
}
/**
* this is a setter for UI
*
* @param keepAttendees
*/
public void setKeepAttendees(boolean keepAttendees) {
this.keepAttendees = keepAttendees;
}
/**
* this is a getter method
*
* @return an integer number
*/
public int getMaxNumOfAttendees() {
return maxNumOfAttendees;
}
/**
* this is a setter
*
* @param maxNumOfAttendees
* an integer number
*/
public void setMaxNumOfAttendees(int maxNumOfAttendees) {
this.maxNumOfAttendees = maxNumOfAttendees;
}
/**
* this is a getter method for UI
*
* @return a SignupMeeting object
*/
public SignupMeeting getSignupMeeting() {
return signupMeeting;
}
/**
* this is a setter
*
* @param signupMeeting
* a SignupMeeting object
*/
public void setSignupMeeting(SignupMeeting signupMeeting) {
this.signupMeeting = signupMeeting;
}
/**
* check to see if the attendees are limited in the event/meeting
*
* @return true if the attendees are limited in the event/meeting
*/
public boolean isUnlimited() {
return unlimited;
}
/**
* this is a setter for UI
*
* @param unlimited
* a boolean value
*/
public void setUnlimited(boolean unlimited) {
this.unlimited = unlimited;
}
/**
* this is a getter method to provide a relative time
*
* @return am integer number
*/
public int getDeadlineTime() {
return deadlineTime;
}
/**
* this is a setter
*
* @param deadlineTime
* an integer number, which represents a relative time to meeting
* starting time
*/
public void setDeadlineTime(int deadlineTime) {
this.deadlineTime = deadlineTime;
}
/**
* this is a getter method for UI
*
* @return a string value
*/
public String getDeadlineTimeType() {
return deadlineTimeType;
}
/**
* this is a setter for UI
*
* @param deadlineTimeType
* an integer number
*/
public void setDeadlineTimeType(String deadlineTimeType) {
this.deadlineTimeType = deadlineTimeType;
}
/**
* This is a getter method to provide selected location.
*
* @return String
*/
public String getselectedLocation() {
return selectedLocation;
}
/**
* This is a setter.
*
* @param selectedLoction
* String that represents the selected location
*/
public void setselectedLocation(String selectedLocation) {
this.selectedLocation = selectedLocation;
}
public String getselectedCategory() {
return selectedCategory;
}
public void setselectedCategory(String selectedCategory) {
this.selectedCategory = selectedCategory;
}
public String getcreatorUserId() {
if(this.creatorUserId ==null){
//set current user as default meeting organizer if case people forget to select one
return sakaiFacade.getCurrentUserId();
}
return creatorUserId;
}
public void setcreatorUserId(String creatorUserId) {
this.creatorUserId=creatorUserId;
}
/**
* this is a getter method for UI
*
* @return an integer number
*/
public int getSignupBegins() {
return signupBegins;
}
/**
* this is a setter for UI
*
* @param signupBegins
* an integer number
*/
public void setSignupBegins(int signupBegins) {
this.signupBegins = signupBegins;
}
/**
* this is a getter method for UI
*
* @return an integer number
*/
public String getSignupBeginsType() {
return signupBeginsType;
}
/**
* this is a setter for UI
*
* @param signupBeginsType
* an integer number
*/
public void setSignupBeginsType(String signupBeginsType) {
this.signupBeginsType = signupBeginsType;
}
public Date getRepeatUntil() {
return repeatUntil;
}
public void setRepeatUntil(Date repeatUntil) {
this.repeatUntil = repeatUntil;
}
public String getRepeatType() {
return repeatType;
}
public void setRepeatType(String repeatType) {
this.repeatType = repeatType;
}
/**
* This is a getter method for UI.
*
* @return a HtmlInputHidden object.
*/
public int getTimeSlotDuration() {
long duration = (getSignupMeeting().getEndTime().getTime() - getSignupMeeting().getStartTime().getTime())
/ (MINUTE_IN_MILLISEC * getNumberOfSlots());
return (int) duration;
}
/*public void setTimeSlotDuration(int timeSlotDuration) {
this.timeSlotDuration = timeSlotDuration;
}*/
/**
* This is a getter method for UI.
*
* @return a HtmlInputHidden object.
*/
public int getNumberOfSlots() {
return numberOfSlots;
}
/**
* This is a setter method for UI.
*
* @param numberOfSlots
* an int value
*/
public void setNumberOfSlots(int numberOfSlots) {
this.numberOfSlots = numberOfSlots;
}
/**
* This is a getter method for UI.
*
* @return a list of SignupSiteWrapper objects.
*/
public List<SignupSiteWrapper> getOtherSites() {
return otherSites;
}
/**
* This is a setter method for UI.
*
* @param signupSiteWrapperList
* a list of SignupSiteWrapper object.
*/
public void setOtherSites(List<SignupSiteWrapper> signupSiteWrapperList) {
this.otherSites = signupSiteWrapperList;
}
/**
* This is a getter method for UI.
*
* @return a SignupSiteWrapper object.
*/
public SignupSiteWrapper getCurrentSite() {
return currentSite;
}
/**
* This is a setter for UI.
*
* @param currentSite
* a SignupSiteWrapper object.
*/
public void setCurrentSite(SignupSiteWrapper currentSite) {
this.currentSite = currentSite;
}
private void initializeSitesGroups() {
/*
* Temporary bug fix for AuthZ code ( isAllowed(..) ), which gives wrong
* permission for the first time at 'Create new or Copy meeting pages'.
* The bug will be gone by second time go into it. Once it's fixed,
* remove this below and other places and make it into a more clean way
* by not sharing the same CreateSitesGroups Object. new
* CreateSitesGroups(getSignupMeeting(),sakaiFacade,signupMeetingService);
*/
CreateSitesGroups createSiteGroups = Utilities.getSignupMeetingsBean().getCreateSitesGroups();
createSiteGroups.resetSiteGroupCheckboxMark();
createSiteGroups.setSignupMeeting(this.getSignupMeeting());
createSiteGroups.processSiteGroupSelectionMarks();
setCurrentSite(createSiteGroups.getCurrentSite());
setOtherSites(createSiteGroups.getOtherSites());
setMissingSitGroupWarning(createSiteGroups.isSiteOrGroupTruncated());
setMissingSites(createSiteGroups.getMissingSites());
setMissingGroups(createSiteGroups.getMissingGroups());
}
private List<SignupUser> LoadAllowedUsers(SignupMeeting meeting) {
return sakaiFacade.getAllUsers(getSignupMeeting());
}
private void removeNotAllowedAttedees(List<SignupAttendee> screenAttendeeList) {
if (screenAttendeeList == null || screenAttendeeList.isEmpty())
return;
boolean notFound = true;
for (int i = screenAttendeeList.size(); i > 0; i--) {
notFound = true;
for (SignupUser allowedOne : allowedUserList) {
if (allowedOne.getInternalUserId().equals(screenAttendeeList.get(i - 1).getAttendeeUserId())) {
notFound = false;
break;
}
}
if (notFound) {
screenAttendeeList.remove(i - 1);
}
}
}
private String getValidatedMeetingCoordinators(SignupMeeting meeting){
List<String> allCoordinatorIds = getExistingCoordinatorIds(meeting);
StringBuilder sb = new StringBuilder();
boolean isFirst = true;
for (String couId : allCoordinatorIds) {
if(this.sakaiFacade.hasPermissionToCreate(meeting,couId)){
if(isFirst){
sb.append(couId);
isFirst = false;
}else{
//safeguard -db column max size, hardly have over 10 coordinators per meeting
if(sb.length() < 1000)
sb.append("|" + couId);
}
}
}
return sb.length()<1? null : sb.toString();
}
private List<String> getExistingCoordinatorIds(SignupMeeting meeting){
List<String> coUsers = new ArrayList<String>();
String coUserIdsString = meeting.getCoordinatorIds();
if(coUserIdsString !=null && coUserIdsString.trim().length()>0){
StringTokenizer userIdTokens = new StringTokenizer(coUserIdsString,"|");
while(userIdTokens.hasMoreTokens()){
String uId = userIdTokens.nextToken();
coUsers.add(uId);
}
}
return coUsers;
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isMissingSitGroupWarning() {
return missingSitGroupWarning;
}
private void setMissingSitGroupWarning(boolean missingSitGroupWarning) {
this.missingSitGroupWarning = missingSitGroupWarning;
}
public List<String> getMissingSites() {
return missingSites;
}
private void setMissingSites(List<String> missingSites) {
this.missingSites = missingSites;
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isMissingSitesThere() {
if (this.missingSites == null || this.missingSites.isEmpty())
return false;
return true;
}
public List<String> getMissingGroups() {
return missingGroups;
}
private void setMissingGroups(List<String> missingGroups) {
this.missingGroups = missingGroups;
}
public boolean isMissingGroupsThere() {
if (this.missingGroups == null || this.missingGroups.isEmpty())
return false;
return true;
}
/**
* It's a getter method for UI.
*
* @return a boolean value
*/
public boolean isAssignParicitpantsToAllRecurEvents() {
return assignParicitpantsToAllRecurEvents;
}
/**
* It's a setter for UI
*
* @param assignParicitpantsToAllRecurEvents
* a boolean value
*/
public void setAssignParicitpantsToAllRecurEvents(boolean assignParicitpantsToAllRecurEvents) {
this.assignParicitpantsToAllRecurEvents = assignParicitpantsToAllRecurEvents;
}
/**
* It's a getter method for UI
*
* @return a list of SelectItem objects for radio buttons.
*/
public List<SelectItem> getMeetingTypeRadioBttns() {
this.meetingTypeRadioBttns = Utilities.getMeetingTypeSelectItems(getSignupMeeting().getMeetingType(), true);
return meetingTypeRadioBttns;
}
private void prepareRecurredEvents(){
Long recurrenceId = this.signupMeeting.getRecurrenceId();
if (recurrenceId != null && recurrenceId.longValue() > 0 ) {
Calendar cal = Calendar.getInstance();
cal.setTime(this.signupMeeting.getStartTime());
/*backward to one month and make sure we could get some recurrence events
* if it's not the only one existed
* */
cal.add(Calendar.HOUR,-24*31);
List<SignupMeeting> recurredMeetings = signupMeetingService.getRecurringSignupMeetings(getSakaiFacade().getCurrentLocationId(), getSakaiFacade().getCurrentUserId(), recurrenceId,
cal.getTime());
retrieveRecurrenceData(recurredMeetings);
}
}
/*This method only provide a most possible repeatType, not with 100% accuracy*/
private void retrieveRecurrenceData(List<SignupMeeting> upTodateOrginMeetings) {
/*to see if the recurring events have a 'Start_Now' type already*/
if(Utilities.testSignupBeginStartNowType(upTodateOrginMeetings)){
setSignupBeginsType(START_NOW);//overwrite previous value
setSignupBegins(6);//default value; not used
}
Date lastDate=new Date();
if (upTodateOrginMeetings == null || upTodateOrginMeetings.isEmpty())
return;
/*if this is the last one*/
Calendar cal = Calendar.getInstance();
cal.setTime(this.signupMeeting.getStartTime());
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
setRepeatUntil(cal.getTime());
int listSize = upTodateOrginMeetings.size();
if (listSize > 1) {
/*set last recurred Date for recurrence events*/
lastDate = upTodateOrginMeetings.get(listSize -1).getStartTime();
cal.setTime(lastDate);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
setRepeatUntil(cal.getTime());
String repeatType = upTodateOrginMeetings.get(listSize -1).getRepeatType();
if(repeatType !=null && !ONCE_ONLY.equals(repeatType)){
setRepeatType(repeatType);
setRepeatTypeUnknown(false);
return;
}
/*The following code is to make it old version backward compatible
* It will be cleaned after a while.
*/
Calendar calFirst = Calendar.getInstance();
Calendar calSecond = Calendar.getInstance();
/*The following code is to make it old version backward compatible*/
/*
* we can only get approximate estimation by assuming it's a
* succession. take the last two which are more likely be in a sequence
*/
calFirst.setTime(upTodateOrginMeetings.get(listSize - 2).getStartTime());
calFirst.set(Calendar.SECOND, 0);
calFirst.set(Calendar.MILLISECOND, 0);
calSecond.setTime(upTodateOrginMeetings.get(listSize - 1).getStartTime());
calSecond.set(Calendar.SECOND, 0);
calSecond.set(Calendar.MILLISECOND, 0);
int tmp = calSecond.get(Calendar.DATE);
int daysDiff = (int) (calSecond.getTimeInMillis() - calFirst.getTimeInMillis()) / DAY_IN_MILLISEC;
setRepeatTypeUnknown(false);
if (daysDiff == perDay)//could have weekdays get into this one, not very accurate.
setRepeatType(DAILY);
else if (daysDiff == perWeek)
setRepeatType(WEEKLY);
else if (daysDiff == perBiweek)
setRepeatType(BIWEEKLY);
else if(daysDiff ==3 && calFirst.get(Calendar.DAY_OF_WEEK)== Calendar.FRIDAY)
setRepeatType(WEEKDAYS);
else{
/*case:unknown repeatType*/
setRepeatTypeUnknown(true);
}
}
}
/**
* This is a getter for UI and it is used for controlling the
* recurring meeting warning message.
* @return true if the repeatType is unknown for a repeatable event.
*/
public boolean getRepeatTypeUnknown() {
return repeatTypeUnknown;
}
public void setRepeatTypeUnknown(boolean repeatTypeUnknown) {
this.repeatTypeUnknown = repeatTypeUnknown;
}
private void assignMainAttachmentsCopyToSignupMeeting(){
List<SignupAttachment> attachList = new ArrayList<SignupAttachment>();
if(attachList != null){
for (SignupAttachment attach: this.signupMeeting.getSignupAttachments()) {
if(attach.getTimeslotId() ==null && attach.getViewByAll())
attachList.add(attach);
//TODO Later: how about time slot attachment?.
}
}
List<SignupAttachment> cpList = new ArrayList<SignupAttachment>();
if(attachList.size() > 0){
for (SignupAttachment attach : attachList) {
cpList.add(getAttachmentHandler().copySignupAttachment(this.signupMeeting,true,attach,ATTACH_COPY +this.signupMeeting.getId().toString()));
}
}
this.signupMeeting.setSignupAttachments(cpList);
}
/*Overwrite default one*/
public boolean getSignupAttachmentEmpty(){
if(this.signupMeeting ==null)
return true;
if(this.signupMeeting.getSignupAttachments() ==null || this.signupMeeting.getSignupAttachments().isEmpty())
return true;
else
return false;
}
public UserDefineTimeslotBean getUserDefineTimeslotBean() {
return userDefineTimeslotBean;
}
public void setUserDefineTimeslotBean(UserDefineTimeslotBean userDefineTimeslotBean) {
this.userDefineTimeslotBean = userDefineTimeslotBean;
}
public boolean isUserDefinedTS() {
return userDefinedTS;
}
public void setUserDefinedTS(boolean userDefinedTS) {
this.userDefinedTS = userDefinedTS;
}
public List<TimeslotWrapper> getCustomTimeSlotWrpList() {
return customTimeSlotWrpList;
}
public void setCustomTimeSlotWrpList(List<TimeslotWrapper> customTimeSlotWrpList) {
this.customTimeSlotWrpList = customTimeSlotWrpList;
}
/**
* This is only for UI purpose to check if the event/meeting is an
* custom-ts style (manay time slots) and it requires signup.
*/
public boolean getCustomTsType() {
return CUSTOM_TIMESLOTS.equals(this.signupMeeting.getMeetingType());
}
public String getRecurLengthChoice() {
return recurLengthChoice;
}
public void setRecurLengthChoice(String recurLengthChoice) {
this.recurLengthChoice = recurLengthChoice;
}
public int getOccurrences() {
return occurrences;
}
public void setOccurrences(int occurrences) {
this.occurrences = occurrences;
}
/**
* @return true if sakai property signup.enableAttendance is true, else will return false
*/
public boolean isAttendanceOn() {
return Utilities.getSignupMeetingsBean().isAttendanceOn();
}
/**
* Get a list of users that have permission, but format it as a SelectItem list for the dropdown.
* Since this is a new item there will be no current instructor so it returns the current user at the top of the list
* We send a null signup meeting param as this is a new meeting.
*/
public List<SelectItem> getInstructors() {
return Utilities.getSignupMeetingsBean().getInstructors(signupMeeting);
}
/**
* This is for UI page to determine whether the email checkbox should be checked and disabled to change
* @return
*/
public boolean isMandatorySendEmail(){
return this.mandatorySendEmail;
}
}
| |
/*
* $Id: EditorActions.java,v 1.1 2012/11/15 13:26:46 gaudenz Exp $
* Copyright (c) 2001-2012, JGraph Ltd
*/
package com.mxgraph.examples.swing.editor;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.awt.print.PageFormat;
import java.awt.print.Paper;
import java.awt.print.PrinterException;
import java.awt.print.PrinterJob;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import com.mxgraph.util.*;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.HashSet;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.swing.*;
import javax.swing.filechooser.FileFilter;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLDocument;
import javax.swing.text.html.HTMLEditorKit;
import org.w3c.dom.Document;
import com.mxgraph.analysis.mxDistanceCostFunction;
import com.mxgraph.analysis.mxGraphAnalysis;
import com.mxgraph.canvas.mxGraphics2DCanvas;
import com.mxgraph.canvas.mxICanvas;
import com.mxgraph.canvas.mxSvgCanvas;
import com.mxgraph.io.mxCodec;
import com.mxgraph.io.mxGdCodec;
import com.mxgraph.model.mxCell;
import com.mxgraph.model.mxGraphModel;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.shape.mxStencilShape;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.swing.mxGraphOutline;
import com.mxgraph.swing.handler.mxConnectionHandler;
import com.mxgraph.swing.util.mxGraphActions;
import com.mxgraph.swing.view.mxCellEditor;
import com.mxgraph.util.mxCellRenderer;
import com.mxgraph.util.mxCellRenderer.CanvasFactory;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxDomUtils;
import com.mxgraph.util.mxResources;
import com.mxgraph.util.mxUtils;
import com.mxgraph.util.mxXmlUtils;
import com.mxgraph.util.png.mxPngEncodeParam;
import com.mxgraph.util.png.mxPngImageEncoder;
import com.mxgraph.util.png.mxPngTextDecoder;
import com.mxgraph.view.mxGraph;
/**
*
*/
public class EditorActions
{
/**
*
* @param e
* @return Returns the graph for the given action event.
*/
public static final BasicGraphEditor getEditor(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
Component component = (Component) e.getSource();
while (component != null
&& !(component instanceof BasicGraphEditor))
{
component = component.getParent();
}
return (BasicGraphEditor) component;
}
return null;
}
public static class NewVertexAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = getEditor(e).getGraphComponent().getGraph();
if (graph != null)
{
Point pt = getEditor(e).mouseClickLoc;
graph.setCellStyle("Rounded");
graph.insertVertex(graph.getDefaultParent(), null,"", pt.x, pt.y,100,100,"shape=ellipse;perimeter=100;fillColor=green");
//(graph.getDefaultParent(), null, "",pt.x,pt.y, 80,
// 30,"shape=ellipse");
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleRulersItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleRulersItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(editor.getGraphComponent().getColumnHeader() != null);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
if (graphComponent.getColumnHeader() != null)
{
graphComponent.setColumnHeader(null);
graphComponent.setRowHeader(null);
}
else
{
graphComponent.setColumnHeaderView(new EditorRuler(
graphComponent,
EditorRuler.ORIENTATION_HORIZONTAL));
graphComponent.setRowHeaderView(new EditorRuler(
graphComponent,
EditorRuler.ORIENTATION_VERTICAL));
}
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleGridItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleGridItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
boolean enabled = !graph.isGridEnabled();
graph.setGridEnabled(enabled);
graphComponent.setGridVisible(enabled);
graphComponent.repaint();
setSelected(enabled);
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleOutlineItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleOutlineItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
final mxGraphOutline outline = editor.getGraphOutline();
outline.setVisible(!outline.isVisible());
outline.revalidate();
SwingUtilities.invokeLater(new Runnable()
{
/*
* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
public void run()
{
if (outline.getParent() instanceof JSplitPane)
{
if (outline.isVisible())
{
((JSplitPane) outline.getParent())
.setDividerLocation(editor
.getHeight() - 300);
((JSplitPane) outline.getParent())
.setDividerSize(6);
}
else
{
((JSplitPane) outline.getParent())
.setDividerSize(0);
}
}
}
});
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ExitAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
editor.exit();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class StylesheetAction extends AbstractAction
{
/**
*
*/
protected String stylesheet;
/**
*
*/
public StylesheetAction(String stylesheet)
{
this.stylesheet = stylesheet;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxCodec codec = new mxCodec();
Document doc = mxUtils.loadDocument(EditorActions.class
.getResource(stylesheet).toString());
if (doc != null)
{
codec.decode(doc.getDocumentElement(),
graph.getStylesheet());
graph.refresh();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ZoomPolicyAction extends AbstractAction
{
/**
*
*/
protected int zoomPolicy;
/**
*
*/
public ZoomPolicyAction(int zoomPolicy)
{
this.zoomPolicy = zoomPolicy;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.setPageVisible(true);
graphComponent.setZoomPolicy(zoomPolicy);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class GridStyleAction extends AbstractAction
{
/**
*
*/
protected int style;
/**
*
*/
public GridStyleAction(int style)
{
this.style = style;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.setGridStyle(style);
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class GridColorAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("gridColor"),
graphComponent.getGridColor());
if (newColor != null)
{
graphComponent.setGridColor(newColor);
graphComponent.repaint();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ScaleAction extends AbstractAction
{
/**
*
*/
protected double scale;
/**
*
*/
public ScaleAction(double scale)
{
this.scale = scale;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
double scale = this.scale;
if (scale == 0)
{
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("value"),
mxResources.get("scale") + " (%)",
JOptionPane.PLAIN_MESSAGE, null, null, "");
if (value != null)
{
scale = Double.parseDouble(value.replace("%", "")) / 100;
}
}
if (scale > 0)
{
graphComponent.zoomTo(scale, graphComponent.isCenterZoom());
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PageSetupAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
PrinterJob pj = PrinterJob.getPrinterJob();
PageFormat format = pj.pageDialog(graphComponent
.getPageFormat());
if (format != null)
{
graphComponent.setPageFormat(format);
graphComponent.zoomAndCenter();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PrintAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
PrinterJob pj = PrinterJob.getPrinterJob();
if (pj.printDialog())
{
PageFormat pf = graphComponent.getPageFormat();
Paper paper = new Paper();
double margin = 36;
paper.setImageableArea(margin, margin, paper.getWidth()
- margin * 2, paper.getHeight() - margin * 2);
pf.setPaper(paper);
pj.setPrintable(graphComponent, pf);
try
{
pj.print();
}
catch (PrinterException e2)
{
System.out.println(e2);
}
}
}
}
public static void printComp(mxGraphComponent mxgc)
{
PrinterJob pj = PrinterJob.getPrinterJob();
if (pj.printDialog())
{
PageFormat pf = mxgc.getPageFormat();
Paper paper = new Paper();
double margin = 36;
paper.setImageableArea(margin, margin, paper.getWidth()
- margin * 2, paper.getHeight() - margin * 2);
pf.setPaper(paper);
pj.setPrintable(mxgc, pf);
try
{
pj.print();
}
catch (PrinterException e2)
{
System.out.println(e2);
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SaveAction extends AbstractAction
{
/**
*
*/
protected boolean showDialog;
/**
*
*/
protected String lastDir = null;
/**
*
*/
public SaveAction(boolean showDialog)
{
this.showDialog = showDialog;
}
/**
* Saves XML+PNG format.
*/
protected void saveXmlPng(BasicGraphEditor editor, String filename,
Color bg) throws IOException
{
mxGraphComponent graphComponent = editor.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
// Creates the image for the PNG file
BufferedImage image = mxCellRenderer.createBufferedImage(graph,
null, 1, bg, graphComponent.isAntiAlias(), null,
graphComponent.getCanvas());
// Creates the URL-encoded XML data
mxCodec codec = new mxCodec();
String xml = URLEncoder.encode(
mxXmlUtils.getXml(codec.encode(graph.getModel())), "UTF-8");
mxPngEncodeParam param = mxPngEncodeParam
.getDefaultEncodeParam(image);
param.setCompressedText(new String[] { "mxGraphModel", xml });
// Saves as a PNG file
FileOutputStream outputStream = new FileOutputStream(new File(
filename));
try
{
mxPngImageEncoder encoder = new mxPngImageEncoder(outputStream,
param);
if (image != null)
{
encoder.encode(image);
editor.setModified(false);
editor.setCurrentFile(new File(filename));
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noImageData"));
}
}
finally
{
outputStream.close();
}
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
mxGraphComponent graphComponent = editor.getGraphComponent();
mxGraph graph = graphComponent.getGraph();
FileFilter selectedFilter = null;
DefaultFileFilter xmlPngFilter = new DefaultFileFilter(".png",
"PNG+XML " + mxResources.get("file") + " (.png)");
FileFilter vmlFileFilter = new DefaultFileFilter(".html",
"VML " + mxResources.get("file") + " (.html)");
String filename = null;
boolean dialogShown = false;
if (showDialog || editor.getCurrentFile() == null)
{
String wd;
if (lastDir != null)
{
wd = lastDir;
}
else if (editor.getCurrentFile() != null)
{
wd = editor.getCurrentFile().getParent();
}
else
{
wd = System.getProperty("user.dir");
}
JFileChooser fc = new JFileChooser(wd);
// Adds the default file format
FileFilter defaultFilter = xmlPngFilter;
fc.addChoosableFileFilter(defaultFilter);
// Adds special vector graphics formats and HTML
fc.addChoosableFileFilter(new DefaultFileFilter(".mxe",
"mxGraph Editor " + mxResources.get("file")
+ " (.mxe)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".txt",
"Graph Drawing " + mxResources.get("file")
+ " (.txt)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".svg",
"SVG " + mxResources.get("file") + " (.svg)"));
fc.addChoosableFileFilter(vmlFileFilter);
fc.addChoosableFileFilter(new DefaultFileFilter(".html",
"HTML " + mxResources.get("file") + " (.html)"));
// Adds a filter for each supported image format
Object[] imageFormats = ImageIO.getReaderFormatNames();
// Finds all distinct extensions
HashSet<String> formats = new HashSet<String>();
for (int i = 0; i < imageFormats.length; i++)
{
String ext = imageFormats[i].toString().toLowerCase();
formats.add(ext);
}
imageFormats = formats.toArray();
for (int i = 0; i < imageFormats.length; i++)
{
String ext = imageFormats[i].toString();
fc.addChoosableFileFilter(new DefaultFileFilter("."
+ ext, ext.toUpperCase() + " "
+ mxResources.get("file") + " (." + ext + ")"));
}
// Adds filter that accepts all supported image formats
fc.addChoosableFileFilter(new DefaultFileFilter.ImageFileFilter(
mxResources.get("allImages")));
fc.setFileFilter(defaultFilter);
int rc = fc.showDialog(null, mxResources.get("save"));
dialogShown = true;
if (rc != JFileChooser.APPROVE_OPTION)
{
return;
}
else
{
lastDir = fc.getSelectedFile().getParent();
}
filename = fc.getSelectedFile().getAbsolutePath();
selectedFilter = fc.getFileFilter();
if (selectedFilter instanceof DefaultFileFilter)
{
String ext = ((DefaultFileFilter) selectedFilter)
.getExtension();
if (!filename.toLowerCase().endsWith(ext))
{
filename += ext;
}
}
if (new File(filename).exists()
&& JOptionPane.showConfirmDialog(graphComponent,
mxResources.get("overwriteExistingFile")) != JOptionPane.YES_OPTION)
{
return;
}
}
else
{
filename = editor.getCurrentFile().getAbsolutePath();
}
try
{
String ext = filename
.substring(filename.lastIndexOf('.') + 1);
if (ext.equalsIgnoreCase("svg"))
{
mxSvgCanvas canvas = (mxSvgCanvas) mxCellRenderer
.drawCells(graph, null, 1, null,
new CanvasFactory()
{
public mxICanvas createCanvas(
int width, int height)
{
mxSvgCanvas canvas = new mxSvgCanvas(
mxDomUtils.createSvgDocument(
width, height));
canvas.setEmbedded(true);
return canvas;
}
});
mxUtils.writeFile(mxXmlUtils.getXml(canvas.getDocument()),
filename);
}
else if (selectedFilter == vmlFileFilter)
{
mxUtils.writeFile(mxXmlUtils.getXml(mxCellRenderer
.createVmlDocument(graph, null, 1, null, null)
.getDocumentElement()), filename);
}
else if (ext.equalsIgnoreCase("html"))
{
mxUtils.writeFile(mxXmlUtils.getXml(mxCellRenderer
.createHtmlDocument(graph, null, 1, null, null)
.getDocumentElement()), filename);
}
else if (ext.equalsIgnoreCase("mxe")
|| ext.equalsIgnoreCase("xml"))
{
mxCodec codec = new mxCodec();
String xml = mxXmlUtils.getXml(codec.encode(graph
.getModel()));
mxUtils.writeFile(xml, filename);
editor.setModified(false);
editor.setCurrentFile(new File(filename));
}
else if (ext.equalsIgnoreCase("txt"))
{
String content = mxGdCodec.encode(graph);
mxUtils.writeFile(content, filename);
}
else
{
Color bg = null;
if ((!ext.equalsIgnoreCase("gif") && !ext
.equalsIgnoreCase("png"))
|| JOptionPane.showConfirmDialog(
graphComponent, mxResources
.get("transparentBackground")) != JOptionPane.YES_OPTION)
{
bg = graphComponent.getBackground();
}
if (selectedFilter == xmlPngFilter
|| (editor.getCurrentFile() != null
&& ext.equalsIgnoreCase("png") && !dialogShown))
{
saveXmlPng(editor, filename, bg);
}
else
{
BufferedImage image = mxCellRenderer
.createBufferedImage(graph, null, 1, bg,
graphComponent.isAntiAlias(), null,
graphComponent.getCanvas());
if (image != null)
{
ImageIO.write(image, ext, new File(filename));
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noImageData"));
}
}
}
}
catch (Throwable ex)
{
ex.printStackTrace();
JOptionPane.showMessageDialog(graphComponent,
ex.toString(), mxResources.get("error"),
JOptionPane.ERROR_MESSAGE);
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SelectShortestPathAction extends AbstractAction
{
/**
*
*/
protected boolean directed;
/**
*
*/
public SelectShortestPathAction(boolean directed)
{
this.directed = directed;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object source = null;
Object target = null;
Object[] cells = graph.getSelectionCells();
for (int i = 0; i < cells.length; i++)
{
if (model.isVertex(cells[i]))
{
if (source == null)
{
source = cells[i];
}
else if (target == null)
{
target = cells[i];
}
}
if (source != null && target != null)
{
break;
}
}
if (source != null && target != null)
{
int steps = graph.getChildEdges(graph.getDefaultParent()).length;
Object[] path = mxGraphAnalysis.getInstance()
.getShortestPath(graph, source, target,
new mxDistanceCostFunction(), steps,
directed);
graph.setSelectionCells(path);
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noSourceAndTargetSelected"));
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SelectSpanningTreeAction extends AbstractAction
{
/**
*
*/
protected boolean directed;
/**
*
*/
public SelectSpanningTreeAction(boolean directed)
{
this.directed = directed;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
mxIGraphModel model = graph.getModel();
Object parent = graph.getDefaultParent();
Object[] cells = graph.getSelectionCells();
for (int i = 0; i < cells.length; i++)
{
if (model.getChildCount(cells[i]) > 0)
{
parent = cells[i];
break;
}
}
Object[] v = graph.getChildVertices(parent);
Object[] mst = mxGraphAnalysis.getInstance()
.getMinimumSpanningTree(graph, v,
new mxDistanceCostFunction(), directed);
graph.setSelectionCells(mst);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleDirtyAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.showDirtyRectangle = !graphComponent.showDirtyRectangle;
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleConnectModeAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxConnectionHandler handler = graphComponent
.getConnectionHandler();
handler.setHandleEnabled(!handler.isHandleEnabled());
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleCreateTargetItem extends JCheckBoxMenuItem
{
/**
*
*/
public ToggleCreateTargetItem(final BasicGraphEditor editor, String name)
{
super(name);
setSelected(true);
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraphComponent graphComponent = editor
.getGraphComponent();
if (graphComponent != null)
{
mxConnectionHandler handler = graphComponent
.getConnectionHandler();
handler.setCreateTarget(!handler.isCreateTarget());
setSelected(handler.isCreateTarget());
}
}
});
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PromptPropertyAction extends AbstractAction
{
/**
*
*/
protected Object target;
/**
*
*/
protected String fieldname, message;
/**
*
*/
public PromptPropertyAction(Object target, String message)
{
this(target, message, message);
}
/**
*
*/
public PromptPropertyAction(Object target, String message,
String fieldname)
{
this.target = target;
this.message = message;
this.fieldname = fieldname;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
try
{
Method getter = target.getClass().getMethod(
"get" + fieldname);
Object current = getter.invoke(target);
// TODO: Support other atomic types
if (current instanceof Integer)
{
Method setter = target.getClass().getMethod(
"set" + fieldname, new Class[] { int.class });
String value = (String) JOptionPane.showInputDialog(
(Component) e.getSource(), "Value", message,
JOptionPane.PLAIN_MESSAGE, null, null, current);
if (value != null)
{
setter.invoke(target, Integer.parseInt(value));
}
}
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
// Repaints the graph component
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class TogglePropertyItem extends JCheckBoxMenuItem
{
/**
*
*/
public TogglePropertyItem(Object target, String name, String fieldname)
{
this(target, name, fieldname, false);
}
/**
*
*/
public TogglePropertyItem(Object target, String name, String fieldname,
boolean refresh)
{
this(target, name, fieldname, refresh, null);
}
/**
*
*/
public TogglePropertyItem(final Object target, String name,
final String fieldname, final boolean refresh,
ActionListener listener)
{
super(name);
// Since action listeners are processed last to first we add the given
// listener here which means it will be processed after the one below
if (listener != null)
{
addActionListener(listener);
}
addActionListener(new ActionListener()
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
execute(target, fieldname, refresh);
}
});
PropertyChangeListener propertyChangeListener = new PropertyChangeListener()
{
/*
* (non-Javadoc)
* @see java.beans.PropertyChangeListener#propertyChange(java.beans.PropertyChangeEvent)
*/
public void propertyChange(PropertyChangeEvent evt)
{
if (evt.getPropertyName().equalsIgnoreCase(fieldname))
{
update(target, fieldname);
}
}
};
if (target instanceof mxGraphComponent)
{
((mxGraphComponent) target)
.addPropertyChangeListener(propertyChangeListener);
}
else if (target instanceof mxGraph)
{
((mxGraph) target)
.addPropertyChangeListener(propertyChangeListener);
}
update(target, fieldname);
}
/**
*
*/
public void update(Object target, String fieldname)
{
if (target != null && fieldname != null)
{
try
{
Method getter = target.getClass().getMethod(
"is" + fieldname);
if (getter != null)
{
Object current = getter.invoke(target);
if (current instanceof Boolean)
{
setSelected(((Boolean) current).booleanValue());
}
}
}
catch (Exception e)
{
// ignore
}
}
}
/**
*
*/
public void execute(Object target, String fieldname, boolean refresh)
{
if (target != null && fieldname != null)
{
try
{
Method getter = target.getClass().getMethod(
"is" + fieldname);
Method setter = target.getClass().getMethod(
"set" + fieldname, new Class[] { boolean.class });
Object current = getter.invoke(target);
if (current instanceof Boolean)
{
boolean value = !((Boolean) current).booleanValue();
setter.invoke(target, value);
setSelected(value);
}
if (refresh)
{
mxGraph graph = null;
if (target instanceof mxGraph)
{
graph = (mxGraph) target;
}
else if (target instanceof mxGraphComponent)
{
graph = ((mxGraphComponent) target).getGraph();
}
graph.refresh();
}
}
catch (Exception e)
{
// ignore
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class HistoryAction extends AbstractAction
{
/**
*
*/
protected boolean undo;
/**
*
*/
public HistoryAction(boolean undo)
{
this.undo = undo;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (undo)
{
editor.getUndoManager().undo();
}
else
{
editor.getUndoManager().redo();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class FontStyleAction extends AbstractAction
{
/**
*
*/
protected boolean bold;
/**
*
*/
public FontStyleAction(boolean bold)
{
this.bold = bold;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Component editorComponent = null;
if (graphComponent.getCellEditor() instanceof mxCellEditor)
{
editorComponent = ((mxCellEditor) graphComponent
.getCellEditor()).getEditor();
}
if (editorComponent instanceof JEditorPane)
{
JEditorPane editorPane = (JEditorPane) editorComponent;
int start = editorPane.getSelectionStart();
int ende = editorPane.getSelectionEnd();
String text = editorPane.getSelectedText();
if (text == null)
{
text = "";
}
try
{
HTMLEditorKit editorKit = new HTMLEditorKit();
HTMLDocument document = (HTMLDocument) editorPane
.getDocument();
document.remove(start, (ende - start));
editorKit.insertHTML(document, start, ((bold) ? "<b>"
: "<i>") + text + ((bold) ? "</b>" : "</i>"),
0, 0, (bold) ? HTML.Tag.B : HTML.Tag.I);
}
catch (Exception ex)
{
ex.printStackTrace();
}
editorPane.requestFocus();
editorPane.select(start, ende);
}
else
{
mxIGraphModel model = graphComponent.getGraph().getModel();
model.beginUpdate();
try
{
graphComponent.stopEditing(false);
graphComponent.getGraph().toggleCellStyleFlags(
mxConstants.STYLE_FONTSTYLE,
(bold) ? mxConstants.FONT_BOLD
: mxConstants.FONT_ITALIC);
}
finally
{
model.endUpdate();
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class WarningAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Object[] cells = graphComponent.getGraph().getSelectionCells();
if (cells != null && cells.length > 0)
{
String warning = JOptionPane.showInputDialog(mxResources
.get("enterWarningMessage"));
for (int i = 0; i < cells.length; i++)
{
graphComponent.setCellWarning(cells[i], warning);
}
}
else
{
JOptionPane.showMessageDialog(graphComponent,
mxResources.get("noCellSelected"));
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class NewAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (!editor.isModified()
|| JOptionPane.showConfirmDialog(editor,
mxResources.get("loseChanges")) == JOptionPane.YES_OPTION)
{
mxGraph graph = editor.getGraphComponent().getGraph();
// Check modified flag and display save dialog
mxCell root = new mxCell();
root.insert(new mxCell());
graph.getModel().setRoot(root);
editor.setModified(false);
editor.setCurrentFile(null);
editor.getGraphComponent().zoomAndCenter();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ImportAction extends AbstractAction
{
/**
*
*/
protected String lastDir;
/**
* Loads and registers the shape as a new shape in mxGraphics2DCanvas and
* adds a new entry to use that shape in the specified palette
* @param palette The palette to add the shape to.
* @param nodeXml The raw XML of the shape
* @param path The path to the directory the shape exists in
* @return the string name of the shape
*/
public static String addStencilShape(EditorPalette palette,
String nodeXml, String path)
{
// Some editors place a 3 byte BOM at the start of files
// Ensure the first char is a "<"
int lessthanIndex = nodeXml.indexOf("<");
nodeXml = nodeXml.substring(lessthanIndex);
mxStencilShape newShape = new mxStencilShape(nodeXml);
String name = newShape.getName();
ImageIcon icon = null;
if (path != null)
{
String iconPath = path + newShape.getIconPath();
icon = new ImageIcon(iconPath);
}
// Registers the shape in the canvas shape registry
mxGraphics2DCanvas.putShape(name, newShape);
if (palette != null && icon != null)
{
palette.addTemplate(name, icon, "shape=" + name, 80, 80, "");
}
return name;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
String wd = (lastDir != null) ? lastDir : System
.getProperty("user.dir");
JFileChooser fc = new JFileChooser(wd);
fc.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
// Adds file filter for Dia shape import
fc.addChoosableFileFilter(new DefaultFileFilter(".shape",
"Dia Shape " + mxResources.get("file") + " (.shape)"));
int rc = fc.showDialog(null, mxResources.get("importStencil"));
if (rc == JFileChooser.APPROVE_OPTION)
{
lastDir = fc.getSelectedFile().getParent();
try
{
if (fc.getSelectedFile().isDirectory())
{
EditorPalette palette = editor.insertPalette(fc
.getSelectedFile().getName());
for (File f : fc.getSelectedFile().listFiles(
new FilenameFilter()
{
public boolean accept(File dir,
String name)
{
return name.toLowerCase().endsWith(
".shape");
}
}))
{
String nodeXml = mxUtils.readFile(f
.getAbsolutePath());
addStencilShape(palette, nodeXml, f.getParent()
+ File.separator);
}
JComponent scrollPane = (JComponent) palette
.getParent().getParent();
editor.getLibraryPane().setSelectedComponent(
scrollPane);
// FIXME: Need to update the size of the palette to force a layout
// update. Re/in/validate of palette or parent does not work.
//editor.getLibraryPane().revalidate();
}
else
{
String nodeXml = mxUtils.readFile(fc
.getSelectedFile().getAbsolutePath());
String name = addStencilShape(null, nodeXml, null);
JOptionPane.showMessageDialog(editor, mxResources
.get("stencilImported",
new String[] { name }));
}
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class OpenAction extends AbstractAction
{
/**
*
*/
protected String lastDir;
/**
*
*/
protected void resetEditor(BasicGraphEditor editor)
{
editor.setModified(false);
editor.getUndoManager().clear();
editor.getGraphComponent().zoomAndCenter();
}
/**
* Reads XML+PNG format.
*/
protected void openXmlPng(BasicGraphEditor editor, File file)
throws IOException
{
Map<String, String> text = mxPngTextDecoder
.decodeCompressedText(new FileInputStream(file));
if (text != null)
{
String value = text.get("mxGraphModel");
if (value != null)
{
Document document = mxXmlUtils.parseXml(URLDecoder.decode(
value, "UTF-8"));
mxCodec codec = new mxCodec(document);
codec.decode(document.getDocumentElement(), editor
.getGraphComponent().getGraph().getModel());
editor.setCurrentFile(file);
resetEditor(editor);
return;
}
}
JOptionPane.showMessageDialog(editor,
mxResources.get("imageContainsNoDiagramData"));
}
/**
* @throws IOException
*
*/
protected void openGD(BasicGraphEditor editor, File file,
String gdText)
{
mxGraph graph = editor.getGraphComponent().getGraph();
// Replaces file extension with .mxe
String filename = file.getName();
filename = filename.substring(0, filename.length() - 4) + ".mxe";
if (new File(filename).exists()
&& JOptionPane.showConfirmDialog(editor,
mxResources.get("overwriteExistingFile")) != JOptionPane.YES_OPTION)
{
return;
}
((mxGraphModel) graph.getModel()).clear();
mxGdCodec.decode(gdText, graph);
editor.getGraphComponent().zoomAndCenter();
editor.setCurrentFile(new File(lastDir + "/" + filename));
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
BasicGraphEditor editor = getEditor(e);
if (editor != null)
{
if (!editor.isModified()
|| JOptionPane.showConfirmDialog(editor,
mxResources.get("loseChanges")) == JOptionPane.YES_OPTION)
{
mxGraph graph = editor.getGraphComponent().getGraph();
if (graph != null)
{
String wd = (lastDir != null) ? lastDir : System
.getProperty("user.dir");
JFileChooser fc = new JFileChooser(wd);
// Adds file filter for supported file format
DefaultFileFilter defaultFilter = new DefaultFileFilter(
".mxe", mxResources.get("allSupportedFormats")
+ " (.mxe, .png, .vdx)")
{
public boolean accept(File file)
{
String lcase = file.getName().toLowerCase();
return super.accept(file)
|| lcase.endsWith(".png")
|| lcase.endsWith(".vdx");
}
};
fc.addChoosableFileFilter(defaultFilter);
fc.addChoosableFileFilter(new DefaultFileFilter(".mxe",
"mxGraph Editor " + mxResources.get("file")
+ " (.mxe)"));
fc.addChoosableFileFilter(new DefaultFileFilter(".png",
"PNG+XML " + mxResources.get("file")
+ " (.png)"));
// Adds file filter for VDX import
fc.addChoosableFileFilter(new DefaultFileFilter(".vdx",
"XML Drawing " + mxResources.get("file")
+ " (.vdx)"));
// Adds file filter for GD import
fc.addChoosableFileFilter(new DefaultFileFilter(".txt",
"Graph Drawing " + mxResources.get("file")
+ " (.txt)"));
fc.setFileFilter(defaultFilter);
int rc = fc.showDialog(null,
mxResources.get("openFile"));
if (rc == JFileChooser.APPROVE_OPTION)
{
lastDir = fc.getSelectedFile().getParent();
try
{
if (fc.getSelectedFile().getAbsolutePath()
.toLowerCase().endsWith(".png"))
{
openXmlPng(editor, fc.getSelectedFile());
}
else if (fc.getSelectedFile().getAbsolutePath()
.toLowerCase().endsWith(".txt"))
{
openGD(editor, fc.getSelectedFile(),
mxUtils.readFile(fc
.getSelectedFile()
.getAbsolutePath()));
}
else
{
Document document = mxXmlUtils
.parseXml(mxUtils.readFile(fc
.getSelectedFile()
.getAbsolutePath()));
mxCodec codec = new mxCodec(document);
codec.decode(
document.getDocumentElement(),
graph.getModel());
editor.setCurrentFile(fc
.getSelectedFile());
resetEditor(editor);
}
}
catch (IOException ex)
{
ex.printStackTrace();
JOptionPane.showMessageDialog(
editor.getGraphComponent(),
ex.toString(),
mxResources.get("error"),
JOptionPane.ERROR_MESSAGE);
}
}
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ToggleAction extends AbstractAction
{
/**
*
*/
protected String key;
/**
*
*/
protected boolean defaultValue;
/**
*
* @param key
*/
public ToggleAction(String key)
{
this(key, false);
}
/**
*
* @param key
*/
public ToggleAction(String key, boolean defaultValue)
{
this.key = key;
this.defaultValue = defaultValue;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null)
{
graph.toggleCellStyles(key, defaultValue);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SetLabelPositionAction extends AbstractAction
{
/**
*
*/
protected String labelPosition, alignment;
/**
*
* @param key
*/
public SetLabelPositionAction(String labelPosition, String alignment)
{
this.labelPosition = labelPosition;
this.alignment = alignment;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.getModel().beginUpdate();
try
{
// Checks the orientation of the alignment to use the correct constants
if (labelPosition.equals(mxConstants.ALIGN_LEFT)
|| labelPosition.equals(mxConstants.ALIGN_CENTER)
|| labelPosition.equals(mxConstants.ALIGN_RIGHT))
{
graph.setCellStyles(mxConstants.STYLE_LABEL_POSITION,
labelPosition);
graph.setCellStyles(mxConstants.STYLE_ALIGN, alignment);
}
else
{
graph.setCellStyles(
mxConstants.STYLE_VERTICAL_LABEL_POSITION,
labelPosition);
graph.setCellStyles(mxConstants.STYLE_VERTICAL_ALIGN,
alignment);
}
}
finally
{
graph.getModel().endUpdate();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class SetStyleAction extends AbstractAction
{
/**
*
*/
protected String value;
/**
*
* @param key
*/
public SetStyleAction(String value)
{
this.value = value;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.setCellStyle(value);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class KeyValueAction extends AbstractAction
{
/**
*
*/
protected String key, value;
/**
*
* @param key
*/
public KeyValueAction(String key)
{
this(key, null);
}
/**
*
* @param key
*/
public KeyValueAction(String key, String value)
{
this.key = key;
this.value = value;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.setCellStyles(key, value);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PromptValueAction extends AbstractAction
{
/**
*
*/
protected String key, message;
/**
*
* @param key
*/
public PromptValueAction(String key, String message)
{
this.key = key;
this.message = message;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof Component)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
String value = (String) JOptionPane.showInputDialog(
(Component) e.getSource(),
mxResources.get("value"), message,
JOptionPane.PLAIN_MESSAGE, null, null, "");
if (value != null)
{
if (value.equals(mxConstants.NONE))
{
value = null;
}
graph.setCellStyles(key, value);
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class AlignCellsAction extends AbstractAction
{
/**
*
*/
protected String align;
/**
*
* @param key
*/
public AlignCellsAction(String align)
{
this.align = align;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
graph.alignCells(align);
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class AutosizeAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
mxGraph graph = mxGraphActions.getGraph(e);
if (graph != null && !graph.isSelectionEmpty())
{
Object[] cells = graph.getSelectionCells();
mxIGraphModel model = graph.getModel();
model.beginUpdate();
try
{
for (int i = 0; i < cells.length; i++)
{
graph.updateCellSize(cells[i]);
}
}
finally
{
model.endUpdate();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class ColorAction extends AbstractAction
{
/**
*
*/
protected String name, key;
/**
*
* @param key
*/
public ColorAction(String name, String key)
{
this.name = name;
this.key = key;
}
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
if (!graph.isSelectionEmpty())
{
Color newColor = JColorChooser.showDialog(graphComponent,
name, null);
if (newColor != null)
{
graph.setCellStyles(key, mxUtils.hexString(newColor));
}
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class BackgroundImageAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("backgroundImage"),
"URL", JOptionPane.PLAIN_MESSAGE, null, null,
"http://www.callatecs.com/images/background2.JPG");
if (value != null)
{
if (value.length() == 0)
{
graphComponent.setBackgroundImage(null);
}
else
{
Image background = mxUtils.loadImage(value);
// Incorrect URLs will result in no image.
// TODO provide feedback that the URL is not correct
if (background != null)
{
graphComponent.setBackgroundImage(new ImageIcon(
background));
}
}
// Forces a repaint of the outline
graphComponent.getGraph().repaint();
}
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class BackgroundAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("background"), null);
if (newColor != null)
{
graphComponent.getViewport().setOpaque(true);
graphComponent.getViewport().setBackground(newColor);
}
// Forces a repaint of the outline
graphComponent.getGraph().repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class PageBackgroundAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
Color newColor = JColorChooser.showDialog(graphComponent,
mxResources.get("pageBackground"), null);
if (newColor != null)
{
graphComponent.setPageBackgroundColor(newColor);
}
// Forces a repaint of the component
graphComponent.repaint();
}
}
}
/**
*
*/
@SuppressWarnings("serial")
public static class StyleAction extends AbstractAction
{
/**
*
*/
public void actionPerformed(ActionEvent e)
{
if (e.getSource() instanceof mxGraphComponent)
{
mxGraphComponent graphComponent = (mxGraphComponent) e
.getSource();
mxGraph graph = graphComponent.getGraph();
String initial = graph.getModel().getStyle(
graph.getSelectionCell());
String value = (String) JOptionPane.showInputDialog(
graphComponent, mxResources.get("style"),
mxResources.get("style"), JOptionPane.PLAIN_MESSAGE,
null, null, initial);
if (value != null)
{
graph.setCellStyle(value);
}
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.