answer
stringlengths
17
10.2M
import java.io.*; public class Fachada { private int filas; private int columnas; private int contador; private boolean[][] fachada; public Fachada(String fichero) { try { BufferedReader br = new BufferedReader(new FileReader(fichero)); String sCurrentLine = br.readLine(); String[] cr = sCurrentLine.split(" "); int row = Integer.parseInt(cr[0]); int column = Integer.parseInt(cr[1]); this.filas = row; this.columnas = column; this.fachada = new boolean[row][column]; int i = 0; while ((sCurrentLine = br.readLine()) != null) { rellenar(sCurrentLine, i, this.getColumnas()); i++; } br.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public int getFilas() { return this.filas; } public int getColumnas() { return this.columnas; } private void rellenar(String cadena, int fila, int columna) { for (int i = 0; i < columna; i++) { if (cadena.charAt(i) == ' this.fachada[fila][i] = true; this.contador++; } else this.fachada[fila][i] = false; } } public Fachada(int filas, int columnas) { this.filas = filas; this.columnas = columnas; this.fachada = new boolean[filas][columnas]; } public String toString() { String toString = ""; for (int i = 0; i < this.fachada.length; i++) { for (int j = 0; j < this.fachada[0].length; j++) { if (fachada[i][j]) toString += " else toString += "."; } toString += "\n"; } return toString; } public static void main(String[] args) { Fachada test = new Fachada(args[0]); System.out.println(test); } }
package reborncore; import me.modmuss50.jsonDestroyer.JsonDestroyer; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.SidedProxy; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.network.NetworkRegistry; import reborncore.common.IModInfo; import reborncore.common.RebornCoreConfig; import reborncore.common.packets.PacketHandler; import reborncore.common.powerSystem.tesla.TeslaManager; import reborncore.common.util.LogHelper; import reborncore.common.util.OreUtil; import reborncore.shields.RebornCoreShields; import reborncore.shields.json.ShieldJsonLoader; import java.io.IOException; @Mod(modid = RebornCore.MOD_ID, name = RebornCore.MOD_NAME, version = RebornCore.MOD_VERSION, acceptedMinecraftVersions = "[1.9.4,1.9]") public class RebornCore implements IModInfo { public static final String MOD_NAME = "RebornCore"; public static final String MOD_ID = "reborncore"; public static final String MOD_VERSION = "@MODVERSION@"; public static LogHelper logHelper; public static JsonDestroyer jsonDestroyer = new JsonDestroyer(); public static RebornCoreConfig config; @SidedProxy(clientSide = "reborncore.ClientProxy", serverSide = "reborncore.CommonProxy") public static CommonProxy proxy; public RebornCore() { logHelper = new LogHelper(this); } @Mod.EventHandler public void preInit(FMLPreInitializationEvent event) { config = RebornCoreConfig.initialize(event.getSuggestedConfigurationFile()); proxy.preInit(event); new Thread(() -> { try { ShieldJsonLoader.load(event); } catch (IOException e) { e.printStackTrace(); } }).start(); } @Mod.EventHandler public void init(FMLInitializationEvent event) { jsonDestroyer.load(); TeslaManager.load(); // packets PacketHandler.setChannels(NetworkRegistry.INSTANCE.newChannel(MOD_ID + "_packets", new PacketHandler())); OreUtil.scanForOres(); RebornCoreShields.init(); proxy.init(event); } @Mod.EventHandler public void postInit(FMLPostInitializationEvent event) { proxy.postInit(event); } public String MOD_NAME() { return MOD_NAME; } @Override public String MOD_ID() { return MOD_ID; } @Override public String MOD_VERSION() { return MOD_VERSION; } @Override public String MOD_DEPENDENCIES() { return ""; } }
package net.fortuna.ical4j.data; import java.io.BufferedReader; import java.io.IOException; import java.io.PushbackReader; import java.io.Reader; import java.util.Arrays; import net.fortuna.ical4j.util.CompatibilityHints; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A reader which performs iCalendar unfolding as it reads. Note that unfolding rules may be "relaxed" to allow * unfolding of non-conformant *.ics files. By specifying the system property "ical4j.unfolding.relaxed=true" iCalendar * files created with Mozilla Calendar/Sunbird may be correctly unfolded. * * To wrap this reader with a {@link BufferedReader} you must ensure you specify an identical buffer size * to that used in the {@link BufferedReader}. * * @author Ben Fortuna */ public class UnfoldingReader extends PushbackReader { private Log log = LogFactory.getLog(UnfoldingReader.class); /** * The pattern used to identify a fold in an iCalendar data stream. */ private static final char[] DEFAULT_FOLD_PATTERN = { '\r', '\n', ' ' }; /** * The pattern used to identify a fold in Mozilla Calendar/Sunbird and KOrganizer. */ private static final char[] RELAXED_FOLD_PATTERN_1 = { '\n', ' ' }; /** * The pattern used to identify a fold in Microsoft Outlook 2007. */ private static final char[] RELAXED_FOLD_PATTERN_2 = { '\r', '\n', '\t' }; /** * The pattern used to identify a fold in Microsoft Outlook 2007. */ private static final char[] RELAXED_FOLD_PATTERN_3 = { '\n', '\t' }; private char[][] patterns; private char[][] buffers; private int linesUnfolded; private int maxPatternLength = 0; /** * Creates a new unfolding reader instance. Relaxed unfolding flag is read from system property. * @param in the reader to unfold from */ public UnfoldingReader(final Reader in) { this(in, DEFAULT_FOLD_PATTERN.length, CompatibilityHints .isHintEnabled(CompatibilityHints.KEY_RELAXED_UNFOLDING)); } /** * @param in * @param size */ public UnfoldingReader(final Reader in, int size) { this(in, size, CompatibilityHints.isHintEnabled(CompatibilityHints.KEY_RELAXED_UNFOLDING)); } /** * Creates a new unfolding reader instance. * @param in a reader to read from * @param relaxed specifies whether unfolding is relaxed */ public UnfoldingReader(final Reader in, int size, final boolean relaxed) { super(in, size); if (relaxed) { patterns = new char[4][]; patterns[0] = DEFAULT_FOLD_PATTERN; patterns[1] = RELAXED_FOLD_PATTERN_1; patterns[2] = RELAXED_FOLD_PATTERN_2; patterns[3] = RELAXED_FOLD_PATTERN_3; } else { patterns = new char[1][]; patterns[0] = DEFAULT_FOLD_PATTERN; } buffers = new char[patterns.length][]; for (int i = 0; i < patterns.length; i++) { buffers[i] = new char[patterns[i].length]; maxPatternLength = Math.max(maxPatternLength, patterns[i].length); } } /** * @return number of lines unfolded so far while reading */ public final int getLinesUnfolded() { return linesUnfolded; } /** * @see java.io.PushbackReader#read() */ public final int read() throws IOException { int c = super.read(); boolean doUnfold = false; for (int i = 0; i < patterns.length; i++) { if (c == patterns[i][0]) { doUnfold = true; break; } } if (!doUnfold) { return c; } else { unread(c); } unfold(); return super.read(); } /** * @see java.io.PushbackReader#read(char[], int, int) */ public int read(char[] cbuf, int off, int len) throws IOException { int read = super.read(cbuf, off, len); boolean doUnfold = false; for (int i = 0; i < patterns.length; i++) { if (read > 0 && cbuf[0] == patterns[i][0]) { doUnfold = true; break; } else { for (int j = 0; j < read; j++) { if (cbuf[j] == patterns[i][0]) { unread(cbuf, j, read - j); return j; } } } } if (!doUnfold) { return read; } else { unread(cbuf, off, read); } unfold(); return super.read(cbuf, off, maxPatternLength); } /** * @throws IOException */ private void unfold() throws IOException { // need to loop since one line fold might be directly followed by another boolean didUnfold; do { didUnfold = false; for (int i = 0; i < buffers.length; i++) { int read = super.read(buffers[i], 0, buffers[i].length); if (read > 0) { if (!Arrays.equals(patterns[i], buffers[i])) { unread(buffers[i], 0, read); } else { if (log.isTraceEnabled()) { log.trace("Unfolding..."); } linesUnfolded++; didUnfold = true; } } // else { // return read; } } while (didUnfold); } }
package water; import java.lang.management.ManagementFactory; import javax.management.*; import water.persist.Persist; import water.util.LinuxProcFileReader; import water.util.Log; /** * Starts a thread publishing multicast HeartBeats to the local subnet: the * Leader of this Cloud. * * @author <a href="mailto:cliffc@0xdata.com"></a> * @version 1.0 */ public class HeartBeatThread extends Thread { public HeartBeatThread() { super("Heartbeat"); setDaemon(true); } // Time between heartbeats. Strictly several iterations less than the // timeout. static final int SLEEP = 1000; // Timeout in msec before we decide to not include a Node in the next round // of Paxos Cloud Membership voting. static public final int TIMEOUT = 60000; // Timeout in msec before we decide a Node is suspect, and call for a vote // to remove him. This must be strictly greater than the TIMEOUT. static final int SUSPECT = TIMEOUT+500; // My Histogram. Called from any thread calling into the MM. // Singleton, allocated now so I do not allocate during an OOM event. static private final H2O.Cleaner.Histo myHisto = new H2O.Cleaner.Histo(); // uniquely number heartbeats for better timelines static private int HB_VERSION; // The Run Method. // Started by main() on a single thread, this code publishes Cloud membership // to the Cloud once a second (across all members). If anybody disagrees // with the membership Heartbeat, they will start a round of Paxos group // discovery. public void run() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); ObjectName os; try { os = new ObjectName("java.lang:type=OperatingSystem"); } catch( MalformedObjectNameException e ) { throw Log.errRTExcept(e); } Thread.currentThread().setPriority(Thread.MAX_PRIORITY); int counter = 0; while( true ) { // Once per second, for the entire cloud a Node will multi-cast publish // itself, so other unrelated Clouds discover each other and form up. try { Thread.sleep(SLEEP); } // Only once-sec per entire Cloud catch( InterruptedException ignore ) { } // Update the interesting health self-info for publication also H2O cloud = H2O.CLOUD; HeartBeat hb = H2O.SELF._heartbeat; hb._hb_version = HB_VERSION++; hb._jvm_boot_msec= TimeLine.JVM_BOOT_MSEC; final Runtime run = Runtime.getRuntime(); hb.set_free_mem (run. freeMemory()); hb.set_max_mem (run. maxMemory()); hb.set_tot_mem (run.totalMemory()); hb._keys = (H2O.STORE.size ()); hb.set_valsz (myHisto.histo(false)._cached); hb._num_cpus = (char)run.availableProcessors(); // Run mini-benchmark every 5 mins. However, on startup - do not have // all JVMs immediately launch a all-core benchmark - they will fight // with each other. Stagger them using the hashcode. if( (counter+Math.abs(H2O.SELF.hashCode())) % 300 == 0) { hb._gflops = Linpack.run(hb._cpus_allowed); hb._membw = MemoryBandwidth.run(hb._cpus_allowed); } Object load = null; try { load = mbs.getAttribute(os, "SystemLoadAverage"); } catch( Exception e ) { // Ignore, data probably not available on this VM } hb._system_load_average = load instanceof Double ? ((Double) load).floatValue() : 0; int rpcs = 0; for( H2ONode h2o : cloud._memary ) rpcs += h2o.taskSize(); hb._rpcs = (char)rpcs; // Scrape F/J pool counts hb._fjthrds = new short[H2O.MAX_PRIORITY+1]; hb._fjqueue = new short[H2O.MAX_PRIORITY+1]; for( int i=0; i<hb._fjthrds.length; i++ ) { hb._fjthrds[i] = (short)H2O.getWrkThrPoolSize(i); hb._fjqueue[i] = (short)H2O.getWrkQueueSize(i); } hb._tcps_active= (char)H2ONode.TCPS.get(); // get the usable and total disk storage for the partition where the // persistent KV pairs are stored hb.set_free_disk(Persist.getIce().getUsableSpace()); hb.set_max_disk(Persist.getIce().getTotalSpace()); // get cpu utilization for the system and for this process. (linux only.) LinuxProcFileReader lpfr = new LinuxProcFileReader(); lpfr.read(); if (lpfr.valid()) { hb._system_idle_ticks = lpfr.getSystemIdleTicks(); hb._system_total_ticks = lpfr.getSystemTotalTicks(); hb._process_total_ticks = lpfr.getProcessTotalTicks(); hb._process_num_open_fds = lpfr.getProcessNumOpenFds(); } else { hb._system_idle_ticks = -1; hb._system_total_ticks = -1; hb._process_total_ticks = -1; hb._process_num_open_fds = -1; } hb._cpus_allowed = lpfr.getProcessCpusAllowed(); hb._pid = lpfr.getProcessID(); // Announce what Cloud we think we are in. // Publish our health as well. UDPHeartbeat.build_and_multicast(cloud, hb); // If we have no internet connection, then the multicast goes // nowhere and we never receive a heartbeat from ourselves! // Fake it now. long now = System.currentTimeMillis(); H2O.SELF._last_heard_from = now; // Look for napping Nodes & propose removing from Cloud for( H2ONode h2o : cloud._memary ) { long delta = now - h2o._last_heard_from; if( delta > SUSPECT ) {// We suspect this Node has taken a dirt nap if( !h2o._announcedLostContact ) { Paxos.print("hart: announce suspect node",cloud._memary,h2o.toString()); h2o._announcedLostContact = true; } } else if( h2o._announcedLostContact ) { Paxos.print("hart: regained contact with node",cloud._memary,h2o.toString()); h2o._announcedLostContact = false; } } counter++; } } }
package dr.evomodel.substmodel; import dr.inference.model.Likelihood; import dr.inference.model.Parameter; import dr.evolution.datatype.Microsatellite; import dr.math.ModifiedBesselFirstKind; /** * @author Chieh-Hsi Wu * Implementation of models by Watkins (2007) */ public class NewMicrosatelliteModel extends AbstractSubstitutionModel { public NewMicrosatelliteModel(Microsatellite msat, FrequencyModel rootFreqModel){ super("NewMicrosatelliteModel", msat, rootFreqModel); } public void getTransitionProbabilities(double distance, double[] matrix){ int k = 0; double[] rowSums = new double[stateCount]; for(int i = 0; i < stateCount; i ++){ for(int j = 0; j < stateCount; j++){ matrix[k] = Math.exp(-distance)* ModifiedBesselFirstKind.bessi(distance,Math.abs(i-j)); rowSums[i] += matrix[k]; k++; } } k = 0; for(int i = 0; i < stateCount; i ++){ for(int j = 0; j < stateCount; j++){ matrix[k] = matrix[k]/rowSums[i]; k++; } } } protected void ratesChanged() {}; protected void setupRelativeRates(){}; protected void frequenciesChanged() {}; public static void main(String[] args){ Microsatellite msat = new Microsatellite(1,30); NewMicrosatelliteModel nmsatModel = new NewMicrosatelliteModel(msat, null); double[] probs = new double[msat.getStateCount()*msat.getStateCount()]; nmsatModel.getTransitionProbabilities(1.2,probs); int k =0; for(int i = 0; i < msat.getStateCount(); i++){ for(int j = 0; j < msat.getStateCount(); j++){ System.out.print(probs[k++]+" "); } System.out.println(); } } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ //Make sure to "Commit" code on your local computer with relevant comments //At end of session, "Push" code to the remote repository online. package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.Relay; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Talon; import com.sun.squawk.util.*; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Preferences; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.SensorBase; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends IterativeRobot { double STARTINGTHRESHOLD = 0.0; Talon frontleft; Talon frontright; Talon backleft; Talon backright; RobotDrive drive; Joystick operatorStick; Joystick driverStick; Talon winch1; Talon winch2; Compressor compressor; Talon collector; Relay compressorRelay; Relay pancakeRelay; Relay oceanbluePistons; Relay skydivePistons; Timer pancakeTimer; boolean isPancakeTimerOn = false; boolean isPS2Joystick = false; DigitalInput dropitlowSensor; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { driverStick= new Joystick(1); operatorStick = new Joystick(2); collector = new Talon (5); frontleft = new Talon (1); frontright = new Talon (2); backleft = new Talon (3); backright = new Talon (4); winch1 = new Talon (6); winch2 = new Talon (7); drive = new RobotDrive (frontleft,backleft, frontright,backright); compressorRelay = new Relay(1, Relay.Direction.kForward); //compressorRelay.set(Relay.Value.kOff); //compressorRelaySwitchOn(); //compressor = new Compressor(5,1); //compressor.start(); pancakeRelay = new Relay(4, Relay.Direction.kBoth); //pancakeRelay.setDirection(Relay.Direction.kForward); pancakeTimer = new Timer(); oceanbluePistons = new Relay (2, Relay.Direction.kBoth); skydivePistons = new Relay (3, Relay.Direction.kBoth); dropitlowSensor = new DigitalInput (4); //boolean dropitlowSensor = false; isPS2Joystick = SmartDashboard.getBoolean("usePS2Joystick", false); SmartDashboard.putString("Collector", "disengaged"); SmartDashboard.putString("Pancake", "disengaged"); SmartDashboard.putString("Winch", "OFF"); SmartDashboard.putString("Anchor", "UP"); SmartDashboard.putString("cArm", "RVRSE"); SmartDashboard.putString("Compressor", "ON"); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { } /** * This function is called periodically during operator control */ public void teleopPeriodic() { checkDrive(); checkWinch(); checkCollector(); checkCompressor(); checkAnchor(); checkCollectorAngles(); checkPancake(); } public void checkCollector() { if(operatorStick.getRawButton(8)) { SmartDashboard.putString("Collector", "FWD"); collector.set(1.0); } else if(operatorStick.getRawButton(9)) { SmartDashboard.putString("Collector", "RVRSE"); collector.set(-1.0); } else { collector.set(0.0); SmartDashboard.putString("Collector", "OFF"); } } /*public void compressorRelaySwitchOn() { compressorRelay.set(Relay.Value.kOn); //System.out.println("Compressor Relay Value Now: " + compressorRelay.get().value); } */ public void checkCollectorAngles () { if(operatorStick.getRawButton(6)) { skydivePistons.set(Relay.Value.kReverse); SmartDashboard.putString("cArm", "Retracted"); } if (operatorStick.getRawButton(7)) { skydivePistons.set(Relay.Value.kForward); SmartDashboard.putString("cArm", "Extended"); } } public void checkCompressor() { if (driverStick.getRawButton(3)) { SmartDashboard.putString("Compressor", "ON"); compressorRelay.set(Relay.Value.kOn); } if(driverStick.getRawButton(4)) { SmartDashboard.putString("Compressor", "OFF"); compressorRelay.set(Relay.Value.kOff); } } public void checkAnchor() { if(driverStick.getRawButton(7)){ SmartDashboard.putString("Anchor", "DOWN"); oceanbluePistons.set(Relay.Value.kForward); } if (driverStick.getRawButton(8)){ SmartDashboard.putString("Anchor", "UP"); oceanbluePistons.set(Relay.Value.kReverse); } } public void checkWinch() { if(operatorStick.getRawButton(11) /*&& dropitlowSensor.get() == false*/) { SmartDashboard.putString("Winch", "ON"); winch1.set(1.0); winch2.set(1.0); } else { SmartDashboard.putString("Winch", "OFF"); winch1.set(0.0); winch2.set(0.0); } } public void checkPancake() { if(operatorStick.getRawButton(10) && isPancakeTimerOn == false) { pancakeTimer.start(); isPancakeTimerOn = true; SmartDashboard.putString("Pancake", "engaged"); pancakeRelay.set(Relay.Value.kReverse); } // after 2 secs if(pancakeTimer.get() >= 2) { SmartDashboard.putString("Pancake", "disengaged"); pancakeRelay.set(Relay.Value.kForward); pancakeTimer.stop(); pancakeTimer.reset(); isPancakeTimerOn = false; } } private void checkDrive(){ double x = driverStick.getRawAxis(1); if(x < 0.1 && x > -0.1){ x = 0; } if(x > 0){ x = (1-STARTINGTHRESHOLD) * MathUtils.pow(x,2) + STARTINGTHRESHOLD; } else if(x < 0){ x = -1 * ((1-STARTINGTHRESHOLD) * MathUtils.pow(x,2) + STARTINGTHRESHOLD); } double y = driverStick.getRawAxis(2); if(y < 0.1 && y > -0.1){ y = 0; } if(y > 0){ y = (1-STARTINGTHRESHOLD) * MathUtils.pow(y,2) + STARTINGTHRESHOLD; } else if(y < 0){ y = -1 * ((1-STARTINGTHRESHOLD) * MathUtils.pow(y,2) + STARTINGTHRESHOLD); } double rotation = 0; if(isPS2Joystick) { rotation = operatorStick.getRawAxis(4); } else { rotation = driverStick.getRawAxis(3); } if(rotation < 0.05 && rotation > -0.05){ rotation = 0; } if(rotation > 0){ rotation = (1-STARTINGTHRESHOLD) * MathUtils.pow(rotation,2) + STARTINGTHRESHOLD; } else if (rotation < 0){ rotation = -1 * ((1-STARTINGTHRESHOLD) * MathUtils.pow(rotation,2) + STARTINGTHRESHOLD); } double gyroAngle = 0; drive.mecanumDrive_Cartesian(x, y, rotation, 0.0); } /** * This function is called periodically during test mode */ public void testPeriodic() { } }
package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.SimpleRobot; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Relay; //import edu.wpi.first.wpilibj.Jaguar; //import edu.wpi.first.wpilibj.DriverStationLCD; public class RobotTemplate extends SimpleRobot { /** * This function is called once each time the robot enters autonomous mode. */ public RobotDrive drivetrain; public Relay spikeA; public Joystick leftStick; public Joystick rightStick; //public DriverStationLCD lcd; //public Victor gearMotor; public RobotTemplate() { //Instantialize objects for RobotTemplate getWatchdog().setEnabled(false); leftStick = new Joystick(1); rightStick = new Joystick(2); //gearMotor = new Victor(5); //initialize speed controller //2-Wheel tank drive spikeA = new Relay(1); drivetrain = new RobotDrive(1,2); //4-Wheel tank drive //Motors must be set in the following order: //LeftFront=1; LeftRear=2; RightFront=3; RightRear=4; //drivetrain = new RobotDrive(1,2,3,4); //drivetrain.tankDrive(leftStick, rightStick); } public void autonomous() { for (int i = 0; i < 4; i++){ drivetrain.drive(0.5, 0.0); // drive 50% fwd 0% turn Timer.delay(2.0); // wait 2 seconds drivetrain.drive(0.0, 0.75); // drive 0% fwd, 75% turn } drivetrain.drive(0.0, 0.0); // drive 0% forward, 0% turn } public void operatorControl() { drivetrain.setSafetyEnabled(true); while(isOperatorControl() && isEnabled() ){ drivetrain.tankDrive(leftStick, rightStick); Timer.delay(0.01); if(rightStick.getTrigger()){ spikeA.set(Relay.Value.kForward); } else{ spikeA.set(Relay.Value.kOff); } /*if(leftStick.getTrigger()){ lcd.println(DriverStationLCD.Line.kUser2, 1, motor.get()); Jaguar motor; motor = new Jaguar(1); if(motor.get() > -1){ motor.set(motor.get() - .1); } //gearMotor.set(.5);//if right stick trigger is pressed, set motor to 50% speed } if(rightStick.getTrigger()){ Jaguar motor; motor = new Jaguar(2); if(motor.get() > -1){ motor.set(motor.get() - .1); } //gearMotor.set(.5);//if right stick trigger is pressed, set motor to 50% speed } else{ //gearMotor.set(0); }*/ } } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ //Make sure to "Commit" code on your local computer with relevant comments //At end of session, "Push" code to the remote repository online. package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.Relay; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.Encoder; import com.sun.squawk.util.*; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Gyro; import edu.wpi.first.wpilibj.Preferences; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.SensorBase; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends IterativeRobot { /** * JOYSTICK BUTTONS * */ //DRIVER JOYSTICK final static int ANCHOR_UP_BUTTON = 0; final static int ANCHOR_DOWN_BUTTON = 2; //OPERATOR JOYSTICK final static int PANCAKE_BUTTON = 7; final static int WINCH_BUTTON = 6; final static int COLLECTOR_DOWN_BUTTON = 0; final static int COLLECTOR_UP_BUTTON = 2; final static int COLLECTOR_BUTTON = 1; final static int COLLECTOR_BUTTON_REVERSE = 3; /** * CHANNELS * */ //PWM CHANNEL final static int FRONT_LEFT_CHANNEL = 1; final static int FRONT_RIGHT_CHANNEL = 2; final static int BACK_LEFT_CHANNEL = 3; final static int BACK_RIGHT_CHANNEL = 4; final static int COLLECTOR_CHANNEL = 5; final static int WINCH_2_CHANNEL = 7; final static int WINCH_1_CHANNEL = 6; //RELAYS final static int ANCHOR_CHANNEL = 4; final static int COLLECTOR_ANGLE_CHANNEL = 8; final static int COMPRESSOR_RELAY_CHANNEL = 1; final static int PANCAKE_CHANNEL = 2; // DIGITAL I/O final static int WINCH_LIMIT_SWITCH_CHANNEL = 4; double STARTINGTHRESHOLD = 0.0; Talon frontleft; Talon frontright; Talon backleft; Talon backright; RobotDrive drive; Joystick driverOne; Joystick driverTwo; Joystick operatorStick; Talon winch1; Talon winch2; Compressor compressor; Talon collector; Relay compressorRelay; Relay pancakeRelay; Relay oceanbluePistons; Relay skydivePistons; Timer pancakeTimer; Timer autoTimer; boolean isPancakeTimerOn = false; boolean isPS3Joystick = true; DigitalInput dropitlowSensor; Encoder encoder; Gyro gyroScope; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { driverOne = new Joystick(1); if (isPS3Joystick == true) { operatorStick = new Joystick(2); } else { driverTwo = new Joystick(2); operatorStick = new Joystick(3); } collector = new Talon(COLLECTOR_CHANNEL); frontleft = new Talon(FRONT_LEFT_CHANNEL); frontright = new Talon(FRONT_RIGHT_CHANNEL); backleft = new Talon(BACK_LEFT_CHANNEL); backright = new Talon(BACK_RIGHT_CHANNEL); winch1 = new Talon(WINCH_1_CHANNEL); winch2 = new Talon(WINCH_2_CHANNEL); drive = new RobotDrive(frontleft, backleft, frontright, backright); compressorRelay = new Relay(COMPRESSOR_RELAY_CHANNEL, Relay.Direction.kForward); compressorRelay.set(Relay.Value.kOn); //compressorRelaySwitchOn(); //compressor = new Compressor(5,1); //compressor.start(); pancakeRelay = new Relay(PANCAKE_CHANNEL, Relay.Direction.kBoth); //pancakeRelay.setDirection(Relay.Direction.kForward); pancakeTimer = new Timer(); oceanbluePistons = new Relay(ANCHOR_CHANNEL, Relay.Direction.kBoth); skydivePistons = new Relay(COLLECTOR_ANGLE_CHANNEL, Relay.Direction.kBoth); dropitlowSensor = new DigitalInput(WINCH_LIMIT_SWITCH_CHANNEL); encoder = new Encoder(2, 3); //int Evalue = Encoder.getRaw(); boolean dropitlowSensor = false; gyroScope = new Gyro(5); isPS3Joystick = SmartDashboard.getBoolean("usePS3Joystick", true); SmartDashboard.putString("Collector", "disengaged"); SmartDashboard.putString("Pancake", "disengaged"); SmartDashboard.putString("Winch", "OFF"); SmartDashboard.putString("Anchor", "UP"); SmartDashboard.putString("cArm", "RVRSE"); SmartDashboard.putString("Compressor", "ON"); // SmartDashboard.putString("displayAngle",angle); // SmartDashboard.putDouble("Angle", angle); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { moveForward(0, 3, 1.0); setPancake(3, 5, false); setPancake(5, 7, true); winchDownInAuto(7,9); } public void moveForward (double startTime, double endTime, double speed) { if(autoTimer.get() > startTime && autoTimer.get() < endTime) { drive.drive(speed, 0); } else{ drive.drive(0, 0); } } void setPancake(double startTime, double endTime, boolean engaged){ if(autoTimer.get() > startTime && autoTimer.get() < endTime){ if(engaged){ engagePancake(); }else{ disengagePancake(); } } } public void disengagePancake() { SmartDashboard.putString("Pancake", "disengaged"); pancakeRelay.set(Relay.Value.kReverse); } public void engagePancake() { SmartDashboard.putString("Pancake", "engaged"); pancakeRelay.set(Relay.Value.kReverse); } public void winchDownInAuto(double startTime, double endTime) { if (autoTimer.get() > startTime && autoTimer.get() < endTime) { moveWinch(1.0); } else { moveWinch(0.0); } } /** * This function is called periodically during operator control */ public void teleopPeriodic() { checkDrive(); checkWinch(); checkCollector(); checkCompressor(); checkAnchor(); checkCollectorAngles(); checkPancake(); // double displayAngle = gyro.getAngle(); } public void checkCollector() { if (operatorStick.getRawButton(COLLECTOR_BUTTON)) { SmartDashboard.putString("Collector", "FWD"); collector.set(1.0); } else if (operatorStick.getRawButton(COLLECTOR_BUTTON_REVERSE)) { SmartDashboard.putString("Collector", "RVRSE"); collector.set(-1.0); } else { collector.set(0.0); SmartDashboard.putString("Collector", "OFF"); } } /*public void compressorRelaySwitchOn() { compressorRelay.set(Relay.Value.kOn); //System.out.println("Compressor Relay Value Now: " + compressorRelay.get().value); } */ public void checkCollectorAngles() { if (operatorStick.getRawButton(COLLECTOR_UP_BUTTON)) { skydivePistons.set(Relay.Value.kReverse); SmartDashboard.putString("cArm", "Retracted"); } if (operatorStick.getRawButton(COLLECTOR_DOWN_BUTTON)) { skydivePistons.set(Relay.Value.kForward); SmartDashboard.putString("cArm", "Extended"); } } public void checkCompressor() { if (driverOne.getRawButton(6)) { SmartDashboard.putString("Compressor", "ON"); compressorRelay.set(Relay.Value.kOn); } if (driverOne.getRawButton(4)) { SmartDashboard.putString("Compressor", "OFF"); compressorRelay.set(Relay.Value.kOff); } } public void checkAnchor() { if (driverOne.getRawButton(ANCHOR_DOWN_BUTTON)) { SmartDashboard.putString("Anchor", "DOWN"); oceanbluePistons.set(Relay.Value.kReverse); } if (driverOne.getRawButton(ANCHOR_UP_BUTTON)) { SmartDashboard.putString("Anchor", "UP"); oceanbluePistons.set(Relay.Value.kForward); } } boolean isWinchMoving() { return encoder.getRate() != 0; } boolean atBottom() { return dropitlowSensor.get(); } void moveWinch(double speed) { if (!atBottom()){ winch1.set(speed); winch2.set(speed); } else { winch1.set(0.0); winch2.set(0.0); } } void checkWinch() { //SmartDashboard.putNumber("Encoder", encoder.getRate()); SmartDashboard.putBoolean("WINCH_MOVING", isWinchMoving()); if (operatorStick.getRawButton(WINCH_BUTTON)) { SmartDashboard.putString("Winch", "ON"); moveWinch(1.0); } else { SmartDashboard.putString("Winch", "OFF"); moveWinch(0.0); } } public void checkPancake() { if (operatorStick.getRawButton(PANCAKE_BUTTON) && isPancakeTimerOn == false) { pancakeTimer.start(); isPancakeTimerOn = true; engagePancake(); } // after 2 secs if (pancakeTimer.get() >= 2) { disengagePancake(); pancakeTimer.stop(); pancakeTimer.reset(); isPancakeTimerOn = false; } } private void checkDrive() { //Set X motion based on Joystick Type double x = 0; if (isPS3Joystick) { x = driverOne.getRawAxis(1); x = x * -1; } else { x = driverOne.getRawAxis(1); x = x * -1; } if (x < 0.1 && x > -0.1) { x = 0; } if (x > 0) { x = (1 - STARTINGTHRESHOLD) * MathUtils.pow(x, 2) + STARTINGTHRESHOLD; } else if (x < 0) { x = -1 * ((1 - STARTINGTHRESHOLD) * MathUtils.pow(x, 2) + STARTINGTHRESHOLD); } //Set Y-motion (STRAFE) based on Joystick Type double y = 0; if (isPS3Joystick) { y = driverOne.getRawAxis(2); y = y * -1; } else { y = driverOne.getRawAxis(2); } if (y < 0.1 && y > -0.1) { y = 0; } if (y > 0) { y = (1 - STARTINGTHRESHOLD) * MathUtils.pow(y, 2) + STARTINGTHRESHOLD; } else if (y < 0) { y = -1 * ((1 - STARTINGTHRESHOLD) * MathUtils.pow(y, 2) + STARTINGTHRESHOLD); } double rotation = 0; //Set Rotation Angle based on Joystick Type if (isPS3Joystick) { rotation = driverOne.getRawAxis(3); rotation = rotation * -1; } else { rotation = driverTwo.getRawAxis(1); rotation = rotation * -1; } if (rotation < 0.05 && rotation > -0.05) { rotation = 0; } if (rotation > 0) { rotation = (1 - STARTINGTHRESHOLD) * MathUtils.pow(rotation, 2) + STARTINGTHRESHOLD; } else if (rotation < 0) { rotation = -1 * ((1 - STARTINGTHRESHOLD) * MathUtils.pow(rotation, 2) + STARTINGTHRESHOLD); } double gyroAngle = 0; drive.mecanumDrive_Cartesian(x, y, rotation, 0.0); } /** * This function is called periodically during test mode */ public void testPeriodic() { } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.SimpleRobot; import edu.wpi.first.wpilibj.CANJaguar; import edu.wpi.first.wpilibj.DriverStationLCD; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Victor; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.can.CANTimeoutException; import edu.wpi.first.wpilibj.camera.AxisCamera; import edu.wpi.first.wpilibj.camera.AxisCameraException; import edu.wpi.first.wpilibj.image.BinaryImage; import edu.wpi.first.wpilibj.image.ColorImage; import edu.wpi.first.wpilibj.image.CriteriaCollection; import edu.wpi.first.wpilibj.image.NIVision; import edu.wpi.first.wpilibj.image.NIVisionException; import edu.wpi.first.wpilibj.image.ParticleAnalysisReport; import team1517.aerialassist.mecanum.MecanumDrive; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends SimpleRobot { boolean catapultArmed = false; final int AREA_MINIMUM = 100; double tiltValue = 0.5, rotValue = 0.85, winchPower = -0.9; AxisCamera camera; CriteriaCollection cc; CANJaguar aF, aB, bF, bB; DigitalInput in1, in3, in4, in5, armedSwitch; Victor rotRod1, rotRod2, angle1; Talon winchMotor; Servo tiltServo, rotServo; Joystick xyStick, steerStick, auxStick; DriverStationLCD lcd; MecanumDrive mDrive; public RobotTemplate() { //camera = AxisCamera.getInstance(); cc = new CriteriaCollection(); // create the criteria for the particle filter cc.addCriteria(NIVision.MeasurementType.IMAQ_MT_AREA, AREA_MINIMUM, 215472, false); in1 = new DigitalInput(1); armedSwitch = new DigitalInput(2); in3 = new DigitalInput(3); in4 = new DigitalInput(4); in5 = new DigitalInput(5); rotRod1 = new Victor(8); rotRod2 = new Victor(9); angle1 = new Victor(3); winchMotor = new Talon(4); tiltServo = new Servo(5); rotServo = new Servo(6); xyStick = new Joystick(1); steerStick = new Joystick(2); auxStick = new Joystick(3); lcd = DriverStationLCD.getInstance(); initCANJaguars(); } /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { Timer timer = new Timer(); timer.start(); Timer.delay(0.7);//Delays a amount of time in order for the hot goal vision targets to rotate into position. boolean isHotGoalStarting = false; //getHotGoal(); try { if(!isHotGoalStarting) { /*while(Math.abs(bF.getPosition()) < 8.91) { mDrive.drive(0, -0.7, 0); lcd.println(DriverStationLCD.Line.kUser1, 1, "" + aF.getPosition()); lcd.updateLCD(); } mDrive.drive(0, 0, 0); Timer.delay(5 - timer.get());*/ Timer.delay(2); } while(Math.abs(bF.getPosition()) < 10.18) { mDrive.drive(0, -0.7, 0); lcd.println(DriverStationLCD.Line.kUser1, 1, "" + aF.getPosition()); lcd.updateLCD(); } mDrive.drive(0, 0, 0); } catch(CANTimeoutException ex) { ex.printStackTrace(); initCANJaguars(); } timer.stop(); } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { boolean exceptionFree = true; double x = 0, y = 0, t = 0; int i = 0; while(isOperatorControl() && isEnabled()) { if(xyStick.getRawButton(1)) { try { aF.setX(xyStick.getTwist()); bB.setX(xyStick.getTwist()); aB.setX(-1 * xyStick.getTwist()); bF.setX(-1 * xyStick.getTwist()); exceptionFree = true; } catch(CANTimeoutException ex) { ex.printStackTrace(); exceptionFree = false; } } else { /* * Controls the drive base and also handles exceptions. */ x = filterJoystickInput(steerStick.getX()); y = filterJoystickInput(xyStick.getY()); t = filterJoystickInput(xyStick.getTwist()); exceptionFree = tDrive(x, y, t); } if(!exceptionFree /*|| getCANJaguarsPowerCycled()*/) { initCANJaguars(); } if(auxStick.getRawButton(1)) { angle1.set(auxStick.getY()); } else { /* * Sets the output to the angle motor of the rot rods to the value of the y axis of the auxStick scaled by a factor of 0.7. */ angle1.set(auxStick.getY() * 0.7); } /* * Controls the rot rods. */ if(auxStick.getRawButton(3)) { rotRod1.set(-0.7); rotRod2.set(0.7); } else if(auxStick.getRawButton(5)) { rotRod1.set(0.7); rotRod2.set(-0.7); } else { rotRod1.set(0); rotRod2.set(0); } /* * Manual control of the catapult winch. */ if(auxStick.getRawButton(2)) { winchMotor.set(winchPower); } else if(auxStick.getRawButton(4)) { //winchMotor.set(-0.3); armCatapult(); } else if(auxStick.getRawButton(6)) { //winchMotor.set(0.5); fireCatapult(); } else { winchMotor.set(0); } if(auxStick.getRawAxis(6) > 0 && winchPower <= 0.95) { winchPower = winchPower + 0.05; } else if(auxStick.getRawAxis(6) < 0 && winchPower >= -0.95) { winchPower = winchPower - 0.05; } /* * Sets the output values of the camera axis servos. */ tiltServo.set(tiltValue); rotServo.set(rotValue); // /* // * Allows the user to adjust the value set to the tiltServo. // */ // if(auxStick.getRawAxis(6) > 0 && tiltValue <= 0.95) // tiltValue = tiltValue + 0.05; // else if(auxStick.getRawAxis(6) < 0 && tiltValue >= 0.05) // tiltValue = tiltValue - 0.05; /* * Allows the user to adjust the value set to the rotServo. */ if(auxStick.getRawAxis(5) > 0 && rotValue <= 0.95) { rotValue = rotValue + 0.05; } else if(auxStick.getRawAxis(5) < 0 && rotValue >= 0.05) { rotValue = rotValue - 0.05; } lcd.println(DriverStationLCD.Line.kUser1, 1, "winch " + winchPower + " "); lcd.println(DriverStationLCD.Line.kUser3, 1, "input2" + armedSwitch.get() + " "); Timer.delay(0.01); lcd.updateLCD(); } } /** * This function is called once each time the robot enters test mode. */ public void test() { while(isTest() && isEnabled()) { if(auxStick.getRawButton(1)) { lcd.println(DriverStationLCD.Line.kUser1, 1, " " + getHotGoal()); } if(auxStick.getRawButton(2)) { lcd.println(DriverStationLCD.Line.kUser2, 1, " " + getVisionDistance()); } lcd.updateLCD(); } } /** * Moves the catapult into armed position. */ private void armCatapult() { if(!armedSwitch.get()) { winchMotor.set(-0.7); } else { winchMotor.set(0); } } /** * Fires the catapult. */ private void fireCatapult() { if(armedSwitch.get()) { while(armedSwitch.get()) { winchMotor.set(-0.7); } winchMotor.set(0); } } /** * Added to abstract the drive method so that CAN can be switched to PWM easier and more simply. * @param mX The X value of the drive vector. * @param mY The Y value of the drive vector. * @param twist The turn added to the output of the drive vector. * @return True if successful, false if exceptions are thrown. */ private boolean tDrive(double mX, double mY, double twist) { try { return mDrive.drive(mX, mY, twist); } catch(NullPointerException ex) { ex.printStackTrace(); return true; } } /** * Detects whether the hot goal is visible. * @return True if the hot goal is visible. False if the hot goal is not visible or an exception has been thrown. */ private boolean getHotGoal() { try { ColorImage image = camera.getImage(); BinaryImage thresholdImage = image.thresholdHSV(80, 140, 165, 255, 200, 255); image.free(); BinaryImage hulledImage = thresholdImage.convexHull(false); thresholdImage.free(); if(hulledImage.getNumberParticles() > 0) { lcd.println(DriverStationLCD.Line.kUser2,1, "" + hulledImage.getNumberParticles()); lcd.updateLCD(); ParticleAnalysisReport report; for(int i = 0; i < hulledImage.getNumberParticles(); i++) { report = hulledImage.getParticleAnalysisReport(i); if((report.boundingRectHeight / report.boundingRectWidth) < 1) { return true; } } report = null; } hulledImage.free(); } catch (AxisCameraException ex) { ex.printStackTrace(); return false; } catch (NIVisionException ex) { ex.printStackTrace(); return false; } return false; } /** * Used to initialize the CANJaguars. It can also be called to reinitialize them if an exception is thrown. * @return Success */ private boolean initCANJaguars() { boolean successful = true; mDrive = null; while(aF == null || bF == null || aB == null || bB == null) { try { aF = null; bF = null; aB = null; bB = null; aF = new CANJaguar(1); bF = new CANJaguar(2); aB = new CANJaguar(3); bB = new CANJaguar(4); aF.changeControlMode(CANJaguar.ControlMode.kPercentVbus); bF.changeControlMode(CANJaguar.ControlMode.kPercentVbus); aB.changeControlMode(CANJaguar.ControlMode.kPercentVbus); bB.changeControlMode(CANJaguar.ControlMode.kPercentVbus); aF.configNeutralMode(CANJaguar.NeutralMode.kBrake); bF.configNeutralMode(CANJaguar.NeutralMode.kBrake); aB.configNeutralMode(CANJaguar.NeutralMode.kBrake); bB.configNeutralMode(CANJaguar.NeutralMode.kBrake); aF.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); bF.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); aB.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); bB.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); aF.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); bF.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); aB.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); bB.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); aF.configEncoderCodesPerRev(100); bF.configEncoderCodesPerRev(100); aB.configEncoderCodesPerRev(100); bB.configEncoderCodesPerRev(100); //aF.setX(0); //bF.setX(0); //aB.setX(0); //bB.setX(0); } catch(CANTimeoutException ex) { ex.printStackTrace(); successful = true; } } mDrive = new MecanumDrive(aF, aB, bF, bB); return successful; } /** * Detects whether one or more of the CANJaguars has lost and then regained power. * @return True if power to one or more of the CANJaguars has been cycled or if a timeout exception has occurred. False otherwise. */ private boolean getCANJaguarsPowerCycled() { try { if(aF.getPowerCycled() || aB.getPowerCycled() || bF.getPowerCycled() || bB.getPowerCycled()) { return true; } } catch(CANTimeoutException ex) { ex.printStackTrace(); return true; } return false; } /** * Filters out noise from the input of the joysticks. * @param joystickValue The raw input value from the joystick. * @return The filtered value. */ double filterJoystickInput(double joystickValue) { if(Math.abs(joystickValue) > 0.1) { return (joystickValue * joystickValue * joystickValue); } else { if(xyStick.getTwist() != 0) { return 0.0000000000001; } else { return 0; } } } double getVisionDistance() { try { ColorImage image = camera.getImage(); BinaryImage thresholdImage = image.thresholdHSV(80, 140, 165, 255, 200, 255); image.free(); BinaryImage hulledImage = thresholdImage.convexHull(false); thresholdImage.free(); if(hulledImage.getNumberParticles() > 0) { ParticleAnalysisReport report; for(int i = 0; i < hulledImage.getNumberParticles(); i++) { report = hulledImage.getParticleAnalysisReport(i); if(report.boundingRectWidth / report.boundingRectHeight < 1) //1 can be reduced. { //do distance calculations. //return distance. return report.center_mass_y_normalized + 1; } } } } catch (AxisCameraException ex) { ex.printStackTrace(); return -2; } catch (NIVisionException ex) { ex.printStackTrace(); return -3; } return -1; } }
package org.voltcore.messaging; import java.net.InetSocketAddress; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Queue; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.zookeeper_voltpatches.CreateMode; import org.apache.zookeeper_voltpatches.KeeperException; import org.apache.zookeeper_voltpatches.ZooDefs.Ids; import org.apache.zookeeper_voltpatches.ZooKeeper; import org.json_voltpatches.JSONArray; import org.json_voltpatches.JSONObject; import org.json_voltpatches.JSONStringer; import org.voltcore.agreement.AgreementSite; import org.voltcore.agreement.InterfaceToMessenger; import org.voltcore.logging.VoltLogger; import org.voltcore.network.VoltNetworkPool; import org.voltcore.utils.COWMap; import org.voltcore.utils.CoreUtils; import org.voltcore.utils.InstanceId; import org.voltcore.utils.PortGenerator; import org.voltcore.zk.CoreZK; import org.voltcore.zk.ZKUtil; import org.voltdb.VoltDB; import org.voltdb.utils.MiscUtils; import com.google.common.primitives.Longs; /** * Host messenger contains all the code necessary to join a cluster mesh, and create mailboxes * that are addressable from anywhere within that mesh. Host messenger also provides * a ZooKeeper instance that is maintained within the mesh that can be used for distributed coordination * and failure detection. */ public class HostMessenger implements SocketJoiner.JoinHandler, InterfaceToMessenger { private static final VoltLogger logger = new VoltLogger("NETWORK"); /** * Configuration for a host messenger. The leader binds to the coordinator ip and * not the internal interface or port. Nodes that fail to become the leader will * connect to the leader using any interface, and will then advertise using the specified * internal interface/port. * * By default all interfaces are used, if one is specified then only that interface will be used. * */ public static class Config { public InetSocketAddress coordinatorIp; public String zkInterface = "127.0.0.1:2181"; public String internalInterface = ""; public int internalPort = 3021; public int deadHostTimeout = 10000; public long backwardsTimeForgivenessWindow = 1000 * 60 * 60 * 24 * 7; public VoltMessageFactory factory = new VoltMessageFactory(); public int networkThreads = Math.max(2, CoreUtils.availableProcessors() / 4); public Queue<String> coreBindIds;; public Config(String coordIp, int coordPort) { if (coordIp == null || coordIp.length() == 0) { coordinatorIp = new InetSocketAddress(coordPort); } else { coordinatorIp = new InetSocketAddress(coordIp, coordPort); } initNetworkThreads(); } public Config() { this(null, 3021); } public Config(PortGenerator ports) { this(null, 3021); zkInterface = "127.0.0.1:" + ports.next(); internalPort = ports.next(); } public int getZKPort() { return MiscUtils.getPortFromHostnameColonPort(zkInterface, VoltDB.DEFAULT_ZK_PORT); } private void initNetworkThreads() { try { logger.info("Default network thread count: " + this.networkThreads); Integer networkThreadConfig = Integer.getInteger("networkThreads"); if ( networkThreadConfig != null ) { this.networkThreads = networkThreadConfig; logger.info("Overridden network thread count: " + this.networkThreads); } } catch (Exception e) { logger.error("Error setting network thread count", e); } } @Override public String toString() { JSONStringer js = new JSONStringer(); try { js.object(); js.key("coordinatorip").value(coordinatorIp.toString()); js.key("zkinterface").value(zkInterface); js.key("internalinterface").value(internalInterface); js.key("internalport").value(internalPort); js.key("deadhosttimeout").value(deadHostTimeout); js.key("backwardstimeforgivenesswindow").value(backwardsTimeForgivenessWindow); js.key("networkThreads").value(networkThreads); js.endObject(); return js.toString(); } catch (Exception e) { throw new RuntimeException(e); } } } private static final VoltLogger m_logger = new VoltLogger("org.voltdb.messaging.impl.HostMessenger"); private static final VoltLogger hostLog = new VoltLogger("HOST"); public static final int AGREEMENT_SITE_ID = -1; public static final int STATS_SITE_ID = -2; public static final int ASYNC_COMPILER_SITE_ID = -3; public static final int CLIENT_INTERFACE_SITE_ID = -4; // we should never hand out this site ID. Use it as an empty message destination public static final int VALHALLA = Integer.MIN_VALUE; int m_localHostId; private final Config m_config; private final SocketJoiner m_joiner; private final VoltNetworkPool m_network; private volatile boolean m_localhostReady = false; // memoized InstanceId private InstanceId m_instanceId = null; /* * References to other hosts in the mesh. * Updates via COW */ final COWMap<Integer, ForeignHost> m_foreignHosts = new COWMap<Integer, ForeignHost>(); /* * References to all the local mailboxes * Updates via COW */ final COWMap<Long, Mailbox> m_siteMailboxes = new COWMap<Long, Mailbox>(); /* * All failed hosts that have ever been seen. * Used to dedupe failures so that they are only processed once. */ private final Set<Integer> m_knownFailedHosts = Collections.synchronizedSet(new HashSet<Integer>()); private AgreementSite m_agreementSite; private ZooKeeper m_zk; private final AtomicInteger m_nextSiteId = new AtomicInteger(0); public Mailbox getMailbox(long hsId) { return m_siteMailboxes.get(hsId); } /** * * @param network * @param coordinatorIp * @param expectedHosts * @param catalogCRC * @param hostLog */ public HostMessenger( Config config) { m_config = config; m_network = new VoltNetworkPool( m_config.networkThreads, m_config.coreBindIds); m_joiner = new SocketJoiner( m_config.coordinatorIp, m_config.internalInterface, m_config.internalPort, this); } /** * Synchronization protects m_knownFailedHosts and ensures that every failed host is only reported * once */ @Override public synchronized void reportForeignHostFailed(int hostId) { if (m_knownFailedHosts.contains(hostId)) { return; } m_knownFailedHosts.add(hostId); long initiatorSiteId = CoreUtils.getHSIdFromHostAndSite(hostId, AGREEMENT_SITE_ID); removeForeignHost(hostId); m_agreementSite.reportFault(initiatorSiteId); logger.warn(String.format("Host %d failed", hostId)); } /** * Start the host messenger and connect to the leader, or become the leader * if necessary. */ public void start() throws Exception { /* * SJ uses this barrier if this node becomes the leader to know when ZooKeeper * has been finished bootstrapping. */ CountDownLatch zkInitBarrier = new CountDownLatch(1); /* * If start returns true then this node is the leader, it bound to the coordinator address * It needs to bootstrap its agreement site so that other nodes can join */ if(m_joiner.start(zkInitBarrier)) { m_network.start(); /* * m_localHostId is 0 of course. */ long agreementHSId = getHSIdForLocalSite(AGREEMENT_SITE_ID); /* * A set containing just the leader (this node) */ HashSet<Long> agreementSites = new HashSet<Long>(); agreementSites.add(agreementHSId); /* * A basic site mailbox for the agreement site */ SiteMailbox sm = new SiteMailbox(this, agreementHSId); createMailbox(agreementHSId, sm); /* * Construct the site with just this node */ m_agreementSite = new AgreementSite( agreementHSId, agreementSites, 0, sm, new InetSocketAddress( m_config.zkInterface.split(":")[0], Integer.parseInt(m_config.zkInterface.split(":")[1])), m_config.backwardsTimeForgivenessWindow); m_agreementSite.start(); m_agreementSite.waitForRecovery(); m_zk = org.voltcore.zk.ZKUtil.getClient(m_config.zkInterface, 60 * 1000); if (m_zk == null) { throw new Exception("Timed out trying to connect local ZooKeeper instance"); } CoreZK.createHierarchy(m_zk); /* * This creates the ephemeral sequential node with host id 0 which * this node already used for itself. Just recording that fact. */ final int selectedHostId = selectNewHostId(m_config.coordinatorIp.toString()); if (selectedHostId != 0) { org.voltdb.VoltDB.crashLocalVoltDB("Selected host id for coordinator was not 0, " + selectedHostId, false, null); } // Store the components of the instance ID in ZK JSONObject instance_id = new JSONObject(); instance_id.put("coord", ByteBuffer.wrap(m_config.coordinatorIp.getAddress().getAddress()).getInt()); instance_id.put("timestamp", System.currentTimeMillis()); hostLog.debug("Cluster will have instance ID:\n" + instance_id.toString(4)); byte[] payload = instance_id.toString(4).getBytes("UTF-8"); m_zk.create(CoreZK.instance_id, payload, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); /* * Store all the hosts and host ids here so that waitForGroupJoin * knows the size of the mesh. This part only registers this host */ byte hostInfoBytes[] = m_config.coordinatorIp.toString().getBytes("UTF-8"); m_zk.create(CoreZK.hosts_host + selectedHostId, hostInfoBytes, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); } zkInitBarrier.countDown(); } //For test only protected HostMessenger() { this(new Config()); } /* * The network is only available after start() finishes */ public VoltNetworkPool getNetwork() { return m_network; } public VoltMessageFactory getMessageFactory() { return m_config.factory; } /** * Get a unique ID for this cluster * @return */ public InstanceId getInstanceId() { if (m_instanceId == null) { try { byte[] data = m_zk.getData(CoreZK.instance_id, false, null); JSONObject idJSON = new JSONObject(new String(data, "UTF-8")); m_instanceId = new InstanceId(idJSON.getInt("coord"), idJSON.getLong("timestamp")); } catch (Exception e) { String msg = "Unable to get instance ID info from " + CoreZK.instance_id; hostLog.error(msg); throw new RuntimeException(msg, e); } } return m_instanceId; } /* * Take the new connection (member of the mesh) and create a foreign host for it * and put it in the map of foreign hosts */ @Override public void notifyOfJoin(int hostId, SocketChannel socket, InetSocketAddress listeningAddress) { System.out.println(getHostId() + " notified of " + hostId); prepSocketChannel(socket); ForeignHost fhost = null; try { fhost = new ForeignHost(this, hostId, socket, m_config.deadHostTimeout, listeningAddress); fhost.register(this); putForeignHost(hostId, fhost); fhost.enableRead(); } catch (java.io.IOException e) { org.voltdb.VoltDB.crashLocalVoltDB("", true, e); } } /* * Set all the default options for sockets */ private void prepSocketChannel(SocketChannel sc) { try { sc.socket().setSendBufferSize(1024*1024*2); sc.socket().setReceiveBufferSize(1024*1024*2); } catch (SocketException e) { e.printStackTrace(); } } /* * Convenience method for doing the verbose COW insert into the map */ private void putForeignHost(int hostId, ForeignHost fh) { m_foreignHosts.put(hostId, fh); } /* * Convenience method for doing the verbose COW remove from the map */ private void removeForeignHost(int hostId) { ForeignHost fh = m_foreignHosts.remove(hostId); if (fh != null) { fh.close(); } } /* * Any node can serve a request to join. The coordination of generating a new host id * is done via ZK */ @Override public void requestJoin(SocketChannel socket, InetSocketAddress listeningAddress) throws Exception { /* * Generate the host id via creating an ephemeral sequential node */ Integer hostId = selectNewHostId(socket.socket().getInetAddress().getHostAddress()); prepSocketChannel(socket); ForeignHost fhost = null; try { try { /* * Write the response that advertises the cluster topology */ writeRequestJoinResponse( hostId, socket); /* * Wait for the a response from the joining node saying that it connected * to all the nodes we just advertised. Use a timeout so that the cluster can't be stuck * on failed joins. */ ByteBuffer finishedJoining = ByteBuffer.allocate(1); socket.configureBlocking(false); long start = System.currentTimeMillis(); while (finishedJoining.hasRemaining() && System.currentTimeMillis() - start < 120000) { int read = socket.read(finishedJoining); if (read == -1) { hostLog.info("New connection was unable to establish mesh"); return; } else if (read < 1) { Thread.sleep(5); } } /* * Now add the host to the mailbox system */ fhost = new ForeignHost(this, hostId, socket, m_config.deadHostTimeout, listeningAddress); fhost.register(this); putForeignHost(hostId, fhost); fhost.enableRead(); } catch (Exception e) { logger.error("Error joining new node", e); m_knownFailedHosts.add(hostId); removeForeignHost(hostId); return; } /* * And the last step is to wait for the new node to join ZooKeeper. * This node is the one to create the txn that will add the new host to the list of hosts * with agreement sites across the cluster. */ long hsId = CoreUtils.getHSIdFromHostAndSite(hostId, AGREEMENT_SITE_ID); if (!m_agreementSite.requestJoin(hsId).await(60, TimeUnit.SECONDS)) { reportForeignHostFailed(hostId); } } catch (Throwable e) { org.voltdb.VoltDB.crashLocalVoltDB("", true, e); } } /* * Generate a new host id by creating a persistent sequential node */ private Integer selectNewHostId(String address) throws Exception { String node = m_zk.create(CoreZK.hostids_host, address.getBytes("UTF-8"), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); return Integer.valueOf(node.substring(node.length() - 10)); } /* * Advertise to a newly connecting node the topology of the cluster so that it can connect to * the rest of the nodes */ private void writeRequestJoinResponse(int hostId, SocketChannel socket) throws Exception { JSONObject jsObj = new JSONObject(); /* * Tell the new node what its host id is */ jsObj.put("newHostId", hostId); /* * Echo back the address that the node connected from */ jsObj.put("reportedAddress", ((InetSocketAddress)socket.socket().getRemoteSocketAddress()).getAddress().getHostAddress()); /* * Create an array containing an ad for every node including this one * even though the connection has already been made */ JSONArray jsArray = new JSONArray(); JSONObject hostObj = new JSONObject(); hostObj.put("hostId", getHostId()); hostObj.put("address", m_config.internalInterface.isEmpty() ? socket.socket().getLocalAddress().getHostAddress() : m_config.internalInterface); hostObj.put("port", m_config.internalPort); jsArray.put(hostObj); for (Map.Entry<Integer, ForeignHost> entry : m_foreignHosts.entrySet()) { if (entry.getValue() == null) continue; int hsId = entry.getKey(); ForeignHost fh = entry.getValue(); hostObj = new JSONObject(); hostObj.put("hostId", hsId); hostObj.put("address", fh.m_listeningAddress.getAddress().getHostAddress()); hostObj.put("port", fh.m_listeningAddress.getPort()); jsArray.put(hostObj); } jsObj.put("hosts", jsArray); byte messageBytes[] = jsObj.toString(4).getBytes("UTF-8"); ByteBuffer message = ByteBuffer.allocate(4 + messageBytes.length); message.putInt(messageBytes.length); message.put(messageBytes).flip(); while (message.hasRemaining()) { socket.write(message); } } /* * SJ invokes this method after a node finishes connecting to the entire cluster. * This method constructs all the hosts and puts them in the map */ @Override public void notifyOfHosts( int yourHostId, int[] hosts, SocketChannel[] sockets, InetSocketAddress listeningAddresses[]) throws Exception { m_localHostId = yourHostId; long agreementHSId = getHSIdForLocalSite(AGREEMENT_SITE_ID); /* * Construct the set of agreement sites based on all the hosts that are connected */ HashSet<Long> agreementSites = new HashSet<Long>(); agreementSites.add(agreementHSId); m_network.start();//network must be running for register to work for (int ii = 0; ii < hosts.length; ii++) { System.out.println(yourHostId + " Notified of host " + hosts[ii]); agreementSites.add(CoreUtils.getHSIdFromHostAndSite(hosts[ii], AGREEMENT_SITE_ID)); prepSocketChannel(sockets[ii]); ForeignHost fhost = null; try { fhost = new ForeignHost(this, hosts[ii], sockets[ii], m_config.deadHostTimeout, listeningAddresses[ii]); fhost.register(this); putForeignHost(hosts[ii], fhost); } catch (java.io.IOException e) { org.voltdb.VoltDB.crashLocalVoltDB("", true, e); } } /* * Create the local agreement site. It knows that it is recovering because the number of * prexisting sites is > 0 */ SiteMailbox sm = new SiteMailbox(this, agreementHSId); createMailbox(agreementHSId, sm); m_agreementSite = new AgreementSite( agreementHSId, agreementSites, yourHostId, sm, new InetSocketAddress( m_config.zkInterface.split(":")[0], Integer.parseInt(m_config.zkInterface.split(":")[1])), m_config.backwardsTimeForgivenessWindow); /* * Now that the agreement site mailbox has been created it is safe * to enable read */ for (ForeignHost fh : m_foreignHosts.values()) { fh.enableRead(); } m_agreementSite.start(); /* * Do the usual thing of waiting for the agreement site * to join the cluster and creating the client */ m_agreementSite.waitForRecovery(); m_zk = org.voltcore.zk.ZKUtil.getClient(m_config.zkInterface, 60 * 1000); if (m_zk == null) { throw new Exception("Timed out trying to connect local ZooKeeper instance"); } /* * Publish the address of this node to ZK as seen by the leader * Also allows waitForGroupJoin to know the number of nodes in the cluster */ byte hostInfoBytes[]; if (m_config.internalInterface.isEmpty()) { InetSocketAddress addr = new InetSocketAddress(m_joiner.m_reportedInternalInterface, m_config.internalPort); hostInfoBytes = addr.toString().getBytes("UTF-8"); } else { InetSocketAddress addr = new InetSocketAddress(m_config.internalInterface, m_config.internalPort); hostInfoBytes = addr.toString().getBytes("UTF-8"); } m_zk.create(CoreZK.hosts_host + getHostId(), hostInfoBytes, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); } /** * Wait until all the nodes have built a mesh. */ public void waitForGroupJoin(int expectedHosts) { try { while (true) { ZKUtil.FutureWatcher fw = new ZKUtil.FutureWatcher(); final int numChildren = m_zk.getChildren(CoreZK.hosts, fw).size(); /* * If the target number of hosts has been reached * break out */ if ( numChildren == expectedHosts) { break; } /* * If there are extra hosts that means too many Volt procs were started. * Kill this node based on the assumption that we are the extra one. In most * cases this is correct and fine and in the worst case the cluster will hang coming up * because two or more hosts killed themselves */ if ( numChildren > expectedHosts) { org.voltdb.VoltDB.crashLocalVoltDB("Expected to find " + expectedHosts + " hosts in cluster at startup but found " + numChildren + ". Terminating this host.", false, null); } fw.get(); } } catch (Exception e) { org.voltdb.VoltDB.crashLocalVoltDB("Error waiting for hosts to be ready", false, e); } } public int getHostId() { return m_localHostId; } public long getHSIdForLocalSite(int site) { return CoreUtils.getHSIdFromHostAndSite(getHostId(), site); } public String getHostname() { String hostname = org.voltcore.utils.CoreUtils.getHostnameOrAddress(); return hostname; } public List<Integer> getLiveHostIds() throws KeeperException, InterruptedException { List<Integer> hostids = new ArrayList<Integer>(); for (String host : m_zk.getChildren(CoreZK.hosts, false, null)) { hostids.add(Integer.parseInt(host.substring(host.indexOf("host") + "host".length()))); } return hostids; } /** * Given a hostid, return the hostname for it */ @Override public String getHostnameForHostID(int hostId) { ForeignHost fh = m_foreignHosts.get(hostId); return fh == null ? "UNKNOWN" : fh.hostname(); } /** * * @param siteId * @param mailboxId * @param message * @return null if message was delivered locally or a ForeignHost * reference if a message is read to be delivered remotely. */ ForeignHost presend(long hsId, VoltMessage message) { int hostId = (int)hsId; // the local machine case if (hostId == m_localHostId) { Mailbox mbox = m_siteMailboxes.get(hsId); if (mbox != null) { mbox.deliver(message); return null; } else { hostLog.warn("Mailbox is not registered for site id " + CoreUtils.getSiteIdFromHSId(hsId)); return null; } } // the foreign machine case ForeignHost fhost = m_foreignHosts.get(hostId); if (fhost == null) { if (!m_knownFailedHosts.contains(hostId)) { hostLog.warn( "Attempted to send a message to foreign host with id " + hostId + " but there is no such host."); } return null; } if (!fhost.isUp()) { //Throwable t = new Throwable(); //java.io.StringWriter sw = new java.io.StringWriter(); //java.io.PrintWriter pw = new java.io.PrintWriter(sw); //t.printStackTrace(pw); //pw.flush(); m_logger.warn("Attempted delivery of message to failed site: " + CoreUtils.hsIdToString(hsId)); //m_logger.warn(sw.toString()); return null; } return fhost; } public void registerMailbox(Mailbox mailbox) { if (!m_siteMailboxes.containsKey(mailbox.getHSId())) { throw new RuntimeException("Can only register a mailbox with an hsid alreadly generated"); } m_siteMailboxes.put(mailbox.getHSId(), mailbox); } /* * Generate a slot for the mailbox and put a noop box there. Can also * supply a value */ public long generateMailboxId(Long mailboxId) { final long hsId = mailboxId == null ? getHSIdForLocalSite(m_nextSiteId.getAndIncrement()) : mailboxId; m_siteMailboxes.put(hsId, new Mailbox() { @Override public void send(long hsId, VoltMessage message) {} @Override public void send(long[] hsIds, VoltMessage message) {} @Override public void deliver(VoltMessage message) { hostLog.info("No-op mailbox(" + CoreUtils.hsIdToString(hsId) + ") dropped message " + message); } @Override public void deliverFront(VoltMessage message) {} @Override public VoltMessage recv() {return null;} @Override public VoltMessage recvBlocking() {return null;} @Override public VoltMessage recvBlocking(long timeout) {return null;} @Override public VoltMessage recv(Subject[] s) {return null;} @Override public VoltMessage recvBlocking(Subject[] s) {return null;} @Override public VoltMessage recvBlocking(Subject[] s, long timeout) { return null;} @Override public long getHSId() {return 0L;} @Override public void setHSId(long hsId) {} }); return hsId; } /* * Create a site mailbox with a generated host id */ public Mailbox createMailbox() { final int siteId = m_nextSiteId.getAndIncrement(); long hsId = getHSIdForLocalSite(siteId); SiteMailbox sm = new SiteMailbox( this, hsId); m_siteMailboxes.put(hsId, sm); return sm; } /** * Discard a mailbox */ public void removeMailbox(long hsId) { m_siteMailboxes.remove(hsId); } public void send(final long destinationHSId, final VoltMessage message) { assert(message != null); ForeignHost host = presend(destinationHSId, message); if (host != null) { host.send(new long [] { destinationHSId }, message); } } public void send(long[] destinationHSIds, final VoltMessage message) { assert(message != null); assert(destinationHSIds != null); final HashMap<ForeignHost, ArrayList<Long>> foreignHosts = new HashMap<ForeignHost, ArrayList<Long>>(32); for (long hsId : destinationHSIds) { ForeignHost host = presend(hsId, message); if (host == null) continue; ArrayList<Long> bundle = foreignHosts.get(host); if (bundle == null) { bundle = new ArrayList<Long>(); foreignHosts.put(host, bundle); } bundle.add(hsId); } if (foreignHosts.size() == 0) return; for (Entry<ForeignHost, ArrayList<Long>> e : foreignHosts.entrySet()) { e.getKey().send(Longs.toArray(e.getValue()), message); } } /** * Block on this call until the number of ready hosts is * equal to the number of expected hosts. * * @return True if returning with all hosts ready. False if error. */ public void waitForAllHostsToBeReady(int expectedHosts) { m_localhostReady = true; try { m_zk.create(CoreZK.readyhosts_host, null, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); while (true) { ZKUtil.FutureWatcher fw = new ZKUtil.FutureWatcher(); if (m_zk.getChildren(CoreZK.readyhosts, fw).size() == expectedHosts) { break; } fw.get(); } } catch (Exception e) { org.voltdb.VoltDB.crashLocalVoltDB("Error waiting for hosts to be ready", false, e); } } public synchronized boolean isLocalHostReady() { return m_localhostReady; } public void shutdown() throws InterruptedException { m_zk.close(); m_agreementSite.shutdown(); for (ForeignHost host : m_foreignHosts.values()) { // null is OK. It means this host never saw this host id up if (host != null) { host.close(); } } m_joiner.shutdown(); m_network.shutdown(); } /* * Register a custom mailbox, optinally specifying what the hsid should be. */ public void createMailbox(Long proposedHSId, Mailbox mailbox) { long hsId = 0; if (proposedHSId != null) { if (m_siteMailboxes.containsKey(proposedHSId)) { org.voltdb.VoltDB.crashLocalVoltDB( "Attempted to create a mailbox for site " + CoreUtils.hsIdToString(proposedHSId) + " twice", true, null); } hsId = proposedHSId; } else { hsId = getHSIdForLocalSite(m_nextSiteId.getAndIncrement()); mailbox.setHSId(hsId); } m_siteMailboxes.put(hsId, mailbox); } /** * Get the number of up foreign hosts. Used for test purposes. * @return The number of up foreign hosts. */ public int countForeignHosts() { int retval = 0; for (ForeignHost host : m_foreignHosts.values()) if ((host != null) && (host.isUp())) retval++; return retval; } /** * Kill a foreign host socket by id. * @param hostId The id of the foreign host to kill. */ public void closeForeignHostSocket(int hostId) { ForeignHost fh = m_foreignHosts.get(hostId); if (fh != null && fh.isUp()) { fh.killSocket(); } reportForeignHostFailed(hostId); } public ZooKeeper getZK() { return m_zk; } public void sendPoisonPill(String err) { for (ForeignHost fh : m_foreignHosts.values()) { if (fh != null && fh.isUp()) { fh.sendPoisonPill(err); } } } }
package org.voltdb.messaging; import com.google_voltpatches.common.collect.ImmutableSet; import com.google_voltpatches.common.collect.Sets; import org.voltcore.messaging.VoltMessage; import org.voltdb.StoredProcedureInvocation; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Set; /** * A message sent from the involved partitions to the * multipart replayer during command log replay. */ public class MpReplayMessage extends VoltMessage { long m_txnId; long m_uniqueId; int m_partitionId; Set<Integer> m_involvedPartitions; StoredProcedureInvocation m_invocation; /** Empty constructor for de-serialization */ MpReplayMessage() { super(); } public MpReplayMessage(long txnId, long uniqueId, int partitionId, Collection<Integer> involvedPartitions, StoredProcedureInvocation invocation) { super(); m_txnId = txnId; m_uniqueId = uniqueId; m_partitionId = partitionId; m_involvedPartitions = ImmutableSet.copyOf(involvedPartitions); m_invocation = invocation; } public long getTxnId() { return m_txnId; } public long getUniqueId() { return m_uniqueId; } public int getPartitionId() { return m_partitionId; } public Set<Integer> getInvolvedPartitions() { return m_involvedPartitions; } public StoredProcedureInvocation getInvocation() { return m_invocation; } @Override public int getSerializedSize() { int size = super.getSerializedSize(); size += 8 // m_txnId + 8 // m_uniqueId + 4 // m_partitionId + 4 // m_involvedPartitions.size() + 4 * m_involvedPartitions.size(); if (m_invocation != null) { size += m_invocation.getSerializedSize(); } return size; } @Override protected void initFromBuffer(ByteBuffer buf) throws IOException { m_txnId = buf.getLong(); m_uniqueId = buf.getLong(); m_partitionId = buf.getInt(); int partitionCount = buf.getInt(); m_involvedPartitions = Sets.newHashSet(); for (int i = 0; i < partitionCount; i++) { m_involvedPartitions.add(buf.getInt()); } if (buf.remaining() > 0) { m_invocation = new StoredProcedureInvocation(); m_invocation.initFromBuffer(buf); } else { m_invocation = null; } } @Override public void flattenToBuffer(ByteBuffer buf) throws IOException { buf.put(VoltDbMessageFactory.MP_REPLAY_ID); buf.putLong(m_txnId); buf.putLong(m_uniqueId); buf.putInt(m_partitionId); buf.putInt(m_involvedPartitions.size()); for (int pid : m_involvedPartitions) { buf.putInt(pid); } if (m_invocation != null) { m_invocation.flattenToBuffer(buf); } assert(buf.capacity() == buf.position()); buf.limit(buf.position()); } }
package com.yahoo.squidb.data; import android.annotation.TargetApi; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.Uri; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.text.TextUtils; import com.yahoo.squidb.Beta; import com.yahoo.squidb.data.adapter.DefaultOpenHelperWrapper; import com.yahoo.squidb.data.adapter.SQLExceptionWrapper; import com.yahoo.squidb.data.adapter.SQLiteDatabaseWrapper; import com.yahoo.squidb.data.adapter.SQLiteOpenHelperWrapper; import com.yahoo.squidb.data.adapter.SquidTransactionListener; import com.yahoo.squidb.sql.CompiledStatement; import com.yahoo.squidb.sql.Criterion; import com.yahoo.squidb.sql.Delete; import com.yahoo.squidb.sql.Index; import com.yahoo.squidb.sql.Insert; import com.yahoo.squidb.sql.Property; import com.yahoo.squidb.sql.Property.PropertyVisitor; import com.yahoo.squidb.sql.Query; import com.yahoo.squidb.sql.SqlStatement; import com.yahoo.squidb.sql.SqlTable; import com.yahoo.squidb.sql.Table; import com.yahoo.squidb.sql.TableStatement; import com.yahoo.squidb.sql.Update; import com.yahoo.squidb.sql.View; import com.yahoo.squidb.sql.VirtualTable; import com.yahoo.squidb.utility.Logger; import com.yahoo.squidb.utility.VersionCode; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * SquidDatabase is a database abstraction which wraps a SQLite database. * <p> * Use this class to control the lifecycle of your database where you would normally use a * {@link android.database.sqlite.SQLiteOpenHelper}. The first call to a read or write operation will open the database. * You can close it again using {@link #close()}. * <p> * SquidDatabase provides typesafe reads and writes using model classes. For example, rather than using rawQuery to * get a Cursor, use {@link #query(Class, Query)}. * <p> * By convention, methods beginning with "try" (e.g. {@link #tryCreateTable(Table) tryCreateTable}) return true * if the operation succeeded and false if it failed for any reason. If it fails, there will also be a call to * {@link #onError(String, Throwable) onError}. * <p> * Methods that use String arrays for where clause arguments ({@link #update(String, ContentValues, String, String[]) * update}, {@link #updateWithOnConflict(String, ContentValues, String, String[], int) updateWithOnConflict}, and * {@link #delete(String, String, String[]) delete}) are wrappers around Android's {@link SQLiteDatabase} methods. * However, Android's default behavior of binding all arguments as strings can have unexpected bugs, particularly when * working with SQLite functions. For example: * * <pre> * select * from t where _id = '1'; // Returns the first row * select * from t where abs(_id) = '1'; // Always returns empty set * </pre> * * For this reason, these methods are protected rather than public. You can choose to expose them in your database * subclass if you wish, but we recommend that you instead use the typesafe, public, model-bases methods, such as * {@link #update(Criterion, TableModel)}, {@link #updateWithOnConflict(Criterion, TableModel, * TableStatement.ConflictAlgorithm)}, {@link #delete(Class, long)}, and {@link #deleteWhere(Class, Criterion)}. * <p> * As a convenience, when calling the {@link #query(Class, Query) query} and {@link #fetchByQuery(Class, Query) * fetchByQuery} methods, if the <code>query</code> argument does not have a FROM clause, the table or view to select * from will be inferred from the provided <code>modelClass</code> argument (if possible). This allows for invocations * where {@link Query#from(com.yahoo.squidb.sql.SqlTable) Query.from} is never explicitly called: * * <pre> * SquidCursor&lt;Person&gt; cursor = * db.query(Person.class, Query.select().orderBy(Person.NAME.asc())); * </pre> * * By convention, the <code>fetch...</code> methods return a single model instance corresponding to the first record * found, or null if no records are found for that particular form of fetch. */ public abstract class SquidDatabase { /** * @return the database name */ public abstract String getName(); /** * @return the database version */ protected abstract int getVersion(); /** * @return all {@link Table Tables} and {@link VirtualTable VirtualTables} and that should be created when the * database is created */ protected abstract Table[] getTables(); /** * @return all {@link View Views} that should be created when the database is created. Views will be created after * all Tables have been created. */ protected View[] getViews() { return null; } /** * @return all {@link Index Indexes} that should be created when the database is created. Indexes will be created * after Tables and Views have been created. */ protected Index[] getIndexes() { return null; } /** * Called after the database has been created. At this time, all {@link Table Tables} and {@link * VirtualTable VirtualTables} returned from {@link #getTables()}, all {@link View Views} from {@link #getViews()}, * and all {@link Index Indexes} from {@link #getIndexes()} will have been created. Any additional database setup * should be done here, e.g. creating other views, indexes, triggers, or inserting data. * * @param db the {@link SQLiteDatabaseWrapper} being created */ protected void onTablesCreated(SQLiteDatabaseWrapper db) { } /** * Called when the database should be upgraded from one version to another. The most common pattern to use is a * fall-through switch statement with calls to the tryAdd/Create/Drop methods: * * <pre> * switch(oldVersion) { * case 1: * tryAddColumn(MyModel.NEW_COL_1); * case 2: * tryCreateTable(MyNewModel.TABLE); * </pre> * * @param db the {@link SQLiteDatabaseWrapper} being upgraded * @param oldVersion the current database version * @param newVersion the database version being upgraded to * @return true if the upgrade was handled successfully, false otherwise */ protected abstract boolean onUpgrade(SQLiteDatabaseWrapper db, int oldVersion, int newVersion); /** * Called when the database should be downgraded from one version to another * * @param db the {@link SQLiteDatabaseWrapper} being upgraded * @param oldVersion the current database version * @param newVersion the database version being downgraded to * @return true if the downgrade was handled successfully, false otherwise. The default implementation returns true. */ protected boolean onDowngrade(SQLiteDatabaseWrapper db, int oldVersion, int newVersion) { return true; } /** * Called to notify of a failure in {@link #onUpgrade(SQLiteDatabaseWrapper, int, int) onUpgrade()} or * {@link #onDowngrade(SQLiteDatabaseWrapper, int, int) onDowngrade()}, either because it returned false or because * an unexpected exception occurred. Subclasses can take drastic corrective action here, e.g. recreating the * database with {@link #recreate()}. The default implementation throws an exception. * <p> * Note that taking no action here leaves the database in whatever state it was in when the error occurred, which * can result in unexpected errors if callers are allowed to invoke further operations on the database. * * @param failure details about the upgrade or downgrade that failed */ protected void onMigrationFailed(MigrationFailedException failure) { throw failure; } /** * Called when the database connection is being configured, to enable features such as write-ahead logging or * foreign key support. * * This method may be called at different points in the database lifecycle depending on the environment. When using * a custom SQLite build with the squidb-sqlite-bindings project, or when running on Android API >= 16, it is * called before {@link #onTablesCreated(SQLiteDatabaseWrapper) onTablesCreated}, * {@link #onUpgrade(SQLiteDatabaseWrapper, int, int) onUpgrade}, * {@link #onDowngrade(SQLiteDatabaseWrapper, int, int) onDowngrade}, * and {@link #onOpen(SQLiteDatabaseWrapper) onOpen}. If it is running on stock Android SQLite and API < 16, it * is called immediately before onOpen but after the other callbacks. The discrepancy is because onConfigure was * only introduced as a callback in API 16, but the ordering should not matter much for most use cases. * <p> * This method should only call methods that configure the parameters of the database connection, such as * {@link SQLiteDatabaseWrapper#enableWriteAheadLogging}, {@link SQLiteDatabaseWrapper#setForeignKeyConstraintsEnabled}, * {@link SQLiteDatabaseWrapper#setLocale}, {@link SQLiteDatabaseWrapper#setMaximumSize}, or executing PRAGMA statements. * * @param db the {@link SQLiteDatabaseWrapper} being configured */ protected void onConfigure(SQLiteDatabaseWrapper db) { } /** * Called when the database has been opened. This method is called after the database connection has been * configured and after the database schema has been created, upgraded, or downgraded as necessary. * * @param db the {@link SQLiteDatabaseWrapper} being opened */ protected void onOpen(SQLiteDatabaseWrapper db) { } /** * Called when an error occurs. This is primarily for clients to log notable errors, not for taking corrective * action on them. The default implementation prints a warning log. * * @param message an error message * @param error the error that was encountered */ protected void onError(String message, Throwable error) { Logger.w(getClass().getSimpleName() + " -- " + message, error); } private static final int STRING_BUILDER_INITIAL_CAPACITY = 128; private final Context context; private SquidDatabase attachedTo = null; private final ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(); /** * SQLiteOpenHelperWrapper that takes care of database operations */ private SQLiteOpenHelperWrapper helper = null; /** * Internal pointer to open database. Hides the fact that there is a database and a wrapper by making a single * monolithic interface */ private SQLiteDatabaseWrapper database = null; /** * Cached version code */ private VersionCode sqliteVersion = null; /** * Map of class objects to corresponding tables */ private Map<Class<? extends AbstractModel>, SqlTable<?>> tableMap; private boolean isInMigration; /** * Create a new SquidDatabase * * @param context the Context, must not be null */ public SquidDatabase(Context context) { if (context == null) { throw new NullPointerException("Null context creating SquidDatabase"); } this.context = context.getApplicationContext(); initializeTableMap(); } private void initializeTableMap() { tableMap = new HashMap<Class<? extends AbstractModel>, SqlTable<?>>(); registerTableModels(getTables()); registerTableModels(getViews()); } private <T extends SqlTable<?>> void registerTableModels(T[] tables) { if (tables != null) { for (SqlTable<?> table : tables) { if (table.getModelClass() != null && !tableMap.containsKey(table.getModelClass())) { tableMap.put(table.getModelClass(), table); } } } } /** * @return the path to the underlying database file. */ public String getDatabasePath() { return context.getDatabasePath(getName()).getAbsolutePath(); } /** * Return the {@link SqlTable} corresponding to the specified model type * * @param modelClass the model class * @return the corresponding data source for the model. May be a table, view, or subquery * @throws UnsupportedOperationException if the model class is unknown to this database */ protected final SqlTable<?> getSqlTable(Class<? extends AbstractModel> modelClass) { Class<?> type = modelClass; SqlTable<?> table; //noinspection SuspiciousMethodCalls while ((table = tableMap.get(type)) == null && type != AbstractModel.class && type != Object.class) { type = type.getSuperclass(); } if (table != null) { return table; } throw new UnsupportedOperationException("Unknown model class " + modelClass); } /** * Return the {@link Table} corresponding to the specified TableModel class * * @param modelClass the model class * @return the corresponding table for the model * @throws UnsupportedOperationException if the model class is unknown to this database */ protected final Table getTable(Class<? extends TableModel> modelClass) { return (Table) getSqlTable(modelClass); } /** * Gets the underlying SQLiteDatabaseWrapper instance. Most users should not need to call this. If you call this * from your AbstractDatabase subclass with the intention of executing SQL, you should wrap the calls with a lock, * probably the non-exclusive one: * * <pre> * public void execSql(String sql) { * aquireNonExclusiveLock(); * try { * getDatabase().execSQL(sql); * } finally { * releaseNonExclusiveLock(); * } * } * </pre> * * You only need to acquire the exclusive lock if you truly need exclusive access to the database connection. * * @return the underlying {@link SQLiteDatabaseWrapper}, which will be opened if it is not yet opened * @see #acquireExclusiveLock() * @see #acquireNonExclusiveLock() */ protected synchronized final SQLiteDatabaseWrapper getDatabase() { if (database == null) { openForWriting(); } return database; } @Beta @TargetApi(VERSION_CODES.JELLY_BEAN) public final String attachDatabase(SquidDatabase other) { if (attachedTo != null) { throw new IllegalStateException("Can't attach a database to a database that is itself attached"); } if (inTransaction()) { throw new IllegalStateException("Can't attach a database while in a transaction on the current thread"); } boolean walEnabled = (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) && getDatabase().isWriteAheadLoggingEnabled(); if (walEnabled) { // need to wait for transactions to finish acquireExclusiveLock(); } try { return other.attachTo(this); } finally { if (walEnabled) { releaseExclusiveLock(); } } } /** * Detaches a database previously attached with {@link #attachDatabase(SquidDatabase)} * * @return true if the other database was successfully detached */ @Beta public final boolean detachDatabase(SquidDatabase other) { if (other.attachedTo != this) { throw new IllegalArgumentException("Database " + other.getName() + " is not attached to " + getName()); } return other.detachFrom(this); } private String attachTo(SquidDatabase attachTo) { if (attachedTo != null) { throw new IllegalArgumentException( "Database " + getName() + " is already attached to " + attachedTo.getName()); } if (inTransaction()) { throw new IllegalStateException( "Cannot attach database " + getName() + " to " + attachTo.getName() + " -- " + getName() + " is in a transaction on the calling thread"); } acquireExclusiveLock(); String attachedAs = getAttachedName(); if (!attachTo.tryExecSql("ATTACH '" + getDatabasePath() + "' AS '" + attachedAs + "'")) { releaseExclusiveLock(); // Failed return null; } else { attachedTo = attachTo; return attachedAs; } } private boolean detachFrom(SquidDatabase detachFrom) { if (detachFrom.tryExecSql("DETACH '" + getAttachedName() + "'")) { attachedTo = null; releaseExclusiveLock(); return true; } return false; } private String getAttachedName() { return getName().replace('.', '_'); } /** * Open the database for writing. */ private void openForWriting() { initializeHelper(); boolean performRecreate = false; try { setDatabase(helper.openForWriting()); } catch (RecreateDuringMigrationException recreate) { performRecreate = true; } catch (MigrationFailedException fail) { onError(fail.getMessage(), fail); onMigrationFailed(fail); } catch (RuntimeException e) { onError("Failed to open database: " + getName(), e); throw e; } if (performRecreate) { recreate(); } } private void initializeHelper() { if (helper == null) { helper = getOpenHelper(context, getName(), new OpenHelperDelegate(), getVersion()); } } /** * Subclasses can override this method to enable connecting to a different version of SQLite than the default * version shipped with Android. For example, the squidb-sqlite-bindings project provides a class * SQLiteBindingsDatabaseOpenHelper to facilitate binding to a custom native build of SQLite. Overriders of this * method could simply <code>return new SQLiteBindingsDatabaseOpenHelper(context, databaseName, delegate, version);</code> * if they wanted to bypass Android's version of SQLite and use the version included with that project. * <p> * If you don't override this method, the stock Android SQLite build will be used. This is generally fine unless you * have a specific reason to prefer some other version of SQLite. */ protected SQLiteOpenHelperWrapper getOpenHelper(Context context, String databaseName, OpenHelperDelegate delegate, int version) { return new DefaultOpenHelperWrapper(context, databaseName, delegate, version); } /** * @return true if a connection to the {@link SQLiteDatabase} is open, false otherwise */ public synchronized final boolean isOpen() { return database != null && database.isOpen(); } /** * Close the database if it has been opened previously */ public synchronized final void close() { if (isOpen()) { database.close(); } helper = null; setDatabase(null); } /** * Clear all data in the database. * <p> * WARNING: Any open database resources will be abruptly closed. Do not call this method if other threads may be * accessing the database. The existing database file will be deleted and all data will be lost. */ public synchronized final void clear() { close(); context.deleteDatabase(getName()); } /** * Clears the database and recreates an empty version of it. * <p> * WARNING: Any open connections to the database will be abruptly closed. Do not call this method if other threads * may be accessing the database. * * @see #clear() */ public synchronized final void recreate() { if (isInMigration) { throw new RecreateDuringMigrationException(); } else { clear(); getDatabase(); } } /** * @return a human-readable database name for debugging */ @Override public String toString() { return "DB:" + getName(); } /** * Execute a raw sqlite query. This method takes an Object[] for the arguments because Android's default behavior * of binding all arguments as strings can have unexpected bugs, particularly when working with functions. For * example: * * <pre> * select * from t where _id = '1'; // Returns the first row * select * from t where abs(_id) = '1'; // Always returns empty set * </pre> * * To eliminate this class of bugs, we bind all arguments as their native types, not as strings. Any object in the * array that is not a basic type (Number, String, Boolean, etc.) will be converted to a sanitized string before * binding. * * @param sql a sql statement * @param sqlArgs arguments to bind to the sql statement * @return a {@link Cursor} containing results of the query */ public Cursor rawQuery(String sql, Object[] sqlArgs) { acquireNonExclusiveLock(); try { return getDatabase().rawQuery(sql, sqlArgs); } finally { releaseNonExclusiveLock(); } } // For use only when validating queries private void compileStatement(String sql) { acquireNonExclusiveLock(); try { getDatabase().ensureSqlCompiles(sql); } finally { releaseNonExclusiveLock(); } } /** * @see SQLiteDatabase#insert(String table, String nullColumnHack, ContentValues values) */ protected long insert(String table, String nullColumnHack, ContentValues values) { acquireNonExclusiveLock(); try { return getDatabase().insert(table, nullColumnHack, values); } finally { releaseNonExclusiveLock(); } } /** * @see SQLiteDatabase#insertOrThrow(String table, String nullColumnHack, ContentValues values) */ protected long insertOrThrow(String table, String nullColumnHack, ContentValues values) { acquireNonExclusiveLock(); try { return getDatabase().insertOrThrow(table, nullColumnHack, values); } finally { releaseNonExclusiveLock(); } } /** * @see SQLiteDatabase#insertWithOnConflict(String, String, android.content.ContentValues, int) */ protected long insertWithOnConflict(String table, String nullColumnHack, ContentValues values, int conflictAlgorithm) { acquireNonExclusiveLock(); try { return getDatabase().insertWithOnConflict(table, nullColumnHack, values, conflictAlgorithm); } finally { releaseNonExclusiveLock(); } } /** * Execute a SQL {@link com.yahoo.squidb.sql.Insert} statement * * @return the row id of the last row inserted on success, -1 on failure */ private long insertInternal(Insert insert) { CompiledStatement compiled = insert.compile(getSqliteVersion()); acquireNonExclusiveLock(); try { return getDatabase().executeInsert(compiled.sql, compiled.sqlArgs); } finally { releaseNonExclusiveLock(); } } /** * See the note at the top of this file about the potential bugs when using String[] whereArgs * * @see SQLiteDatabase#delete(String, String, String[]) */ protected int delete(String table, String whereClause, String[] whereArgs) { acquireNonExclusiveLock(); try { return getDatabase().delete(table, whereClause, whereArgs); } finally { releaseNonExclusiveLock(); } } /** * Execute a SQL {@link com.yahoo.squidb.sql.Delete} statement * * @return the number of rows deleted on success, -1 on failure */ private int deleteInternal(Delete delete) { CompiledStatement compiled = delete.compile(getSqliteVersion()); acquireNonExclusiveLock(); try { return getDatabase().executeUpdateDelete(compiled.sql, compiled.sqlArgs); } finally { releaseNonExclusiveLock(); } } /** * See the note at the top of this file about the potential bugs when using String[] whereArgs * * @see SQLiteDatabase#update(String table, ContentValues values, String whereClause, String[] whereArgs) */ protected int update(String table, ContentValues values, String whereClause, String[] whereArgs) { acquireNonExclusiveLock(); try { return getDatabase().update(table, values, whereClause, whereArgs); } finally { releaseNonExclusiveLock(); } } /** * See the note at the top of this file about the potential bugs when using String[] whereArgs * * @see SQLiteDatabase#updateWithOnConflict(String table, ContentValues values, String whereClause, String[] * whereArgs, int conflictAlgorithm) */ protected int updateWithOnConflict(String table, ContentValues values, String whereClause, String[] whereArgs, int conflictAlgorithm) { acquireNonExclusiveLock(); try { return getDatabase().updateWithOnConflict(table, values, whereClause, whereArgs, conflictAlgorithm); } finally { releaseNonExclusiveLock(); } } /** * Execute a SQL {@link com.yahoo.squidb.sql.Update} statement * * @return the number of rows updated on success, -1 on failure */ private int updateInternal(Update update) { CompiledStatement compiled = update.compile(getSqliteVersion()); acquireNonExclusiveLock(); try { return getDatabase().executeUpdateDelete(compiled.sql, compiled.sqlArgs); } finally { releaseNonExclusiveLock(); } } /** * Begin a transaction. This acquires a non-exclusive lock. * * @see #acquireNonExclusiveLock() * @see SQLiteDatabase#beginTransaction() */ public void beginTransaction() { acquireNonExclusiveLock(); getDatabase().beginTransaction(); transactionSuccessState.get().beginTransaction(); } /** * Begin a non-exclusive transaction. This acquires a non-exclusive lock. * * @see #acquireNonExclusiveLock() * @see SQLiteDatabase#beginTransactionNonExclusive() */ public void beginTransactionNonExclusive() { acquireNonExclusiveLock(); getDatabase().beginTransactionNonExclusive(); transactionSuccessState.get().beginTransaction(); } /** * Begin a transaction with a listener. This acquires a non-exclusive lock. * * @param listener the transaction listener * @see #acquireNonExclusiveLock() * @see SQLiteDatabase#beginTransactionWithListener(android.database.sqlite.SQLiteTransactionListener) */ public void beginTransactionWithListener(SquidTransactionListener listener) { acquireNonExclusiveLock(); getDatabase().beginTransactionWithListener(listener); transactionSuccessState.get().beginTransaction(); } /** * Begin a non-exclusive transaction with a listener. This acquires a non-exclusive lock. * * @param listener the transaction listener * @see #acquireNonExclusiveLock() * @see SQLiteDatabase#beginTransactionWithListenerNonExclusive(android.database.sqlite.SQLiteTransactionListener) */ public void beginTransactionWithListenerNonExclusive(SquidTransactionListener listener) { acquireNonExclusiveLock(); getDatabase().beginTransactionWithListenerNonExclusive(listener); transactionSuccessState.get().beginTransaction(); } /** * Mark the current transaction as successful * * @see SQLiteDatabase#setTransactionSuccessful() */ public void setTransactionSuccessful() { getDatabase().setTransactionSuccessful(); transactionSuccessState.get().setTransactionSuccessful(); } /** * @return true if a transaction is active * @see SQLiteDatabase#inTransaction() */ public synchronized boolean inTransaction() { return database != null && database.inTransaction(); } /** * End the current transaction * * @see SQLiteDatabase#endTransaction() */ public void endTransaction() { getDatabase().endTransaction(); releaseNonExclusiveLock(); TransactionSuccessState successState = transactionSuccessState.get(); successState.endTransaction(); if (!inTransaction()) { flushAccumulatedNotifications(successState.outerTransactionSuccess); successState.reset(); } } // Tracks nested transaction success or failure state. If any // nested transaction fails, the entire outer transaction // is also considered to have failed. private static class TransactionSuccessState { Deque<Boolean> nestedSuccessStack = new LinkedList<Boolean>(); boolean outerTransactionSuccess = true; private void beginTransaction() { nestedSuccessStack.push(false); } private void setTransactionSuccessful() { nestedSuccessStack.pop(); nestedSuccessStack.push(true); } private void endTransaction() { Boolean mostRecentTransactionSuccess = nestedSuccessStack.pop(); if (!mostRecentTransactionSuccess) { outerTransactionSuccess = false; } } private void reset() { nestedSuccessStack.clear(); outerTransactionSuccess = true; } } private ThreadLocal<TransactionSuccessState> transactionSuccessState = new ThreadLocal<TransactionSuccessState>() { protected TransactionSuccessState initialValue() { return new TransactionSuccessState(); } }; /** * Yield the current transaction * * @see SQLiteDatabase#yieldIfContendedSafely() */ public boolean yieldIfContendedSafely() { return getDatabase().yieldIfContendedSafely(); } /** * Convenience method for calling {@link ContentResolver#notifyChange(Uri, android.database.ContentObserver) * ContentResolver.notifyChange(uri, null)}. * * @param uri the Uri to notify */ public void notifyChange(Uri uri) { context.getContentResolver().notifyChange(uri, null); } /** * Convenience method for calling {@link ContentResolver#notifyChange(Uri, android.database.ContentObserver) * ContentResolver.notifyChange(uri, null)} on all the provided Uris. * * @param uris the Uris to notify */ public void notifyChange(Collection<Uri> uris) { if (uris != null && !uris.isEmpty()) { ContentResolver resolver = context.getContentResolver(); for (Uri uri : uris) { resolver.notifyChange(uri, null); } } } /** * Acquires an exclusive lock on the database. This is semantically similar to acquiring a write lock in a {@link * java.util.concurrent.locks.ReadWriteLock ReadWriteLock} but it is not generally necessary for protecting actual * database writes--it's only necessary when exclusive use of the database connection is required (e.g. while the * database is attached to another database). * <p> * Only one thread can hold an exclusive lock at a time. Calling this while on a thread that already holds a non- * exclusive lock is an error and will deadlock! We will throw an exception if this method is called while the * calling thread is in a transaction. Otherwise, this method will block until all non-exclusive locks * acquired with {@link #acquireNonExclusiveLock()} have been released, but will prevent any new non-exclusive * locks from being acquired while it blocks. */ @Beta protected void acquireExclusiveLock() { if (inTransaction()) { throw new IllegalStateException( "Can't acquire an exclusive lock when the calling thread is in a transaction"); } readWriteLock.writeLock().lock(); } /** * Release the exclusive lock acquired by {@link #acquireExclusiveLock()} */ @Beta protected void releaseExclusiveLock() { readWriteLock.writeLock().unlock(); } /** * Acquire a non-exclusive lock on the database. This is semantically similar to acquiring a read lock in a {@link * java.util.concurrent.locks.ReadWriteLock ReadWriteLock} but may also be used in most cases to protect database * writes (see {@link #acquireExclusiveLock()} for why this is true). This will block if the exclusive lock is held * by some other thread. Many threads can hold non-exclusive locks as long as no thread holds the exclusive lock. */ @Beta protected void acquireNonExclusiveLock() { readWriteLock.readLock().lock(); } /** * Releases a non-exclusive lock acquired with {@link #acquireNonExclusiveLock()} */ @Beta protected void releaseNonExclusiveLock() { readWriteLock.readLock().unlock(); } /** * Delegate class passed to a {@link SQLiteOpenHelperWrapper} instance that allows the SQLiteOpenHelperWrapper to call back * into its owning SquidDatabase after the database has been created or opened. */ public final class OpenHelperDelegate { private OpenHelperDelegate() { // No public instantiation } /** * Called to create the database tables */ public void onCreate(SQLiteDatabaseWrapper db) { setDatabase(db); StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY); SqlConstructorVisitor sqlVisitor = new SqlConstructorVisitor(); // create tables Table[] tables = getTables(); if (tables != null) { for (Table table : tables) { table.appendCreateTableSql(getSqliteVersion(), sql, sqlVisitor); db.execSQL(sql.toString()); sql.setLength(0); } } View[] views = getViews(); if (views != null) { for (View view : views) { view.createViewSql(getSqliteVersion(), sql); db.execSQL(sql.toString()); sql.setLength(0); } } Index[] indexes = getIndexes(); if (indexes != null) { for (Index idx : indexes) { tryCreateIndex(idx); } } // post-table-creation SquidDatabase.this.onTablesCreated(db); } /** * Called to upgrade the database to a new version */ public void onUpgrade(SQLiteDatabaseWrapper db, int oldVersion, int newVersion) { setDatabase(db); boolean success = false; Throwable thrown = null; isInMigration = true; try { success = SquidDatabase.this.onUpgrade(db, oldVersion, newVersion); } catch (Throwable t) { thrown = t; success = false; } finally { isInMigration = false; } if (thrown instanceof RecreateDuringMigrationException) { throw (RecreateDuringMigrationException) thrown; } else if (thrown instanceof MigrationFailedException) { throw (MigrationFailedException) thrown; } else if (!success) { throw new MigrationFailedException(getName(), oldVersion, newVersion, thrown); } } /** * Called to downgrade the database to an older version */ public void onDowngrade(SQLiteDatabaseWrapper db, int oldVersion, int newVersion) { setDatabase(db); boolean success = false; Throwable thrown = null; isInMigration = true; try { success = SquidDatabase.this.onDowngrade(db, oldVersion, newVersion); } catch (Throwable t) { thrown = t; success = false; } finally { isInMigration = false; } if (thrown instanceof RecreateDuringMigrationException) { throw (RecreateDuringMigrationException) thrown; } else if (thrown instanceof MigrationFailedException) { throw (MigrationFailedException) thrown; } else if (!success) { throw new MigrationFailedException(getName(), oldVersion, newVersion, thrown); } } public void onConfigure(SQLiteDatabaseWrapper db) { setDatabase(db); SquidDatabase.this.onConfigure(db); } public void onOpen(SQLiteDatabaseWrapper db) { setDatabase(db); SquidDatabase.this.onOpen(db); } } private synchronized void setDatabase(SQLiteDatabaseWrapper db) { // If we're already holding a reference to the same object, don't need to update or recalculate the version if (database != null && db != null && db.getWrappedDatabase() == database.getWrappedDatabase()) { return; } database = db; sqliteVersion = database != null ? readSqliteVersion() : null; } /** * Add a column to a table by specifying the corresponding {@link Property} * * @param property the Property associated with the column to add * @return true if the statement executed without error, false otherwise */ protected boolean tryAddColumn(Property<?> property) { if (!(property.table instanceof Table)) { throw new IllegalArgumentException("Can't alter table: property does not belong to a Table"); } SqlConstructorVisitor visitor = new SqlConstructorVisitor(); StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY); sql.append("ALTER TABLE ").append(property.table.getExpression()).append(" ADD "); property.accept(visitor, sql); return tryExecSql(sql.toString()); } /** * Create a new {@link Table} or {@link VirtualTable} in the database * * @param table the Table or VirtualTable to create * @return true if the statement executed without error, false otherwise */ protected boolean tryCreateTable(Table table) { SqlConstructorVisitor sqlVisitor = new SqlConstructorVisitor(); StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY); table.appendCreateTableSql(getSqliteVersion(), sql, sqlVisitor); return tryExecSql(sql.toString()); } /** * Drop a {@link Table} or {@link VirtualTable} in the database if it exists * * @param table the Table or VirtualTable to drop * @return true if the statement executed without error, false otherwise */ protected boolean tryDropTable(Table table) { return tryExecSql("DROP TABLE IF EXISTS " + table.getExpression()); } /** * Create a new {@link View} in the database * * @param view the View to create * @return true if the statement executed without error, false otherwise * @see com.yahoo.squidb.sql.View#fromQuery(com.yahoo.squidb.sql.Query, String) * @see com.yahoo.squidb.sql.View#temporaryFromQuery(com.yahoo.squidb.sql.Query, String) */ public boolean tryCreateView(View view) { StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY); view.createViewSql(getSqliteVersion(), sql); return tryExecSql(sql.toString()); } /** * Drop a {@link View} in the database if it exists * * @param view the View to drop * @return true if the statement executed without error, false otherwise */ public boolean tryDropView(View view) { return tryExecSql("DROP VIEW IF EXISTS " + view.getExpression()); } /** * Create a new {@link Index} in the database * * @param index the Index to create * @return true if the statement executed without error, false otherwise * @see com.yahoo.squidb.sql.Table#index(String, com.yahoo.squidb.sql.Property[]) * @see com.yahoo.squidb.sql.Table#uniqueIndex(String, com.yahoo.squidb.sql.Property[]) */ protected boolean tryCreateIndex(Index index) { return tryCreateIndex(index.getName(), index.getTable(), index.isUnique(), index.getProperties()); } /** * Create a new {@link Index} in the database * * @param indexName name for the Index * @param table the table to create the index on * @param unique true if the index is a unique index on the specified columns * @param properties the columns to create the index on * @return true if the statement executed without error, false otherwise */ protected boolean tryCreateIndex(String indexName, Table table, boolean unique, Property<?>... properties) { if (properties == null || properties.length == 0) { onError(String.format("Cannot create index %s: no properties specified", indexName), null); return false; } StringBuilder sql = new StringBuilder(STRING_BUILDER_INITIAL_CAPACITY); sql.append("CREATE "); if (unique) { sql.append("UNIQUE "); } sql.append("INDEX IF NOT EXISTS ").append(indexName).append(" ON ").append(table.getExpression()) .append("("); for (Property<?> p : properties) { sql.append(p.getName()).append(","); } sql.deleteCharAt(sql.length() - 1); sql.append(")"); return tryExecSql(sql.toString()); } /** * Drop an {@link Index} if it exists * * @param index the Index to drop * @return true if the statement executed without error, false otherwise */ protected boolean tryDropIndex(Index index) { return tryDropIndex(index.getName()); } /** * Drop an {@link Index} if it exists * * @param indexName the name of the Index to drop * @return true if the statement executed without error, false otherwise */ protected boolean tryDropIndex(String indexName) { return tryExecSql("DROP INDEX IF EXISTS " + indexName); } /** * Execute a {@link SqlStatement} * * @param statement the statement to execute * @return true if the statement executed without error, false otherwise */ public boolean tryExecStatement(SqlStatement statement) { CompiledStatement compiled = statement.compile(getSqliteVersion()); return tryExecSql(compiled.sql, compiled.sqlArgs); } /** * Execute a raw SQL statement * * @param sql the statement to execute * @return true if the statement executed without an error * @see SQLiteDatabase#execSQL(String) */ public boolean tryExecSql(String sql) { acquireNonExclusiveLock(); try { getDatabase().execSQL(sql); return true; } catch (SQLExceptionWrapper e) { onError("Failed to execute statement: " + sql, e); return false; } finally { releaseNonExclusiveLock(); } } /** * Execute a raw SQL statement * * @param sql the statement to execute * @throws SQLExceptionWrapper if there is an error parsing the SQL or some other error * @see SQLiteDatabase#execSQL(String) */ public void execSqlOrThrow(String sql) throws SQLExceptionWrapper { acquireNonExclusiveLock(); try { getDatabase().execSQL(sql); } finally { releaseNonExclusiveLock(); } } /** * Execute a raw SQL statement with optional arguments. The sql string may contain '?' placeholders for the * arguments. * * @param sql the statement to execute * @param bindArgs the arguments to bind to the statement * @return true if the statement executed without an error * @see SQLiteDatabase#execSQL(String, Object[]) */ public boolean tryExecSql(String sql, Object[] bindArgs) { acquireNonExclusiveLock(); try { getDatabase().execSQL(sql, bindArgs); return true; } catch (SQLExceptionWrapper e) { onError("Failed to execute statement: " + sql, e); return false; } finally { releaseNonExclusiveLock(); } } /** * Execute a raw SQL statement with optional arguments. The sql string may contain '?' placeholders for the * arguments. * * @param sql the statement to execute * @param bindArgs the arguments to bind to the statement * @throws SQLExceptionWrapper if there is an error parsing the SQL or some other error * @see SQLiteDatabase#execSQL(String, Object[]) */ public void execSqlOrThrow(String sql, Object[] bindArgs) throws SQLExceptionWrapper { acquireNonExclusiveLock(); try { getDatabase().execSQL(sql, bindArgs); } finally { releaseNonExclusiveLock(); } } /** * @return the current SQLite version as a {@link VersionCode} * @throws RuntimeException if the version could not be read */ public VersionCode getSqliteVersion() { if (sqliteVersion == null) { synchronized (this) { if (sqliteVersion == null) { sqliteVersion = readSqliteVersion(); } } } return sqliteVersion; } private VersionCode readSqliteVersion() { acquireNonExclusiveLock(); try { String versionString = getDatabase().simpleQueryForString("select sqlite_version()", null); return VersionCode.parse(versionString); } catch (RuntimeException e) { onError("Failed to read sqlite version", e); throw new RuntimeException("Failed to read sqlite version", e); } finally { releaseNonExclusiveLock(); } } /** * Visitor that builds column definitions for {@link Property}s */ private static class SqlConstructorVisitor implements PropertyVisitor<Void, StringBuilder> { private Void appendColumnDefinition(String type, Property<?> property, StringBuilder sql) { sql.append(property.getName()).append(" ").append(type); if (!TextUtils.isEmpty(property.getColumnDefinition())) { sql.append(" ").append(property.getColumnDefinition()); } return null; } @Override public Void visitDouble(Property<Double> property, StringBuilder sql) { return appendColumnDefinition("REAL", property, sql); } @Override public Void visitInteger(Property<Integer> property, StringBuilder sql) { return appendColumnDefinition("INTEGER", property, sql); } @Override public Void visitLong(Property<Long> property, StringBuilder sql) { return appendColumnDefinition("INTEGER", property, sql); } @Override public Void visitString(Property<String> property, StringBuilder sql) { return appendColumnDefinition("TEXT", property, sql); } @Override public Void visitBoolean(Property<Boolean> property, StringBuilder sql) { return appendColumnDefinition("INTEGER", property, sql); } @Override public Void visitBlob(Property<byte[]> property, StringBuilder sql) { return appendColumnDefinition("BLOB", property, sql); } } private static class RecreateDuringMigrationException extends RuntimeException { /* suppress compiler warning */ private static final long serialVersionUID = 480910684116077495L; } /** * Exception thrown when an upgrade or downgrade fails for any reason. Clients that want to provide more * information about why an upgrade or downgrade failed can subclass this class and throw it intentionally in * {@link #onUpgrade(SQLiteDatabaseWrapper, int, int) onUpgrade()} or * {@link #onDowngrade(SQLiteDatabaseWrapper, int, int) onDowngrade()}, and it will be forwarded to * {@link #onMigrationFailed(MigrationFailedException) onMigrationFailed()}. */ public static class MigrationFailedException extends RuntimeException { /* suppress compiler warning */ private static final long serialVersionUID = 2949995666882182744L; public final String dbName; public final int oldVersion; public final int newVersion; public MigrationFailedException(String dbName, int oldVersion, int newVersion) { this(dbName, oldVersion, newVersion, null); } public MigrationFailedException(String dbName, int oldVersion, int newVersion, Throwable throwable) { super("Failed to migrate db " + dbName + " from version " + oldVersion + "to " + newVersion, throwable); this.dbName = dbName; this.oldVersion = oldVersion; this.newVersion = newVersion; } } /** * Query the database * * @param modelClass the type to parameterize the cursor by. If the query does not contain a FROM clause, the table * or view corresponding to this model class will be used. * @param query the query to execute * @return a {@link SquidCursor} containing the query results */ public <TYPE extends AbstractModel> SquidCursor<TYPE> query(Class<TYPE> modelClass, Query query) { if (!query.hasTable() && modelClass != null) { SqlTable<?> table = getSqlTable(modelClass); if (table == null) { throw new IllegalArgumentException("Query has no FROM clause and model class " + modelClass.getSimpleName() + " has no associated table"); } query = query.from(table); // If argument was frozen, we may get a new object } CompiledStatement compiled = query.compile(getSqliteVersion()); if (compiled.needsValidation) { String validateSql = query.sqlForValidation(getSqliteVersion()); compileStatement(validateSql); // throws if the statement fails to compile } Cursor cursor = rawQuery(compiled.sql, compiled.sqlArgs); return new SquidCursor<TYPE>(cursor, query.getFields()); } /** * Fetch the specified model object with the given row ID * * @param modelClass the model class to fetch * @param id the row ID of the item * @param properties the {@link Property properties} to read * @return an instance of the model with the given ID, or null if no record was found */ public <TYPE extends TableModel> TYPE fetch(Class<TYPE> modelClass, long id, Property<?>... properties) { SquidCursor<TYPE> cursor = fetchItemById(modelClass, id, properties); return returnFetchResult(modelClass, cursor); } /** * Fetch the first model matching the given {@link Criterion}. This is useful if you expect uniqueness of models * with respect to the given criterion. * * @param modelClass the model class to fetch * @param properties the {@link Property properties} to read * @param criterion the criterion to match * @return an instance of the model matching the given criterion, or null if no record was found */ public <TYPE extends AbstractModel> TYPE fetchByCriterion(Class<TYPE> modelClass, Criterion criterion, Property<?>... properties) { SquidCursor<TYPE> cursor = fetchFirstItem(modelClass, criterion, properties); return returnFetchResult(modelClass, cursor); } /** * Fetch the first model matching the query. This is useful if you expect uniqueness of models with respect to the * given query. * * @param modelClass the model class to fetch * @param query the query to execute * @return an instance of the model returned by the given query, or null if no record was found */ public <TYPE extends AbstractModel> TYPE fetchByQuery(Class<TYPE> modelClass, Query query) { SquidCursor<TYPE> cursor = fetchFirstItem(modelClass, query); return returnFetchResult(modelClass, cursor); } protected <TYPE extends AbstractModel> TYPE returnFetchResult(Class<TYPE> modelClass, SquidCursor<TYPE> cursor) { try { if (cursor.getCount() == 0) { return null; } TYPE toReturn = modelClass.newInstance(); toReturn.readPropertiesFromCursor(cursor); return toReturn; } catch (SecurityException e) { throw new RuntimeException(e); } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } finally { cursor.close(); } } /** * Delete the row with the given row ID * * @param modelClass the model class corresponding to the table to delete from * @param id the row ID of the record * @return true if delete was successful */ public boolean delete(Class<? extends TableModel> modelClass, long id) { Table table = getTable(modelClass); int rowsUpdated = deleteInternal(Delete.from(table).where(table.getIdProperty().eq(id))); if (rowsUpdated > 0) { notifyForTable(DataChangedNotifier.DBOperation.DELETE, null, table, id); } return rowsUpdated > 0; } /** * Delete all rows matching the given {@link Criterion} * * @param modelClass model class for the table to delete from * @param where the Criterion to match. Note: passing null will delete all rows! * @return the number of deleted rows */ public int deleteWhere(Class<? extends TableModel> modelClass, Criterion where) { Table table = getTable(modelClass); Delete delete = Delete.from(table); if (where != null) { delete.where(where); } int rowsUpdated = deleteInternal(delete); if (rowsUpdated > 0) { notifyForTable(DataChangedNotifier.DBOperation.DELETE, null, table, TableModel.NO_ID); } return rowsUpdated; } /** * Delete all rows for table corresponding to the given model class * * @param modelClass model class for the table to delete from * @return the number of deleted rows */ public int deleteAll(Class<? extends TableModel> modelClass) { return deleteWhere(modelClass, null); } /** * Executes a {@link Delete} statement. * <p> * Note: Generally speaking, you should prefer to use {@link #delete(Class, long) delete} or * {@link #deleteWhere(Class, Criterion) deleteWhere} for deleting database rows. This is provided as a convenience * in case there exists a non-ORM case where a more traditional SQL delete statement is required. * * @param delete the statement to execute * @return the number of rows deleted on success, -1 on failure */ public int delete(Delete delete) { int result = deleteInternal(delete); if (result > 0) { notifyForTable(DataChangedNotifier.DBOperation.DELETE, null, delete.getTable(), TableModel.NO_ID); } return result; } /** * Update all rows matching the given {@link Criterion}, setting values based on the provided template model. For * example, this code would change all persons' names from "joe" to "bob": * * <pre> * Person template = new Person(); * template.setName(&quot;bob&quot;); * update(Person.NAME.eq(&quot;joe&quot;), template); * </pre> * * @param where the criterion to match. Note: passing null will update all rows! * @param template a model containing new values for the properties (columns) that should be updated. The template * class implicitly defines the table to be updated. * @return the number of updated rows */ public int update(Criterion where, TableModel template) { return updateWithOnConflict(where, template, null); } /** * Update all rows in the table corresponding to the class of the given template * * @param template a model containing new values for the properties (columns) that should be updated. The template * class implicitly defines the table to be updated. * @return the number of updated rows */ public int updateAll(TableModel template) { return update(null, template); } /** * Update all rows matching the given {@link Criterion}, setting values based on the provided template model. Any * constraint violations will be resolved using the specified {@link TableStatement.ConflictAlgorithm}. * * @param where the criterion to match. Note: passing null will update all rows! * @param template a model containing new values for the properties (columns) that should be updated * @param conflictAlgorithm the conflict algorithm to use * @return the number of updated rows * @see #update(Criterion, TableModel) */ public int updateWithOnConflict(Criterion where, TableModel template, TableStatement.ConflictAlgorithm conflictAlgorithm) { Class<? extends TableModel> modelClass = template.getClass(); Table table = getTable(modelClass); Update update = Update.table(table).fromTemplate(template); if (where != null) { update.where(where); } if (conflictAlgorithm != null) { update.onConflict(conflictAlgorithm); } int rowsUpdated = updateInternal(update); if (rowsUpdated > 0) { notifyForTable(DataChangedNotifier.DBOperation.UPDATE, template, table, TableModel.NO_ID); } return rowsUpdated; } /** * Update all rows in the table corresponding to the class of the given template * * @param template a model containing new values for the properties (columns) that should be updated. The template * class implicitly defines the table to be updated. * @param conflictAlgorithm the conflict algorithm to use * @return the number of updated rows */ public int updateAllWithOnConflict(TableModel template, TableStatement.ConflictAlgorithm conflictAlgorithm) { return updateWithOnConflict(null, template, conflictAlgorithm); } /** * Executes an {@link Update} statement. * <p> * Note: Generally speaking, you should prefer to use {@link #update(Criterion, TableModel)} * or {@link #updateWithOnConflict(Criterion, TableModel, com.yahoo.squidb.sql.TableStatement.ConflictAlgorithm)} * for bulk database updates. This is provided as a convenience in case there exists a non-ORM case where a more * traditional SQL update statement is required for some reason. * * @param update statement to execute * @return the number of rows updated on success, -1 on failure */ public int update(Update update) { int result = updateInternal(update); if (result > 0) { notifyForTable(DataChangedNotifier.DBOperation.UPDATE, null, update.getTable(), TableModel.NO_ID); } return result; } /** * Save a model to the database. Creates a new row if the model does not have an ID, otherwise updates the row with * the corresponding row ID. If a new row is inserted, the model will have its ID set to the corresponding row ID. * * @param item the model to save * @return true if current the model data is stored in the database */ public boolean persist(TableModel item) { return persistWithOnConflict(item, null); } /** * Save a model to the database. Creates a new row if the model does not have an ID, otherwise updates the row with * the corresponding row ID. If a new row is inserted, the model will have its ID set to the corresponding row ID. * Any constraint violations will be resolved using the specified {@link TableStatement.ConflictAlgorithm}. * * @param item the model to save * @param conflictAlgorithm the conflict algorithm to use * @return true if current the model data is stored in the database * @see #persist(TableModel) */ public boolean persistWithOnConflict(TableModel item, TableStatement.ConflictAlgorithm conflictAlgorithm) { if (!item.isSaved()) { return insertRow(item, conflictAlgorithm); } if (!item.isModified()) { return true; } return updateRow(item, conflictAlgorithm); } /** * Save a model to the database. This method always inserts a new row and sets the ID of the model to the * corresponding row ID. * * @param item the model to save * @return true if current the model data is stored in the database */ public boolean createNew(TableModel item) { item.setId(TableModel.NO_ID); return insertRow(item, null); } /** * Save a model to the database. This method always updates an existing row with a row ID corresponding to the * model's ID. If the model doesn't have an ID, or the corresponding row no longer exists in the database, this * will return false. * * @param item the model to save * @return true if current the model data is stored in the database */ public boolean saveExisting(TableModel item) { return updateRow(item, null); } /** * Inserts a new row using the item's merged values into the DB. * <p> * Note: unlike {@link #createNew(TableModel)}, which will always create a new row even if an id is set on the * model, this method will blindly attempt to insert the primary key id value if it is provided. This may cause * conflicts, throw exceptions, etc. if the row id already exists, so be sure to check for such cases if you * expect they may happen. * * @param item the model to insert * @return true if success, false otherwise */ protected final boolean insertRow(TableModel item) { return insertRow(item, null); } /** * Same as {@link #insertRow(TableModel)} with the ability to specify a ConflictAlgorithm for handling constraint * violations * * @param item the model to insert * @param conflictAlgorithm the conflict algorithm to use * @return true if success, false otherwise */ protected final boolean insertRow(TableModel item, TableStatement.ConflictAlgorithm conflictAlgorithm) { Class<? extends TableModel> modelClass = item.getClass(); Table table = getTable(modelClass); ValuesStorage mergedValues = item.getMergedValues(); if (mergedValues.size() == 0) { return false; } Insert insert = Insert.into(table).fromValues(mergedValues); if (conflictAlgorithm != null) { insert.onConflict(conflictAlgorithm); } long newRow = insertInternal(insert); boolean result = newRow > 0; if (result) { notifyForTable(DataChangedNotifier.DBOperation.INSERT, item, table, newRow); item.setId(newRow); item.markSaved(); } return result; } /** * Update an existing row in the database using the item's setValues. The item must have the primary key id set; * if it does not, the method will return false. * * @param item the model to save * @return true if success, false otherwise */ protected final boolean updateRow(TableModel item) { return updateRow(item, null); } /** * Same as {@link #updateRow(TableModel)} with the ability to specify a ConflictAlgorithm for handling constraint * violations * * @param item the model to save * @param conflictAlgorithm the conflict algorithm to use * @return true if success, false otherwise */ protected final boolean updateRow(TableModel item, TableStatement.ConflictAlgorithm conflictAlgorithm) { if (!item.isModified()) { // nothing changed return true; } if (!item.isSaved()) { return false; } Class<? extends TableModel> modelClass = item.getClass(); Table table = getTable(modelClass); Update update = Update.table(table).fromTemplate(item).where(table.getIdProperty().eq(item.getId())); if (conflictAlgorithm != null) { update.onConflict(conflictAlgorithm); } boolean result = updateInternal(update) > 0; if (result) { notifyForTable(DataChangedNotifier.DBOperation.UPDATE, item, table, item.getId()); item.markSaved(); } return result; } /** * Executes an {@link Insert} statement. * <p> * Note: Generally speaking, you should prefer to use {@link #persist(TableModel) persist} or * {@link #createNew(TableModel) createNew} for inserting database rows. This is provided as a convenience in case * there exists a non-ORM case where a more traditional SQL insert statement is required. * * @param insert the statement to execute * @return the row id of the last row inserted on success, 0 on failure */ public long insert(Insert insert) { long result = insertInternal(insert); if (result > TableModel.NO_ID) { int numInserted = insert.getNumRows(); notifyForTable(DataChangedNotifier.DBOperation.INSERT, null, insert.getTable(), numInserted == 1 ? result : TableModel.NO_ID); } return result; } protected <TYPE extends TableModel> SquidCursor<TYPE> fetchItemById(Class<TYPE> modelClass, long id, Property<?>... properties) { Table table = getTable(modelClass); return fetchFirstItem(modelClass, table.getIdProperty().eq(id), properties); } protected <TYPE extends AbstractModel> SquidCursor<TYPE> fetchFirstItem(Class<TYPE> modelClass, Criterion criterion, Property<?>... properties) { return fetchFirstItem(modelClass, Query.select(properties).where(criterion)); } protected <TYPE extends AbstractModel> SquidCursor<TYPE> fetchFirstItem(Class<TYPE> modelClass, Query query) { int beforeLimit = query.getLimit(); SqlTable<?> beforeTable = query.getTable(); query = query.limit(1); // If argument was frozen, we may get a new object SquidCursor<TYPE> cursor = query(modelClass, query); query.limit(beforeLimit); // Reset for user query.from(beforeTable); // Reset for user cursor.moveToFirst(); return cursor; } /** * Count the number of rows matching a given {@link Criterion}. Use null to count all rows. * * @param modelClass the model class corresponding to the table * @param criterion the criterion to match * @return the number of rows matching the given criterion */ public int count(Class<? extends AbstractModel> modelClass, Criterion criterion) { Property.IntegerProperty countProperty = Property.IntegerProperty.countProperty(); Query query = Query.select(countProperty); if (criterion != null) { query.where(criterion); } SquidCursor<?> cursor = query(modelClass, query); try { cursor.moveToFirst(); return cursor.get(countProperty); } finally { cursor.close(); } } /** * Count the number of rows in the given table. * * @param modelClass the model class corresponding to the table * @return the number of rows in the table */ public int countAll(Class<? extends AbstractModel> modelClass) { return count(modelClass, null); } private final Object notifiersLock = new Object(); private boolean dataChangedNotificationsEnabled = true; private List<DataChangedNotifier<?>> globalNotifiers = new ArrayList<DataChangedNotifier<?>>(); private Map<SqlTable<?>, List<DataChangedNotifier<?>>> tableNotifiers = new HashMap<SqlTable<?>, List<DataChangedNotifier<?>>>(); // Using a ThreadLocal makes it easy to have one accumulator set per transaction, since // transactions are also associated with the thread they run on private ThreadLocal<Set<DataChangedNotifier<?>>> notifierAccumulator = new ThreadLocal<Set<DataChangedNotifier<?>>>() { protected Set<DataChangedNotifier<?>> initialValue() { return new HashSet<DataChangedNotifier<?>>(); } }; /** * Register a {@link DataChangedNotifier} to listen for database changes. The DataChangedNotifier object will be * notified whenever a table it is interested is modified, and can accumulate a set of notifications to send when * the current transaction or statement completes successfully. * * @param notifier the DataChangedNotifier to register */ public void registerDataChangedNotifier(DataChangedNotifier<?> notifier) { if (notifier == null) { return; } synchronized (notifiersLock) { Collection<SqlTable<?>> tables = notifier.whichTables(); if (tables == null || tables.isEmpty()) { globalNotifiers.add(notifier); } else { for (SqlTable<?> table : tables) { List<DataChangedNotifier<?>> notifiersForTable = tableNotifiers.get(table); if (notifiersForTable == null) { notifiersForTable = new ArrayList<DataChangedNotifier<?>>(); tableNotifiers.put(table, notifiersForTable); } notifiersForTable.add(notifier); } } } } /** * Unregister a {@link DataChangedNotifier} previously registered by * {@link #registerDataChangedNotifier(DataChangedNotifier)} * * @param notifier the DataChangedNotifier to unregister */ public void unregisterDataChangedNotifier(DataChangedNotifier<?> notifier) { if (notifier == null) { return; } synchronized (notifiersLock) { Collection<SqlTable<?>> tables = notifier.whichTables(); if (tables == null || tables.isEmpty()) { globalNotifiers.remove(notifier); } else { for (SqlTable<?> table : tables) { List<DataChangedNotifier<?>> notifiersForTable = tableNotifiers.get(table); if (notifiersForTable != null) { notifiersForTable.remove(notifier); } } } } } /** * Unregister all {@link DataChangedNotifier}s previously registered by * {@link #registerDataChangedNotifier(DataChangedNotifier)} */ public void unregisterAllDataChangedNotifiers() { synchronized (notifiersLock) { globalNotifiers.clear(); tableNotifiers.clear(); } } /** * Set a flag to enable or disable data change notifications. No {@link DataChangedNotifier}s will be notified * (or accumulated during transactions) while the flag is set to false. */ public void setDataChangedNotificationsEnabled(boolean enabled) { dataChangedNotificationsEnabled = enabled; } private void notifyForTable(DataChangedNotifier.DBOperation op, AbstractModel modelValues, SqlTable<?> table, long rowId) { if (!dataChangedNotificationsEnabled) { return; } synchronized (notifiersLock) { onDataChanged(globalNotifiers, op, modelValues, table, rowId); onDataChanged(tableNotifiers.get(table), op, modelValues, table, rowId); } if (!inTransaction()) { flushAccumulatedNotifications(true); } } private void onDataChanged(List<DataChangedNotifier<?>> notifiers, DataChangedNotifier.DBOperation op, AbstractModel modelValues, SqlTable<?> table, long rowId) { if (notifiers != null) { for (DataChangedNotifier<?> notifier : notifiers) { if (notifier.onDataChanged(table, this, op, modelValues, rowId)) { notifierAccumulator.get().add(notifier); } } } } private void flushAccumulatedNotifications(boolean transactionSuccess) { Set<DataChangedNotifier<?>> accumulatedNotifiers = notifierAccumulator.get(); if (!accumulatedNotifiers.isEmpty()) { for (DataChangedNotifier<?> notifier : accumulatedNotifiers) { notifier.flushAccumulatedNotifications(this, transactionSuccess && dataChangedNotificationsEnabled); } accumulatedNotifiers.clear(); } } }
import java.util.Arrays; import java.util.Vector; public class LFCFile { //TODO Should we add a type to distinguish REGULAR, INPUT, and MERGE files? private String logicalFileName; private long logicalFileSize; private Vector<String>SENames; public String getLogicalFileName() { return logicalFileName; } public void setLogicalFileName(String logicalFileName) { this.logicalFileName = logicalFileName; } public long getLogicalFileSize() { return logicalFileSize; } public void setLogicalFileSize(long logicalFileSize) { this.logicalFileSize = logicalFileSize; } public String getSEName() { //TODO return the first SE for now. Might be interesting to implement // some load balancing strategy return SENames.get(0); } public void setSEName(String SEName) { SENames.add(SEName); } public Vector<String> getSEs() { return SENames; } public String toString(){ return "File '" + logicalFileName + "' of size " + logicalFileSize + " stored on " + SENames.toString(); } public LFCFile(String logicalFileName, long logicalFileSize, String SEName) { super(); this.setLogicalFileName(logicalFileName); this.setLogicalFileSize(logicalFileSize); this.SENames = new Vector<String>(); setSEName(SEName); } public LFCFile(String logicalFileName, long logicalFileSize, String[] seNames) { super(); this.setLogicalFileName(logicalFileName); this.setLogicalFileSize(logicalFileSize); this.SENames = new Vector<String>(); this.SENames.addAll(Arrays.asList(seNames)); } }
package jade.domain.FIPAAgentManagement; /** * * @see jade.domain.FIPAAgentManagement.FIPAAgentManagementOntology * @author Fabio Bellifemine - CSELT S.p.A. * @version $Date$ $Revision$ */ public class InternalError extends FailureException { public InternalError(String msg) { super("(internal-error "+msg+")"); s1=msg; } public InternalError() { this("unknown-error"); } /** @serial */ String s1; public void set_0(String s){ s1=s; setMessage("(internal-error "+s1+")");} public String get_0() {return s1;} }
package uk.co.johnjtaylor; import java.util.ArrayList; import uk.co.johnjtaylor.enums.DataStructure; import uk.co.johnjtaylor.structures.DataStructures; import uk.co.johnjtaylor.structures.LinkedListNode; public class Tester<T extends Comparable<T>> { private Time timer; private DataGenerator<T> gen; @SuppressWarnings("unused") private ScaleParser parser; public Tester() { timer = new Time(); gen = new DataGenerator<T>(); parser = new ScaleParser(); } /** * Runs and times a sorting algorithm * * @param test A SortTest object to be tested as specified */ public ArrayList<SortResult> run(SortTest test) { //TODO throw events to be handled for data printouts etc @ start; sort finish; overall end etc switch(test.getDataStructure()) { case ARRAY: T[] array = gen.makeArray(test.getInitialSize(), test.getDataType()); for(int i = 0; i < test.getIterations(); i++) { timer.start(); Object res = test.getSort().sort(array); timer.stop(); test.addResult(new SortResult(timer.getTime(), "ms", array.length, test.getDataType(), test.getDataStructure(), true)); System.out.println("Result stored"); array = gen.makeArray(array.length * 2, test.getDataType()); } break; case LINKEDLIST: LinkedListNode<T> head = gen.makeLinkedList(test.getInitialSize(), test.getDataType()); int currentLength = test.getInitialSize(); for(int i = 0; i < test.getIterations(); i++) { timer.start(); Object result = test.getSort().sort(head); timer.stop(); test.addResult(new SortResult(timer.getTime(), "ms", currentLength, test.getDataType(), test.getDataStructure(), true)); currentLength *= 2; head = gen.makeLinkedList(currentLength, test.getDataType()); } break; default: System.out.println("Implementation pending for " + test.getDataStructure().toString()); } return test.getAllResults(); } /** * Validates that a sorting algorithm produced correct results * @param values Any array/custom data structure that implements toArray() * @return true if the sort is valid; false otherwise * @throws UnsupportedOperationException when the values given couldn't be converted to an array */ public boolean validateSort(Object values) { if(values instanceof DataStructures<?>) { @SuppressWarnings("unchecked") // Checked as best as possible, can't do a <T> check though so DataStructures<T> valueStructure = (DataStructures<T>) values; values = valueStructure.toArray(values); } if(values instanceof String[]) { String[] valuesToCheck = (String[]) values; for(int i=1; i < valuesToCheck.length; i++) { if(valuesToCheck[i-1].compareToIgnoreCase(valuesToCheck[i]) > 0) { return false; } } return true; } else if(values instanceof Comparable<?>[]) { T[] valuesToCheck = (T[]) values; for(int i=1; i < valuesToCheck.length; i++) { if(valuesToCheck[i-1].compareTo(valuesToCheck[i]) > 0) { return false; } } return true; } else { throw new UnsupportedOperationException("The given data couldn't be converted to an array for processing"); } } }
// $Id: PuzzleContext.java,v 1.3 2003/11/26 23:16:13 mdb Exp $ package com.threerings.puzzle.util; import com.threerings.util.KeyDispatcher; import com.threerings.util.KeyboardManager; import com.threerings.util.MessageManager; import com.threerings.media.FrameManager; import com.threerings.media.sound.SoundManager; import com.threerings.crowd.chat.client.ChatDirector; import com.threerings.parlor.util.ParlorContext; /** * Provides access to entities needed by the puzzle services. */ public interface PuzzleContext extends ParlorContext { /** * Returns the username of the local user. */ public String getUsername (); /** * Returns a reference to the message manager used by the client. */ public MessageManager getMessageManager (); /** * Provides access to the frame manager. */ public FrameManager getFrameManager (); /** * Provides access to the keyboard manager. */ public KeyboardManager getKeyboardManager (); /** * Provides access to the key dispatcher. */ public KeyDispatcher getKeyDispatcher (); /** * Provides access to the sound manager. */ public SoundManager getSoundManager (); /** * Provides access to the chat director. */ public ChatDirector getChatDirector (); }
package org.jsmpp.examples; import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.log4j.PropertyConfigurator; import org.jsmpp.InvalidResponseException; import org.jsmpp.PDUStringException; import org.jsmpp.bean.Alphabet; import org.jsmpp.bean.BindType; import org.jsmpp.bean.ESMClass; import org.jsmpp.bean.GeneralDataCoding; import org.jsmpp.bean.MessageClass; import org.jsmpp.bean.NumberingPlanIndicator; import org.jsmpp.bean.RegisteredDelivery; import org.jsmpp.bean.TypeOfNumber; import org.jsmpp.extra.NegativeResponseException; import org.jsmpp.extra.ResponseTimeoutException; import org.jsmpp.session.SMPPSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This stress client is an example of submit bulk messages asynchronously. * * <table border="1"> * <tr><td><strong>Name</strong></td><td><strong>Description</strong></td><td><strong>Default value</strong></td></tr> * <tr><td>jsmpp.client.host</td><td>Server host address</td><td>localhost</td></tr> * <tr><td>jsmpp.client.port</td><td>Server port</td><td>8056</td></tr> * <tr><td>jsmpp.client.systemId</td><td>System Identifier</td><td>j</td></tr> * <tr><td>jsmpp.client.password</td><td>Password</td><td>jpwd</td></tr> * <tr><td>jsmpp.client.sourceAddr</td><td>Submit Source Address</td><td>1616</td></tr> * <tr><td>jsmpp.client.destinationAddr</td><td>Submit Destination Address</td><td>62161616</td> * <tr><td>jsmpp.client.transactionTimer</td><td>Transaction timer</td><td>2000</td></tr> * <tr><td>jsmpp.client.bulkSize</td><td>Amount of bulk messages</td><td>100000</td></tr> * <tr><td>jsmpp.client.procDegree</td><td>Max parallel processor for PDU reading</td><td>3</td></tr> * <tr><td>jsmpp.client.maxOutstanding</td><td>Maximum outstanding messages</td><td>10</td></tr> * <tr><td>jsmpp.client.log4jPath</td><td>Log4j configuration</td><td>conf/client-log4j.properties</td></tr> * </table> * @author uudashr * */ public class StressClient implements Runnable { private static final String DEFAULT_PASSWORD = "jpwd"; private static final String DEFAULT_SYSID = "j"; private static final String DEFAULT_DESTADDR = "62161616"; private static final String DEFAULT_SOURCEADDR = "1616"; private static final Logger logger = LoggerFactory.getLogger(StressClient.class); private static final String DEFAULT_LOG4J_PATH = "stress/client-log4j.properties"; private static final String DEFAULT_HOST = "localhost"; private static final Integer DEFAULT_PORT = 8056; private static final Long DEFAULT_TRANSACTIONTIMER = 2000L; private static final Integer DEFAULT_BULK_SIZE = 100000; private static final Integer DEFAULT_PROCESSOR_DEGREE = 3; private static final Integer DEFAULT_MAX_OUTSTANDING = 10; private AtomicInteger requestCounter = new AtomicInteger(); private AtomicInteger totalRequestCounter = new AtomicInteger(); private AtomicInteger responseCounter = new AtomicInteger(); private AtomicInteger totalResponseCounter = new AtomicInteger(); private AtomicLong maxDelay = new AtomicLong(); private ExecutorService execService; private String host; private int port; private int bulkSize; private SMPPSession smppSession = new SMPPSession(); private AtomicBoolean exit = new AtomicBoolean(); private int id; private String systemId; private String password; private String sourceAddr; private String destinationAddr; public StressClient(int id, String host, int port, int bulkSize, String systemId, String password, String sourceAddr, String destinationAddr, long transactionTimer, int pduProcessorDegree, int maxOutstanding) { this.id = id; this.host = host; this.port = port; this.bulkSize = bulkSize; this.systemId = systemId; this.password = password; this.sourceAddr = sourceAddr; this.destinationAddr = destinationAddr; smppSession.setPduProcessorDegree(pduProcessorDegree); smppSession.setTransactionTimer(transactionTimer); execService = Executors.newFixedThreadPool(maxOutstanding); } private void shutdown() { execService.shutdown(); exit.set(true); } public void run() { try { smppSession.connectAndBind(host, port, BindType.BIND_TRX, systemId, password, "cln", TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, null); logger.info("Bound to " + host + ":" + port); } catch (IOException e) { logger.error("Failed initialize connection or bind", e); return; } new TrafficWatcherThread().start(); logger.info("Starting send " + bulkSize + " bulk message..."); for (int i = 0; i < bulkSize && !exit.get(); i++) { execService.execute(newSendTask("Hello " + id + " idx=" + i)); } while (!exit.get()) { try { Thread.sleep(1000); } catch (InterruptedException e) { } } logger.info("Done"); smppSession.unbindAndClose(); } private Runnable newSendTask(final String message) { return new Runnable() { public void run() { try { requestCounter.incrementAndGet(); long startTime = System.currentTimeMillis(); smppSession.submitShortMessage(null, TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, sourceAddr, TypeOfNumber.UNKNOWN, NumberingPlanIndicator.UNKNOWN, destinationAddr, new ESMClass(), (byte)0, (byte)0, null, null, new RegisteredDelivery(0), (byte)0, new GeneralDataCoding(true, true, MessageClass.CLASS1, Alphabet.ALPHA_DEFAULT), (byte)0, message.getBytes()); long delay = System.currentTimeMillis() - startTime; responseCounter.incrementAndGet(); if (maxDelay.get() < delay) { maxDelay.set(delay); } } catch (PDUStringException e) { logger.error("Failed submit short message '" + message + "'", e); shutdown(); } catch (ResponseTimeoutException e) { logger.error("Failed submit short message '" + message + "'", e); shutdown(); } catch (InvalidResponseException e) { logger.error("Failed submit short message '" + message + "'", e); shutdown(); } catch (NegativeResponseException e) { logger.error("Failed submit short message '" + message + "'", e); shutdown(); } catch (IOException e) { logger.error("Failed submit short message '" + message + "'", e); shutdown(); } } }; } private class TrafficWatcherThread extends Thread { @Override public void run() { logger.info("Starting traffic watcher..."); while (!exit.get()) { try { Thread.sleep(1000); } catch (InterruptedException e) { } int requestPerSecond = requestCounter.getAndSet(0); int responsePerSecond = responseCounter.getAndSet(0); long maxDelayPerSecond = maxDelay.getAndSet(0); totalRequestCounter.addAndGet(requestPerSecond); int total = totalResponseCounter.addAndGet(responsePerSecond); logger.info("Request/Response per second : " + requestPerSecond + "/" + responsePerSecond + " of " + total + " maxDelay=" + maxDelayPerSecond); if (total == bulkSize) { shutdown(); } } } } public static void main(String[] args) { String host = System.getProperty("jsmpp.client.host", DEFAULT_HOST); String systemId = System.getProperty("jsmpp.client.systemId", DEFAULT_SYSID); String password = System.getProperty("jsmpp.client.password", DEFAULT_PASSWORD); String sourceAddr = System.getProperty("jsmpp.client.sourceAddr", DEFAULT_SOURCEADDR); String destinationAddr = System.getProperty("jsmpp.client.destinationAddr", DEFAULT_DESTADDR); int port; try { port = Integer.parseInt(System.getProperty("jsmpp.client.port", DEFAULT_PORT.toString())); } catch (NumberFormatException e) { port = DEFAULT_PORT; } long transactionTimer; try { transactionTimer = Integer.parseInt(System.getProperty("jsmpp.client.transactionTimer", DEFAULT_TRANSACTIONTIMER.toString())); } catch (NumberFormatException e) { transactionTimer = DEFAULT_TRANSACTIONTIMER; } int bulkSize; try { bulkSize = Integer.parseInt(System.getProperty("jsmpp.client.bulkSize", DEFAULT_BULK_SIZE.toString())); } catch (NumberFormatException e) { bulkSize = DEFAULT_BULK_SIZE; } int processorDegree; try { processorDegree = Integer.parseInt(System.getProperty("jsmpp.client.procDegree", DEFAULT_PROCESSOR_DEGREE.toString())); } catch (NumberFormatException e) { processorDegree = DEFAULT_PROCESSOR_DEGREE; } int maxOutstanding; try { maxOutstanding = Integer.parseInt(System.getProperty("jsmpp.client.maxOutstanding", DEFAULT_MAX_OUTSTANDING.toString())); } catch (NumberFormatException e) { maxOutstanding = DEFAULT_MAX_OUTSTANDING; } String log4jPath = System.getProperty("jsmpp.client.log4jPath", DEFAULT_LOG4J_PATH); PropertyConfigurator.configure(log4jPath); logger.info("Target server {}:{}", host, port); logger.info("System ID: {}", systemId); logger.info("Password: {}", password); logger.info("Source address: {}", sourceAddr); logger.info("Destination address: {}", destinationAddr); logger.info("Transaction timer: {}", transactionTimer); logger.info("Bulk size: {}", bulkSize); logger.info("Max outstanding: {}", maxOutstanding); logger.info("Processor degree: {}", processorDegree); StressClient stressClient = new StressClient(0, host, port, bulkSize, systemId, password, sourceAddr, destinationAddr, transactionTimer, processorDegree, maxOutstanding); stressClient.run(); } }
//This library is free software; you can redistribute it and/or //modify it under the terms of the GNU Lesser General Public //This library is distributed in the hope that it will be useful, //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //You should have received a copy of the GNU Lesser General Public //Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. package opennlp.tools.coref.mention; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import opennlp.tools.lang.english.NameFinder; import opennlp.tools.parser.Parse; import opennlp.tools.parser.chunking.Parser; import opennlp.tools.util.Span; /** * This class is a wrapper for {@link opennlp.tools.parser.Parse} mapping it to the API specified in {@link opennlp.tools.coref.mention.Parse}. * This allows coreference to be done on the output of the parser. */ public class DefaultParse extends AbstractParse { private Parse parse; private int sentenceNumber; private static Set entitySet = new HashSet(Arrays.asList(NameFinder.NAME_TYPES)); /** * Initializes the current instance. * * @param parse * @param sentenceNumber */ public DefaultParse(Parse parse, int sentenceNumber) { this.parse = parse; this.sentenceNumber = sentenceNumber; } public int getSentenceNumber() { return sentenceNumber; } public List getNamedEntities() { List names = new ArrayList(); List kids = new LinkedList(Arrays.asList(parse.getChildren())); while (kids.size() > 0) { Parse p = (Parse) kids.remove(0); if (entitySet.contains(p.getType())) { names.add(p); } else { kids.addAll(Arrays.asList(p.getChildren())); } } return createParses((Parse[]) names.toArray(new Parse[names.size()])); } public List getChildren() { return createParses(parse.getChildren()); } public List getSyntacticChildren() { List kids = new ArrayList(Arrays.asList(parse.getChildren())); for (int ci = 0; ci < kids.size(); ci++) { Parse kid = (Parse) kids.get(ci); if (entitySet.contains(kid.getType())) { kids.remove(ci); kids.addAll(ci, Arrays.asList(kid.getChildren())); ci } } return createParses((Parse[]) kids.toArray(new Parse[kids.size()])); } public List getTokens() { List tokens = new ArrayList(); List kids = new LinkedList(Arrays.asList(parse.getChildren())); while (kids.size() > 0) { Parse p = (Parse) kids.remove(0); if (p.isPosTag()) { tokens.add(p); } else { kids.addAll(0,Arrays.asList(p.getChildren())); } } return createParses((Parse[]) tokens.toArray(new Parse[tokens.size()])); } public String getSyntacticType() { if (entitySet.contains(parse.getType())) { return null; } else { return parse.getType(); } } private List createParses(Parse[] parses) { List newParses = new ArrayList(parses.length); for (int pi=0,pn=parses.length;pi<pn;pi++) { newParses.add(new DefaultParse(parses[pi],sentenceNumber)); } return newParses; } public String getEntityType() { if (entitySet.contains(parse.getType())) { return parse.getType(); } else { return null; } } public boolean isParentNAC() { Parse parent = parse.getParent(); while(parent != null) { if (parent.getType().equals("NAC")) { return true; } parent = parent.getParent(); } return false; } public opennlp.tools.coref.mention.Parse getParent() { Parse parent = parse.getParent(); if (parent == null) { return null; } else { return new DefaultParse(parent,sentenceNumber); } } public boolean isNamedEntity() { if (entitySet.contains(parse.getType())) { return true; } else { return false; } } public boolean isNounPhrase() { return parse.getType().equals("NP"); } public boolean isSentence() { return parse.getType().equals(Parser.TOP_NODE); } public boolean isToken() { return parse.isPosTag(); } public int getEntityId() { return -1; } public Span getSpan() { return parse.getSpan(); } public int compareTo(Object o) { if ( o == this) { return 0; } DefaultParse p = (DefaultParse) o; if (sentenceNumber < p.sentenceNumber) { return -1; } else if (sentenceNumber > p.sentenceNumber) { return 1; } else { return parse.getSpan().compareTo(p.getSpan()); } } public String toString() { return parse.toString(); } public opennlp.tools.coref.mention.Parse getPreviousToken() { Parse parent = parse.getParent(); Parse node = parse; int index=-1; //find parent with previous children while(parent != null && index < 0) { index = parent.indexOf(node)-1; if (index < 0) { node = parent; parent = parent.getParent(); } } //find right-most child which is a token if (index < 0) { return null; } else { Parse p = parent.getChildren()[index]; while (!p.isPosTag()) { Parse[] kids = p.getChildren(); p = kids[kids.length-1]; } return new DefaultParse(p,sentenceNumber); } } public opennlp.tools.coref.mention.Parse getNextToken() { Parse parent = parse.getParent(); Parse node = parse; int index=-1; //find parent with subsequent children while(parent != null) { index = parent.indexOf(node)+1; if (index == parent.getChildCount()) { node = parent; parent = parent.getParent(); } else { break; } } //find left-most child which is a token if (parent == null) { return null; } else { Parse p = parent.getChildren()[index]; while (!p.isPosTag()) { p = p.getChildren()[0]; } return new DefaultParse(p,sentenceNumber); } } public boolean equals(Object o) { boolean result; if (o == this) { result = true; } else if (o instanceof DefaultParse) { result = parse == ((DefaultParse) o).parse; } else { result = false; } return result; } public int hashCode() { return parse.hashCode(); } /** * Retrives the {@link Parse}. * * @return the {@link Parse} */ public Parse getParse() { return parse; } }
package graph; import generaltools.ArrayTools; import graphtools.EdmondsKarpMaxFlowMinCut; import hashtools.TwoKeyHash; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.Vector; import mathtools.MapleTools; import mathtools.Subsets; import org.jgrapht.WeightedGraph; import org.jgrapht.alg.EdmondsKarpMaximumFlow; import org.jgrapht.alg.MinSourceSinkCut; import org.jgrapht.alg.StoerWagnerMinimumCut; import org.jgrapht.graph.DefaultWeightedEdge; import org.jgrapht.graph.DirectedWeightedMultigraph; import filetools.WriteFile; import returntools.Tuple2; import syst.Instance; import syst.Systm; import syst.Variable; public class BinaryGraph { private Systm sys; private DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> graph; private final Variable source = new Variable("S"); private final Variable sink = new Variable("T"); private final String[] biLabels; private boolean isAE; //is from an alpha expansion /* * * * * * * * * * * CONSTRUCTORS * * * * * * * * * * */ /** * @param s A binary Systm */ public BinaryGraph(Systm s, boolean ae){ // Instantiate the Systm and make the hats sys = s; sys.makeHats(); if (ae){ biLabels = Systm.binaryLabels; if (!sys.getIsBinary()){ System.out.println("Must have a binary Systm to make a BinaryGraph!"); System.exit(0); }else{ // Make the flow network from the normal form energy createNonSubmodularFlowNetworkAE(); //printGraphCompact(); } }else{ biLabels = null; if (!sys.getIsBinary()){ System.out.println("Must have a binary Systm to make a BinaryGraph!"); System.exit(0); }else{ // Make the flow network from the normal form energy createNonSubmodularFlowNetworkNonAE(); printGraphCompact(); } } } public BinaryGraph(Systm s, boolean ae, WriteFile wr){ // Instantiate the Systm and make the hats sys = s; sys.makeHats(); if (ae){ biLabels = Systm.binaryLabels; if (!sys.getIsBinary()){ System.out.println("Must have a binary Systm to make a BinaryGraph!"); System.exit(0); }else{ // Make the flow network from the normal form energy createNonSubmodularFlowNetworkAE(); //printGraphCompact(); } }else{ biLabels = null; if (!sys.getIsBinary()){ System.out.println("Must have a binary Systm to make a BinaryGraph!"); System.exit(0); }else{ // Make the flow network from the normal form energy createNonSubmodularFlowNetworkNonAE(); printGraphCompact(wr); } } } /* * * * * * * * THE CUT * * * * * * * */ /** * Does the graph cut on the flow network to find the minimum * energy configuration. If some Variables are not assigned we * do the brute force minimization on just those unassigned * Variables. * @return The array of {0,1} indicating the output of * the cut. There may be some -1 in the array if the cut * algorithm failed to label everything and what it didn't * label was too large for brute force (currently >25 elements) */ public HashMap<Variable,Integer> doCut(){ System.out.println("Doing the cut!"); EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge> F = new EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge>(graph); Vector<Variable> vars = sys.getVars(); Double constantNF = sys.getConstantNF(); F.calculateMinimumCut(source, sink); F.calculateMinimumCutValue(); System.out.println(); Map <Variable,Integer> minCut = F.getMinimumCut(); Double minCutValue = F.getMinimumCutValue(); Vector<Variable> S = new Vector<Variable>(); Vector<Variable> T = new Vector<Variable>(); Vector<Variable> U = new Vector<Variable>(); Iterator<Variable> it = vars.iterator(); int[] varSeq = new int[sys.getVars().size()]; int ind = 0; HashMap<Variable,Integer> varHM = new HashMap<Variable,Integer>(); while (it.hasNext()){ Variable gr = it.next(); if (minCut.get(gr) == 0 && minCut.get(gr.getHat()) == 1){ S.add(gr); varSeq[ind] = 0; varHM.put(gr, 0); }else if (minCut.get(gr) == 1 && minCut.get(gr.getHat())==0){ T.add(gr); varSeq[ind] = 1; varHM.put(gr, 1); }else{ U.add(gr); varSeq[ind] = -1; varHM.put(gr, -1); } ind++; } //System.out.println("flow: "+F.getMaximumFlow().toString()); System.out.println("flow value: "+F.getMaximumFlowValue().toString()); System.out.println("cut: "+minCut.toString()); System.out.println("cut value: "+minCutValue); //System.out.println("cut constant: "+constantNF); System.out.println("cut + constant: " + (minCutValue+constantNF)); System.out.println(); printST(S,T); System.out.println("UNKNOWN groups: "+Arrays.deepToString(U.toArray())); System.out.println(" Tuple2<Double,int[]> better = bruteLeftOver(varSeq); if (better != null){ Double minValue = better.getFirst(); int[] varSeq2 = better.getSecond(); HashMap<Variable,Integer> varHM2 = new HashMap<Variable,Integer>(); Vector<Variable> S2 = new Vector<Variable>(); Vector<Variable> T2 = new Vector<Variable>(); Vector<Variable> U2 = new Vector<Variable>(); //TODO: Here I'm just picking the first of the minimizers // since they are all the same minimum value. Should probably // pick more carefully //for (int i=0; i<varSeq2.length; i++){ for (int i=0; i<vars.size(); i++){ Variable gr = vars.get(i); if (varSeq2[i] == 0){ S2.add(gr); //System.out.println("S"+gr.toString()); //protSeq[ind] = 0; varHM2.put(gr, 0); }else if (varSeq2[i] == 1){ T2.add(gr); //System.out.println("T"+gr.toString()); //protSeq[ind] = 1; varHM2.put(gr, 1); }else{ U2.add(gr); //System.out.println("U"+gr.toString()); //protSeq[ind] = -1; varHM2.put(gr, -1); } ind++; } System.out.println(); System.out.println("After brute force on the leftover we got..."); System.out.println("minimum energy: "+minValue); printST(S2,T2); System.out.println("UNKNOWN groups: "+Arrays.deepToString(U2.toArray())); System.out.println("====================================================="); System.out.println(); return varHM2; }else{ System.out.println("====================================================="); return varHM; } } public HashMap<Variable,Instance> doCutNonAE(){ System.out.println("Doing the cut!"); EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge> F = new EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge>(graph); Vector<Variable> vars = sys.getVars(); Double constantNF = sys.getConstantNF(); F.calculateMinimumCut(source, sink); F.calculateMinimumCutValue(); System.out.println(); Map <Variable,Integer> minCut = F.getMinimumCut(); Double minCutValue = F.getMinimumCutValue(); Vector<Variable> S = new Vector<Variable>(); Vector<Variable> T = new Vector<Variable>(); Vector<Variable> U = new Vector<Variable>(); Iterator<Variable> it = vars.iterator(); int[] varSeq = new int[sys.getVars().size()]; int ind = 0; HashMap<Variable,Instance> varHM = new HashMap<Variable,Instance>(); while (it.hasNext()){ Variable gr = it.next(); if (minCut.get(gr) == 0 && minCut.get(gr.getHat()) == 1){ S.add(gr); varSeq[ind] = 0; varHM.put(gr, gr.getInstances().get(0)); }else if (minCut.get(gr) == 1 && minCut.get(gr.getHat())==0){ T.add(gr); varSeq[ind] = 1; varHM.put(gr, gr.getInstances().get(1)); }else{ U.add(gr); varSeq[ind] = -1; varHM.put(gr, null); } ind++; } System.out.println("flow: "+F.getMaximumFlow().toString()); System.out.println("flow value: "+F.getMaximumFlowValue().toString()); System.out.println("cut: "+minCut.toString()); System.out.println("cut value: "+minCutValue); //System.out.println("cut constant: "+constantNF); System.out.println("cut + constant: " + (minCutValue+constantNF)); System.out.println(); printST(S,T); System.out.println("UNKNOWN groups: "+Arrays.deepToString(U.toArray())); System.out.println(" Tuple2<Double,int[]> better = bruteLeftOver(varSeq); if (better != null){ Double minValue = better.getFirst(); int[] varSeq2 = better.getSecond(); HashMap<Variable,Instance> varHM2 = new HashMap<Variable,Instance>(); Vector<Variable> S2 = new Vector<Variable>(); Vector<Variable> T2 = new Vector<Variable>(); Vector<Variable> U2 = new Vector<Variable>(); //TODO: Here I'm just picking the first of the minimizers // since they are all the same minimum value. Should probably // pick more carefully //for (int i=0; i<varSeq2.length; i++){ for (int i=0; i<vars.size(); i++){ Variable gr = vars.get(i); if (varSeq2[i] == 0){ S2.add(gr); //System.out.println("S"+gr.toString()); //protSeq[ind] = 0; varHM2.put(gr, gr.getInstances().get(0)); }else if (varSeq2[i] == 1){ T2.add(gr); //System.out.println("T"+gr.toString()); //protSeq[ind] = 1; varHM2.put(gr, gr.getInstances().get(1)); }else{ U2.add(gr); //System.out.println("U"+gr.toString()); //protSeq[ind] = -1; varHM2.put(gr, null); } ind++; } System.out.println(); System.out.println("After brute force on the leftover we got..."); System.out.println("minimum energy: "+minValue); printST(S2,T2); System.out.println("UNKNOWN groups: "+Arrays.deepToString(U2.toArray())); System.out.println("====================================================="); System.out.println(); return varHM2; }else{ System.out.println("====================================================="); return varHM; } } public HashMap<Variable,Instance> doCutNonAE(WriteFile wr){ System.out.println("Doing the cut!"); EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge> F = new EdmondsKarpMaxFlowMinCut<Variable, DefaultWeightedEdge>(graph); Vector<Variable> vars = sys.getVars(); Double constantNF = sys.getConstantNF(); F.calculateMinimumCut(source, sink); F.calculateMinimumCutValue(); System.out.println(); Map <Variable,Integer> minCut = F.getMinimumCut(); Double minCutValue = F.getMinimumCutValue(); Vector<Variable> S = new Vector<Variable>(); Vector<Variable> T = new Vector<Variable>(); Vector<Variable> U = new Vector<Variable>(); Iterator<Variable> it = vars.iterator(); int[] varSeq = new int[sys.getVars().size()]; int ind = 0; HashMap<Variable,Instance> varHM = new HashMap<Variable,Instance>(); while (it.hasNext()){ Variable gr = it.next(); if (minCut.get(gr) == 0 && minCut.get(gr.getHat()) == 1){ S.add(gr); varSeq[ind] = 0; varHM.put(gr, gr.getInstances().get(0)); }else if (minCut.get(gr) == 1 && minCut.get(gr.getHat())==0){ T.add(gr); varSeq[ind] = 1; varHM.put(gr, gr.getInstances().get(1)); }else{ U.add(gr); varSeq[ind] = -1; varHM.put(gr, null); } ind++; } wr.writeln("RESULTS"); wr.writeln("flow: "+F.getMaximumFlow().toString()); wr.writeln("flow value: "+F.getMaximumFlowValue().toString()); wr.writeln("cut: "+minCut.toString()); wr.writeln("cut value: "+minCutValue); //System.out.println("cut constant: "+constantNF); wr.writeln("cut + constant: " + (minCutValue+constantNF)); wr.writeln(); printST(S,T,wr); wr.writeln("UNKNOWN groups: "+Arrays.deepToString(U.toArray())); wr.writeln(" Tuple2<Double,int[]> better = bruteLeftOver(varSeq, wr); if (better != null){ Double minValue = better.getFirst(); int[] varSeq2 = better.getSecond(); HashMap<Variable,Instance> varHM2 = new HashMap<Variable,Instance>(); Vector<Variable> S2 = new Vector<Variable>(); Vector<Variable> T2 = new Vector<Variable>(); Vector<Variable> U2 = new Vector<Variable>(); //TODO: Here I'm just picking the first of the minimizers // since they are all the same minimum value. Should probably // pick more carefully //for (int i=0; i<varSeq2.length; i++){ for (int i=0; i<vars.size(); i++){ Variable gr = vars.get(i); if (varSeq2[i] == 0){ S2.add(gr); //System.out.println("S"+gr.toString()); //protSeq[ind] = 0; varHM2.put(gr, gr.getInstances().get(0)); }else if (varSeq2[i] == 1){ T2.add(gr); //System.out.println("T"+gr.toString()); //protSeq[ind] = 1; varHM2.put(gr, gr.getInstances().get(1)); }else{ U2.add(gr); //System.out.println("U"+gr.toString()); //protSeq[ind] = -1; varHM2.put(gr, null); } ind++; } wr.writeln(); wr.writeln("After brute force on the leftover we got..."); wr.writeln("minimum energy: "+minValue); printST(S2,T2,wr); wr.writeln("UNKNOWN groups: "+Arrays.deepToString(U2.toArray())); wr.writeln("====================================================="); wr.writeln(); return varHM2; }else{ wr.writeln("====================================================="); return varHM; } } // THE ONE THAT YOU WANT public HashMap<Variable,Instance> doCutNonAE(WriteFile main, WriteFile sec){ System.out.println("Doing the cut!"); MinSourceSinkCut<Variable, DefaultWeightedEdge> MSSC = new MinSourceSinkCut<Variable, DefaultWeightedEdge>(graph); EdmondsKarpMaximumFlow<Variable, DefaultWeightedEdge> F = new EdmondsKarpMaximumFlow<Variable, DefaultWeightedEdge>(graph); Vector<Variable> vars = sys.getVars(); Double constantNF = sys.getConstantNF(); // F.calculateMinimumCut(source, sink); // F.calculateMinimumCutValue(); F.calculateMaximumFlow(source, sink); MSSC.computeMinCut(source, sink); System.out.println(); // Map <Variable,Integer> minCut = F.getMinimumCut(); Set<Variable> SRC = MSSC.getSourcePartition(); Set<Variable> SNK = MSSC.getSinkPartition(); // Double minCutValue = F.getMinimumCutValue(); Double minCutValue = MSSC.getCutWeight(); if (minCutValue - F.getMaximumFlowValue() > 0.00001){ throw new IllegalArgumentException( "maxflow != mincut"); } Vector<Variable> S = new Vector<Variable>(); Vector<Variable> T = new Vector<Variable>(); Vector<Variable> U = new Vector<Variable>(); Iterator<Variable> it = vars.iterator(); int[] varSeq = new int[sys.getVars().size()]; int ind = 0; HashMap<Variable,Instance> varHM = new HashMap<Variable,Instance>(); while (it.hasNext()){ Variable gr = it.next(); // if (minCut.get(gr) == 0 && minCut.get(gr.getHat()) == 1){ if (SRC.contains(gr) && SNK.contains(gr.getHat())){ S.add(gr); varSeq[ind] = 0; varHM.put(gr, gr.getInstances().get(0)); // }else if (minCut.get(gr) == 1 && minCut.get(gr.getHat())==0){ }else if (SNK.contains(gr) && SRC.contains(gr.getHat())){ T.add(gr); varSeq[ind] = 1; varHM.put(gr, gr.getInstances().get(1)); }else{ U.add(gr); varSeq[ind] = -1; varHM.put(gr, null); } ind++; } main.writeln("RESULTS"); // main.writeln("flow: "+F.getMaximumFlow().toString()); main.writeln("flow: "+F.getMaximumFlow().toString()); main.writeln("flow value: "+F.getMaximumFlowValue().toString()); // main.writeln("cut: "+minCut.toString()); main.writeln("cut:"); main.writeln(" SRC = "+SRC.toString()); main.writeln(" SNK = "+SNK.toString()); main.writeln("cut value: "+minCutValue); //System.out.println("cut constant: "+constantNF); main.writeln("cut + constant: " + (minCutValue+constantNF)); main.writeln(); printST(S,T,main); main.writeln("UNKNOWN groups: "+Arrays.deepToString(U.toArray())); main.writeln(" Tuple2<Double,int[]> better = bruteLeftOver(varSeq, main, sec); if (better != null){ Double minValue = better.getFirst(); int[] varSeq2 = better.getSecond(); HashMap<Variable,Instance> varHM2 = new HashMap<Variable,Instance>(); Vector<Variable> S2 = new Vector<Variable>(); Vector<Variable> T2 = new Vector<Variable>(); Vector<Variable> U2 = new Vector<Variable>(); //TODO: Here I'm just picking the first of the minimizers // since they are all the same minimum value. Should probably // pick more carefully //for (int i=0; i<varSeq2.length; i++){ for (int i=0; i<vars.size(); i++){ Variable gr = vars.get(i); if (varSeq2[i] == 0){ S2.add(gr); //System.out.println("S"+gr.toString()); //protSeq[ind] = 0; varHM2.put(gr, gr.getInstances().get(0)); }else if (varSeq2[i] == 1){ T2.add(gr); //System.out.println("T"+gr.toString()); //protSeq[ind] = 1; varHM2.put(gr, gr.getInstances().get(1)); }else{ U2.add(gr); //System.out.println("U"+gr.toString()); //protSeq[ind] = -1; varHM2.put(gr, null); } ind++; } main.writeln(); main.writeln("After brute force on the leftover we got..."); main.writeln("minimum energy: "+minValue); printST(S2,T2,main); main.writeln("UNKNOWN groups: "+Arrays.deepToString(U2.toArray())); main.writeln("====================================================="); main.writeln(); return varHM2; }else{ main.writeln("====================================================="); return varHM; } } /** * @param varSeq Array of {-1,0,1} indicating the output of the cut algorithm * @return The result of doing a brute force minimization on the * unassigned Variables (indicated by "-1" in varSeq). Outputs the new * minimum energy as well as the configuration that realizes this minimum. */ private Tuple2<Double, int[]> bruteLeftOver(int[] varSeq) { int[] unassigned = MapleTools.select(-1, varSeq); int n = unassigned.length; System.out.println("There are "+n+" unassigned residues."); if (n==0){ //System.out.println("There are no unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("NO "); return null; } if (n>25){ System.out.println("Can't do brute that high! There are "+n+" unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("XX "); // wr.write(n+" "); return null; } Subsets S = new Subsets(n); double currentMin = Double.POSITIVE_INFINITY; int[] currentMinimizer = new int[n]; while (S.hasNext()){ int[] subset = S.next(); int[] fullSub = varSeq.clone(); for (int j=0; j<n; j++){ fullSub[unassigned[j]] = subset[j]; } double energy; if (isAE){ energy = sys.evaluateBinaryEnergy(fullSub,true); //System.out.println(Arrays.toString(subsetVectors[i])+" "+energy); }else{ energy = sys.evaluateEnergy(fullSub, true); } //if (verb){ // System.out.println(energy + " " + Arrays.toString(subsetVectors[i])); if (energy<currentMin){ currentMin = energy; currentMinimizer = fullSub.clone(); }else if (energy == currentMin){ currentMinimizer = ArrayTools.concat(currentMinimizer,fullSub); } } Tuple2<Double,int[]> Ret = new Tuple2<Double,int[]>(currentMin, currentMinimizer); return Ret; } private Tuple2<Double, int[]> bruteLeftOver(int[] varSeq, WriteFile wr) { int[] unassigned = MapleTools.select(-1, varSeq); int n = unassigned.length; wr.writeln("There are "+n+" unassigned residues."); if (n==0){ //System.out.println("There are no unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("NO "); return null; } if (n>25){ wr.writeln("Can't do brute that high! There are "+n+" unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("XX "); // wr.write(n+" "); return null; } Subsets S = new Subsets(n); double currentMin = Double.POSITIVE_INFINITY; int[] currentMinimizer = new int[n]; while (S.hasNext()){ int[] subset = S.next(); int[] fullSub = varSeq.clone(); for (int j=0; j<n; j++){ fullSub[unassigned[j]] = subset[j]; } double energy; if (isAE){ energy = sys.evaluateBinaryEnergy(fullSub,true); //System.out.println(Arrays.toString(subsetVectors[i])+" "+energy); }else{ energy = sys.evaluateEnergy(fullSub, true); } //if (verb){ // System.out.println(energy + " " + Arrays.toString(subsetVectors[i])); if (energy<currentMin){ currentMin = energy; currentMinimizer = fullSub.clone(); }else if (energy == currentMin){ currentMinimizer = ArrayTools.concat(currentMinimizer,fullSub); } } Tuple2<Double,int[]> Ret = new Tuple2<Double,int[]>(currentMin, currentMinimizer); return Ret; } private Tuple2<Double, int[]> bruteLeftOver(int[] varSeq, WriteFile wr, WriteFile sec) { int[] unassigned = MapleTools.select(-1, varSeq); int n = unassigned.length; wr.writeln("There are "+n+" unassigned residues."); sec.writeln(sys.getVars().size()+" "+n); if (n==0){ //System.out.println("There are no unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("NO "); return null; } if (n>25){ wr.writeln("Can't do brute that high! There are "+n+" unassigned residues."); // long endBrute = System.currentTimeMillis(); // System.out.println("Took "+(endBrute - startBrute)+" ms to do the brute force minimization on the unassigned residues."); // //time.write("brute "+(endBrute-startBrute)+" "); // time.write("XX "); // wr.write(n+" "); return null; } Subsets S = new Subsets(n); double currentMin = Double.POSITIVE_INFINITY; int[] currentMinimizer = new int[n]; while (S.hasNext()){ int[] subset = S.next(); int[] fullSub = varSeq.clone(); for (int j=0; j<n; j++){ fullSub[unassigned[j]] = subset[j]; } double energy; if (isAE){ energy = sys.evaluateBinaryEnergy(fullSub,true); //System.out.println(Arrays.toString(subsetVectors[i])+" "+energy); }else{ energy = sys.evaluateEnergy(fullSub, true); } //if (verb){ // System.out.println(energy + " " + Arrays.toString(subsetVectors[i])); if (energy<currentMin){ currentMin = energy; currentMinimizer = fullSub.clone(); }else if (energy == currentMin){ currentMinimizer = ArrayTools.concat(currentMinimizer,fullSub); } } Tuple2<Double,int[]> Ret = new Tuple2<Double,int[]>(currentMin, currentMinimizer); return Ret; } /* * * * * * * * * MAKE GRAPH * * * * * * * * */ /** * Creates the flow network for the general binary Systm * of this BinaryGraph object. */ private void createNonSubmodularFlowNetworkNonAE(){ System.out.println("Making the binary energy flow network for the binary Systm."); // Get the variables and normal form binary energy Vector<Variable> vars = sys.getVars(); TwoKeyHash<Instance,Double> matrixNF = sys.getBinaryNF(); // This will be the flow network DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> g = new DirectedWeightedMultigraph<Variable, DefaultWeightedEdge>(DefaultWeightedEdge.class); int n = vars.size(); g.addVertex(source); g.addVertex(sink); // Weight the edges as indicated in the Minimizing Non-Submodular Energy paper. for(int i = 0; i<=n-1; i++){ Variable vi = vars.get(i); //System.out.println(vi); Variable viHat = vi.getHat(); //System.out.println(viHat); Vector<Instance> viInsts = vi.getInstances(); Instance vi0 = viInsts.get(0); Instance vi1 = viInsts.get(1); g.addVertex(vi); g.addVertex(viHat); Double E0 = vi0.getEnergyNF(); Double E1 = vi1.getEnergyNF(); g.addEdge(vi,sink); g.setEdgeWeight(g.getEdge(vi, sink),0.5*E0); g.addEdge(source,viHat); g.setEdgeWeight(g.getEdge(source,viHat), 0.5*E0); g.addEdge(source,vi); g.setEdgeWeight(g.getEdge(source,vi),0.5*E1); g.addEdge(viHat, sink); g.setEdgeWeight(g.getEdge(viHat,sink),0.5*E1); } for(int p=0; p<=n-1; p++){ for(int q=p+1; q<=n-1 && p!=q; q++){ Variable vp = vars.get(p); Vector<Instance> vpInsts = vp.getInstances(); Instance vp0 = vpInsts.get(0); Instance vp1 = vpInsts.get(1); Variable vpHat = vp.getHat(); Variable vq = vars.get(q); Vector<Instance> vqInsts = vq.getInstances(); Instance vq0 = vqInsts.get(0); Instance vq1 = vqInsts.get(1); Variable vqHat = vq.getHat(); //System.out.println(vp.toString()+" ["+vp0.toString()+", "+vp1.toString()+"] "+vq.toString()+" ["+vq0.toString()+", "+vq1.toString()+"]"); Double E01 = matrixNF.get(vp0, vq1); Double E10 = matrixNF.get(vp1, vq0); Double E00 = matrixNF.get(vp0, vq0); Double E11 = matrixNF.get(vp1, vq1); g.addEdge(vp,vq); g.addEdge(vqHat,vpHat); g.setEdgeWeight(g.getEdge(vp,vq), 0.5*E01); g.setEdgeWeight(g.getEdge(vqHat,vpHat), 0.5*E01); g.addEdge(vq,vp); g.addEdge(vpHat,vqHat); g.setEdgeWeight(g.getEdge(vq,vp),0.5*E10); g.setEdgeWeight(g.getEdge(vpHat,vqHat), 0.5*E10); g.addEdge(vp,vqHat); g.addEdge(vq,vpHat); g.setEdgeWeight(g.getEdge(vp,vqHat),0.5*E00); g.setEdgeWeight(g.getEdge(vq,vpHat), 0.5*E00); g.addEdge(vqHat,vp); g.addEdge(vpHat,vq); g.setEdgeWeight(g.getEdge(vqHat,vp),0.5*E11); g.setEdgeWeight(g.getEdge(vpHat,vq),0.5*E11); } } graph = addWeights(g); } /** * Creates the flow network for the binary Systm (created for * the alpha-expansion) of this BinaryGraph object. */ private void createNonSubmodularFlowNetworkAE() { System.out.println("Making the binary energy flow network for the alpha-expansion."); // Get the variables and normal form binary energy Vector<Variable> vars = sys.getVars(); TwoKeyHash<Instance,Double> matrixNF = sys.getBinaryNF(); // This will be the flow network DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> g = new DirectedWeightedMultigraph<Variable, DefaultWeightedEdge>(DefaultWeightedEdge.class); int n = vars.size(); g.addVertex(source); g.addVertex(sink); // Weight the edges as indicated in the Minimizing Non-Submodular Energy paper. for(int i = 0; i<=n-1; i++){ Variable vi = vars.get(i); //System.out.println(vi); Variable viHat = vi.getHat(); //System.out.println(viHat); Instance vi0 = vi.getInstance(biLabels[0]); Instance vi1 = vi.getInstance(biLabels[1]); g.addVertex(vi); g.addVertex(viHat); Double E0 = vi0.getEnergyNF(); Double E1 = vi1.getEnergyNF(); g.addEdge(vi,sink); g.setEdgeWeight(g.getEdge(vi, sink),0.5*E0); g.addEdge(source,viHat); g.setEdgeWeight(g.getEdge(source,viHat), 0.5*E0); g.addEdge(source,vi); g.setEdgeWeight(g.getEdge(source,vi),0.5*E1); g.addEdge(viHat, sink); g.setEdgeWeight(g.getEdge(viHat,sink),0.5*E1); } for(int p=0; p<=n-1; p++){ for(int q=p+1; q<=n-1 && p!=q; q++){ Variable vp = vars.get(p); Instance vp0 = vp.getInstance(biLabels[0]); Instance vp1 = vp.getInstance(biLabels[1]); Variable vpHat = vp.getHat(); Variable vq = vars.get(q); Instance vq0 = vq.getInstance(biLabels[0]); Instance vq1 = vq.getInstance(biLabels[1]); Variable vqHat = vq.getHat(); Double E01 = matrixNF.get(vp0, vq1); Double E10 = matrixNF.get(vp1, vq0); Double E00 = matrixNF.get(vp0, vq0); Double E11 = matrixNF.get(vp1, vq1); g.addEdge(vp,vq); g.addEdge(vqHat,vpHat); g.setEdgeWeight(g.getEdge(vp,vq), 0.5*E01); g.setEdgeWeight(g.getEdge(vqHat,vpHat), 0.5*E01); g.addEdge(vq,vp); g.addEdge(vpHat,vqHat); g.setEdgeWeight(g.getEdge(vq,vp),0.5*E10); g.setEdgeWeight(g.getEdge(vpHat,vqHat), 0.5*E10); g.addEdge(vp,vqHat); g.addEdge(vq,vpHat); g.setEdgeWeight(g.getEdge(vp,vqHat),0.5*E00); g.setEdgeWeight(g.getEdge(vq,vpHat), 0.5*E00); g.addEdge(vqHat,vp); g.addEdge(vpHat,vq); g.setEdgeWeight(g.getEdge(vqHat,vp),0.5*E11); g.setEdgeWeight(g.getEdge(vpHat,vq),0.5*E11); } } graph = addWeights(g); } /** * @param g A flow network with possible multi-edges * @return The same flow network with all multi-edges * collapsed into a single edge having weight equal * to the sum of the original weights. */ private DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> addWeights( DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> g) { DirectedWeightedMultigraph<Variable, DefaultWeightedEdge> g1 = new DirectedWeightedMultigraph<Variable, DefaultWeightedEdge>(DefaultWeightedEdge.class); Set<Variable> vertices = g.vertexSet(); for(Variable v: vertices){ for(Variable w: vertices){ g1.addVertex(v); g1.addVertex(w); Set<DefaultWeightedEdge> edges = g.getAllEdges(v, w); Double newWt = new Double(0); for(DefaultWeightedEdge e: edges){ newWt += g.getEdgeWeight(e); } if(newWt != 0){ g1.addEdge(v,w); g1.setEdgeWeight(g1.getEdge(v,w), newWt); } } } return g1; } /* * * * * * * * * PRINTING * * * * * * * * */ /** * Prints out the number of vertices as well as the edges with weights * Number of vertices first followed by each edge and weight * on subsequent lines (one line per edge) */ public void printGraph() { Set<Variable> vertices = graph.vertexSet(); System.out.println("There are "+vertices.size() +" vertices."); System.out.println("The edges with weights are:"); for(Variable v: vertices){ for(Variable w: vertices){ Set<DefaultWeightedEdge> edges = graph.getAllEdges(v, w); for(DefaultWeightedEdge e: edges){ double wt = graph.getEdgeWeight(e); System.out.println(v.toString()+", "+ w.toString() +", "+ wt); } } } } /** * Prints out the vertex set and edge set with weights on one line. * [vertex set], {(v1,v2)=wt...} * where (v1,v2) is an edge with weight wt */ public void printGraphCompact(){ Set<Variable> vertices = graph.vertexSet(); String toPrint = " "; toPrint = toPrint.concat(vertices.toString()+", {"); for(Variable v: vertices){ for(Variable w: vertices){ Set<DefaultWeightedEdge> edges = graph.getAllEdges(v, w); for(DefaultWeightedEdge e: edges){ double wt = graph.getEdgeWeight(e); toPrint = toPrint.concat("("+v.toString()+", "+ w.toString() +")= "+ wt+", "); } } } toPrint = toPrint.substring(0,toPrint.length()-2); toPrint = toPrint.concat("}"); System.out.println(toPrint); } public void printGraphCompact(WriteFile wr){ Set<Variable> vertices = graph.vertexSet(); // sort the vertices alphabetically Comparator<Variable> comparator = new Comparator<Variable>() { public int compare(Variable o1, Variable o2) { return o1.getName().toString().compareTo(o2.getName().toString()); } }; SortedSet<Variable> sorted_keys = new TreeSet<Variable>(comparator); sorted_keys.addAll(vertices); // run through the vertices to print String toPrintVerts = "Vertices:\n"; String toPrintEdges = "Edges:\n"; for(Variable vtx1: sorted_keys){ String vname = vtx1.toString(); String vnameTranslate = ""; if (vname.equals("S")) vnameTranslate = "S"; else if (vname.equals("T")) vnameTranslate = "T"; else if (vname.substring(vname.length()-2).equals("_H")){ vnameTranslate = vname.substring(0, vname.length()-2)+"_DEPROTONATED"; }else{ vnameTranslate = vname+"_PROTONATED"; } toPrintVerts += (vnameTranslate+"\n"); for (Variable vtx2 : sorted_keys){ if (vtx1.equals(vtx2)) continue; String wname = vtx2.toString(); String wnameTranslate = ""; if (wname.equals("S")) wnameTranslate = "S"; else if (wname.equals("T")) wnameTranslate = "T"; else if (wname.substring(wname.length()-2).equals("_H")){ wnameTranslate = wname.substring(0, wname.length()-2)+"_DEPROTONATED"; }else{ wnameTranslate = wname+"_PROTONATED"; } Set<DefaultWeightedEdge> edges = graph.getAllEdges(vtx1, vtx2); for(DefaultWeightedEdge e: edges){ double wt = graph.getEdgeWeight(e); toPrintEdges += ("("+vnameTranslate+", "+ wnameTranslate +")= "+ Math.round(wt*10000.0)/10000.0+"\n"); } } } // String toPrint = " "; // toPrint = toPrint.concat(vertices.toString()+", {"); // for(Variable v: vertices){ // for(Variable w: vertices){ // Set<DefaultWeightedEdge> edges = graph.getAllEdges(v, w); // for(DefaultWeightedEdge e: edges){ // double wt = graph.getEdgeWeight(e); // toPrint = toPrint.concat("("+v.toString()+", "+ w.toString() +")= "+ wt+", "); // toPrint = toPrint.substring(0,toPrint.length()-2); // toPrint = toPrint.concat("}"); wr.writeln("Flow network:"); wr.writeln(toPrintVerts); wr.writeln(toPrintEdges); } /** * Prints out the configuration given by the S and T sets * @param S Vector of Variables assigned to "0" * @param T Vector of Variables assigned to "1" */ private void printST(Vector<Variable> S, Vector<Variable> T) { System.out.print("0 instances: "); for (int i=0; i<S.size(); i++){ Variable iGrp = S.get(i); Instance iGrp0; if (isAE){ iGrp0 = iGrp.getInstance(biLabels[0]); }else{ iGrp0 = iGrp.getInstances().get(0); } System.out.print(iGrp0.toString()+", "); } System.out.println(); System.out.print("1 instances: "); for (int i=0; i<T.size(); i++){ Variable iGrp = T.get(i); Instance iGrp1; if (isAE){ iGrp1 = iGrp.getInstance(biLabels[1]); }else{ iGrp1 = iGrp.getInstances().get(1); } System.out.print(iGrp1.toString()+", "); } System.out.println(); } private void printST(Vector<Variable> S, Vector<Variable> T, WriteFile wr) { wr.write("0 instances: "); for (int i=0; i<S.size(); i++){ Variable iGrp = S.get(i); Instance iGrp0; if (isAE){ iGrp0 = iGrp.getInstance(biLabels[0]); }else{ iGrp0 = iGrp.getInstances().get(0); } wr.write(iGrp0.toString()+", "); } wr.writeln(); wr.write("1 instances: "); for (int i=0; i<T.size(); i++){ Variable iGrp = T.get(i); Instance iGrp1; if (isAE){ iGrp1 = iGrp.getInstance(biLabels[1]); }else{ iGrp1 = iGrp.getInstances().get(1); } wr.write(iGrp1.toString()+", "); } wr.writeln(); } }
package org.apache.commons.fileupload; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import java.util.Map; import java.util.List; import java.util.ArrayList; import java.util.HashMap; import javax.servlet.http.HttpServletRequest; import org.apache.commons.beanutils.MethodUtils; public class FileUpload { /** * HTTP content type header name. */ public static final String CONTENT_TYPE = "Content-type"; /** * HTTP content disposition header name. */ public static final String CONTENT_DISPOSITION = "Content-disposition"; /** * Content-disposition value for form data. */ public static final String FORM_DATA = "form-data"; /** * Content-disposition value for file attachment. */ public static final String ATTACHMENT = "attachment"; /** * Part of HTTP content type header. */ private static final String MULTIPART = "multipart/"; /** * HTTP content type header for multipart forms. */ public static final String MULTIPART_FORM_DATA = "multipart/form-data"; /** * HTTP content type header for multiple uploads. */ public static final String MULTIPART_MIXED = "multipart/mixed"; /** * The maximum length of a single header line that will be parsed * (1024 bytes). */ public static final int MAX_HEADER_SIZE = 1024; /** * The maximum size permitted for an uploaded file. */ private int sizeMax; /** * The threshold above which uploads will be stored on disk. */ private int sizeThreshold; /** * The path to which uploaded files will be stored, if stored on disk. */ private String repositoryPath; /** * The name of the class to use for <code>FileItem</code>s. */ private String fileItemClassName = "org.apache.commons.fileupload.DefaultFileItem"; /** * The cached method for obtaining a new <code>FileItem</code> instance. */ private Method newInstanceMethod; /** * Returns the maximum allowed upload size. * * @return The maximum allowed size, in bytes. * * @see #setSizeMax(int) * */ public int getSizeMax() { return sizeMax; } /** * Sets the maximum allowed upload size. If negative, there is no maximum. * * @param sizeMax The maximum allowed size, in bytes, or -1 for no maximum. * * @see #getSizeMax() * */ public void setSizeMax(int sizeMax) { this.sizeMax = sizeMax; } /** * Returns the size threshold beyond which files are written directly to * disk. The default value is 1024 bytes. * * @return The size threshold, in bytes. * * @see #setSizeThreshold(int) * * */ public int getSizeThreshold() { return sizeThreshold; } /** * Sets the size threshold beyond which files are written directly to disk. * * @param sizeThreshold The size threshold, in bytes. * * @see #getSizeThreshold() * */ public void setSizeThreshold(int sizeThreshold) { this.sizeThreshold = sizeThreshold; } /** * Returns the location used to temporarily store files that are larger * than the configured size threshold. * * @return The path to the temporary file location. * * @see #setRepositoryPath(String) * */ public String getRepositoryPath() { return repositoryPath; } /** * Sets the location used to temporarily store files that are larger * than the configured size threshold. * * @param repositoryPath The path to the temporary file location. * * @see #getRepositoryPath() * */ public void setRepositoryPath(String repositoryPath) { this.repositoryPath = repositoryPath; } /** * Returns the fully qualified name of the class which will be used to * instantiate <code>FileItem</code> instances when a request is parsed. * * @return The fully qualified name of the Java class. * * @see #setFileItemClassName(String) * */ public String getFileItemClassName() { return fileItemClassName; } /** * Sets the fully qualified name of the class which will be used to * instantiate <code>FileItem</code> instances when a request is parsed. * * @param fileItemClassName The fully qualified name of the Java class. * * @see #getFileItemClassName() * */ public void setFileItemClassName(String fileItemClassName) { this.fileItemClassName = fileItemClassName; this.newInstanceMethod = null; } public List /* FileItem */ parseRequest(HttpServletRequest req) throws FileUploadException { return parseRequest(req, getSizeThreshold(), getSizeMax(), getRepositoryPath()); } public List /* FileItem */ parseRequest(HttpServletRequest req, int sizeThreshold, int sizeMax, String path) throws FileUploadException { if (null == req) { throw new NullPointerException("req parameter"); } ArrayList items = new ArrayList(); String contentType = req.getHeader(CONTENT_TYPE); if (!contentType.startsWith(MULTIPART)) { throw new FileUploadException("the request doesn't contain a " + MULTIPART_FORM_DATA + " or " + MULTIPART_MIXED + " stream"); } int requestSize = req.getContentLength(); if (requestSize == -1) { throw new FileUploadException("the request was rejected because " + "it's size is unknown"); } if (sizeMax >= 0 && requestSize > sizeMax) { throw new FileUploadException("the request was rejected because " + "it's size exceeds allowed range"); } try { byte[] boundary = contentType.substring( contentType.indexOf("boundary=") + 9).getBytes(); InputStream input = (InputStream) req.getInputStream(); MultipartStream multi = new MultipartStream(input, boundary); boolean nextPart = multi.skipPreamble(); while (nextPart) { Map headers = parseHeaders(multi.readHeaders()); String fieldName = getFieldName(headers); if (fieldName != null) { String subContentType = getHeader(headers, CONTENT_TYPE); if (subContentType != null && subContentType .startsWith(MULTIPART_MIXED)) { // Multiple files. byte[] subBoundary = subContentType.substring( subContentType .indexOf("boundary=") + 9).getBytes(); multi.setBoundary(subBoundary); boolean nextSubPart = multi.skipPreamble(); while (nextSubPart) { headers = parseHeaders(multi.readHeaders()); if (getFileName(headers) != null) { FileItem item = createItem(sizeThreshold, path, headers, requestSize); OutputStream os = ((DefaultFileItem) item).getOutputStream(); try { multi.readBodyData(os); } finally { os.close(); } item.setFieldName(getFieldName(headers)); items.add(item); } else { // Ignore anything but files inside // multipart/mixed. multi.discardBodyData(); } nextSubPart = multi.readBoundary(); } multi.setBoundary(boundary); } else { if (getFileName(headers) != null) { // A single file. FileItem item = createItem(sizeThreshold, path, headers, requestSize); OutputStream os = ((DefaultFileItem) item).getOutputStream(); try { multi.readBodyData(os); } finally { os.close(); } item.setFieldName(getFieldName(headers)); items.add(item); } else { // A form field. FileItem item = createItem(sizeThreshold, path, headers, requestSize); OutputStream os = ((DefaultFileItem) item).getOutputStream(); try { multi.readBodyData(os); } finally { os.close(); } item.setFieldName(getFieldName(headers)); item.setIsFormField(true); items.add(item); } } } else { // Skip this part. multi.discardBodyData(); } nextPart = multi.readBoundary(); } } catch (IOException e) { throw new FileUploadException( "Processing of " + MULTIPART_FORM_DATA + " request failed. " + e.getMessage()); } return items; } /** * Retrieves the file name from the <code>Content-disposition</code> * header. * * @param headers A <code>Map</code> containing the HTTP request headers. * * @return The file name for the current <code>encapsulation</code>. */ protected String getFileName(Map /* String, String */ headers) { String fileName = null; String cd = getHeader(headers, CONTENT_DISPOSITION); if (cd.startsWith(FORM_DATA) || cd.startsWith(ATTACHMENT)) { int start = cd.indexOf("filename=\""); int end = cd.indexOf('"', start + 10); if (start != -1 && end != -1) { fileName = cd.substring(start + 10, end).trim(); } } return fileName; } /** * Retrieves the field name from the <code>Content-disposition</code> * header. * * @param headers A <code>Map</code> containing the HTTP request headers. * * @return The field name for the current <code>encapsulation</code>. */ protected String getFieldName(Map /* String, String */ headers) { String fieldName = null; String cd = getHeader(headers, CONTENT_DISPOSITION); if (cd != null && cd.startsWith(FORM_DATA)) { int start = cd.indexOf("name=\""); int end = cd.indexOf('"', start + 6); if (start != -1 && end != -1) { fieldName = cd.substring(start + 6, end); } } return fieldName; } /** * Creates a new {@link org.apache.commons.fileupload.FileItem} instance. * * @param sizeThreshold The max size in bytes to be stored in memory. * @param path The path for the FileItem. * @param headers A <code>Map</code> containing the HTTP request * headers. * @param requestSize The total size of the request, in bytes. * * @return A newly created <code>FileItem</code> instance. * * @exception FileUploadException if an error occurs. */ protected FileItem createItem(int sizeThreshold, String path, Map /* String, String */ headers, int requestSize) throws FileUploadException { Method newInstanceMethod = getNewInstanceMethod(); Object[] args = new Object[] { path, getFileName(headers), getHeader(headers, CONTENT_TYPE), new Integer(requestSize), new Integer(sizeThreshold) }; FileItem fileItem = null; try { fileItem = (FileItem) newInstanceMethod.invoke(null, args); } catch (Exception e) { throw new FileUploadException(e.toString()); } return fileItem; } /** * <p> Returns the <code>Method</code> object to be used to obtain a new * <code>FileItem</code> instance. * * <p> For performance reasons, we cache the method once it has been * looked up, since method lookup is one of the more expensive aspects * of reflection. * * @return The <code>newInstance()</code> method to be invoked. * * @exception FileUploadException if an error occurs. */ protected Method getNewInstanceMethod() throws FileUploadException { // If the method is already cached, just return it. if (this.newInstanceMethod != null) { return this.newInstanceMethod; } // Load the FileUpload implementation class. ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); Class fileItemClass = null; if (classLoader == null) { classLoader = getClass().getClassLoader(); } try { fileItemClass = classLoader.loadClass(fileItemClassName); } catch (Exception e) { throw new FileUploadException(e.toString()); } if (fileItemClass == null) { throw new FileUploadException( "Failed to load FileItem class: " + fileItemClassName); } // Find the newInstance() method. Class[] parameterTypes = new Class[] { String.class, String.class, String.class, Integer.TYPE, Integer.TYPE }; Method newInstanceMethod = MethodUtils.getAccessibleMethod( fileItemClass, "newInstance", parameterTypes); if (newInstanceMethod == null) { throw new FileUploadException( "Failed find newInstance() method in FileItem class: " + fileItemClassName); } // Cache the method so that all this only happens once. this.newInstanceMethod = newInstanceMethod; return newInstanceMethod; } /** * <p> Parses the <code>header-part</code> and returns as key/value * pairs. * * <p> If there are multiple headers of the same names, the name * will map to a comma-separated list containing the values. * * @param headerPart The <code>header-part</code> of the current * <code>encapsulation</code>. * * @return A <code>Map</code> containing the parsed HTTP request headers. */ protected Map /* String, String */ parseHeaders(String headerPart) { Map headers = new HashMap(); char buffer[] = new char[MAX_HEADER_SIZE]; boolean done = false; int j = 0; int i; String header, headerName, headerValue; try { while (!done) { i = 0; // Copy a single line of characters into the buffer, // omitting trailing CRLF. while (i < 2 || buffer[i - 2] != '\r' || buffer[i - 1] != '\n') { buffer[i++] = headerPart.charAt(j++); } header = new String(buffer, 0, i - 2); if (header.equals("")) { done = true; } else { if (header.indexOf(':') == -1) { // This header line is malformed, skip it. continue; } headerName = header.substring(0, header.indexOf(':')) .trim().toLowerCase(); headerValue = header.substring(header.indexOf(':') + 1).trim(); if (getHeader(headers, headerName) != null) { // More that one heder of that name exists, // append to the list. headers.put(headerName, getHeader(headers, headerName) + ',' + headerValue); } else { headers.put(headerName, headerValue); } } } } catch (IndexOutOfBoundsException e) { // Headers were malformed. continue with all that was // parsed. } return headers; } /** * Returns the header with the specified name from the supplied map. The * header lookup is case-insensitive. * * @param headers A <code>Map</code> containing the HTTP request headers. * @param name The name of the header to return. * * @return The value of specified header, or a comma-separated list if * there were multiple headers of that name. */ protected final String getHeader(Map /* String, String */ headers, String name) { return (String) headers.get(name.toLowerCase()); } }
package org.jdesktop.swingx.painter; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import javax.swing.JComponent; import org.jdesktop.swingx.util.Resize; /** * <p>A Painter that paints Shapes. It uses a stroke and a paint to do so. The * shape is painted as is, at a specific location. If no Shape is specified, nothing * will be painted. If no stroke is specified, the default for the Graphics2D * will be used. If no paint is specified, the component background color * will be used. And if no location is specified, then the shape will be draw * at the origin (0,0)</p> * * <p>Here is an example that draws a lowly rectangle: * <pre><code> * Rectangle2D.Double rect = new Rectangle2D.Double(0, 0, 50, 50); * ShapePainter p = new ShapePainter(rect); * p.setLocation(new Point2D.Double(20, 10)); * </code></pre> * * @author rbair */ public class ShapePainter extends AbstractPainter { /** * The Shape to paint. If null, nothing is painted. */ private Shape shape; /** * The Stroke to use when painting. If null, the default Stroke for * the Graphics2D is used */ private Stroke stroke; /** * The Paint to use when painting the shape. If null, then the component * background color is used */ private Paint paint; /** * The location at which to draw the shape. If null, 0,0 is used */ private Point2D location = new Point2D.Double(0, 0); /** * Specifies if/how resizing (relocating) the location should occur. */ private Resize resizeLocation = Resize.NONE; /** * Specifies if/how resizing of the shape should occur */ private Resize resize = Resize.NONE; /** * Indicates whether the shape should be filled or drawn. */ private boolean isFilled; /** * Create a new ShapePainter */ public ShapePainter() { super(); } /** * Create a new ShapePainter for the given shape * * @param s the Shape to use */ public ShapePainter(Shape s) { this.shape = s; } /** * Sets the shape to paint. This shape is not resized when the component * bounds are. To do that, create a custom shape that is bound to the * component width/height * * @param s the Shape to paint. May be null */ public void setShape(Shape s) { Shape old = getShape(); this.shape = s; firePropertyChange("shape", old, getShape()); } /** * @return the Shape to paint. May be null */ public Shape getShape() { return shape; } /** * Sets the stroke to use for painting. If null, then the default Graphics2D * stroke use used * * @param s the Stroke to paint with */ public void setStroke(Stroke s) { Stroke old = getStroke(); this.stroke = s; firePropertyChange("stroke", old, getStroke()); } /** * @return the Stroke to use for painting */ public Stroke getStroke() { return stroke; } /** * The Paint to use for painting the shape. Can be a Color, GradientPaint, * TexturePaint, or any other kind of Paint. If null, the component * background is used. * * @param p the Paint to use for painting the shape. May be null. */ public void setPaint(Paint p) { Paint old = getPaint(); this.paint = p; firePropertyChange("paint", old, getPaint()); } /** * @return the Paint used when painting the shape. May be null */ public Paint getPaint() { return paint; } /** * Specifies the location at which to place the shape prior to painting. * If null, the origin (0,0) is used * * @param location the Point2D at which to paint the shape. may be null */ public void setLocation(Point2D location) { Point2D old = getLocation(); this.location = location == null ? new Point2D.Double(0, 0) : location; firePropertyChange("location", old, getLocation()); } /** * @return the Point2D location at which to paint the shape. Will never be null * (if it was null, new Point2D.Double(0,0) will be returned) */ public Point2D getLocation() { return location; } /** * The shape can be filled or simply stroked. By default, the shape is * stroked. Setting this property to true fills the shape upon drawing. * * @param isFilled true if the shape must be filled, false otherwise. */ public void setFilled(boolean isFilled) { boolean old = isFilled(); this.isFilled = isFilled; firePropertyChange("paint", old, isFilled()); } /** * @return true is the shape is filled, false if stroked */ public boolean isFilled() { return isFilled; } /** * Specifies the resize behavior for the location property. If r is * Resize.HORIZONTAL or Resize.BOTH, then the x value of the location * will be treated as if it were a percentage of the width of the component. * Likewise, Resize.VERTICAL or Resize.BOTH will affect the y value. For * example, if I had a location (.3, .8) then the X will be situated at * 30% of the width and the Y will be situated at 80% of the height. * * @param r value indicating whether/how to resize the Location property when * painting. If null, Resize.BOTH will be used */ public void setResizeLocation(Resize r) { Resize old = getResizeLocation(); this.resizeLocation = r == null ? r.NONE : r; firePropertyChange("resizeLocation", old, getResizeLocation()); } /** * @return value indication whether/how to resize the location property. * This will never be null */ public Resize getResizeLocation() { return resizeLocation; } /** * Specifies the resize behavior of the shape. As with all other properties * that rely on Resize, the value of the width/height of the shape will * represent a percentage of the width/height of the component, as a value * between 0 and 1 * * @param r value indication whether/how to resize the shape. If null, * Resize.NONE will be used */ public void setResize(Resize r) { Resize old = getResize(); this.resize = r == null ? r.NONE : r; firePropertyChange("resize", old, getResize()); } /** * @return value indication whether/how to resize the shape. Will never be null */ public Resize getResize() { return resizeLocation; } /** * @inheritDoc */ public void paintBackground(Graphics2D g, JComponent component) { //set the paint Paint p = getPaint(); if (p == null) { p = component.getBackground(); } g.setPaint(p); //set the stroke if it is not null Stroke s = getStroke(); if (s != null) { g.setStroke(s); } //handle the location Point2D location = getLocation(); Resize resizeLocation = getResizeLocation(); double x = location.getX(); double y = location.getY(); if (resizeLocation == Resize.HORIZONTAL || resizeLocation == Resize.BOTH) { x = x * component.getWidth(); } if (resizeLocation == Resize.VERTICAL || resizeLocation == Resize.BOTH) { y = y * component.getHeight(); } g.translate(-location.getX(), -location.getY()); //resize the shape if necessary Shape shape = getShape(); Rectangle2D bounds = shape.getBounds2D(); double width = 1; double height = 1; Resize resize = getResize(); if (resize == Resize.HORIZONTAL || resize == Resize.BOTH) { width = component.getWidth(); } if (resize == Resize.VERTICAL || resize == Resize.BOTH) { height = component.getHeight(); } shape = AffineTransform.getScaleInstance( width, height).createTransformedShape(shape); //draw/fill the shape if (!isFilled()) { g.draw(shape); } else { g.fill(shape); } } }
package org.opentdc.wtt.file; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; import javax.servlet.ServletContext; import org.opentdc.service.exception.DuplicateException; import org.opentdc.service.exception.InternalServerErrorException; import org.opentdc.service.exception.NotAllowedException; import org.opentdc.service.exception.NotFoundException; import org.opentdc.util.PrettyPrinter; import org.opentdc.wtt.CompanyModel; import org.opentdc.wtt.ProjectModel; import org.opentdc.wtt.ResourceModel; import org.opentdc.wtt.ServiceProvider; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; public class FileServiceProvider implements ServiceProvider { private static final String SEED_FN = "/seed.json"; private static final String DATA_FN = "/data.json"; private static File dataF = null; private static File seedF = null; protected static ArrayList<CompanyModel> companies = null; protected static Map<String, CompanyModel> companyIndex = null; protected static Map<String, ProjectModel> projectIndex = null; protected static ArrayList<String> resources = null; // instance variables private static final Logger logger = Logger.getLogger(FileServiceProvider.class.getName()); private void initStorageProvider( ) { logger.info("> initStorageProvider()"); if (companies == null) { companies = new ArrayList<CompanyModel>(); } if (companyIndex == null) { companyIndex = new HashMap<String, CompanyModel>(); } if (projectIndex == null) { projectIndex = new HashMap<String, ProjectModel>(); } if (resources == null) { resources = new ArrayList<String>(); } logger.info("initStorageProvider() initialized"); } // instance variables private boolean isPersistent = true; public FileServiceProvider( ServletContext context, String prefix ) { logger.info("> FileImpl()"); initStorageProvider(); if (dataF == null) { dataF = new File(context.getRealPath("/" + prefix + DATA_FN)); } if (seedF == null) { seedF = new File(context.getRealPath("/" + prefix + SEED_FN)); } if (companyIndex.size() == 0) { importJson(); } logger.info("FileImpl() initialized"); } /** * List all companies. * * @return a list containing the companies. */ @Override public ArrayList<CompanyModel> listCompanies( boolean asTree, String query, String queryType, long position, long size) { logger.info("listCompanies(" + asTree + ") -> " + countCompanies() + " companies"); // internally, we keep the full data structure with all children // if the client want a flat structure without children, we need to filter accordingly ArrayList<CompanyModel> _companies = companies; if (asTree == false) { _companies = new ArrayList<CompanyModel>(); for (CompanyModel _c : companies) { _companies.add(new CompanyModel(_c, false)); } } Collections.sort(_companies, CompanyModel.CompanyComparator); for (CompanyModel _c : _companies) { logger.info(PrettyPrinter.prettyPrintAsJSON(_c)); } return _companies; } /** * Create a new company. * * @param newCompany * @return * @throws InternalServerErrorException */ @Override public CompanyModel createCompany( CompanyModel newCompany) throws DuplicateException { if (companyIndex.get(newCompany.getId()) != null) { throw new DuplicateException("company with ID " + newCompany.getId() + " exists already."); } companies.add(newCompany); indexCompany(newCompany); logger.info("createCompany() -> " + PrettyPrinter.prettyPrintAsJSON(newCompany)); if (isPersistent) { exportJson(dataF); } return newCompany; } /** * Find a company by ID. * * @param id * the company ID * @return the company * @throws NotFoundException * if there exists no company with this ID */ @Override public CompanyModel readCompany( String id) throws NotFoundException { CompanyModel _company = companyIndex.get(id); if (_company == null) { throw new NotFoundException("company with ID <" + id + "> was not found."); } logger.info("readCompany(" + id + ") -> " + PrettyPrinter.prettyPrintAsJSON(_company)); return _company; } /** * Update a company with new attribute values. * * @param newCompany * the new version of the company * @return the new company * @throws NotFoundException * if an object with the same ID did not exist */ @Override public CompanyModel updateCompany( String compId, CompanyModel newCompany ) throws NotFoundException { logger.info("updateCompany() -> " + PrettyPrinter.prettyPrintAsJSON(newCompany)); CompanyModel _oldCompany = companyIndex.get(compId); if (_oldCompany == null) { throw new NotFoundException("company with ID <" + newCompany.getId() + "> was not found."); } else { _oldCompany.setXri(newCompany.getXri()); _oldCompany.setTitle(newCompany.getTitle()); _oldCompany.setDescription(newCompany.getDescription()); removeProjectsRecursively(_oldCompany.getProjects()); for (ProjectModel _p : newCompany.getProjects()) { indexProjectRecursively(_p); } } logger.info("updateCompany() -> " + PrettyPrinter.prettyPrintAsJSON(_oldCompany)); if (isPersistent) { exportJson(dataF); } return newCompany; } @Override public void deleteCompany( String id) throws NotFoundException, InternalServerErrorException { CompanyModel _company = companyIndex.get(id); if (_company == null) { throw new NotFoundException("company with ID <" + id + "> was not found."); } removeProjectsRecursively(_company.getProjects()); companyIndex.remove(id); if (companies.remove(_company) == false) { throw new InternalServerErrorException("could not remove company " + id); } logger.info("deleteCompany(" + id + ")"); if (isPersistent) { exportJson(dataF); } } /** * Count all companies. * * @return amount of companies or -1 if the store is not existing. */ @Override public int countCompanies() { int _count = -1; if (companies != null) { _count = companies.size(); } logger.info("countCompanies() = " + _count); return _count; } private void indexCompany(CompanyModel company) { companyIndex.put(company.getId(), company); } /** * Return the top-level projects of a company without subprojects. * * @param compId the company ID * @return all top-level projects of a company */ @Override public ArrayList<ProjectModel> listProjects( String compId, String query, String queryType, long position, long size) { ArrayList<ProjectModel> _projects = new ArrayList<ProjectModel>(); for (ProjectModel _p : readCompany(compId).getProjects()) { _projects.add(new ProjectModel(_p, false)); } Collections.sort(_projects, ProjectModel.ProjectComparator); logger.info("listProjects(" + compId + ") -> " + _projects.size() + " values"); return _projects; } /** * Return all projects of a company * * @param compId the company ID * @param asTree return the projects either as a hierarchical tree or as a flat list * @return all projects of a company */ @Override public ArrayList<ProjectModel> listAllProjects( String compId, boolean asTree, String query, String queryType, long position, long size) { ArrayList<ProjectModel> _projects = readCompany(compId).getProjects(); if (asTree == false) { _projects = new ArrayList<ProjectModel>(); _projects = flatten(_projects, readCompany(compId).getProjects()); } Collections.sort(_projects, ProjectModel.ProjectComparator); logger.info("listProjects(" + compId + ") -> " + _projects.size() + " values"); return _projects; } private ArrayList<ProjectModel> flatten( ArrayList<ProjectModel> flatList, List<ProjectModel> list) { for (ProjectModel _p : list) { flatList.add(new ProjectModel(_p, false)); flatList = flatten(flatList, _p.getProjects()); } return flatList; } @Override public ProjectModel createProject( String compId, ProjectModel newProject) throws DuplicateException { logger.info("createProject(" + compId + ", " + PrettyPrinter.prettyPrintAsJSON(newProject) + ")"); if (projectIndex.get(newProject.getId()) != null) { // project with same ID exists already throw new DuplicateException( "Project with ID " + newProject.getId() + " exists already."); } indexProjectRecursively(newProject); readCompany(compId).addProject(newProject); if (isPersistent) { exportJson(dataF); } return newProject; } @Override public ProjectModel createProjectAsSubproject( String compId, String projId, ProjectModel newProject) throws DuplicateException { logger.info("createProjectAsSubproject(" + compId + ", " + projId + ", " + PrettyPrinter.prettyPrintAsJSON(newProject) + ")"); if (projectIndex.get(newProject.getId()) != null) { // project with same ID exists already throw new DuplicateException( "Project with ID " + newProject.getId() + " exists already."); } indexProjectRecursively(newProject); readProject(projId).addProject(newProject); if (isPersistent) { exportJson(dataF); } return newProject; } @Override public ProjectModel readProject( String projId) throws NotFoundException { ProjectModel _project = projectIndex.get(projId); if (_project == null) { throw new NotFoundException("project with ID <" + projId + "> was not found."); } logger.info("readProject(" + projId + ") -> " + PrettyPrinter.prettyPrintAsJSON(_project)); return _project; } @Override public ProjectModel updateProject( String compId, String projId, ProjectModel newProject ) throws NotFoundException { ProjectModel _oldProject = projectIndex.get(projId); if (_oldProject == null) { throw new NotFoundException(); } else { _oldProject.setXri(newProject.getXri()); _oldProject.setTitle(newProject.getTitle()); _oldProject.setDescription(newProject.getDescription()); removeProjectsRecursively(_oldProject.getProjects()); for (ProjectModel _p : newProject.getProjects()) { indexProjectRecursively(_p); } _oldProject.setResources(newProject.getResources()); } logger.info("updateProject(" + compId + ", " + PrettyPrinter.prettyPrintAsJSON(_oldProject) + ") -> OK"); if (isPersistent) { exportJson(dataF); } return newProject; } @Override public void deleteProject( String compId, String projId) throws NotFoundException { CompanyModel _company = readCompany(compId); ProjectModel _project = readProject(projId); // 1) remove all subprojects from this project removeProjectsRecursively(_project.getProjects()); // 2) remove the project from the index projectIndex.remove(projId); // 3) remove the project from its company (if projId is a top-level project) _company.removeProject(projId); // 4) remove the subproject from its parent-project (if projId is a subproject) for (ProjectModel _p : projectIndex.values()) { _p.removeProject(projId); } logger.info("deleteProject(" + compId + ", " + projId + ") -> OK"); if (isPersistent) { exportJson(dataF); } } @Override public int countProjects( String compId) { int _count = readCompany(compId).getProjects().size(); logger.info("countProjects(" + compId + ") -> " + _count); return _count; } @Override public ArrayList<ResourceModel> listResources( String projId, String query, String queryType, long position, long size) { ProjectModel _project = readProject(projId); logger.info("listResources(" + projId + ") -> "); logger.info(PrettyPrinter.prettyPrintAsJSON(_project.getResources())); return _project.getResources(); } // this _adds_ an existing resource to the resource list in project projId. // it does not create a new resource // the idea is to get (and administer) a resource in a separate service (e.g. AddressBook) @Override public String addResource( String projId, String resourceId) throws NotFoundException, DuplicateException { ProjectModel _project = readProject(projId); // check on duplicate for (ResourceModel _resource : _project.getResources()) { if (_resource.getId().equals(resourceId)) { throw new DuplicateException( "Resource " + resourceId + " already exists in project" + _project.getId()); } } // add the resource _project.getResources().add(new ResourceModel(resourceId)); if (isPersistent) { exportJson(dataF); } return resourceId; } @Override public void removeResource( String projId, String resourceId) throws NotFoundException { ProjectModel _project = readProject(projId); // get the resource resourceId from resources for (ResourceModel _resource : _project.getResources()) { if (_resource.getId().equals(resourceId)) { _project.getResources().remove(_resource); if (isPersistent) { exportJson(dataF); } logger.info("removeResource(" + projId + ", " + resourceId + ") -> resource removed."); return; } } throw new NotFoundException("Resource " + resourceId + " was not found in project " + projId); } @Override public int countResources( String projId) throws NotFoundException { int _retVal = 0; ProjectModel _project = readProject(projId); _retVal = _project.getResources().size(); logger.info("countResources(" + projId + ") -> " + _retVal); return _retVal; } /** * Recursively add all subprojects to the index. * * @param project * the new entry */ private void indexProjectRecursively( ProjectModel project) { projectIndex.put(project.getId(), project); for (ProjectModel _childProject : project.getProjects()) { indexProjectRecursively(_childProject); } } /** * Recursively delete all subprojects from the index. * * @param childProjects */ private void removeProjectsRecursively( List<ProjectModel> childProjects) { for (ProjectModel _project : childProjects) { removeProjectsRecursively(_project.getProjects()); projectIndex.remove(_project.getId()); } } private void importJson() { // read the data file // either read persistent data from DATA_FN // or seed data from SEED_FN if no persistent data exists if (dataF.exists()) { logger.info("persistent data in file " + dataF.getName() + " exists."); companies = importJson(dataF); } else { // seeding the data logger.info("persistent data in file " + dataF.getName() + " is missing -> seeding from " + seedF.getName()); companies = importJson(seedF); } // printProjectsRecursively(_wttData, ""); // load the data into the local transient storage recursively int _companiesBefore = companyIndex.size(); int _projectsBefore = projectIndex.size(); int _resourcesBefore = resources.size(); for (CompanyModel _company : companies) { indexCompany(_company); for (ProjectModel _project : _company.getProjects()) { indexProjectRecursively(_project); } } logger.info("added " + (companyIndex.size() - _companiesBefore) + " Companies, " + (projectIndex.size() - _projectsBefore) + " Projects," + (resources.size() - _resourcesBefore) + " Resources"); // create the persistent data if it did not exist if (isPersistent && !dataF.exists()) { try { dataF.createNewFile(); } catch (IOException e) { logger.severe("importJson(): IO exception when creating file " + dataF.getName()); e.printStackTrace(); } exportJson(dataF); } logger.info("importJson(): imported " + companies.size() + " wtt objects"); } private ArrayList<CompanyModel> importJson( File f) throws NotFoundException, NotAllowedException { logger.info("importJson(" + f.getName() + "): importing CompanyData"); if (!f.exists()) { logger.severe("importJson(" + f.getName() + "): file does not exist."); throw new NotFoundException("File " + f.getName() + " does not exist."); } if (!f.canRead()) { logger.severe("importJson(" + f.getName() + "): file is not readable"); throw new NotAllowedException("File " + f.getName() + " is not readable."); } logger.info("importJson(" + f.getName() + "): can read the file."); Reader _reader = null; ArrayList<CompanyModel> _companies = null; try { _reader = new InputStreamReader(new FileInputStream(f)); Gson _gson = new GsonBuilder().create(); Type _collectionType = new TypeToken<ArrayList<CompanyModel>>() { }.getType(); _companies = _gson.fromJson(_reader, _collectionType); logger.info("importJson(" + f.getName() + "): json data converted"); } catch (FileNotFoundException e1) { logger.severe("importJson(" + f.getName() + "): file does not exist (2)."); e1.printStackTrace(); } finally { try { if (_reader != null) { _reader.close(); } } catch (IOException e) { logger.severe("importJson(" + f.getName() + "): IOException when closing the reader."); e.printStackTrace(); } } logger.info("importJson(" + f.getName() + "): " + _companies.size() + " wtt objects imported."); return _companies; } private void exportJson(File f) { logger.info("exportJson(" + f.getName() + "): exporting wtt objects"); Writer _writer = null; try { _writer = new OutputStreamWriter(new FileOutputStream(f)); Gson _gson = new GsonBuilder().setPrettyPrinting().create(); _gson.toJson(companies, _writer); } catch (FileNotFoundException e) { logger.severe("exportJson(" + f.getName() + "): file not found."); e.printStackTrace(); } finally { if (_writer != null) { try { _writer.close(); } catch (IOException e) { logger.severe("exportJson(" + f.getName() + "): IOException when closing the reader."); e.printStackTrace(); } } } } }
package org.smoothbuild.task.exec; import javax.inject.Inject; import org.smoothbuild.command.CommandLineArguments; import org.smoothbuild.function.base.Function; import org.smoothbuild.function.base.Module; import org.smoothbuild.function.base.Name; import org.smoothbuild.message.listen.ErrorMessageException; import org.smoothbuild.message.message.CodeLocation; import org.smoothbuild.task.base.Result; import org.smoothbuild.task.base.Task; import org.smoothbuild.task.base.Taskable; import org.smoothbuild.task.exec.err.UnknownFunctionError; import org.smoothbuild.util.Empty; public class SmoothExecutor { private final TaskGenerator taskGenerator; @Inject public SmoothExecutor(TaskGenerator taskGenerator) { this.taskGenerator = taskGenerator; } public void execute(ExecutionData executionData) { CommandLineArguments args = executionData.args(); Module module = executionData.module(); Name name = args.functionToRun(); Function function = module.getFunction(name); if (function == null) { throw new ErrorMessageException(new UnknownFunctionError(name, module.availableNames())); } Result result = taskGenerator.generateTask(new TaskableCall(function)); try { result.result(); } catch (BuildInterruptedException e) { // Nothing to do. Just quit the build process. } } private static class TaskableCall implements Taskable { private final Function function; public TaskableCall(Function function) { this.function = function; } @Override public Task generateTask(TaskGenerator taskGenerator) { CodeLocation ignoredCodeLocation = null; return function.generateTask(taskGenerator, Empty.stringTaskResultMap(), ignoredCodeLocation); } } }
package org.dvb.event; import java.awt.BDJHelper; import java.util.Iterator; import java.util.LinkedList; import javax.tv.xlet.XletContext; import org.davic.resources.ResourceClient; import org.davic.resources.ResourceServer; import org.davic.resources.ResourceStatusEvent; import org.davic.resources.ResourceStatusListener; import org.havi.ui.HScene; import org.videolan.BDJAction; import org.videolan.BDJActionManager; import org.videolan.BDJXletContext; import org.videolan.GUIManager; public class EventManager implements ResourceServer { public static EventManager getInstance() { synchronized (EventManager.class) { if (instance == null) instance = new EventManager(); } return instance; } public boolean addUserEventListener(UserEventListener listener, ResourceClient client, UserEventRepository userEvents) throws IllegalArgumentException { if (client == null) throw new IllegalArgumentException(); synchronized (this) { if (!cleanupReservedEvents(userEvents)) return false; exclusiveUserEventListener.add(new UserEventItem(BDJXletContext.getCurrentContext(), listener, client, userEvents)); sendResourceStatusEvent(new UserEventUnavailableEvent(userEvents)); return true; } } public void addUserEventListener(UserEventListener listener, UserEventRepository userEvents) { if (listener == null || userEvents == null) throw new NullPointerException(); synchronized (this) { sharedUserEventListener.add(new UserEventItem(BDJXletContext.getCurrentContext(), listener, null, userEvents)); } } public void removeUserEventListener(UserEventListener listener) { BDJXletContext context = BDJXletContext.getCurrentContext(); synchronized (this) { for (Iterator it = sharedUserEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if ((item.context == context) && (item.listener == listener)) it.remove(); } for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if ((item.context == context) && (item.listener == listener)) { sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents)); it.remove(); } } } } public boolean addExclusiveAccessToAWTEvent(ResourceClient client, UserEventRepository userEvents) throws IllegalArgumentException { if (client == null) throw new IllegalArgumentException(); synchronized (this) { if (!cleanupReservedEvents(userEvents)) return false; exclusiveAWTEventListener.add(new UserEventItem(BDJXletContext.getCurrentContext(), null, client, userEvents)); sendResourceStatusEvent(new UserEventUnavailableEvent(userEvents)); return true; } } public void removeExclusiveAccessToAWTEvent(ResourceClient client) { BDJXletContext context = BDJXletContext.getCurrentContext(); synchronized (this) { for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if ((item.context == context) && (item.client == client)) { sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents)); it.remove(); } } } } public void addResourceStatusEventListener(ResourceStatusListener listener) { synchronized (this) { resourceStatusEventListeners.add(listener); } } public void removeResourceStatusEventListener(ResourceStatusListener listener) { synchronized (this) { resourceStatusEventListeners.remove(listener); } } private void sendResourceStatusEvent(ResourceStatusEvent event) { for (Iterator it = resourceStatusEventListeners.iterator(); it.hasNext(); ) ((ResourceStatusListener)it.next()).statusChanged(event); } public void receiveKeyEvent(int type, int modifiers, int keyCode) { receiveKeyEventN(type, modifiers, keyCode); } public boolean receiveKeyEventN(int type, int modifiers, int keyCode) { UserEvent ue = new UserEvent(this, 1, type, keyCode, modifiers, System.currentTimeMillis()); HScene focusHScene = GUIManager.getInstance().getFocusHScene(); boolean result = false; if (focusHScene != null) { BDJXletContext context = focusHScene.getXletContext(); for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if (item.context == context) { if (item.userEvents.contains(ue)) { result = BDJHelper.postKeyEvent(type, modifiers, keyCode); return result; } } } } for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if (item.userEvents.contains(ue)) { BDJActionManager.getInstance().putCallback(new UserEventAction(item, ue)); return true; } } result = BDJHelper.postKeyEvent(type, modifiers, keyCode); for (Iterator it = sharedUserEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if (item.userEvents.contains(ue)) { BDJActionManager.getInstance().putCallback(new UserEventAction(item, ue)); result = true; } } return result; } private boolean cleanupReservedEvents(UserEventRepository userEvents) { BDJXletContext context = BDJXletContext.getCurrentContext(); for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if (item.context == context) continue; if (hasOverlap(userEvents, item.userEvents)) { if (!item.client.requestRelease(item.userEvents, null)) return false; sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents)); it.remove(); } } for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) { UserEventItem item = (UserEventItem)it.next(); if (item.context == context) continue; if (hasOverlap(userEvents, item.userEvents)) { if (!item.client.requestRelease(item.userEvents, null)) return false; sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents)); it.remove(); } } return true; } private boolean hasOverlap(UserEventRepository userEvents1, UserEventRepository userEvents2) { UserEvent[] evts1 = userEvents1.getUserEvent(); UserEvent[] evts2 = userEvents2.getUserEvent(); for (int i = 0; i < evts1.length; i++) { UserEvent evt1 = evts1[i]; for (int j = 0; j < evts2.length; j++) { UserEvent evt2 = evts2[j]; if ((evt1.getFamily() == evt2.getFamily()) && (evt1.getCode() != evt2.getCode())) return true; } } return false; } private class UserEventItem { public UserEventItem(BDJXletContext context, UserEventListener listener, ResourceClient client, UserEventRepository userEvents) { this.context = context; this.listener = listener; this.client = client; this.userEvents = userEvents.getNewInstance(); } public BDJXletContext context; public UserEventListener listener; public ResourceClient client; public UserEventRepository userEvents; } private class UserEventAction extends BDJAction { public UserEventAction(UserEventItem item, UserEvent event) { super(item.context); this.listener = item.listener; this.event = event; } protected void doAction() { listener.userEventReceived(event); } private UserEventListener listener; private UserEvent event; } private LinkedList exclusiveUserEventListener = new LinkedList(); private LinkedList sharedUserEventListener = new LinkedList(); private LinkedList exclusiveAWTEventListener = new LinkedList(); private LinkedList resourceStatusEventListeners = new LinkedList(); private static EventManager instance = null; }
package pro.taskana.impl; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import pro.taskana.BulkOperationResults; import pro.taskana.TaskanaEngine; import pro.taskana.TaskanaTransactionProvider; import pro.taskana.exceptions.SystemException; import pro.taskana.impl.util.LoggerUtils; import pro.taskana.mappings.JobMapper; /** * This is the runner for all jobs scheduled in the Job table. * * @author bbr */ public class JobRunner { private static final Logger LOGGER = LoggerFactory.getLogger(JobRunner.class); private TaskanaEngineImpl taskanaEngine; private JobMapper jobMapper; private int maxRetryCount; private TaskanaTransactionProvider<BulkOperationResults<String, Exception>> txProvider; public JobRunner(TaskanaEngine taskanaEngine) { this.taskanaEngine = (TaskanaEngineImpl) taskanaEngine; jobMapper = this.taskanaEngine.getSqlSession().getMapper(JobMapper.class); maxRetryCount = taskanaEngine.getConfiguration().getMaxNumberOfRetriesOfFailedTaskUpdates(); txProvider = null; } public void registerTransactionProvider( TaskanaTransactionProvider<BulkOperationResults<String, Exception>> txProvider) { this.txProvider = txProvider; } public BulkOperationResults<String, Exception> runJobs() { LOGGER.info("entry to runJobs()"); BulkOperationResults<String, Exception> bulkLog = new BulkOperationResults<>(); Job currentlyProcessedJob = null; try { List<Job> jobs = findJobsToRun(); while (!jobs.isEmpty()) { // run as long as Jobs are available for processing for (Job job : jobs) { currentlyProcessedJob = job; processAJob(bulkLog, job); } jobs = findJobsToRun(); } return bulkLog; } catch (Exception e) { if (currentlyProcessedJob != null) { bulkLog.addError("JobId:" + currentlyProcessedJob.getJobId(), e); setJobFailed(currentlyProcessedJob, bulkLog); return bulkLog; } else { LOGGER.error("tried to run jobs and caught exception {} ", e); bulkLog.addError("unknown", e); return bulkLog; } } finally { taskanaEngine.returnConnection(); LOGGER.info("exit from runJobs(). Returning result {} ", bulkLog); } } private List<Job> findJobsToRun() { final List<Job> result = new ArrayList<>(); if (txProvider != null) { txProvider.executeInTransaction(() -> { // each job in its own transaction try { taskanaEngine.openConnection(); doFindJobsToRun(result); return null; } finally { taskanaEngine.returnConnection(); } }); } else { doFindJobsToRun(result); } return result; } private BulkOperationResults<String, Exception> doFindJobsToRun(List<Job> jobs) { List<Job> found = taskanaEngine.getSqlSession().getMapper(JobMapper.class).findJobsToRun(); jobs.addAll(found); return null; } private void processAJob(BulkOperationResults<String, Exception> bulkLog, Job job) { BulkOperationResults<String, Exception> log; try { if (txProvider != null) { log = txProvider.executeInTransaction(() -> { // each job in its own transaction try { taskanaEngine.openConnection(); return runSingleJob(job); } finally { taskanaEngine.returnConnection(); } }); } else { log = runSingleJob(job); } if (log != null && log.containsErrors() && Job.Type.UPDATETASKSJOB.equals(job.getType())) { handleRetryForFailuresFromBulkOperationResult(bulkLog, job, log); } } catch (Exception e) { // transaction was rolled back -> split job into 2 half sized jobs LOGGER.warn("Processing of job " + job.getJobId() + " failed. Trying to split it up into two pieces...", e); if (job.getRetryCount() < maxRetryCount) { rescheduleBisectedJob(bulkLog, job); } else { List<String> objectIds; if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { String taskIdsAsString = job.getArguments().get(SingleJobExecutor.TASKIDS); objectIds = Arrays.asList(taskIdsAsString.split(",")); } else if (job.getType().equals(Job.Type.CLASSIFICATIONCHANGEDJOB)) { String classificationId = job.getArguments().get(SingleJobExecutor.CLASSIFICATION_ID); objectIds = Arrays.asList(classificationId); } else { throw new SystemException("Unknown Jobtype " + job.getType() + " encountered."); } for (String objectId : objectIds) { bulkLog.addError(objectId, e); } setJobFailed(job, bulkLog); } } } private void setJobFailed(Job job, BulkOperationResults<String, Exception> bulkLog) { try { if (txProvider != null) { txProvider.executeInTransaction(() -> { // each job in its own transaction try { taskanaEngine.openConnection(); return doSetJobFailed(job, bulkLog); } finally { taskanaEngine.returnConnection(); } }); } else { doSetJobFailed(job, bulkLog); } } catch (Exception e) { // transaction was rolled back -> log an Error LOGGER.error("attempted to set job {} to failed, but caught Exception {}", job, e); } } private BulkOperationResults<String, Exception> doSetJobFailed(Job job, BulkOperationResults<String, Exception> bulkLog) { job.setState(Job.State.FAILED); if (job.getStarted() == null) { job.setStarted(Instant.now()); } if (bulkLog.containsErrors()) { Map<String, Exception> errors = bulkLog.getErrorMap(); job.setErrors(LoggerUtils.mapToString(errors)); } taskanaEngine.getSqlSession().getMapper(JobMapper.class).update(job); return null; } private void handleRetryForFailuresFromBulkOperationResult(BulkOperationResults<String, Exception> bulkLog, Job job, BulkOperationResults<String, Exception> errorLogForThisJob) { if (job.getRetryCount() < maxRetryCount) { if (errorLogForThisJob.containsErrors()) { List<String> failedTasks = errorLogForThisJob.getFailedIds(); if (!failedTasks.isEmpty()) { // some tasks failed to be processed LOGGER.error("Errors occurred when running job {}. Processing will be retried", job); scheduleRetryJob(job, failedTasks); } } } else { bulkLog.addAllErrors(errorLogForThisJob); setJobFailed(job, errorLogForThisJob); } } private void rescheduleBisectedJob(BulkOperationResults<String, Exception> bulkLog, Job job) { // the transaction that processed the job was rolled back. try { if (txProvider != null) { txProvider.executeInTransaction(() -> { // each job in its own transaction try { taskanaEngine.openConnection(); return doRescheduleBisectedJob(job); } finally { taskanaEngine.returnConnection(); } }); } else { doRescheduleBisectedJob(job); } } catch (Exception e) { // transaction was rolled back -> log an Error LOGGER.error("attempted to reschedule bisected jobs for {}, but caught Exception {}", job, e); } } private BulkOperationResults<String, Exception> doRescheduleBisectedJob(Job job) { if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { // split the job in halves Map<String, String> args = job.getArguments(); String taskIdsString = args.get(SingleJobExecutor.TASKIDS); List<String> taskIds = Arrays.asList(taskIdsString.split(",")); int size = taskIds.size(); if (size >= 2) { int halfSize = size % 2 == 0 ? size / 2 : (size / 2 + 1); List<List<String>> taskIdListsForNewJobs = partition(taskIds, halfSize); // now schedule new tasks for (List<String> halfSizedTaskIds : taskIdListsForNewJobs) { Job newJob = new Job(); newJob.setCreated(Instant.now()); if (halfSize > 1) { newJob.setRetryCount(0); } else { newJob.setRetryCount(job.getRetryCount() + 1); } newJob.setState(Job.State.READY); newJob.setType(job.getType()); args.put(SingleJobExecutor.TASKIDS, String.join(",", halfSizedTaskIds)); newJob.setArguments(args); newJob.setCreated(Instant.now()); newJob.setExecutor(job.getExecutor()); taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(newJob); } LOGGER.debug("doRescheduleBisectedJob deleting job {} ", job); taskanaEngine.getSqlSession().getMapper(JobMapper.class).delete(job); } } else { // take care that the job is re-executed job.setState(Job.State.READY); job.setRetryCount(job.getRetryCount() + 1); taskanaEngine.getSqlSession().getMapper(JobMapper.class).update(job); } return null; } private void scheduleRetryJob(Job job, List<String> failedTasks) { if (job.getType().equals(Job.Type.UPDATETASKSJOB)) { try { if (txProvider != null) { txProvider.executeInTransaction(() -> { // each job in its own transaction try { taskanaEngine.openConnection(); return doScheduleRetryJob(job, failedTasks); } finally { taskanaEngine.returnConnection(); } }); } else { doScheduleRetryJob(job, failedTasks); } } catch (Exception e) { // transaction was rolled back -> log an Error LOGGER.error("attempted to reschedule bisected jobs for {}, but caught Exception {}", job, e); } } } private BulkOperationResults<String, Exception> doScheduleRetryJob(Job job, List<String> failedTasks) { LOGGER.debug("entry to doScheduleRetryJob for job {} and failedTasks {}", job, LoggerUtils.listToString(failedTasks)); Map<String, String> args = job.getArguments(); Job newJob = new Job(); newJob.setCreated(Instant.now()); newJob.setRetryCount(job.getRetryCount() + 1); newJob.setState(Job.State.READY); newJob.setType(job.getType()); args.put(SingleJobExecutor.TASKIDS, String.join(",", failedTasks)); newJob.setArguments(args); newJob.setExecutor(job.getExecutor()); taskanaEngine.getSqlSession().getMapper(JobMapper.class).insertJob(newJob); LOGGER.debug("doScheduleRetryJob deleting job {} and scheduling {} ", job, newJob); taskanaEngine.getSqlSession().getMapper(JobMapper.class).delete(job); return null; } private BulkOperationResults<String, Exception> runSingleJob(Job job) { LOGGER.debug("entry to runSingleJob(job = {})", job); BulkOperationResults<String, Exception> bulkLog; if (job.getStarted() == null) { job.setStarted(Instant.now()); } job.setState(Job.State.RUNNING); jobMapper.update(job); SingleJobExecutor executor; try { executor = (SingleJobExecutor) Class.forName(job.getExecutor()).newInstance(); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { LOGGER.error("When attempting to load class {} caught Exception {} ", job.getExecutor(), e); throw new SystemException( "When attempting to load class " + job.getExecutor() + " caught Exception " + e.getMessage(), e); } bulkLog = executor.runSingleJob(job, taskanaEngine); if (!bulkLog.containsErrors()) { LOGGER.debug("runSingleJob deletin job {} ", job); jobMapper.delete(job); } LOGGER.debug("exit from runSingleJob"); return bulkLog; } static <T> List<List<T>> partition(Collection<T> members, int maxSize) { List<List<T>> result = new ArrayList<>(); List<T> internal = new ArrayList<>(); for (T member : members) { internal.add(member); if (internal.size() == maxSize) { result.add(internal); internal = new ArrayList<>(); } } if (!internal.isEmpty()) { result.add(internal); } return result; } }
package org.wysaid.view; import android.content.Context; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.PixelFormat; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import org.wysaid.camera.CameraInstance; import org.wysaid.myUtils.Common; import org.wysaid.myUtils.FrameBufferObject; import org.wysaid.nativePort.CGEFrameRecorder; import org.wysaid.nativePort.CGENativeLibrary; import org.wysaid.texUtils.TextureRenderer; import org.wysaid.texUtils.TextureRendererDrawOrigin; import java.nio.IntBuffer; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; public class FilterGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { public static final String LOG_TAG = Common.LOG_TAG; public int maxTextureSize = 0; public int viewWidth; public int viewHeight; private int mRecordWidth = 480; private int mRecordHeight = 640; private SurfaceTexture mSurfaceTexture; private int mTextureID; private CGEFrameRecorder mFrameRecorder; public CGEFrameRecorder getRecorder() { return mFrameRecorder; } private Context mContext; public int maxPreviewWidth = 1280; public int maxPreviewHeight = 1280; //preview void setMaxPreviewSize(int w, int h) { maxPreviewWidth = w; maxPreviewHeight = h; } public class ClearColor { public float r, g, b, a; } public ClearColor clearColor; private TextureRenderer.Viewport mDrawViewport; private boolean mIsUsingMask = false; public boolean isUsingMask() { return mIsUsingMask; } private float mMaskAspectRatio = 1.0f; private float[] mTransformMatrix = new float[16]; private boolean mIsCameraBackForward = true; public boolean isCameraBackForward() { return mIsCameraBackForward; } public void setClearColor(float r, float g, float b, float a) { clearColor.r = r; clearColor.g = g; clearColor.b = b; clearColor.a = a; queueEvent(new Runnable() { @Override public void run() { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glClearColor(clearColor.r, clearColor.g, clearColor.b, clearColor.a); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); } }); } private CameraInstance cameraInstance() { return CameraInstance.getInstance(); } //onSurfaceCreated public void presetCameraForward(boolean isBackForward) { mIsCameraBackForward = isBackForward; } // preview //widthheight //onSurfaceCreated public void presetRecordingSize(int width, int height) { if(width > maxPreviewWidth || height > maxPreviewHeight) { float scaling = Math.min(maxPreviewWidth / (float)width, maxPreviewHeight / (float)height); width = (int)(width * scaling); height = (int)(height * scaling); } mRecordWidth = width; mRecordHeight = height; cameraInstance().setPreferPreviewSize(width, height); } public synchronized void switchCamera() { mIsCameraBackForward = !mIsCameraBackForward; if(mFrameRecorder != null) { queueEvent(new Runnable() { @Override public void run() { cameraInstance().stopCamera(); int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT; mFrameRecorder.setSrcRotation((float) (Math.PI / 2.0)); mFrameRecorder.setRenderFlipScale(1.0f, -1.0f); if (mIsUsingMask) { mFrameRecorder.setMaskTextureRatio(mMaskAspectRatio); } cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() { @Override public void cameraReady() { if (!cameraInstance().isPreviewing()) { Log.i(LOG_TAG, "## switch camera -- start preview..."); cameraInstance().startPreview(mSurfaceTexture); mFrameRecorder.srcResize(cameraInstance().previewHeight(), cameraInstance().previewWidth()); } } }, facing); requestRender(); } }); } } // Camera.Parameters.FLASH_MODE_AUTO; // Camera.Parameters.FLASH_MODE_OFF; // Camera.Parameters.FLASH_MODE_ON; // Camera.Parameters.FLASH_MODE_RED_EYE // Camera.Parameters.FLASH_MODE_TORCH public synchronized boolean setFlashLightMode(String mode) { if(!mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { Log.e(LOG_TAG, "!"); return false; } if(!mIsCameraBackForward) { return false; } Camera.Parameters parameters = cameraInstance().getParams(); try { if(!parameters.getSupportedFlashModes().contains(mode)) { Log.e(LOG_TAG, "Invalid Flash Light Mode!!!"); return false; } parameters.setFlashMode(mode); cameraInstance().setParams(parameters); } catch (Exception e) { Log.e(LOG_TAG, ", ?"); return false; } return true; } public synchronized void setFilterWithConfig(final String config) { queueEvent(new Runnable() { @Override public void run() { assert mFrameRecorder != null && config != null : "Recorder & Config must not be null!"; mFrameRecorder.setFilterWidthConfig(config); } }); } public void setFilterIntensity(final float intensity) { queueEvent(new Runnable() { @Override public void run() { assert mFrameRecorder != null : "Recorder must not be null!"; mFrameRecorder.setFilterIntensity(intensity); } }); } public interface SetMaskBitmapCallback { void setMaskOK(CGEFrameRecorder recorder); } public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle) { setMaskBitmap(bmp, shouldRecycle, null); } // bmpnull SetMaskBitmapCallback . public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle, final SetMaskBitmapCallback callback) { queueEvent(new Runnable() { @Override public void run() { if (bmp == null) { mFrameRecorder.setMaskTexture(0, 1.0f); mIsUsingMask = false; calcViewport(); return; } int texID = Common.genNormalTextureID(bmp, GLES20.GL_NEAREST, GLES20.GL_CLAMP_TO_EDGE); mFrameRecorder.setMaskTexture(texID, bmp.getWidth() / (float) bmp.getHeight()); mIsUsingMask = true; mMaskAspectRatio = bmp.getWidth() / (float) bmp.getHeight(); if (callback != null) { callback.setMaskOK(mFrameRecorder); } if (shouldRecycle) bmp.recycle(); calcViewport(); } }); } public interface SetBackgroundImageCallback { void setBackgroundImageOK(); } TextureRendererDrawOrigin mBackgroundRenderer; int mBackgroundTexture; public void setBackgroundImage(final Bitmap bmp, final boolean shouldRecycle, final SetBackgroundImageCallback callback) { queueEvent(new Runnable() { @Override public void run() { if(bmp == null) { if(mBackgroundRenderer != null) mBackgroundRenderer.release(); mBackgroundRenderer = null; if(mBackgroundTexture != 0) GLES20.glDeleteTextures(1, new int[] {mBackgroundTexture}, 0); mBackgroundTexture = 0; if(callback != null) callback.setBackgroundImageOK(); return; } if(mBackgroundTexture != 0) { GLES20.glDeleteTextures(1, new int[]{mBackgroundTexture}, 0); } mBackgroundTexture = Common.genNormalTextureID(bmp, GLES20.GL_NEAREST, GLES20.GL_CLAMP_TO_EDGE); if(mBackgroundRenderer == null) { mBackgroundRenderer = TextureRendererDrawOrigin.create(false); mBackgroundRenderer.setFlipscale(1.0f, -1.0f); } if(shouldRecycle) bmp.recycle(); if(callback != null) callback.setBackgroundImageOK(); } }); } public interface OnCreateCallback { void createOK(); } private OnCreateCallback mOnCreateCallback; public void setOnCreateCallback(final OnCreateCallback callback) { assert callback != null : "!"; if(mFrameRecorder == null) { mOnCreateCallback = callback; } else { queueEvent(new Runnable() { @Override public void run() { callback.createOK(); } }); } } public FilterGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); Log.i(LOG_TAG, "MyGLSurfaceView Construct..."); setEGLContextClientVersion(2); setEGLConfigChooser(8, 8, 8, 8, 8, 0); getHolder().setFormat(PixelFormat.RGBA_8888); setRenderer(this); // setRenderMode(RENDERMODE_WHEN_DIRTY); setRenderMode(RENDERMODE_CONTINUOUSLY); setZOrderOnTop(true); // setZOrderMediaOverlay(true); clearColor = new ClearColor(); mContext = context; } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.i(LOG_TAG, "onSurfaceCreated..."); GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDisable(GLES20.GL_STENCIL_TEST); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); int texSize[] = new int[1]; GLES20.glGetIntegerv(GLES20.GL_MAX_TEXTURE_SIZE, texSize, 0); maxTextureSize = texSize[0]; mTextureID = Common.genSurfaceTextureID(); mSurfaceTexture = new SurfaceTexture(mTextureID); mSurfaceTexture.setOnFrameAvailableListener(this); mFrameRecorder = new CGEFrameRecorder(); if(!mFrameRecorder.init(mRecordWidth, mRecordHeight, mRecordWidth, mRecordHeight)) { Log.e(LOG_TAG, "Frame Recorder init failed!"); } mFrameRecorder.setSrcRotation((float) (Math.PI / 2.0)); mFrameRecorder.setSrcFlipScale(1.0f, -1.0f); mFrameRecorder.setRenderFlipScale(1.0f, -1.0f); requestRender(); if(!cameraInstance().isCameraOpened()) { int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT; cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() { @Override public void cameraReady() { Log.i(LOG_TAG, "tryOpenCamera OK..."); } }, facing); } if(mOnCreateCallback != null) { mOnCreateCallback.createOK(); } } private void calcViewport() { float scaling; if(mIsUsingMask) { scaling = mMaskAspectRatio; } else { scaling = mRecordWidth / (float)mRecordHeight; } float viewRatio = viewWidth / (float)viewHeight; float s = scaling / viewRatio; int w, h; if(s > 1.0) { w = viewWidth; h = (int)(viewWidth / scaling); } else { h = viewHeight; w = (int)(viewHeight * scaling); } mDrawViewport = new TextureRenderer.Viewport(); mDrawViewport.width = w; mDrawViewport.height = h; mDrawViewport.x = (viewWidth - mDrawViewport.width) / 2; mDrawViewport.y = (viewHeight - mDrawViewport.height) / 2; Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height)); } public interface ReleaseOKCallback { void releaseOK(); } public void release(final ReleaseOKCallback callback) { synchronized (this) { if(mFrameRecorder != null) { queueEvent(new Runnable() { @Override public void run() { mFrameRecorder.release(); mFrameRecorder = null; GLES20.glDeleteTextures(1, new int[]{mTextureID}, 0); mTextureID = 0; mSurfaceTexture.release(); mSurfaceTexture = null; if(mBackgroundRenderer != null) { mBackgroundRenderer.release(); mBackgroundRenderer = null; } if(mBackgroundTexture != 0) { GLES20.glDeleteTextures(1, new int[]{mBackgroundTexture}, 0); mBackgroundTexture = 0; } Log.i(LOG_TAG, "glsurfaceview release..."); if(callback != null) callback.releaseOK(); } }); } } } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.i(LOG_TAG, String.format("onSurfaceChanged: %d x %d", width, height)); GLES20.glClearColor(clearColor.r, clearColor.g, clearColor.b, clearColor.a); viewWidth = width; viewHeight = height; calcViewport(); if(!cameraInstance().isPreviewing()) { cameraInstance().startPreview(mSurfaceTexture); mFrameRecorder.srcResize(cameraInstance().previewHeight(), cameraInstance().previewWidth()); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { super.surfaceDestroyed(holder); cameraInstance().stopCamera(); } private long mTimeCount = 0; private long mFramesCount = 0; private long mLastTimestamp = 0; public void stopPreview() { queueEvent(new Runnable() { @Override public void run() { cameraInstance().stopPreview(); } }); } public synchronized void resumePreview() { if(!cameraInstance().isCameraOpened()) { int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT; cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() { @Override public void cameraReady() { Log.i(LOG_TAG, "tryOpenCamera OK..."); } }, facing); } if(!cameraInstance().isPreviewing()) { cameraInstance().startPreview(mSurfaceTexture); mFrameRecorder.srcResize(cameraInstance().previewHeight(), cameraInstance().previewWidth()); } requestRender(); } @Override public void onDrawFrame(GL10 gl) { if(mSurfaceTexture == null || !cameraInstance().isPreviewing()) { if(mFrameRecorder != null) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); mFrameRecorder.render(mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height); } return; } mSurfaceTexture.updateTexImage(); mSurfaceTexture.getTransformMatrix(mTransformMatrix); mFrameRecorder.update(mTextureID, mTransformMatrix); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); synchronized (mThunbnailLock) { if(mTakeThunbnailCallback != null) { GLES20.glViewport(0, 0, mThunbnailWidth, mThunbnailHeight); mFrameRecorder.drawCache(); mThunbnailBuffer.position(0); GLES20.glReadPixels(0, 0, mThunbnailWidth, mThunbnailHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mThunbnailBuffer); mThunbnailBmp.copyPixelsFromBuffer(mThunbnailBuffer); post(new Runnable() { @Override public void run() { synchronized (mThunbnailLock) { if(mTakeThunbnailCallback != null) mTakeThunbnailCallback.takeThunbnailOK(mThunbnailBmp); } } }); } } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); if(mBackgroundRenderer != null) { GLES20.glViewport(0, 0, viewWidth, viewHeight); mBackgroundRenderer.renderTexture(mBackgroundTexture, null); } GLES20.glEnable(GLES20.GL_BLEND); mFrameRecorder.render(mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height); GLES20.glDisable(GLES20.GL_BLEND); if(mLastTimestamp == 0) mLastTimestamp = System.currentTimeMillis(); long currentTimestamp = System.currentTimeMillis(); ++mFramesCount; mTimeCount += currentTimestamp - mLastTimestamp; mLastTimestamp = currentTimestamp; if(mTimeCount >= 1000) { Log.i(LOG_TAG, String.format(": %d", mFramesCount)); mTimeCount %= 1000; mFramesCount = 0; } } @Override public void onResume() { super.onResume(); Log.i(LOG_TAG, "onResume..."); // int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT; // cameraInstance().tryOpenCamera(null, facing); } @Override public void onPause() { Log.i(LOG_TAG, "surfaceview onPause in..."); cameraInstance().stopCamera(); super.onPause(); Log.i(LOG_TAG, "surfaceview onPause out..."); } private long mTimeCount2 = 0; private long mFramesCount2 = 0; private long mLastTimestamp2 = 0; @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // Log.i(LOG_TAG, "onFrameAvailable..."); // requestRender(); if(mLastTimestamp2 == 0) mLastTimestamp2 = System.currentTimeMillis(); long currentTimestamp = System.currentTimeMillis(); ++mFramesCount2; mTimeCount2 += currentTimestamp - mLastTimestamp2; mLastTimestamp2 = currentTimestamp; if(mTimeCount2 >= 1000) { Log.i(LOG_TAG, String.format(": %d", mFramesCount2)); mTimeCount2 %= 1000; mFramesCount2 = 0; } } public interface TakeThunbnailCallback { void takeThunbnailOK(Bitmap bmp); } private Bitmap mThunbnailBmp; private TakeThunbnailCallback mTakeThunbnailCallback; private final int[] mThunbnailLock = new int[0]; private int mThunbnailWidth, mThunbnailHeight; private IntBuffer mThunbnailBuffer; public boolean isTakingThunbnail() { boolean status; synchronized (mThunbnailLock) { status = mTakeThunbnailCallback != null; } return status; } public void startThunbnailCliping(Bitmap cache, TakeThunbnailCallback callback) { synchronized (mThunbnailLock) { mTakeThunbnailCallback = callback; mThunbnailBmp = cache; mThunbnailWidth = cache.getWidth(); mThunbnailHeight = cache.getHeight(); mThunbnailBuffer = IntBuffer.allocate(mThunbnailWidth * mThunbnailHeight); } } public void startThunbnailCliping(int width, int height, TakeThunbnailCallback callback) { synchronized (mThunbnailLock) { mTakeThunbnailCallback = callback; mThunbnailBmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); mThunbnailWidth = width; mThunbnailHeight = height; mThunbnailBuffer = IntBuffer.allocate(width * height); } } public void stopThunbnailCliping() { synchronized (mThunbnailLock) { mTakeThunbnailCallback = null; mThunbnailBmp.recycle(); mThunbnailBmp = null; mThunbnailBuffer = null; } } public interface TakePictureCallback { //bmprecycle void takePictureOK(Bitmap bmp); } public void takeShot(final TakePictureCallback callback) { takeShot(callback, true); } public synchronized void takeShot(final TakePictureCallback callback, final boolean noMask) { assert callback != null : "callback must not be null!"; if(mFrameRecorder == null) { Log.e(LOG_TAG, "Recorder not initialized!"); callback.takePictureOK(null); return; } queueEvent(new Runnable() { @Override public void run() { FrameBufferObject frameBufferObject = new FrameBufferObject(); int bufferTexID; IntBuffer buffer; Bitmap bmp; if(noMask || !mIsUsingMask) { bufferTexID = Common.genBlankTextureID(mRecordWidth, mRecordHeight); frameBufferObject.bindTexture(bufferTexID); GLES20.glViewport(0, 0, mRecordWidth, mRecordHeight); mFrameRecorder.drawCache(); buffer = IntBuffer.allocate(mRecordWidth * mRecordHeight); GLES20.glReadPixels(0, 0, mRecordWidth, mRecordHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); bmp = Bitmap.createBitmap(mRecordWidth, mRecordHeight, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buffer); Log.i(LOG_TAG, String.format("w: %d, h: %d", mRecordWidth, mRecordHeight)); } else { bufferTexID = Common.genBlankTextureID(mDrawViewport.width, mDrawViewport.height); frameBufferObject.bindTexture(bufferTexID); int w = Math.min(mDrawViewport.width, viewWidth); int h = Math.min(mDrawViewport.height, viewHeight); mFrameRecorder.setRenderFlipScale(1.0f, 1.0f); mFrameRecorder.setMaskTextureRatio(mMaskAspectRatio); mFrameRecorder.render(0, 0, w, h); mFrameRecorder.setRenderFlipScale(1.0f, -1.0f); mFrameRecorder.setMaskTextureRatio(mMaskAspectRatio); Log.i(LOG_TAG, String.format("w: %d, h: %d", w, h)); buffer = IntBuffer.allocate(w * h); GLES20.glReadPixels(0, 0, w, h, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); bmp = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buffer); } frameBufferObject.release(); GLES20.glDeleteTextures(1, new int[]{bufferTexID}, 0); callback.takePictureOK(bmp); } }); } //isBigger true . // false public void setPictureSize(int width, int height, boolean isBigger) { cameraInstance().setPictureSize(height, width, isBigger); } public synchronized void takePicture(final TakePictureCallback photoCallback, Camera.ShutterCallback shutterCallback, final String config, final float intensity, final boolean isFrontMirror) { assert photoCallback != null : "photoCallback must not be null!!"; Camera.Parameters params = cameraInstance().getParams(); params.setRotation(90); cameraInstance().setParams(params); cameraInstance().getCameraDevice().takePicture(shutterCallback, null, new Camera.PictureCallback() { @Override public void onPictureTaken(final byte[] data, Camera camera) { // JPEG Bitmap bmp = BitmapFactory.decodeByteArray(data, 0, data.length); int width = bmp.getWidth(), height = bmp.getHeight(); if(width > maxTextureSize || height > maxTextureSize) { float scaling = Math.max(width / (float) maxTextureSize, height / (float) maxTextureSize); Log.i(LOG_TAG, String.format("(%d x %d)OpenGL (%d x %d) !", width, height, maxTextureSize, maxTextureSize)); bmp = Bitmap.createScaledBitmap(bmp, (int)(width / scaling), (int)(height / scaling), false); width = bmp.getWidth(); height = bmp.getHeight(); } Bitmap bmp2 = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bmp2); if (cameraInstance().getFacing() == Camera.CameraInfo.CAMERA_FACING_BACK) { Matrix mat = new Matrix(); int halfLen = Math.min(width, height) / 2; mat.setRotate(90, halfLen, halfLen); canvas.drawBitmap(bmp, mat, null); } else { Matrix mat = new Matrix(); if (isFrontMirror) { mat.postTranslate(-width / 2, -height / 2); mat.postScale(-1.0f, 1.0f); mat.postTranslate(width / 2, height / 2); int halfLen = Math.min(width, height) / 2; mat.postRotate(90, halfLen, halfLen); } else { int halfLen = Math.max(width, height) / 2; mat.postRotate(-90, halfLen, halfLen); } canvas.drawBitmap(bmp, mat, null); } bmp.recycle(); if (config != null) { CGENativeLibrary.filterImage_MultipleEffectsWriteBack(bmp2, config, intensity); } photoCallback.takePictureOK(bmp2); cameraInstance().getCameraDevice().startPreview(); } }); } }
package com.opengamma.web.jetty; import java.io.File; import java.io.IOException; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.nio.SelectChannelConnector; import org.springframework.context.ApplicationContext; import org.springframework.context.support.FileSystemXmlApplicationContext; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.PlatformConfigUtils; import com.opengamma.util.PlatformConfigUtils.MarketDataSource; import com.opengamma.util.PlatformConfigUtils.RunMode; /** * Starts a jetty server configured from spring. */ public class JettyServer { /** * The run mode. */ private final RunMode _runMode; /** * The market data source. */ private final MarketDataSource _marketDataSource; /** * Creates an instance. * * @param runMode the run mode, not null * @param marketDataSource the market data source, not null */ public JettyServer(RunMode runMode, MarketDataSource marketDataSource) { ArgumentChecker.notNull(runMode, "runMode"); ArgumentChecker.notNull(marketDataSource, "marketDataSource"); _runMode = runMode; _marketDataSource = marketDataSource; } /** * Loads and runs the Spring config file. * * @param springConfig the config file, not null * @throws IOException if an error occurs */ public void run(final String springConfig) throws IOException { ArgumentChecker.notNull(springConfig, "spring config"); // Logging if (System.getProperty("logback.configurationFile") == null) { System.setProperty("logback.configurationFile", "jetty-logback.xml"); } PlatformConfigUtils.configureSystemProperties(_runMode, _marketDataSource); // server try { process(getRelativePath(springConfig)); System.exit(0); } catch (Throwable ex) { ex.printStackTrace(); System.exit(1); } } private static String getRelativePath(final String springConfig) throws IOException { String absolutePath = new File(springConfig).getCanonicalPath(); String baseDir = new File(".").getCanonicalPath(); return absolutePath.substring(baseDir.length() + 1); } private static void process(String springConfig) throws Exception { System.out.println("================================== JETTY START BEGINS ======================================="); ApplicationContext appContext = new FileSystemXmlApplicationContext(springConfig); Server server = appContext.getBean("server", Server.class); System.out.println(server.dump()); server.start(); System.out.println(); System.out.println("Server started on port " + getServerPort(server)); System.out.println(); System.out.println("================================== JETTY START COMPLETE ====================================="); System.out.println(); server.join(); } private static String getServerPort(final Server server) { Connector[] connectors = server.getConnectors(); for (Connector connector : connectors) { if (connector instanceof SelectChannelConnector) { return Integer.toString(connector.getPort()); } } return "Unknown"; } }
package com.ironz.binaryprefs.util; import org.junit.Test; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.assertEquals; public class BitsTest { @Test public void stringSetConvert() { Set<String> strings = new HashSet<>(); strings.add("One"); strings.add("Two"); strings.add("Three"); strings.add(""); strings.add(null); byte[] bytes = Bits.stringSetToBytes(strings); Set<String> restored = Bits.stringSetFromBytes(bytes); assertEquals(37, bytes.length); assertEquals(Bits.FLAG_STRING_SET, bytes[0]); assertEquals(strings, restored); } @Test(expected = ClassCastException.class) public void stringSetIncorrectFlag() { Set<String> strings = new HashSet<>(); strings.add("One"); strings.add("Two"); strings.add("Three"); strings.add(""); strings.add(null); byte[] bytes = Bits.stringSetToBytes(strings); bytes[0] = 0; Bits.stringSetFromBytes(bytes); } @Test public void stringConvert() { String someString = "Some String"; byte[] bytes = Bits.stringToBytes(someString); String restored = Bits.stringFromBytes(bytes); assertEquals(12, bytes.length); assertEquals(Bits.FLAG_STRING, bytes[0]); assertEquals(someString, restored); } @Test(expected = ClassCastException.class) public void stringIncorrectFlag() { byte[] bytes = Bits.intToBytes(Integer.MAX_VALUE); bytes[0] = 0; Bits.intFromBytes(bytes); } @Test public void integerConvert() { byte[] bytes = Bits.intToBytes(Integer.MAX_VALUE); int restored = Bits.intFromBytes(bytes); assertEquals(5, bytes.length); assertEquals(Bits.FLAG_INT, bytes[0]); assertEquals(Integer.MAX_VALUE, restored); } @Test(expected = ClassCastException.class) public void integerIncorrectFlag() { byte[] bytes = Bits.intToBytes(Integer.MAX_VALUE); bytes[0] = 0; Bits.intFromBytes(bytes); } @Test public void floatConvert() { byte[] bytes = Bits.floatToBytes(Float.MAX_VALUE); float restored = Bits.floatFromBytes(bytes); assertEquals(5, bytes.length); assertEquals(Bits.FLAG_FLOAT, bytes[0]); assertEquals(Float.MAX_VALUE, restored, .0); } @Test(expected = ClassCastException.class) public void floatIncorrectFlag() { byte[] bytes = Bits.floatToBytes(Float.MAX_VALUE); bytes[0] = 0; Bits.floatFromBytes(bytes); } @Test public void longConvert() { byte[] bytes = Bits.longToBytes(Long.MAX_VALUE); long restored = Bits.longFromBytes(bytes); assertEquals(9, bytes.length); assertEquals(Bits.FLAG_LONG, bytes[0]); assertEquals(Long.MAX_VALUE, restored); } @Test(expected = ClassCastException.class) public void longIncorrectFlag() { byte[] bytes = Bits.longToBytes(Long.MAX_VALUE); bytes[0] = 0; Bits.longFromBytes(bytes); } @Test public void booleanConvert() { byte[] bytes = Bits.booleanToBytes(true); boolean restored = Bits.booleanFromBytes(bytes); assertEquals(2, bytes.length); assertEquals(Bits.FLAG_BOOLEAN, bytes[0]); assertEquals(true, restored); } @Test(expected = ClassCastException.class) public void booleanIncorrectFlag() { byte[] bytes = Bits.booleanToBytes(true); bytes[0] = 0; Bits.booleanFromBytes(bytes); } }
package org.batfish.main; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.regex.Pattern.CASE_INSENSITIVE; import static java.util.stream.Collectors.toMap; import static org.batfish.bddreachability.BDDMultipathInconsistency.computeMultipathInconsistencies; import static org.batfish.bddreachability.BDDReachabilityUtils.constructFlows; import static org.batfish.common.runtime.SnapshotRuntimeData.EMPTY_SNAPSHOT_RUNTIME_DATA; import static org.batfish.common.util.CompletionMetadataUtils.getFilterNames; import static org.batfish.common.util.CompletionMetadataUtils.getInterfaces; import static org.batfish.common.util.CompletionMetadataUtils.getIps; import static org.batfish.common.util.CompletionMetadataUtils.getLocationCompletionMetadata; import static org.batfish.common.util.CompletionMetadataUtils.getMlagIds; import static org.batfish.common.util.CompletionMetadataUtils.getNodes; import static org.batfish.common.util.CompletionMetadataUtils.getPrefixes; import static org.batfish.common.util.CompletionMetadataUtils.getRoutingPolicyNames; import static org.batfish.common.util.CompletionMetadataUtils.getStructureNames; import static org.batfish.common.util.CompletionMetadataUtils.getVrfs; import static org.batfish.common.util.CompletionMetadataUtils.getZones; import static org.batfish.common.util.isp.IspModelingUtils.INTERNET_HOST_NAME; import static org.batfish.datamodel.acl.AclLineMatchExprs.not; import static org.batfish.main.ReachabilityParametersResolver.resolveReachabilityParameters; import static org.batfish.main.StreamDecoder.decodeStreamAndAppendNewline; import static org.batfish.specifier.LocationInfoUtils.computeLocationInfo; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; import com.google.common.cache.Cache; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.collect.Table; import com.google.common.hash.Hashing; import com.google.errorprone.annotations.MustBeClosed; import io.opentracing.References; import io.opentracing.Scope; import io.opentracing.Span; import io.opentracing.SpanContext; import io.opentracing.util.GlobalTracer; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import net.sf.javabdd.BDD; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.apache.commons.configuration2.ImmutableConfiguration; import org.apache.commons.lang3.SerializationUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.batfish.bddreachability.BDDLoopDetectionAnalysis; import org.batfish.bddreachability.BDDReachabilityAnalysis; import org.batfish.bddreachability.BDDReachabilityAnalysisFactory; import org.batfish.bddreachability.BidirectionalReachabilityAnalysis; import org.batfish.bddreachability.IpsRoutedOutInterfacesFactory; import org.batfish.common.Answerer; import org.batfish.common.BatfishException; import org.batfish.common.BatfishException.BatfishStackTrace; import org.batfish.common.BatfishLogger; import org.batfish.common.BfConsts; import org.batfish.common.CleanBatfishException; import org.batfish.common.CompletionMetadata; import org.batfish.common.CoordConsts; import org.batfish.common.CoordConstsV2; import org.batfish.common.ErrorDetails; import org.batfish.common.NetworkSnapshot; import org.batfish.common.Warning; import org.batfish.common.Warnings; import org.batfish.common.bdd.BDDPacket; import org.batfish.common.plugin.BgpTablePlugin; import org.batfish.common.plugin.DataPlanePlugin; import org.batfish.common.plugin.DataPlanePlugin.ComputeDataPlaneResult; import org.batfish.common.plugin.ExternalBgpAdvertisementPlugin; import org.batfish.common.plugin.IBatfish; import org.batfish.common.plugin.PluginClientType; import org.batfish.common.plugin.PluginConsumer; import org.batfish.common.plugin.TracerouteEngine; import org.batfish.common.runtime.SnapshotRuntimeData; import org.batfish.common.topology.Layer1Edge; import org.batfish.common.topology.Layer1Topology; import org.batfish.common.topology.TopologyContainer; import org.batfish.common.topology.TopologyProvider; import org.batfish.common.util.BatfishObjectMapper; import org.batfish.common.util.CommonUtil; import org.batfish.common.util.isp.IspModelingUtils; import org.batfish.common.util.isp.IspModelingUtils.ModeledNodes; import org.batfish.config.Settings; import org.batfish.datamodel.AbstractRoute; import org.batfish.datamodel.AnnotatedRoute; import org.batfish.datamodel.BgpAdvertisement; import org.batfish.datamodel.Bgpv4Route; import org.batfish.datamodel.Configuration; import org.batfish.datamodel.ConfigurationFormat; import org.batfish.datamodel.DataPlane; import org.batfish.datamodel.DeviceType; import org.batfish.datamodel.Edge; import org.batfish.datamodel.EvpnRoute; import org.batfish.datamodel.Fib; import org.batfish.datamodel.Flow; import org.batfish.datamodel.FlowDisposition; import org.batfish.datamodel.ForwardingAnalysis; import org.batfish.datamodel.GenericRib; import org.batfish.datamodel.IntegerSpace; import org.batfish.datamodel.Interface; import org.batfish.datamodel.Interface.Dependency; import org.batfish.datamodel.Interface.DependencyType; import org.batfish.datamodel.InterfaceType; import org.batfish.datamodel.NetworkConfigurations; import org.batfish.datamodel.Prefix; import org.batfish.datamodel.SubRange; import org.batfish.datamodel.SwitchportMode; import org.batfish.datamodel.Topology; import org.batfish.datamodel.acl.AclLineMatchExpr; import org.batfish.datamodel.answers.Answer; import org.batfish.datamodel.answers.AnswerElement; import org.batfish.datamodel.answers.AnswerMetadataUtil; import org.batfish.datamodel.answers.AnswerStatus; import org.batfish.datamodel.answers.AnswerSummary; import org.batfish.datamodel.answers.ConvertConfigurationAnswerElement; import org.batfish.datamodel.answers.ConvertStatus; import org.batfish.datamodel.answers.DataPlaneAnswerElement; import org.batfish.datamodel.answers.InitInfoAnswerElement; import org.batfish.datamodel.answers.InitStepAnswerElement; import org.batfish.datamodel.answers.ParseAnswerElement; import org.batfish.datamodel.answers.ParseEnvironmentBgpTablesAnswerElement; import org.batfish.datamodel.answers.ParseStatus; import org.batfish.datamodel.answers.ParseVendorConfigurationAnswerElement; import org.batfish.datamodel.answers.RunAnalysisAnswerElement; import org.batfish.datamodel.collections.BgpAdvertisementsByVrf; import org.batfish.datamodel.collections.NodeInterfacePair; import org.batfish.datamodel.eigrp.EigrpMetricValues; import org.batfish.datamodel.eigrp.EigrpTopologyUtils; import org.batfish.datamodel.flow.Trace; import org.batfish.datamodel.flow.TraceWrapperAsAnswerElement; import org.batfish.datamodel.isp_configuration.IspConfiguration; import org.batfish.datamodel.ospf.OspfTopologyUtils; import org.batfish.datamodel.questions.InvalidReachabilityParametersException; import org.batfish.datamodel.questions.Question; import org.batfish.datamodel.vxlan.Layer2Vni; import org.batfish.dataplane.TracerouteEngineImpl; import org.batfish.grammar.BatfishCombinedParser; import org.batfish.grammar.BatfishParseException; import org.batfish.grammar.BatfishParseTreeWalker; import org.batfish.grammar.BgpTableFormat; import org.batfish.grammar.GrammarSettings; import org.batfish.grammar.NopFlattener; import org.batfish.grammar.ParseTreePrettyPrinter; import org.batfish.grammar.VendorConfigurationFormatDetector; import org.batfish.grammar.flattener.Flattener; import org.batfish.grammar.juniper.JuniperCombinedParser; import org.batfish.grammar.juniper.JuniperFlattener; import org.batfish.grammar.palo_alto_nested.PaloAltoNestedCombinedParser; import org.batfish.grammar.palo_alto_nested.PaloAltoNestedFlattener; import org.batfish.grammar.vyos.VyosCombinedParser; import org.batfish.grammar.vyos.VyosFlattener; import org.batfish.identifiers.AnalysisId; import org.batfish.identifiers.AnswerId; import org.batfish.identifiers.IdResolver; import org.batfish.identifiers.NetworkId; import org.batfish.identifiers.NodeRolesId; import org.batfish.identifiers.QuestionId; import org.batfish.identifiers.SnapshotId; import org.batfish.identifiers.StorageBasedIdResolver; import org.batfish.job.BatfishJobExecutor; import org.batfish.job.ConvertConfigurationJob; import org.batfish.job.ParseEnvironmentBgpTableJob; import org.batfish.job.ParseResult; import org.batfish.job.ParseVendorConfigurationJob; import org.batfish.job.ParseVendorConfigurationResult; import org.batfish.question.ReachabilityParameters; import org.batfish.question.ResolvedReachabilityParameters; import org.batfish.question.SrcNattedConstraint; import org.batfish.question.bidirectionalreachability.BidirectionalReachabilityResult; import org.batfish.question.differentialreachability.DifferentialReachabilityParameters; import org.batfish.question.differentialreachability.DifferentialReachabilityResult; import org.batfish.question.multipath.MultipathConsistencyParameters; import org.batfish.referencelibrary.ReferenceLibrary; import org.batfish.representation.aws.AwsConfiguration; import org.batfish.representation.host.HostConfiguration; import org.batfish.representation.iptables.IptablesVendorConfiguration; import org.batfish.role.InferRoles; import org.batfish.role.NodeRoleDimension; import org.batfish.role.NodeRolesData; import org.batfish.role.NodeRolesData.Type; import org.batfish.role.RoleMapping; import org.batfish.specifier.AllInterfaceLinksLocationSpecifier; import org.batfish.specifier.AllInterfacesLocationSpecifier; import org.batfish.specifier.InferFromLocationIpSpaceSpecifier; import org.batfish.specifier.IpSpaceAssignment; import org.batfish.specifier.Location; import org.batfish.specifier.LocationInfo; import org.batfish.specifier.SpecifierContext; import org.batfish.specifier.SpecifierContextImpl; import org.batfish.specifier.UnionLocationSpecifier; import org.batfish.storage.FileBasedStorage; import org.batfish.storage.StorageProvider; import org.batfish.symbolic.IngressLocation; import org.batfish.topology.TopologyProviderImpl; import org.batfish.vendor.VendorConfiguration; import org.batfish.version.BatfishVersion; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.jgrapht.Graph; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.SimpleDirectedGraph; import org.jgrapht.traverse.TopologicalOrderIterator; /** This class encapsulates the main control logic for Batfish. */ public class Batfish extends PluginConsumer implements IBatfish { private static final Pattern MANAGEMENT_INTERFACES = Pattern.compile( "(\\Amgmt)|(\\Amanagement)|(\\Afxp0)|(\\Aem0)|(\\Ame0)|(\\Avme)|(\\Awlan-ap)", CASE_INSENSITIVE); private static final Pattern MANAGEMENT_VRFS = Pattern.compile("(\\Amgmt)|(\\Amanagement)", CASE_INSENSITIVE); static void checkTopology(Map<String, Configuration> configurations, Topology topology) { for (Edge edge : topology.getEdges()) { if (!configurations.containsKey(edge.getNode1())) { throw new BatfishException( String.format("Topology contains a non-existent node '%s'", edge.getNode1())); } if (!configurations.containsKey(edge.getNode2())) { throw new BatfishException( String.format("Topology contains a non-existent node '%s'", edge.getNode2())); } // nodes are valid, now checking corresponding interfaces Configuration config1 = configurations.get(edge.getNode1()); Configuration config2 = configurations.get(edge.getNode2()); if (!config1.getAllInterfaces().containsKey(edge.getInt1())) { throw new BatfishException( String.format( "Topology contains a non-existent interface '%s' on node '%s'", edge.getInt1(), edge.getNode1())); } if (!config2.getAllInterfaces().containsKey(edge.getInt2())) { throw new BatfishException( String.format( "Topology contains a non-existent interface '%s' on node '%s'", edge.getInt2(), edge.getNode2())); } } } public static @Nonnull Flattener flatten( String input, BatfishLogger logger, GrammarSettings settings, Warnings warnings, ConfigurationFormat format, String header) { switch (format) { case PALO_ALTO_NESTED: { PaloAltoNestedCombinedParser parser = new PaloAltoNestedCombinedParser(input, settings); ParserRuleContext tree = parse(parser, logger, settings); PaloAltoNestedFlattener flattener = new PaloAltoNestedFlattener( VendorConfigurationFormatDetector.BATFISH_FLATTENED_PALO_ALTO_HEADER); ParseTreeWalker walker = new BatfishParseTreeWalker(parser); try { walker.walk(flattener, tree); } catch (BatfishParseException e) { warnings.setErrorDetails(e.getErrorDetails()); throw new BatfishException( String.format("Error flattening %s config", format.getVendorString()), e); } return flattener; } case JUNIPER: { JuniperCombinedParser parser = new JuniperCombinedParser(input, settings); ParserRuleContext tree = parse(parser, logger, settings); JuniperFlattener flattener = new JuniperFlattener( VendorConfigurationFormatDetector.BATFISH_FLATTENED_JUNIPER_HEADER, input); ParseTreeWalker walker = new BatfishParseTreeWalker(parser); try { walker.walk(flattener, tree); } catch (BatfishParseException e) { warnings.setErrorDetails(e.getErrorDetails()); throw new BatfishException( String.format("Error flattening %s config", format.getVendorString()), e); } return flattener; } case VYOS: { VyosCombinedParser parser = new VyosCombinedParser(input, settings); ParserRuleContext tree = parse(parser, logger, settings); VyosFlattener flattener = new VyosFlattener(VendorConfigurationFormatDetector.BATFISH_FLATTENED_VYOS_HEADER); ParseTreeWalker walker = new BatfishParseTreeWalker(parser); try { walker.walk(flattener, tree); } catch (BatfishParseException e) { warnings.setErrorDetails(e.getErrorDetails()); throw new BatfishException( String.format("Error flattening %s config", format.getVendorString()), e); } return flattener; } // $CASES-OMITTED$ default: return new NopFlattener(input); } } private void initLocalSettings(Settings settings) { if (settings == null || settings.getStorageBase() == null || settings.getContainer() == null) { // This should only happen in tests. return; } _snapshot = settings.getTestrig(); if (_snapshot == null) { throw new CleanBatfishException("Must supply argument to -" + BfConsts.ARG_TESTRIG); } _referenceSnapshot = settings.getDeltaTestrig(); } /** * Reads the snapshot input objects corresponding to the provided keys, and returns a map from * each object's key to its contents. */ private @Nonnull SortedMap<String, String> readAllInputObjects( Stream<String> keys, NetworkSnapshot snapshot) { return keys.map( key -> { _logger.debugf("Reading: \"%s\"\n", key); try (InputStream inputStream = _storage.loadSnapshotInputObject( snapshot.getNetwork(), snapshot.getSnapshot(), key)) { return new SimpleEntry<>(key, decodeStreamAndAppendNewline(inputStream)); } catch (IOException e) { throw new UncheckedIOException(e); } }) .collect( ImmutableSortedMap.toImmutableSortedMap( Ordering.natural(), SimpleEntry::getKey, SimpleEntry::getValue)); } public static void logWarnings(BatfishLogger logger, Warnings warnings) { for (Warning warning : warnings.getRedFlagWarnings()) { logger.redflag(logWarningsHelper(warning)); } for (Warning warning : warnings.getUnimplementedWarnings()) { logger.unimplemented(logWarningsHelper(warning)); } for (Warning warning : warnings.getPedanticWarnings()) { logger.pedantic(logWarningsHelper(warning)); } } private static String logWarningsHelper(Warning warning) { return " " + warning.getTag() + ": " + warning.getText() + "\n"; } /** * Returns the parse tree for the given parser, logging to the given logger and using the given * settings to control the parse tree printing, if applicable. */ public static ParserRuleContext parse( BatfishCombinedParser<?, ?> parser, BatfishLogger logger, GrammarSettings settings) { ParserRuleContext tree; try { tree = parser.parse(); } catch (BatfishException e) { throw new ParserBatfishException("Parser error", e); } List<String> errors = parser.getErrors(); int numErrors = errors.size(); if (numErrors > 0) { throw new ParserBatfishException("Parser error(s)", errors); } else if (!settings.getPrintParseTree()) { logger.info("OK\n"); } else { logger.info("OK, PRINTING PARSE TREE:\n"); logger.info( ParseTreePrettyPrinter.print(tree, parser, settings.getPrintParseTreeLineNums()) + "\n\n"); } return tree; } private final Map<String, AnswererCreator> _answererCreators; private SnapshotId _snapshot; private SortedMap<BgpTableFormat, BgpTablePlugin> _bgpTablePlugins; private final Cache<NetworkSnapshot, SortedMap<String, Configuration>> _cachedConfigurations; private final Cache<NetworkSnapshot, DataPlane> _cachedDataPlanes; private final Map<NetworkSnapshot, SortedMap<String, BgpAdvertisementsByVrf>> _cachedEnvironmentBgpTables; private final Cache<NetworkSnapshot, Map<String, VendorConfiguration>> _cachedVendorConfigurations; private SnapshotId _referenceSnapshot; private Set<ExternalBgpAdvertisementPlugin> _externalBgpAdvertisementPlugins; private IdResolver _idResolver; private BatfishLogger _logger; private Settings _settings; private final StorageProvider _storage; // this variable is used communicate with parent thread on how the job // finished (null if job finished successfully) private String _terminatingExceptionMessage; private Map<String, DataPlanePlugin> _dataPlanePlugins; private final TopologyProvider _topologyProvider; public Batfish( Settings settings, Cache<NetworkSnapshot, SortedMap<String, Configuration>> cachedConfigurations, Cache<NetworkSnapshot, DataPlane> cachedDataPlanes, Map<NetworkSnapshot, SortedMap<String, BgpAdvertisementsByVrf>> cachedEnvironmentBgpTables, Cache<NetworkSnapshot, Map<String, VendorConfiguration>> cachedVendorConfigurations, @Nullable StorageProvider alternateStorageProvider, @Nullable IdResolver alternateIdResolver) { _settings = settings; _bgpTablePlugins = new TreeMap<>(); _cachedConfigurations = cachedConfigurations; _cachedDataPlanes = cachedDataPlanes; _cachedEnvironmentBgpTables = cachedEnvironmentBgpTables; _cachedVendorConfigurations = cachedVendorConfigurations; _externalBgpAdvertisementPlugins = new TreeSet<>(); initLocalSettings(settings); _logger = _settings.getLogger(); _terminatingExceptionMessage = null; _answererCreators = new HashMap<>(); _dataPlanePlugins = new HashMap<>(); _storage = alternateStorageProvider != null ? alternateStorageProvider : new FileBasedStorage(_settings.getStorageBase(), _logger, this::newBatch); _idResolver = alternateIdResolver != null ? alternateIdResolver : new StorageBasedIdResolver(_storage); _topologyProvider = new TopologyProviderImpl(this, _storage); loadPlugins(); } private Answer analyze() { try { Answer answer = new Answer(); AnswerSummary summary = new AnswerSummary(); AnalysisId analysisName = _settings.getAnalysisName(); NetworkId containerName = _settings.getContainer(); RunAnalysisAnswerElement ae = new RunAnalysisAnswerElement(); _idResolver .listQuestions(containerName, analysisName) .forEach( questionName -> { Optional<QuestionId> questionIdOpt = _idResolver.getQuestionId(questionName, containerName, analysisName); checkArgument( questionIdOpt.isPresent(), "Question '%s' for analysis '%s' for network '%s' was deleted in the middle of" + " this operation", questionName, containerName, analysisName); _settings.setQuestionName(questionIdOpt.get()); Answer currentAnswer; Span span = GlobalTracer.get().buildSpan("Getting answer to analysis question").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning span.setTag("analysis-name", analysisName.getId()); currentAnswer = answer(); } finally { span.finish(); } // Ensuring that question was parsed successfully if (currentAnswer.getQuestion() != null) { try { // TODO: This can be represented much cleanly and easily with a Json _logger.infof( "Ran question:%s from analysis:%s in container:%s; work-id:%s, status:%s, " + "computed dataplane:%s, parameters:%s\n", questionName, analysisName, containerName, getTaskId(), currentAnswer.getSummary().getNumFailed() > 0 ? "failed" : "passed", currentAnswer.getQuestion().getDataPlane(), BatfishObjectMapper.writeString( currentAnswer.getQuestion().getInstance().getVariables())); } catch (JsonProcessingException e) { throw new BatfishException( String.format( "Error logging question %s in analysis %s", questionName, analysisName), e); } } try { outputAnswer(currentAnswer); outputAnswerMetadata(currentAnswer); ae.getAnswers().put(questionName, currentAnswer); } catch (Exception e) { Answer errorAnswer = new Answer(); errorAnswer.addAnswerElement( new BatfishStackTrace(new BatfishException("Failed to output answer", e))); ae.getAnswers().put(questionName, errorAnswer); } ae.getAnswers().put(questionName, currentAnswer); summary.combine(currentAnswer.getSummary()); }); answer.addAnswerElement(ae); answer.setSummary(summary); return answer; } finally { // ensure question name is null so logger does not try to write analysis answer into a // question's answer folder _settings.setQuestionName(null); } } public Answer answer() { Question question = null; // return right away if we cannot parse the question successfully Span span = GlobalTracer.get().buildSpan("Parse question span").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning String rawQuestionStr; try { rawQuestionStr = _storage.loadQuestion( _settings.getContainer(), _settings.getQuestionName(), _settings.getAnalysisName()); } catch (Exception e) { Answer answer = new Answer(); BatfishException exception = new BatfishException("Could not read question", e); answer.setStatus(AnswerStatus.FAILURE); answer.addAnswerElement(exception.getBatfishStackTrace()); return answer; } try { question = Question.parseQuestion(rawQuestionStr); } catch (Exception e) { Answer answer = new Answer(); BatfishException exception = new BatfishException("Could not parse question", e); answer.setStatus(AnswerStatus.FAILURE); answer.addAnswerElement(exception.getBatfishStackTrace()); return answer; } } finally { span.finish(); } LOGGER.info("Answering question {}", question.getClass().getSimpleName()); if (GlobalTracer.get().scopeManager().activeSpan() != null) { Span activeSpan = GlobalTracer.get().scopeManager().activeSpan(); activeSpan .setTag("container-name", getContainerName().getId()) .setTag("testrig_name", getSnapshot().getSnapshot().getId()); if (question.getInstance() != null) { activeSpan.setTag("question-name", question.getInstance().getInstanceName()); } } if (_settings.getDifferential()) { question.setDifferential(true); } boolean dp = question.getDataPlane(); boolean diff = question.getDifferential(); _settings.setDiffQuestion(diff); // Ensures configurations are parsed and ready loadConfigurations(getSnapshot()); if (diff) { loadConfigurations(getReferenceSnapshot()); } Span initQuestionSpan = GlobalTracer.get().buildSpan("Init question env").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning prepareToAnswerQuestions(diff, dp); } finally { initQuestionSpan.finish(); } AnswerElement answerElement = null; BatfishException exception = null; Span getAnswerSpan = GlobalTracer.get().buildSpan("Get answer").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning if (question.getDifferential()) { answerElement = Answerer.create(question, this).answerDiff(getSnapshot(), getReferenceSnapshot()); } else { answerElement = Answerer.create(question, this).answer(getSnapshot()); } } catch (Exception e) { exception = new BatfishException("Failed to answer question", e); } finally { getAnswerSpan.finish(); } Answer answer = new Answer(); answer.setQuestion(question); if (exception == null) { LOGGER.info("Question answered successfully"); // success answer.setStatus(AnswerStatus.SUCCESS); answer.addAnswerElement(answerElement); } else { LOGGER.warn("Question execution failed", exception); // failure answer.setStatus(AnswerStatus.FAILURE); answer.addAnswerElement(exception.getBatfishStackTrace()); } return answer; } private static void computeAggregatedInterfaceBandwidths(Map<String, Interface> interfaces) { // Set bandwidths for aggregate interfaces interfaces.values().stream() .filter(iface -> iface.getInterfaceType() == InterfaceType.AGGREGATED) .forEach( iface -> { /* If interface has dependencies, bandwidth should be sum of their bandwidths. */ if (!iface.getDependencies().isEmpty()) { iface.setBandwidth( iface.getDependencies().stream() .map(dependency -> interfaces.get(dependency.getInterfaceName())) .filter(Objects::nonNull) .filter(Interface::getActive) .map(Interface::getBandwidth) .filter(Objects::nonNull) .mapToDouble(Double::doubleValue) .sum()); } else { /* Bandwidth should be sum of bandwidth of channel-group members. */ iface.setBandwidth( iface.getChannelGroupMembers().stream() .mapToDouble(ifaceName -> interfaces.get(ifaceName).getBandwidth()) .sum()); } }); // Now that aggregate interfaces have bandwidths, set bandwidths for aggregate child interfaces interfaces.values().stream() .filter(iface -> iface.getInterfaceType() == InterfaceType.AGGREGATE_CHILD) .forEach( iface -> { /* Bandwidth for aggregate child interfaces (e.g. units) should be inherited from parent. */ double bandwidth = iface.getDependencies().stream() .filter(d -> d.getType() == DependencyType.BIND) .findFirst() .map(Dependency::getInterfaceName) .map(interfaces::get) .map(Interface::getBandwidth) .orElse(0.0); iface.setBandwidth(bandwidth); }); } private static void computeRedundantInterfaceBandwidths(Map<String, Interface> interfaces) { // Set bandwidths for redundant interfaces interfaces.values().stream() .filter(iface -> iface.getInterfaceType() == InterfaceType.REDUNDANT) .forEach( iface -> { /* If interface has dependencies, bandwidth should be bandwidth of any active dependency. */ iface.setBandwidth( iface.getDependencies().stream() .map(dependency -> interfaces.get(dependency.getInterfaceName())) .filter(Objects::nonNull) .filter(Interface::getActive) .map(Interface::getBandwidth) .filter(Objects::nonNull) .mapToDouble(Double::doubleValue) .min() .orElse(0.0)); }); // Now that redundant interfaces have bandwidths, set bandwidths for redundant child interfaces interfaces.values().stream() .filter(iface -> iface.getInterfaceType() == InterfaceType.REDUNDANT_CHILD) .forEach( iface -> { /* Bandwidth for redundant child interfaces (e.g. units) should be inherited from parent. */ double bandwidth = iface.getDependencies().stream() .filter(d -> d.getType() == DependencyType.BIND) .findFirst() .map(Dependency::getInterfaceName) .map(interfaces::get) .map(Interface::getBandwidth) .orElse(0.0); iface.setBandwidth(bandwidth); }); } public static Warnings buildWarnings(Settings settings) { return Warnings.forLogger(settings.getLogger()); } private static final NetworkSnapshot DUMMY_SNAPSHOT = new NetworkSnapshot( new NetworkId("__BATFISH_DUMMY_NETWORK"), new SnapshotId("__BATFISH_DUMMY_SNAPSHOT")); private static final DataPlane DUMMY_DATAPLANE = new DataPlane() { @Override public Table<String, String, Set<Bgpv4Route>> getBgpRoutes() { throw new UnsupportedOperationException(); } @Override public Table<String, String, Set<Bgpv4Route>> getBgpBackupRoutes() { throw new UnsupportedOperationException(); } @Override public Table<String, String, Set<EvpnRoute<?, ?>>> getEvpnRoutes() { throw new UnsupportedOperationException(); } @Override public Table<String, String, Set<EvpnRoute<?, ?>>> getEvpnBackupRoutes() { throw new UnsupportedOperationException(); } @Override public Map<String, Map<String, Fib>> getFibs() { throw new UnsupportedOperationException(); } @Override public ForwardingAnalysis getForwardingAnalysis() { throw new UnsupportedOperationException(); } @Override public SortedMap<String, SortedMap<String, GenericRib<AnnotatedRoute<AbstractRoute>>>> getRibs() { throw new UnsupportedOperationException(); } @Override public SortedMap<String, SortedMap<String, Map<Prefix, Map<String, Set<String>>>>> getPrefixTracingInfoSummary() { throw new UnsupportedOperationException(); } @Override public Table<String, String, Set<Layer2Vni>> getLayer2Vnis() { throw new UnsupportedOperationException(); } }; @Override public DataPlaneAnswerElement computeDataPlane(NetworkSnapshot snapshot) { // If already present, invalidate a dataplane for this snapshot. // (unlikely, only when devs force recomputation) _cachedDataPlanes.invalidate(snapshot); // Reserve space for the new dataplane in the in-memory cache by inserting and invalidating a // dummy value. _cachedDataPlanes.put(DUMMY_SNAPSHOT, DUMMY_DATAPLANE); _cachedDataPlanes.invalidate(DUMMY_SNAPSHOT); ComputeDataPlaneResult result = getDataPlanePlugin().computeDataPlane(snapshot); DataPlaneAnswerElement answerElement = result._answerElement; DataPlane dataplane = result._dataPlane; TopologyContainer topologyContainer = result._topologies; result = null; // let it be garbage collected. saveDataPlane(snapshot, dataplane, topologyContainer); return answerElement; } /* Write the dataplane to disk and cache, and write the answer element to disk. */ private void saveDataPlane( NetworkSnapshot snapshot, DataPlane dataplane, TopologyContainer topologies) { _cachedDataPlanes.put(snapshot, dataplane); _logger.resetTimer(); newBatch("Writing data plane to disk", 0); Span writeDataplane = GlobalTracer.get().buildSpan("Writing data plane").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(writeDataplane)) { assert scope != null; // avoid unused warning LOGGER.info("Storing DataPlane"); _storage.storeDataPlane(dataplane, snapshot); LOGGER.info("Storing BGP Topology"); _storage.storeBgpTopology(topologies.getBgpTopology(), snapshot); LOGGER.info("Storing EIGRP Topology"); _storage.storeEigrpTopology(topologies.getEigrpTopology(), snapshot); LOGGER.info("Storing L3 Adjacencies"); _storage.storeL3Adjacencies(topologies.getL3Adjacencies(), snapshot); LOGGER.info("Storing Layer3 Topology"); _storage.storeLayer3Topology(topologies.getLayer3Topology(), snapshot); LOGGER.info("Storing OSPF Topology"); _storage.storeOspfTopology(topologies.getOspfTopology(), snapshot); LOGGER.info("Storing VxLAN Topology"); _storage.storeVxlanTopology(topologies.getVxlanTopology(), snapshot); } catch (IOException e) { throw new BatfishException("Failed to save data plane", e); } finally { writeDataplane.finish(); } _logger.printElapsedTime(); } private Map<String, Configuration> convertConfigurations( Map<String, VendorConfiguration> vendorConfigurations, SnapshotRuntimeData runtimeData, ConvertConfigurationAnswerElement answerElement) { _logger.info("\n*** CONVERTING VENDOR CONFIGURATIONS TO INDEPENDENT FORMAT ***\n"); _logger.resetTimer(); Map<String, Configuration> configurations = new TreeMap<>(); List<ConvertConfigurationJob> jobs = new ArrayList<>(); for (Entry<String, VendorConfiguration> config : vendorConfigurations.entrySet()) { VendorConfiguration vc = config.getValue(); ConvertConfigurationJob job = new ConvertConfigurationJob(_settings, runtimeData, vc, config.getKey()); jobs.add(job); } BatfishJobExecutor.runJobsInExecutor( _settings, _logger, jobs, configurations, answerElement, _settings.getHaltOnConvertError(), "Convert configurations to vendor-independent format"); _logger.printElapsedTime(); return configurations; } @Override public boolean debugFlagEnabled(String flag) { return _settings.debugFlagEnabled(flag); } @Override public Map<Location, LocationInfo> getLocationInfo(NetworkSnapshot snapshot) { return computeLocationInfo( getTopologyProvider().getIpOwners(snapshot), loadConfigurations(snapshot)); } private void disableUnusableVlanInterfaces(Map<String, Configuration> configurations) { for (Configuration c : configurations.values()) { String hostname = c.getHostname(); Map<Integer, Interface> vlanInterfaces = new HashMap<>(); Map<Integer, Integer> vlanMemberCounts = new HashMap<>(); Set<Interface> nonVlanInterfaces = new HashSet<>(); Integer vlanNumber = null; // Populate vlanInterface and nonVlanInterfaces, and initialize // vlanMemberCounts: for (Interface iface : c.getActiveInterfaces().values()) { if ((iface.getInterfaceType() == InterfaceType.VLAN) && ((vlanNumber = CommonUtil.getInterfaceVlanNumber(iface.getName())) != null)) { vlanInterfaces.put(vlanNumber, iface); vlanMemberCounts.put(vlanNumber, 0); } else { nonVlanInterfaces.add(iface); } } // Update vlanMemberCounts: for (Interface iface : nonVlanInterfaces) { IntegerSpace.Builder vlans = IntegerSpace.builder(); if (iface.getSwitchportMode() == SwitchportMode.TRUNK) { // vlan trunked interface IntegerSpace allowed = iface.getAllowedVlans(); if (!allowed.isEmpty()) { // Explicit list of allowed VLANs vlans.including(allowed); } else { // No explicit list, so all VLANs are allowed. vlanInterfaces.keySet().forEach(vlans::including); } // Add the native VLAN as well. vlanNumber = iface.getNativeVlan(); if (vlanNumber != null) { vlans.including(vlanNumber); } } else if (iface.getSwitchportMode() == SwitchportMode.ACCESS) { // access mode ACCESS vlanNumber = iface.getAccessVlan(); if (vlanNumber != null) { vlans.including(vlanNumber); } // Any other Switch Port mode is unsupported } else if (iface.getSwitchportMode() != SwitchportMode.NONE) { _logger.warnf( "WARNING: Unsupported switch port mode %s, assuming no VLANs allowed: \"%s:%s\"\n", iface.getSwitchportMode(), hostname, iface.getName()); } vlans.build().stream() .forEach( vlanId -> vlanMemberCounts.compute(vlanId, (k, v) -> (v == null) ? 1 : (v + 1))); } // Disable all "normal" vlan interfaces with zero member counts: SubRange normalVlanRange = c.getNormalVlanRange(); for (Map.Entry<Integer, Integer> entry : vlanMemberCounts.entrySet()) { if (entry.getValue() == 0) { vlanNumber = entry.getKey(); if ((vlanNumber >= normalVlanRange.getStart()) && (vlanNumber <= normalVlanRange.getEnd())) { Interface iface = vlanInterfaces.get(vlanNumber); if ((iface != null) && iface.getAutoState()) { _logger.warnf( "Disabling unusable vlan interface because no switch port is assigned to it: %s", NodeInterfacePair.of(iface)); iface.blacklist(); } } } } } } /** Returns a map of hostname to VI {@link Configuration} */ public Map<String, Configuration> getConfigurations( Map<String, VendorConfiguration> vendorConfigurations, SnapshotRuntimeData runtimeData, ConvertConfigurationAnswerElement answerElement) { Map<String, Configuration> configurations = convertConfigurations(vendorConfigurations, runtimeData, answerElement); identifyDeviceTypes(configurations.values()); return configurations; } @Override public NetworkId getContainerName() { return _settings.getContainer(); } @Override public DataPlanePlugin getDataPlanePlugin() { DataPlanePlugin plugin = _dataPlanePlugins.get(_settings.getDataPlaneEngineName()); if (plugin == null) { throw new BatfishException( String.format( "Dataplane engine %s is unavailable or unsupported", _settings.getDataPlaneEngineName())); } return plugin; } private SortedMap<String, BgpAdvertisementsByVrf> getEnvironmentBgpTables( NetworkSnapshot snapshot, ParseEnvironmentBgpTablesAnswerElement answerElement) { _logger.info("\n*** READING Environment BGP Tables ***\n"); SortedMap<String, String> inputData; try (Stream<String> keys = _storage.listInputEnvironmentBgpTableKeys(snapshot)) { inputData = readAllInputObjects(keys, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } SortedMap<String, BgpAdvertisementsByVrf> bgpTables = parseEnvironmentBgpTables(snapshot, inputData, answerElement); return bgpTables; } @Override public BatfishLogger getLogger() { return _logger; } /** * Gets the {@link NodeRolesData} for the testrig * * @return The {@link NodeRolesData} object. */ @Override public NodeRolesData getNodeRolesData() { try { NetworkId networkId = _settings.getContainer(); Optional<NodeRolesId> networkNodeRolesIdOpt = _idResolver.getNetworkNodeRolesId(networkId); if (!networkNodeRolesIdOpt.isPresent()) { return null; } return BatfishObjectMapper.mapper() .readValue(_storage.loadNodeRoles(networkNodeRolesIdOpt.get()), NodeRolesData.class); } catch (IOException e) { _logger.errorf("Could not read roles data: %s", e); return null; } } /** * Gets the {@link NodeRoleDimension} object given dimension name. If {@code dimension} is null, * returns the default dimension. * * @param dimension The dimension name * @return An {@link Optional} that has the requested NodeRoleDimension or empty otherwise. */ @Override public Optional<NodeRoleDimension> getNodeRoleDimension(@Nullable String dimension) { NodeRolesData nodeRolesData = getNodeRolesData(); return nodeRolesData.nodeRoleDimensionFor(dimension); } @Override public Map<String, String> getQuestionTemplates(boolean verbose) { if (_settings.getCoordinatorHost() == null) { throw new BatfishException("Cannot get question templates: coordinator host is not set"); } String url = String.format( "http://%s:%s%s/%s", _settings.getCoordinatorHost(), _settings.getCoordinatorPoolPort(), CoordConsts.SVC_CFG_POOL_MGR, CoordConsts.SVC_RSC_POOL_GET_QUESTION_TEMPLATES); Map<String, String> params = new HashMap<>(); params.put(CoordConsts.SVC_KEY_VERSION, BatfishVersion.getVersionStatic()); params.put(CoordConstsV2.QP_VERBOSE, String.valueOf(verbose)); JSONObject response = (JSONObject) CoordinatorClient.talkToCoordinator(url, params, _settings, _logger); if (response == null) { throw new BatfishException("Could not get question templates: Got null response"); } if (!response.has(CoordConsts.SVC_KEY_QUESTION_LIST)) { throw new BatfishException("Could not get question templates: Response lacks question list"); } try { Map<String, String> templates = BatfishObjectMapper.mapper() .readValue( response.get(CoordConsts.SVC_KEY_QUESTION_LIST).toString(), new TypeReference<Map<String, String>>() {}); return templates; } catch (JSONException | IOException e) { throw new BatfishException("Could not cast response to Map: ", e); } } /** Gets the {@link ReferenceLibrary} for the network */ @Override public @Nullable ReferenceLibrary getReferenceLibraryData() { try { return _storage .loadReferenceLibrary(_settings.getContainer()) .orElse(new ReferenceLibrary(null)); } catch (IOException e) { _logger.errorf( "Could not read reference library data for network %s: %s", _settings.getContainer(), e); return null; } } public Settings getSettings() { return _settings; } @Override public ImmutableConfiguration getSettingsConfiguration() { return _settings.getImmutableConfiguration(); } @Override public NetworkSnapshot getSnapshot() { return new NetworkSnapshot(_settings.getContainer(), _snapshot); } @Override public NetworkSnapshot getReferenceSnapshot() { return new NetworkSnapshot(_settings.getContainer(), _referenceSnapshot); } @Override public String getTaskId() { return _settings.getTaskId(); } public String getTerminatingExceptionMessage() { return _terminatingExceptionMessage; } @Nonnull @Override public TopologyProvider getTopologyProvider() { return _topologyProvider; } @Override public PluginClientType getType() { return PluginClientType.BATFISH; } @Override public InitInfoAnswerElement initInfo( NetworkSnapshot snapshot, boolean summary, boolean verboseError) { LOGGER.info("Getting snapshot initialization info"); ParseVendorConfigurationAnswerElement parseAnswer = loadParseVendorConfigurationAnswerElement(snapshot); InitInfoAnswerElement answerElement = mergeParseAnswer(summary, verboseError, parseAnswer); ConvertConfigurationAnswerElement convertAnswer = loadConvertConfigurationAnswerElementOrReparse(snapshot); mergeConvertAnswer(summary, verboseError, convertAnswer, answerElement); _logger.info(answerElement.toString()); return answerElement; } @Override public InitInfoAnswerElement initInfoBgpAdvertisements( NetworkSnapshot snapshot, boolean summary, boolean verboseError) { ParseEnvironmentBgpTablesAnswerElement parseAnswer = loadParseEnvironmentBgpTablesAnswerElement(snapshot); InitInfoAnswerElement answerElement = mergeParseAnswer(summary, verboseError, parseAnswer); _logger.info(answerElement.toString()); return answerElement; } private void prepareToAnswerQuestions(NetworkSnapshot snapshot, boolean dp) { try { if (!_storage.hasParseEnvironmentBgpTablesAnswerElement(snapshot)) { computeEnvironmentBgpTables(snapshot); } if (dp && _cachedDataPlanes.getIfPresent(snapshot) == null) { if (!_storage.hasDataPlane(snapshot)) { computeDataPlane(snapshot); } } } catch (IOException e) { throw new UncheckedIOException(e); } } private void prepareToAnswerQuestions(boolean diff, boolean dp) { prepareToAnswerQuestions(getSnapshot(), dp); if (diff) { prepareToAnswerQuestions(getReferenceSnapshot(), dp); } } @Override public SortedMap<String, Configuration> loadConfigurations(NetworkSnapshot snapshot) { Span span = GlobalTracer.get().buildSpan("Load configurations").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning _logger.debugf("Loading configurations for %s\n", snapshot); // Do we already have configurations in the cache? SortedMap<String, Configuration> configurations = _cachedConfigurations.getIfPresent(snapshot); if (configurations != null) { return configurations; } _logger.debugf("Loading configurations for %s, cache miss", snapshot); // Next, see if we have an up-to-date configurations on disk. configurations = _storage.loadConfigurations(snapshot.getNetwork(), snapshot.getSnapshot()); if (configurations != null) { _logger.debugf("Loaded configurations for %s off disk", snapshot); } else { // Otherwise, we have to parse the configurations. Fall back to old, hacky code. configurations = actuallyParseConfigurations(snapshot); } // Apply things like blacklist and aggregations before installing in the cache. postProcessSnapshot(snapshot, configurations); _cachedConfigurations.put(snapshot, configurations); return configurations; } finally { span.finish(); } } @Nonnull private SortedMap<String, Configuration> actuallyParseConfigurations(NetworkSnapshot snapshot) { _logger.infof("Repairing configurations for testrig %s", snapshot.getSnapshot()); repairConfigurations(snapshot); SortedMap<String, Configuration> configurations = _storage.loadConfigurations(snapshot.getNetwork(), snapshot.getSnapshot()); verify( configurations != null, "Configurations should not be null when loaded immediately after repair."); assert configurations != null; return configurations; } @Override public ConvertConfigurationAnswerElement loadConvertConfigurationAnswerElementOrReparse( NetworkSnapshot snapshot) { ConvertConfigurationAnswerElement ccae = _storage.loadConvertConfigurationAnswerElement( snapshot.getNetwork(), snapshot.getSnapshot()); if (ccae != null) { return ccae; } repairConfigurations(snapshot); ccae = _storage.loadConvertConfigurationAnswerElement( snapshot.getNetwork(), snapshot.getSnapshot()); if (ccae != null) { return ccae; } else { throw new BatfishException( "Version error repairing configurations for convert configuration answer element"); } } @Override public DataPlane loadDataPlane(NetworkSnapshot snapshot) { Span span = GlobalTracer.get().buildSpan("Load data plane").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning DataPlane dp = _cachedDataPlanes.getIfPresent(snapshot); if (dp == null) { newBatch("Loading data plane from disk", 0); dp = _storage.loadDataPlane(snapshot); _cachedDataPlanes.put(snapshot, dp); } return dp; } catch (IOException e) { throw new UncheckedIOException(e); } finally { span.finish(); } } @Override public SortedMap<String, BgpAdvertisementsByVrf> loadEnvironmentBgpTables( NetworkSnapshot snapshot) { SortedMap<String, BgpAdvertisementsByVrf> environmentBgpTables = _cachedEnvironmentBgpTables.get(snapshot); if (environmentBgpTables == null) { loadParseEnvironmentBgpTablesAnswerElement(snapshot); try { environmentBgpTables = ImmutableSortedMap.copyOf(_storage.loadEnvironmentBgpTables(snapshot)); } catch (IOException e) { throw new UncheckedIOException(e); } _cachedEnvironmentBgpTables.put(snapshot, environmentBgpTables); } return environmentBgpTables; } public ParseEnvironmentBgpTablesAnswerElement loadParseEnvironmentBgpTablesAnswerElement( NetworkSnapshot snapshot) { return loadParseEnvironmentBgpTablesAnswerElement(snapshot, true); } private ParseEnvironmentBgpTablesAnswerElement loadParseEnvironmentBgpTablesAnswerElement( NetworkSnapshot snapshot, boolean firstAttempt) { try { if (!_storage.hasParseEnvironmentBgpTablesAnswerElement(snapshot)) { repairEnvironmentBgpTables(snapshot); } } catch (IOException e) { throw new UncheckedIOException(e); } try { return _storage.loadParseEnvironmentBgpTablesAnswerElement(snapshot); } catch (Exception e) { /* Do nothing, this is expected on serialization or other errors. */ _logger.warn("Unable to load prior parse data"); } if (firstAttempt) { repairEnvironmentBgpTables(snapshot); return loadParseEnvironmentBgpTablesAnswerElement(snapshot, false); } else { throw new BatfishException( "Version error repairing environment BGP tables for parse environment BGP tables " + "answer element"); } } @Override public ParseVendorConfigurationAnswerElement loadParseVendorConfigurationAnswerElement( NetworkSnapshot snapshot) { return loadParseVendorConfigurationAnswerElement(snapshot, true); } private ParseVendorConfigurationAnswerElement loadParseVendorConfigurationAnswerElement( NetworkSnapshot snapshot, boolean firstAttempt) { try { if (_storage.hasParseVendorConfigurationAnswerElement(snapshot)) { try { return _storage.loadParseVendorConfigurationAnswerElement(snapshot); } catch (Exception e) { /* Do nothing, this is expected on serialization or other errors. */ _logger.warn("Unable to load prior parse data"); } } } catch (IOException e) { throw new UncheckedIOException(e); } if (firstAttempt) { repairVendorConfigurations(snapshot); return loadParseVendorConfigurationAnswerElement(snapshot, false); } else { throw new BatfishException( "Version error repairing vendor configurations for parse configuration answer element"); } } @Override public Map<String, VendorConfiguration> loadVendorConfigurations(NetworkSnapshot snapshot) { Span span = GlobalTracer.get().buildSpan("Load vendor configurations").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning _logger.debugf("Loading vendor configurations for %s\n", snapshot); // Do we already have configurations in the cache? Map<String, VendorConfiguration> vendorConfigurations = _cachedVendorConfigurations.getIfPresent(snapshot); if (vendorConfigurations == null) { _logger.debugf("Loading vendor configurations for %s, cache miss", snapshot); loadParseVendorConfigurationAnswerElement(snapshot); try { vendorConfigurations = _storage.loadVendorConfigurations(snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } _cachedVendorConfigurations.put(snapshot, vendorConfigurations); } return vendorConfigurations; } finally { span.finish(); } } private void mergeConvertAnswer( boolean summary, boolean verboseError, ConvertConfigurationAnswerElement convertAnswer, InitInfoAnswerElement answerElement) { mergeInitStepAnswer(answerElement, convertAnswer, summary, verboseError); convertAnswer.getConvertStatus().entrySet().stream() .filter(s -> s.getValue() == ConvertStatus.FAILED) .forEach(s -> answerElement.getParseStatus().put(s.getKey(), ParseStatus.FAILED)); } private void mergeInitStepAnswer( InitInfoAnswerElement initInfoAnswerElement, InitStepAnswerElement initStepAnswerElement, boolean summary, boolean verboseError) { if (!summary) { if (verboseError) { SortedMap<String, List<BatfishStackTrace>> errors = initInfoAnswerElement.getErrors(); initStepAnswerElement .getErrors() .forEach( (hostname, initStepErrors) -> errors.computeIfAbsent(hostname, k -> new ArrayList<>()).add(initStepErrors)); } SortedMap<String, Warnings> warnings = initInfoAnswerElement.getWarnings(); initStepAnswerElement .getWarnings() .forEach( (hostname, initStepWarnings) -> { Warnings combined = warnings.computeIfAbsent(hostname, h -> buildWarnings(_settings)); combined.getParseWarnings().addAll(initStepWarnings.getParseWarnings()); combined.getPedanticWarnings().addAll(initStepWarnings.getPedanticWarnings()); combined.getRedFlagWarnings().addAll(initStepWarnings.getRedFlagWarnings()); combined .getUnimplementedWarnings() .addAll(initStepWarnings.getUnimplementedWarnings()); }); } } private InitInfoAnswerElement mergeParseAnswer( boolean summary, boolean verboseError, ParseAnswerElement parseAnswer) { InitInfoAnswerElement answerElement = new InitInfoAnswerElement(); mergeInitStepAnswer(answerElement, parseAnswer, summary, verboseError); answerElement.setParseStatus(parseAnswer.getParseStatus()); answerElement.setParseTrees(parseAnswer.getParseTrees()); return answerElement; } @Override public AtomicInteger newBatch(String description, int jobs) { return BatchManager.get().newBatch(_settings, description, jobs); } private void outputAnswer(Answer answer) { outputAnswer(answer, /* log */ false); } void outputAnswerWithLog(Answer answer) { outputAnswer(answer, /* log */ true); } private void outputAnswer(Answer answer, boolean writeLog) { try { // Write answer to work json log if caller requested. // Summarize that answer if all of the following are true: // - answering a question // - question successful // - client did not request full successful answers String answerString = BatfishObjectMapper.writeString(answer); boolean summarizeWorkJsonLogAnswer = writeLog && _settings.getQuestionName() != null && !_settings.getAlwaysIncludeAnswerInWorkJsonLog() && answer.getStatus() == AnswerStatus.SUCCESS; String workJsonLogAnswerString; if (summarizeWorkJsonLogAnswer) { Answer summaryAnswer = new Answer(); summaryAnswer.setQuestion(answer.getQuestion()); summaryAnswer.setStatus(answer.getStatus()); summaryAnswer.setSummary(answer.getSummary()); // do not include answer elements workJsonLogAnswerString = BatfishObjectMapper.writeString(summaryAnswer); } else { workJsonLogAnswerString = answerString; } _logger.debug(answerString); writeJsonAnswerWithLog(answerString, workJsonLogAnswerString, writeLog); } catch (Exception e) { BatfishException be = new BatfishException("Error in sending answer", e); try { Answer failureAnswer = Answer.failureAnswer(e.toString(), answer.getQuestion()); failureAnswer.addAnswerElement(be.getBatfishStackTrace()); String answerString = BatfishObjectMapper.writeString(failureAnswer); _logger.error(answerString); // write "answer" to work json log if caller requested writeJsonAnswerWithLog(answerString, answerString, writeLog); } catch (Exception e1) { _logger.errorf( "Could not serialize failure answer. %s", Throwables.getStackTraceAsString(e1)); } throw be; } } void outputAnswerMetadata(Answer answer) throws IOException { QuestionId questionId = _settings.getQuestionName(); if (questionId == null) { return; } SnapshotId referenceSnapshot = _settings.getDiffQuestion() ? _referenceSnapshot : null; NetworkId networkId = _settings.getContainer(); AnalysisId analysisId = _settings.getAnalysisName(); NodeRolesId networkNodeRolesId = _idResolver .getNetworkNodeRolesId(networkId) .orElse(NodeRolesId.DEFAULT_NETWORK_NODE_ROLES_ID); AnswerId baseAnswerId = _idResolver.getAnswerId( networkId, _snapshot, questionId, networkNodeRolesId, referenceSnapshot, analysisId); _storage.storeAnswerMetadata( AnswerMetadataUtil.computeAnswerMetadata(answer, _logger), baseAnswerId); } /** Parse AWS configurations for a single account (possibly with multiple regions) */ @VisibleForTesting @Nonnull public static AwsConfiguration parseAwsConfigurations( Map<String, String> configurationData, ParseVendorConfigurationAnswerElement pvcae) { AwsConfiguration config = new AwsConfiguration(); for (Entry<String, String> configFile : configurationData.entrySet()) { // Using path for convenience for now to handle separators and key hierarchcially gracefully Path path = Paths.get(configFile.getKey()); // Find the place in the path where "aws_configs" starts int awsRootIndex = 0; for (Path value : path) { if (value.toString().equals(BfConsts.RELPATH_AWS_CONFIGS_DIR)) { break; } awsRootIndex++; } int pathLength = path.getNameCount(); String regionName; String accountName; if (pathLength == 2) { // Currently happens for tests, but probably shouldn't be allowed regionName = AwsConfiguration.DEFAULT_REGION_NAME; accountName = AwsConfiguration.DEFAULT_ACCOUNT_NAME; } else if (pathLength == 3) { // If we are processing old-style packaging, just put everything in to one "default" // account. regionName = path.getName(pathLength - 2).toString(); // parent dir name accountName = AwsConfiguration.DEFAULT_ACCOUNT_NAME; } else if (pathLength > 3) { regionName = path.getName(pathLength - 2).toString(); // parent dir name accountName = path.getName(pathLength - 3).toString(); // account dir name } else { pvcae.addRedFlagWarning( BfConsts.RELPATH_AWS_CONFIGS_FILE, new Warning(String.format("Unexpected AWS configuration path: %s", path), "AWS")); continue; } String fileName = path.subpath(awsRootIndex, pathLength).toString(); pvcae.getFileMap().put(BfConsts.RELPATH_AWS_CONFIGS_FILE, fileName); try { JsonNode json = BatfishObjectMapper.mapper().readTree(configFile.getValue()); config.addConfigElement(regionName, json, fileName, pvcae, accountName); } catch (IOException e) { pvcae.addRedFlagWarning( BfConsts.RELPATH_AWS_CONFIGS_FILE, new Warning(String.format("Unexpected content in AWS file %s", fileName), "AWS")); } } return config; } private SortedMap<String, BgpAdvertisementsByVrf> parseEnvironmentBgpTables( NetworkSnapshot snapshot, SortedMap<String, String> inputData, ParseEnvironmentBgpTablesAnswerElement answerElement) { _logger.info("\n*** PARSING ENVIRONMENT BGP TABLES ***\n"); _logger.resetTimer(); SortedMap<String, BgpAdvertisementsByVrf> bgpTables = new TreeMap<>(); List<ParseEnvironmentBgpTableJob> jobs = new ArrayList<>(); SortedMap<String, Configuration> configurations = loadConfigurations(snapshot); for (Entry<String, String> bgpObject : inputData.entrySet()) { String currentKey = bgpObject.getKey(); String objectText = bgpObject.getValue(); String hostname = Paths.get(currentKey).getFileName().toString(); String optionalSuffix = ".bgp"; if (hostname.endsWith(optionalSuffix)) { hostname = hostname.substring(0, hostname.length() - optionalSuffix.length()); } if (!configurations.containsKey(hostname)) { continue; } Warnings warnings = buildWarnings(_settings); ParseEnvironmentBgpTableJob job = new ParseEnvironmentBgpTableJob( _settings, snapshot, objectText, hostname, currentKey, warnings, _bgpTablePlugins); jobs.add(job); } BatfishJobExecutor.runJobsInExecutor( _settings, _logger, jobs, bgpTables, answerElement, _settings.getHaltOnParseError(), "Parse environment BGP tables"); _logger.printElapsedTime(); return bgpTables; } /** * Returns a list of {@link ParseVendorConfigurationJob} to parse each file. * * <p>{@code expectedFormat} specifies the type of files expected in the {@code keyedFileText} * map, or is set to {@link ConfigurationFormat#UNKNOWN} to trigger format detection. */ private List<ParseVendorConfigurationJob> makeParseVendorConfigurationsJobs( NetworkSnapshot snapshot, Map<String, String> keyedFileText, ConfigurationFormat expectedFormat) { List<ParseVendorConfigurationJob> jobs = new ArrayList<>(keyedFileText.size()); for (Entry<String, String> vendorFile : keyedFileText.entrySet()) { @Nullable SpanContext parseVendorConfigurationSpanContext = GlobalTracer.get().activeSpan() == null ? null : GlobalTracer.get().activeSpan().context(); ParseVendorConfigurationJob job = new ParseVendorConfigurationJob( _settings, snapshot, vendorFile.getValue(), vendorFile.getKey(), buildWarnings(_settings), expectedFormat, HashMultimap.create(), parseVendorConfigurationSpanContext); jobs.add(job); } return jobs; } /** * Parses the given configuration files and returns a map keyed by hostname representing the * {@link VendorConfiguration vendor-specific configurations}. * * <p>{@code expectedFormat} specifies the type of files expected in the {@code keyedFileText} * map, or is set to {@link ConfigurationFormat#UNKNOWN} to trigger format detection. */ private SortedMap<String, VendorConfiguration> parseVendorConfigurations( NetworkSnapshot snapshot, Map<String, String> keyedConfigurationText, ParseVendorConfigurationAnswerElement answerElement, ConfigurationFormat expectedFormat) { _logger.info("\n*** PARSING VENDOR CONFIGURATION FILES ***\n"); _logger.resetTimer(); SortedMap<String, VendorConfiguration> vendorConfigurations = new TreeMap<>(); List<ParseVendorConfigurationJob> jobs = makeParseVendorConfigurationsJobs(snapshot, keyedConfigurationText, expectedFormat); BatfishJobExecutor.runJobsInExecutor( _settings, _logger, jobs, vendorConfigurations, answerElement, _settings.getHaltOnParseError(), "Parse configurations"); _logger.printElapsedTime(); return vendorConfigurations; } private void populateChannelGroupMembers( Map<String, Interface> interfaces, String ifaceName, Interface iface) { String portChannelName = iface.getChannelGroup(); if (portChannelName == null) { return; } Interface portChannel = interfaces.get(portChannelName); if (portChannel == null) { return; } portChannel.setChannelGroupMembers( ImmutableSortedSet.<String>naturalOrder() .addAll(portChannel.getChannelGroupMembers()) .add(ifaceName) .build()); } private void postProcessAggregatedInterfaces(Map<String, Configuration> configurations) { configurations .values() .forEach(c -> postProcessAggregatedInterfacesHelper(c.getAllInterfaces())); } private void postProcessAggregatedInterfacesHelper(Map<String, Interface> interfaces) { /* Populate aggregated interfaces with members referring to them. */ interfaces.forEach( (ifaceName, iface) -> populateChannelGroupMembers(interfaces, ifaceName, iface)); /* Compute bandwidth for aggregated interfaces. */ computeAggregatedInterfaceBandwidths(interfaces); } private void postProcessRedundantInterfaces(Map<String, Configuration> configurations) { configurations .values() .forEach( c -> c.getVrfs() .values() .forEach( v -> postProcessRedundantInterfacesHelper(c.getAllInterfaces(v.getName())))); } private void postProcessRedundantInterfacesHelper(Map<String, Interface> interfaces) { /* Compute bandwidth for redundnant interfaces. */ computeRedundantInterfaceBandwidths(interfaces); } private void identifyDeviceTypes(Collection<Configuration> configurations) { for (Configuration c : configurations) { if (c.getDeviceType() != null) { continue; } // Set device type to host iff the configuration format is HOST if (c.getConfigurationFormat() == ConfigurationFormat.HOST) { c.setDeviceType(DeviceType.HOST); } else if (c.getVrfs().values().stream() .anyMatch( vrf -> vrf.getBgpProcess() != null || !vrf.getEigrpProcesses().isEmpty() || vrf.getIsisProcess() != null || !vrf.getOspfProcesses().isEmpty() || vrf.getRipProcess() != null)) { c.setDeviceType(DeviceType.ROUTER); } else { // If device was not a host or router, call it a switch c.setDeviceType(DeviceType.SWITCH); } } } @VisibleForTesting static void postProcessInterfaceDependencies(Map<String, Configuration> configurations) { configurations .values() .forEach( config -> { Map<String, Interface> allInterfaces = config.getAllInterfaces(); Graph<String, DefaultEdge> graph = new SimpleDirectedGraph<>(DefaultEdge.class); allInterfaces.keySet().forEach(graph::addVertex); allInterfaces .values() .forEach( iface -> iface .getDependencies() .forEach( dependency -> { // JGraphT crashes if there is an edge to an undeclared vertex. // We add every edge target as a vertex, and code later will // still disable the child. graph.addVertex(dependency.getInterfaceName()); graph.addEdge( // Reverse edge direction to aid topological sort dependency.getInterfaceName(), iface.getName()); })); // Traverse interfaces in topological order and deactivate if necessary for (TopologicalOrderIterator<String, DefaultEdge> iterator = new TopologicalOrderIterator<>(graph); iterator.hasNext(); ) { String ifaceName = iterator.next(); @Nullable Interface iface = allInterfaces.get(ifaceName); if (iface == null) { // A missing dependency. continue; } deactivateInterfaceIfNeeded(iface); } }); } /** Deactivate an interface if it is blacklisted or its dependencies are not active */ private static void deactivateInterfaceIfNeeded(@Nonnull Interface iface) { Configuration config = iface.getOwner(); Set<Dependency> dependencies = iface.getDependencies(); if (dependencies.stream() // Look at bind dependencies .filter(d -> d.getType() == DependencyType.BIND) .map(d -> config.getAllInterfaces().get(d.getInterfaceName())) // Find any missing or inactive interfaces .anyMatch(parent -> parent == null || !parent.getActive())) { iface.setActive(false); } // Look at aggregate dependencies only now if ((iface.getInterfaceType() == InterfaceType.AGGREGATED || iface.getInterfaceType() == InterfaceType.REDUNDANT) && dependencies.stream() .filter(d1 -> d1.getType() == DependencyType.AGGREGATE) // Extract existing and active interfaces .map(d -> config.getAllInterfaces().get(d.getInterfaceName())) .filter(Objects::nonNull) .noneMatch(Interface::getActive)) { iface.setActive(false); } } private void postProcessEigrpCosts(Map<String, Configuration> configurations) { configurations.values().stream() .flatMap(c -> c.getAllInterfaces().values().stream()) .filter( iface -> iface.getEigrp() != null && (iface.getInterfaceType() == InterfaceType.AGGREGATED || iface.getInterfaceType() == InterfaceType.AGGREGATE_CHILD)) .forEach( iface -> { EigrpMetricValues metricValues = iface.getEigrp().getMetric().getValues(); if (metricValues.getBandwidth() == null) { // only set bandwidth if it's not explicitly configured for EIGRP Double bw = iface.getBandwidth(); assert bw != null; // all bandwidths should be finalized at this point metricValues.setBandwidth(bw.longValue() / 1000); // convert to kbps } }); } private void postProcessOspfCosts(Map<String, Configuration> configurations) { configurations .values() .forEach( c -> c.getVrfs() .values() .forEach( vrf -> { // Compute OSPF interface costs where they are missing vrf.getOspfProcesses().values().forEach(p -> p.initInterfaceCosts(c)); })); } @Override public Set<BgpAdvertisement> loadExternalBgpAnnouncements( NetworkSnapshot snapshot, Map<String, Configuration> configurations) { Set<BgpAdvertisement> advertSet = new LinkedHashSet<>(); for (ExternalBgpAdvertisementPlugin plugin : _externalBgpAdvertisementPlugins) { Set<BgpAdvertisement> currentAdvertisements = plugin.loadExternalBgpAdvertisements(snapshot); advertSet.addAll(currentAdvertisements); } return advertSet; } /** * Builds the {@link Trace}s for a {@link Set} of {@link Flow}s. * * @param flows {@link Set} of {@link Flow} for which {@link Trace}s are to be found * @param ignoreFilters if true, will ignore ACLs * @return {@link SortedMap} of {@link Flow}s to {@link List} of {@link Trace}s */ @Override public SortedMap<Flow, List<Trace>> buildFlows( NetworkSnapshot snapshot, Set<Flow> flows, boolean ignoreFilters) { return getTracerouteEngine(snapshot).computeTraces(flows, ignoreFilters); } @Override public TracerouteEngine getTracerouteEngine(NetworkSnapshot snapshot) { return new TracerouteEngineImpl( loadDataPlane(snapshot), _topologyProvider.getLayer3Topology(snapshot), loadConfigurations(snapshot)); } /** Function that processes an interface blacklist across all configurations */ private static void processInterfaceBlacklist( Set<NodeInterfacePair> interfaceBlacklist, NetworkConfigurations configurations) { interfaceBlacklist.stream() .map(iface -> configurations.getInterface(iface.getHostname(), iface.getInterface())) .filter(Optional::isPresent) .map(Optional::get) .forEach(Interface::blacklist); } @VisibleForTesting static Set<NodeInterfacePair> nodeToInterfaceBlacklist( SortedSet<String> blacklistNodes, NetworkConfigurations configurations) { return blacklistNodes.stream() // Get all valid/present node configs .map(configurations::get) .filter(Optional::isPresent) .map(Optional::get) // All interfaces in each config .flatMap(c -> c.getAllInterfaces().values().stream()) .map(NodeInterfacePair::of) .collect(ImmutableSet.toImmutableSet()); } @VisibleForTesting static void processManagementInterfaces(Map<String, Configuration> configurations) { configurations .values() .forEach( configuration -> { for (Interface iface : configuration.getAllInterfaces().values()) { if (MANAGEMENT_INTERFACES.matcher(iface.getName()).find() || MANAGEMENT_VRFS.matcher(iface.getVrfName()).find()) { iface.blacklist(); } } }); } @Override @Nullable public String readExternalBgpAnnouncementsFile(NetworkSnapshot snapshot) { try { return _storage.loadExternalBgpAnnouncementsFile(snapshot).orElse(null); } catch (IOException e) { throw new UncheckedIOException(e); } } /** * Read Iptable Files for each host in the keyset of {@code hostConfigurations}, and store the * contents in {@code iptablesData}. Each task fails if the Iptables file specified by host does * not exist. * * @throws BatfishException if there is a failed task and either {@link * Settings#getExitOnFirstError()} or {@link Settings#getHaltOnParseError()} is set. */ void readIptablesFiles( NetworkSnapshot snapshot, SortedMap<String, VendorConfiguration> hostConfigurations, SortedMap<String, String> iptablesData, ParseVendorConfigurationAnswerElement answerElement) { List<BatfishException> failureCauses = new ArrayList<>(); for (VendorConfiguration vc : hostConfigurations.values()) { HostConfiguration hostConfig = (HostConfiguration) vc; String iptablesFile = hostConfig.getIptablesFile(); if (iptablesFile == null) { continue; } // ensure that the iptables file is not taking us outside of the // testrig try { if (!_storage.hasSnapshotInputObject(iptablesFile, snapshot)) { String failureMessage = String.format( "Iptables file %s for host %s is not contained within the snapshot", hostConfig.getIptablesFile(), hostConfig.getHostname()); BatfishException bfc; if (answerElement.getErrors().containsKey(hostConfig.getHostname())) { bfc = new BatfishException( failureMessage, answerElement.getErrors().get(hostConfig.getHostname()).getException()); answerElement.getErrors().put(hostConfig.getHostname(), bfc.getBatfishStackTrace()); answerElement .getErrorDetails() .put( hostConfig.getHostname(), new ErrorDetails(Throwables.getStackTraceAsString(bfc))); } else { bfc = new BatfishException(failureMessage); if (_settings.getExitOnFirstError()) { throw bfc; } else { failureCauses.add(bfc); answerElement.getErrors().put(hostConfig.getHostname(), bfc.getBatfishStackTrace()); answerElement.getParseStatus().put(hostConfig.getIptablesFile(), ParseStatus.FAILED); answerElement .getErrorDetails() .put( hostConfig.getHostname(), new ErrorDetails(Throwables.getStackTraceAsString(bfc))); } } } else { try (InputStream inputStream = _storage.loadSnapshotInputObject( snapshot.getNetwork(), snapshot.getSnapshot(), iptablesFile)) { iptablesData.put(iptablesFile, decodeStreamAndAppendNewline(inputStream)); } } } catch (IOException e) { throw new BatfishException("Could not get canonical path", e); } } if (_settings.getHaltOnParseError() && !failureCauses.isEmpty()) { BatfishException e = new BatfishException( "Fatal exception due to at least one Iptables file is" + " not contained within the testrig"); failureCauses.forEach(e::addSuppressed); throw e; } } @Override public void registerAnswerer( String questionName, String questionClassName, BiFunction<Question, IBatfish, Answerer> answererCreator) { AnswererCreator oldAnswererCreator = _answererCreators.putIfAbsent( questionName, new AnswererCreator(questionClassName, answererCreator)); if (oldAnswererCreator != null) { // Error: questionName collision. String oldQuestionClassName = _answererCreators.get(questionClassName).getQuestionClassName(); throw new IllegalArgumentException( String.format( "questionName %s already exists.\n" + " old questionClassName: %s\n" + " new questionClassName: %s", questionName, oldQuestionClassName, questionClassName)); } } @Override public void registerBgpTablePlugin(BgpTableFormat format, BgpTablePlugin bgpTablePlugin) { _bgpTablePlugins.put(format, bgpTablePlugin); } @Override public void registerExternalBgpAdvertisementPlugin( ExternalBgpAdvertisementPlugin externalBgpAdvertisementPlugin) { _externalBgpAdvertisementPlugins.add(externalBgpAdvertisementPlugin); } private void repairConfigurations(NetworkSnapshot snapshot) { // Needed to ensure vendor configs are written loadParseVendorConfigurationAnswerElement(snapshot); serializeIndependentConfigs(snapshot); } /** * Post-process the configuration in the current snapshot. Post-processing includes: * * <ul> * <li>Applying node and interface blacklists. * <li>Process interface dependencies and deactivate interfaces that cannot be up * </ul> */ private void updateBlacklistedAndInactiveConfigs( NetworkSnapshot snapshot, Map<String, Configuration> configurations) { NetworkConfigurations nc = NetworkConfigurations.of(configurations); NetworkId networkId = snapshot.getNetwork(); SnapshotId snapshotId = snapshot.getSnapshot(); SortedSet<String> blacklistedNodes = _storage.loadNodeBlacklist(networkId, snapshotId); if (blacklistedNodes != null) { processInterfaceBlacklist(nodeToInterfaceBlacklist(blacklistedNodes, nc), nc); } // If interface blacklist was provided, it was converted to runtime data file by WorkMgr SnapshotRuntimeData runtimeData = _storage.loadRuntimeData(networkId, snapshotId); if (runtimeData != null) { processInterfaceBlacklist(runtimeData.getBlacklistedInterfaces(), nc); } if (_settings.ignoreManagementInterfaces()) { processManagementInterfaces(configurations); } postProcessInterfaceDependencies(configurations); // We do not process the edge blacklist here. Instead, we rely on these edges being explicitly // deleted from the Topology (aka list of edges) that is used along with configurations in // answering questions. // TODO: take this out once dependencies are *the* definitive way to disable interfaces disableUnusableVlanInterfaces(configurations); } /** * Ensures that the current configurations for the current snapshot are correct by performing some * post-processing on the vendor-independent datamodel. Among other things, this includes: * * <ul> * <li>Invalidating cached configs if the in-memory copy has been changed by question * processing. * <li>Re-loading configurations from disk, including re-parsing if the configs were parsed on a * previous version of Batfish. * <li>Ensuring that blacklists are honored. * </ul> */ private void postProcessSnapshot( NetworkSnapshot snapshot, Map<String, Configuration> configurations) { updateBlacklistedAndInactiveConfigs(snapshot, configurations); postProcessAggregatedInterfaces(configurations); postProcessRedundantInterfaces(configurations); NetworkConfigurations nc = NetworkConfigurations.of(configurations); OspfTopologyUtils.initNeighborConfigs(nc); postProcessOspfCosts(configurations); postProcessEigrpCosts(configurations); // must be after postProcessAggregatedInterfaces EigrpTopologyUtils.initNeighborConfigs(nc); } private void computeAndStoreCompletionMetadata( NetworkSnapshot snapshot, Map<String, Configuration> configurations) { try { _storage.storeCompletionMetadata( computeCompletionMetadata(snapshot, configurations), _settings.getContainer(), snapshot.getSnapshot()); } catch (IOException e) { _logger.errorf("Error storing CompletionMetadata: %s", e); } } private CompletionMetadata computeCompletionMetadata( NetworkSnapshot snapshot, Map<String, Configuration> configurations) { return new CompletionMetadata( getFilterNames(configurations), getInterfaces(configurations), getIps(configurations), getLocationCompletionMetadata(getLocationInfo(snapshot), configurations), getMlagIds(configurations), getNodes(configurations), getPrefixes(configurations), getRoutingPolicyNames(configurations), getStructureNames(configurations), getVrfs(configurations), getZones(configurations)); } @MustBeClosed @Nonnull @Override public InputStream getNetworkObject(NetworkId networkId, String key) throws IOException { return _storage.loadNetworkObject(networkId, key); } @MustBeClosed @Nonnull @Override public InputStream getSnapshotInputObject(NetworkSnapshot snapshot, String key) throws IOException { return _storage.loadSnapshotInputObject(snapshot.getNetwork(), snapshot.getSnapshot(), key); } private void repairEnvironmentBgpTables(NetworkSnapshot snapshot) { try { _storage.deleteParseEnvironmentBgpTablesAnswerElement(snapshot); _storage.deleteEnvironmentBgpTables(snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } computeEnvironmentBgpTables(snapshot); } private void repairVendorConfigurations(NetworkSnapshot snapshot) { try { _storage.deleteParseVendorConfigurationAnswerElement(snapshot); _storage.deleteVendorConfigurations(snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } serializeVendorConfigs(snapshot); } public Answer run(NetworkSnapshot snapshot) { newBatch("Begin job", 0); boolean action = false; Answer answer = new Answer(); if (_settings.getSerializeVendor()) { answer.append(serializeVendorConfigs(snapshot)); action = true; } if (_settings.getSerializeIndependent()) { answer.append(serializeIndependentConfigs(snapshot)); // TODO: compute topology on initialization in cleaner way initializeTopology(snapshot); updateSnapshotNodeRoles(snapshot); action = true; } if (_settings.getInitInfo()) { InitInfoAnswerElement initInfoAnswerElement = initInfo(snapshot, true, false); // In this context we can remove parse trees because they will be returned in preceding answer // element. Note that parse trees are not removed when asking initInfo as its own question. initInfoAnswerElement.setParseTrees(Collections.emptySortedMap()); answer.addAnswerElement(initInfoAnswerElement); action = true; } if (_settings.getAnswer()) { Span span = GlobalTracer.get().buildSpan("Getting answer to question").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning answer.append(answer()); action = true; } finally { span.finish(); } } if (_settings.getAnalyze()) { answer.append(analyze()); action = true; } if (_settings.getDataPlane()) { answer.addAnswerElement(computeDataPlane(snapshot)); action = true; } if (!action) { throw new CleanBatfishException("No task performed! Run with -help flag to see usage\n"); } return answer; } /** Initialize topologies, commit {raw, raw pojo, pruned} layer-3 topologies to storage. */ @VisibleForTesting void initializeTopology(NetworkSnapshot networkSnapshot) { Map<String, Configuration> configurations = loadConfigurations(networkSnapshot); LOGGER.info("Initializing topology"); Topology rawLayer3Topology = _topologyProvider.getRawLayer3Topology(networkSnapshot); checkTopology(configurations, rawLayer3Topology); org.batfish.datamodel.pojo.Topology pojoTopology = org.batfish.datamodel.pojo.Topology.create( _settings.getSnapshotName(), configurations, rawLayer3Topology); try { _storage.storePojoTopology( pojoTopology, networkSnapshot.getNetwork(), networkSnapshot.getSnapshot()); } catch (IOException e) { throw new BatfishException("Could not serialize layer-3 POJO topology", e); } Topology layer3Topology = _topologyProvider.getInitialLayer3Topology(networkSnapshot); try { _storage.storeInitialTopology( layer3Topology, networkSnapshot.getNetwork(), networkSnapshot.getSnapshot()); } catch (IOException e) { throw new BatfishException("Could not serialize layer-3 topology", e); } } /** Returns {@code true} iff AWS configuration data is found. */ private boolean serializeAwsConfigs( NetworkSnapshot snapshot, ParseVendorConfigurationAnswerElement pvcae) { _logger.info("\n*** READING AWS CONFIGS ***\n"); AwsConfiguration awsConfiguration; boolean found = false; Span span = GlobalTracer.get().buildSpan("Parse AWS configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning Map<String, String> awsConfigurationData; // Try to parse all accounts as one vendor configuration try (Stream<String> keys = _storage.listInputAwsMultiAccountKeys(snapshot)) { awsConfigurationData = readAllInputObjects(keys, snapshot); } if (awsConfigurationData.isEmpty()) { // No multi-account data, so try to parse as single-account try (Stream<String> keys = _storage.listInputAwsSingleAccountKeys(snapshot)) { awsConfigurationData = readAllInputObjects(keys, snapshot); } } found = !awsConfigurationData.isEmpty(); awsConfiguration = parseAwsConfigurations(awsConfigurationData, pvcae); } catch (IOException e) { throw new UncheckedIOException(e); } finally { span.finish(); } _logger.info("\n*** SERIALIZING AWS CONFIGURATION STRUCTURES ***\n"); _logger.resetTimer(); _logger.debugf("Serializing AWS"); try { _storage.storeVendorConfigurations( ImmutableMap.of(BfConsts.RELPATH_AWS_CONFIGS_FILE, awsConfiguration), snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } _logger.debug("OK\n"); _logger.printElapsedTime(); return found; } private Answer computeEnvironmentBgpTables(NetworkSnapshot snapshot) { Answer answer = new Answer(); ParseEnvironmentBgpTablesAnswerElement answerElement = new ParseEnvironmentBgpTablesAnswerElement(); answerElement.setVersion(BatfishVersion.getVersionStatic()); answer.addAnswerElement(answerElement); SortedMap<String, BgpAdvertisementsByVrf> bgpTables = getEnvironmentBgpTables(snapshot, answerElement); try { _storage.storeEnvironmentBgpTables(bgpTables, snapshot); _storage.storeParseEnvironmentBgpTablesAnswerElement(answerElement, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } return answer; } private SortedMap<String, VendorConfiguration> serializeHostConfigs( NetworkSnapshot snapshot, ParseVendorConfigurationAnswerElement answerElement) { _logger.info("\n*** READING HOST CONFIGS ***\n"); Map<String, String> keyedHostText; try (Stream<String> keys = _storage.listInputHostConfigurationsKeys(snapshot)) { keyedHostText = readAllInputObjects(keys, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } // read the host files SortedMap<String, VendorConfiguration> allHostConfigurations; Span span = GlobalTracer.get().buildSpan("Parse host configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning allHostConfigurations = parseVendorConfigurations( snapshot, keyedHostText, answerElement, ConfigurationFormat.HOST); } finally { span.finish(); } if (allHostConfigurations == null) { throw new BatfishException("Exiting due to parser errors"); } _logger.infof( "Testrig:%s in container:%s has total number of host configs:%d", snapshot.getSnapshot(), snapshot.getNetwork(), allHostConfigurations.size()); // split into hostConfigurations and overlayConfigurations SortedMap<String, VendorConfiguration> overlayConfigurations = allHostConfigurations.entrySet().stream() .filter(e -> ((HostConfiguration) e.getValue()).getOverlay()) .collect(toMap(Entry::getKey, Entry::getValue, (v1, v2) -> v1, TreeMap::new)); SortedMap<String, VendorConfiguration> nonOverlayHostConfigurations = allHostConfigurations.entrySet().stream() .filter(e -> !((HostConfiguration) e.getValue()).getOverlay()) .collect(toMap(Entry::getKey, Entry::getValue, (v1, v2) -> v1, TreeMap::new)); // read and associate iptables files for specified hosts SortedMap<String, String> keyedIptablesText = new TreeMap<>(); readIptablesFiles(snapshot, allHostConfigurations, keyedIptablesText, answerElement); SortedMap<String, VendorConfiguration> iptablesConfigurations = parseVendorConfigurations( snapshot, keyedIptablesText, answerElement, ConfigurationFormat.IPTABLES); for (VendorConfiguration vc : allHostConfigurations.values()) { HostConfiguration hostConfig = (HostConfiguration) vc; if (hostConfig.getIptablesFile() != null) { String iptablesKeyFromHost = hostConfig.getIptablesFile(); if (iptablesConfigurations.containsKey(iptablesKeyFromHost)) { hostConfig.setIptablesVendorConfig( (IptablesVendorConfiguration) iptablesConfigurations.get(iptablesKeyFromHost)); } } } // now, serialize _logger.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); _logger.resetTimer(); try { _storage.storeVendorConfigurations(nonOverlayHostConfigurations, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } // serialize warnings try { _storage.storeParseVendorConfigurationAnswerElement(answerElement, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } _logger.printElapsedTime(); return overlayConfigurations; } private Answer serializeIndependentConfigs(NetworkSnapshot snapshot) { Span span = GlobalTracer.get().buildSpan("serializeIndependentConfigs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning Answer answer = new Answer(); ConvertConfigurationAnswerElement answerElement = new ConvertConfigurationAnswerElement(); answerElement.setVersion(BatfishVersion.getVersionStatic()); if (_settings.getVerboseParse()) { answer.addAnswerElement(answerElement); } SnapshotRuntimeData runtimeData = firstNonNull( _storage.loadRuntimeData(snapshot.getNetwork(), snapshot.getSnapshot()), EMPTY_SNAPSHOT_RUNTIME_DATA); Map<String, VendorConfiguration> vendorConfigs; Map<String, Configuration> configurations; LOGGER.info( "Converting the Vendor-Specific configurations to Vendor-Independent configurations"); Span convertSpan = GlobalTracer.get().buildSpan("convert VS to VI").start(); try (Scope childScope = GlobalTracer.get().scopeManager().activate(span)) { assert childScope != null; // avoid unused warning vendorConfigs = _storage.loadVendorConfigurations(snapshot); configurations = getConfigurations(vendorConfigs, runtimeData, answerElement); } catch (IOException e) { throw new UncheckedIOException(e); } finally { convertSpan.finish(); } Set<Layer1Edge> layer1Edges = vendorConfigs.values().stream() .flatMap(vc -> vc.getLayer1Edges().stream()) .collect(Collectors.toSet()); Warnings internetWarnings = answerElement .getWarnings() .computeIfAbsent(INTERNET_HOST_NAME, i -> buildWarnings(_settings)); ModeledNodes modeledNodes = getInternetAndIspNodes(snapshot, configurations, vendorConfigs, internetWarnings); mergeInternetAndIspNodes(modeledNodes, configurations, layer1Edges, internetWarnings); LOGGER.info("Serializing Vendor-Independent configurations"); Span storeSpan = GlobalTracer.get().buildSpan("store VI configs").start(); try (Scope childScope = GlobalTracer.get().scopeManager().activate(span)) { assert childScope != null; // avoid unused warning try { _storage.storeConfigurations( configurations, answerElement, // we don't write anything if no Layer1 edges were produced // empty topologies are currently dangerous for L1 computation layer1Edges.isEmpty() ? null : new Layer1Topology(layer1Edges), snapshot.getNetwork(), snapshot.getSnapshot()); } catch (IOException e) { throw new BatfishException("Could not store vendor independent configs to disk: %s", e); } } finally { storeSpan.finish(); } LOGGER.info("Post-processing the Vendor-Independent devices"); Span ppSpan = GlobalTracer.get().buildSpan("Post-process vendor-independent configs").start(); try (Scope childScope = GlobalTracer.get().scopeManager().activate(span)) { assert childScope != null; // avoid unused warning postProcessSnapshot(snapshot, configurations); } finally { ppSpan.finish(); } LOGGER.info("Computing completion metadata"); Span metadataSpan = GlobalTracer.get().buildSpan("Compute and store completion metadata").start(); try (Scope childScope = GlobalTracer.get().scopeManager().activate(span)) { assert childScope != null; // avoid unused warning computeAndStoreCompletionMetadata(snapshot, configurations); } finally { metadataSpan.finish(); } return answer; } finally { span.finish(); } } /** * Merges modeled nodes into {@code configurations} and {@code layer1Edges}. Nothing is done if * the input configurations have a node in common with modeled nodes. */ @VisibleForTesting static void mergeInternetAndIspNodes( ModeledNodes modeledNodes, Map<String, Configuration> configurations, Set<Layer1Edge> layer1Edges, Warnings internetWarnings) { Map<String, Configuration> modeledConfigs = modeledNodes.getConfigurations(); Set<String> commonNodes = Sets.intersection(configurations.keySet(), modeledConfigs.keySet()); if (!commonNodes.isEmpty()) { internetWarnings.redFlag( String.format( "Cannot add internet and ISP nodes because nodes with the following names already" + " exist in the snapshot: %s", commonNodes)); return; } configurations.putAll(modeledConfigs); layer1Edges.addAll(modeledNodes.getLayer1Edges()); } /** * Creates and returns ISP and Internet nodes. * * <p>If a node named 'internet' already exists in input {@code configurations} an empty {@link * ModeledNodes} object is returned. */ @Nonnull private ModeledNodes getInternetAndIspNodes( NetworkSnapshot snapshot, Map<String, Configuration> configurations, Map<String, VendorConfiguration> vendorConfigs, Warnings internetWarnings) { if (configurations.containsKey(INTERNET_HOST_NAME)) { internetWarnings.redFlag( "Cannot model internet because a node with the name 'internet' already exists"); return new ModeledNodes(); } ImmutableList.Builder<IspConfiguration> ispConfigurations = new ImmutableList.Builder<>(); IspConfiguration ispConfiguration = _storage.loadIspConfiguration(snapshot.getNetwork(), snapshot.getSnapshot()); if (ispConfiguration != null) { LOGGER.info("Loading Batfish ISP Configuration"); ispConfigurations.add(ispConfiguration); } vendorConfigs.values().stream() .map(VendorConfiguration::getIspConfiguration) .filter(Objects::nonNull) .forEach(ispConfigurations::add); return IspModelingUtils.getInternetAndIspNodes( configurations, ispConfigurations.build(), _logger, internetWarnings); } private void updateSnapshotNodeRoles(NetworkSnapshot snapshot) { // Compute new auto role data and updates existing auto data with it NodeRolesId snapshotNodeRolesId = _idResolver.getSnapshotNodeRolesId(snapshot.getNetwork(), snapshot.getSnapshot()); Set<String> nodeNames = loadConfigurations(snapshot).keySet(); Topology rawLayer3Topology = _topologyProvider.getRawLayer3Topology(snapshot); Optional<RoleMapping> autoRoles = new InferRoles(nodeNames, rawLayer3Topology).inferRoles(); NodeRolesData.Builder snapshotNodeRoles = NodeRolesData.builder(); try { if (autoRoles.isPresent()) { snapshotNodeRoles.setDefaultDimension(NodeRoleDimension.AUTO_DIMENSION_PRIMARY); snapshotNodeRoles.setRoleMappings(ImmutableList.of(autoRoles.get())); snapshotNodeRoles.setType(Type.AUTO); } _storage.storeNodeRoles(snapshotNodeRoles.build(), snapshotNodeRolesId); } catch (IOException e) { _logger.warnf("Could not update node roles: %s", e); } } private ParseVendorConfigurationResult getOrParse( ParseVendorConfigurationJob job, @Nullable SpanContext span, GrammarSettings settings) { String filename = job.getFilename(); String filetext = job.getFileText(); Span parseNetworkConfigsSpan = GlobalTracer.get() .buildSpan("Parse " + job.getFilename()) .addReference(References.FOLLOWS_FROM, span) .start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(parseNetworkConfigsSpan)) { assert scope != null; // avoid unused warning // Short-circuit all cache-related code. if (!_settings.getParseReuse()) { long startTime = System.currentTimeMillis(); ParseResult result = job.parse(); long elapsed = System.currentTimeMillis() - startTime; return job.fromResult(result, elapsed); } String id = Hashing.murmur3_128() .newHasher() .putString("Cached Parse Result", UTF_8) .putString(filename, UTF_8) .putString(filetext, UTF_8) .putBoolean(settings.getDisableUnrecognized()) .putInt(settings.getMaxParserContextLines()) .putInt(settings.getMaxParserContextTokens()) .putInt(settings.getMaxParseTreePrintLength()) .putBoolean(settings.getPrintParseTreeLineNums()) .putBoolean(settings.getPrintParseTree()) .putBoolean(settings.getThrowOnLexerError()) .putBoolean(settings.getThrowOnParserError()) .hash() .toString(); long startTime = System.currentTimeMillis(); boolean cached = false; ParseResult result; try (InputStream in = _storage.loadNetworkBlob(getContainerName(), id)) { result = SerializationUtils.deserialize(in); // sanity-check filenames. In the extremely unlikely event of a collision, we'll lose reuse // for this input. cached = result.getFilename().equals(filename); } catch (FileNotFoundException e) { result = job.parse(); } catch (Exception e) { _logger.warnf( "Error deserializing cached parse result for %s: %s", filename, Throwables.getStackTraceAsString(e)); result = job.parse(); } if (!cached) { try { byte[] serialized = SerializationUtils.serialize(result); _storage.storeNetworkBlob(new ByteArrayInputStream(serialized), getContainerName(), id); } catch (Exception e) { _logger.warnf( "Error caching parse result for %s: %s", filename, Throwables.getStackTraceAsString(e)); } } long elapsed = System.currentTimeMillis() - startTime; return job.fromResult(result, elapsed); } finally { parseNetworkConfigsSpan.finish(); } } /** * Parses configuration files for networking devices from the uploaded user data and produces * {@link VendorConfiguration vendor-specific configurations} serialized to the given output path. * Returns {@code true} iff at least one network configuration was found. * * <p>This function should be named better, but it's called by the {@link * #serializeVendorConfigs(NetworkSnapshot)}, so leaving as-is for now. */ private boolean serializeNetworkConfigs( NetworkSnapshot snapshot, ParseVendorConfigurationAnswerElement answerElement, SortedMap<String, VendorConfiguration> overlayHostConfigurations) { if (!overlayHostConfigurations.isEmpty()) { // Not able to cache with overlays. return oldSerializeNetworkConfigs(snapshot, answerElement, overlayHostConfigurations); } boolean found = false; _logger.info("\n*** READING DEVICE CONFIGURATION FILES ***\n"); List<ParseVendorConfigurationResult> parseResults; Span parseNetworkConfigsSpan = GlobalTracer.get().buildSpan("Parse network configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(parseNetworkConfigsSpan)) { assert scope != null; // avoid unused warning List<ParseVendorConfigurationJob> jobs; Span makeJobsSpan = GlobalTracer.get().buildSpan("Read files and make jobs").start(); try (Scope makeJobsScope = GlobalTracer.get().scopeManager().activate(makeJobsSpan)) { assert makeJobsScope != null; // avoid unused warning Map<String, String> keyedConfigText; // user filename (configs/foo) -> text of configs/foo try (Stream<String> keys = _storage.listInputNetworkConfigurationsKeys(snapshot)) { keyedConfigText = readAllInputObjects(keys, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } if (!keyedConfigText.isEmpty()) { found = true; } jobs = makeParseVendorConfigurationsJobs( snapshot, keyedConfigText, ConfigurationFormat.UNKNOWN); // Java parallel streams are not self-balancing in large networks, so shuffle the jobs. Collections.shuffle(jobs); } finally { makeJobsSpan.finish(); } AtomicInteger batch = newBatch("Parse network configs", jobs.size()); LOGGER.info("Parsing {} configuration files", jobs.size()); parseResults = jobs.parallelStream() .map( j -> { ParseVendorConfigurationResult result = getOrParse(j, parseNetworkConfigsSpan.context(), _settings); int done = batch.incrementAndGet(); if (done % 100 == 0) { LOGGER.info( "Successfully parsed {}/{} configuration files", done, jobs.size()); } return result; }) .collect(ImmutableList.toImmutableList()); LOGGER.info("Done parsing {} configuration files", jobs.size()); } finally { parseNetworkConfigsSpan.finish(); } if (_settings.getHaltOnParseError() && parseResults.stream().anyMatch(r -> r.getFailureCause() != null)) { BatfishException e = new BatfishException("Exiting due to parser errors"); parseResults.stream() .map(ParseVendorConfigurationResult::getFailureCause) .filter(Objects::nonNull) .forEach(e::addSuppressed); throw e; } _logger.infof( "Snapshot %s in network %s has total number of network configs:%d", snapshot.getSnapshot(), snapshot.getNetwork(), parseResults.size()); /* Assemble answer. */ SortedMap<String, VendorConfiguration> vendorConfigurations = new TreeMap<>(); parseResults.forEach(pvcr -> pvcr.applyTo(vendorConfigurations, _logger, answerElement)); LOGGER.info("Serializing Vendor-Specific configurations"); Span serializeNetworkConfigsSpan = GlobalTracer.get().buildSpan("Serialize network configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(serializeNetworkConfigsSpan)) { assert scope != null; // avoid unused warning _logger.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); _logger.resetTimer(); Map<String, VendorConfiguration> output = new TreeMap<>(); vendorConfigurations.forEach( (name, vc) -> { if (name.contains(File.separator)) { // iptables will get a hostname like configs/iptables-save if they // are not set up correctly using host files _logger.errorf("Cannot serialize configuration with bad hostname %s\n", name); answerElement.addRedFlagWarning( name, new Warning( "Cannot serialize network config. Bad hostname " + name.replace("\\", "/"), "MISCELLANEOUS")); } else { output.put(name, vc); } }); _storage.storeVendorConfigurations(output, snapshot); _logger.printElapsedTime(); } catch (IOException e) { throw new UncheckedIOException(e); } finally { serializeNetworkConfigsSpan.finish(); } return found; } /** Returns {@code true} iff at least one network configuration was found. */ private boolean oldSerializeNetworkConfigs( NetworkSnapshot snapshot, ParseVendorConfigurationAnswerElement answerElement, SortedMap<String, VendorConfiguration> overlayHostConfigurations) { boolean found = false; _logger.info("\n*** READING DEVICE CONFIGURATION FILES ***\n"); Map<String, VendorConfiguration> vendorConfigurations; Span parseNetworkConfigsSpan = GlobalTracer.get().buildSpan("Parse network configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(parseNetworkConfigsSpan)) { assert scope != null; // avoid unused warning Map<String, String> keyedConfigText; try (Stream<String> keys = _storage.listInputNetworkConfigurationsKeys(snapshot)) { keyedConfigText = readAllInputObjects(keys, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } if (!keyedConfigText.isEmpty()) { found = true; } vendorConfigurations = parseVendorConfigurations( snapshot, keyedConfigText, answerElement, ConfigurationFormat.UNKNOWN); } finally { parseNetworkConfigsSpan.finish(); } _logger.infof( "Snapshot %s in network %s has total number of network configs:%d", snapshot.getSnapshot(), snapshot.getNetwork(), vendorConfigurations.size()); Span serializeNetworkConfigsSpan = GlobalTracer.get().buildSpan("Serialize network configs").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(serializeNetworkConfigsSpan)) { assert scope != null; // avoid unused warning _logger.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n"); _logger.resetTimer(); Map<String, VendorConfiguration> output = new TreeMap<>(); vendorConfigurations.forEach( (name, vc) -> { if (name.contains(File.separator)) { // iptables will get a hostname like configs/iptables-save if they // are not set up correctly using host files _logger.errorf("Cannot serialize configuration with hostname %s\n", name); answerElement.addRedFlagWarning( name, new Warning( "Cannot serialize network config. Bad hostname " + name.replace("\\", "/"), "MISCELLANEOUS")); } else { // apply overlay if it exists VendorConfiguration overlayConfig = overlayHostConfigurations.get(name); if (overlayConfig != null) { vc.setOverlayConfiguration(overlayConfig); overlayHostConfigurations.remove(name); } output.put(name, vc); } }); // warn about unused overlays overlayHostConfigurations.forEach( (name, overlay) -> answerElement.getParseStatus().put(overlay.getFilename(), ParseStatus.ORPHANED)); _storage.storeVendorConfigurations(output, snapshot); _logger.printElapsedTime(); } catch (IOException e) { throw new UncheckedIOException(e); } finally { serializeNetworkConfigsSpan.finish(); } return found; } /** * Parses configuration files from the uploaded user data and produces {@link VendorConfiguration * vendor-specific configurations} serialized to the given output path. * * <p>This function should be named better, but it's called by the {@code -sv} argument to Batfish * so leaving as-is for now. */ private Answer serializeVendorConfigs(NetworkSnapshot snapshot) { Answer answer = new Answer(); boolean configsFound = false; ParseVendorConfigurationAnswerElement answerElement = new ParseVendorConfigurationAnswerElement(); answerElement.setVersion(BatfishVersion.getVersionStatic()); if (_settings.getVerboseParse()) { answer.addAnswerElement(answerElement); } // look for host configs and overlay configs in the `hosts/` subfolder of the upload. SortedMap<String, VendorConfiguration> overlayHostConfigurations = new TreeMap<>(); if (hasHostConfigs(snapshot)) { overlayHostConfigurations.putAll(serializeHostConfigs(snapshot, answerElement)); configsFound = true; } // look for network configs in the `configs/` subfolder of the upload. if (serializeNetworkConfigs(snapshot, answerElement, overlayHostConfigurations)) { configsFound = true; } // look for AWS VPC configs in the `aws_configs/` subfolder of the upload. if (serializeAwsConfigs(snapshot, answerElement)) { configsFound = true; } if (!configsFound) { throw new BatfishException("No valid configurations found in snapshot"); } // serialize warnings try { _storage.storeParseVendorConfigurationAnswerElement(answerElement, snapshot); } catch (IOException e) { throw new UncheckedIOException(e); } return answer; } private boolean hasHostConfigs(NetworkSnapshot snapshot) { try (Stream<String> keys = _storage.listInputHostConfigurationsKeys(snapshot)) { return keys.findAny().isPresent(); } catch (IOException e) { throw new UncheckedIOException(e); } } @Override public void registerDataPlanePlugin(DataPlanePlugin plugin, String name) { _dataPlanePlugins.put(name, plugin); } public void setTerminatingExceptionMessage(String terminatingExceptionMessage) { _terminatingExceptionMessage = terminatingExceptionMessage; } @Override public SpecifierContext specifierContext(NetworkSnapshot networkSnapshot) { return new SpecifierContextImpl(this, networkSnapshot); } @Override public BidirectionalReachabilityResult bidirectionalReachability( NetworkSnapshot snapshot, BDDPacket bddPacket, ReachabilityParameters parameters) { ResolvedReachabilityParameters params; try { params = resolveReachabilityParameters(this, parameters, snapshot); } catch (InvalidReachabilityParametersException e) { throw new BatfishException("Error resolving reachability parameters", e); } DataPlane dataPlane = loadDataPlane(snapshot); return new BidirectionalReachabilityAnalysis( bddPacket, loadConfigurations(snapshot), dataPlane.getForwardingAnalysis(), new IpsRoutedOutInterfacesFactory(dataPlane.getFibs()), params.getSourceIpAssignment(), params.getHeaderSpace(), params.getForbiddenTransitNodes(), params.getRequiredTransitNodes(), params.getFinalNodes(), params.getActions()) .getResult(); } @Override public AnswerElement standard( NetworkSnapshot snapshot, ReachabilityParameters reachabilityParameters) { return bddSingleReachability(snapshot, reachabilityParameters); } public AnswerElement bddSingleReachability( NetworkSnapshot snapshot, ReachabilityParameters parameters) { Span span = GlobalTracer.get().buildSpan("bddSingleReachability").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning ResolvedReachabilityParameters params; try { params = resolveReachabilityParameters(this, parameters, snapshot); } catch (InvalidReachabilityParametersException e) { return e.getInvalidParametersAnswer(); } checkArgument( params.getSrcNatted() == SrcNattedConstraint.UNCONSTRAINED, "Requiring or forbidding Source NAT is currently unsupported"); BDDPacket pkt = new BDDPacket(); boolean ignoreFilters = params.getIgnoreFilters(); BDDReachabilityAnalysisFactory bddReachabilityAnalysisFactory = getBddReachabilityAnalysisFactory(snapshot, pkt, ignoreFilters); Map<IngressLocation, BDD> reachableBDDs = bddReachabilityAnalysisFactory.getAllBDDs( params.getSourceIpAssignment(), params.getHeaderSpace(), params.getForbiddenTransitNodes(), params.getRequiredTransitNodes(), params.getFinalNodes(), params.getActions()); Set<Flow> flows = constructFlows(pkt, reachableBDDs); return new TraceWrapperAsAnswerElement(buildFlows(snapshot, flows, ignoreFilters)); } finally { span.finish(); } } @Override public Set<Flow> bddLoopDetection(NetworkSnapshot snapshot) { Span span = GlobalTracer.get().buildSpan("bddLoopDetection").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning BDDPacket pkt = new BDDPacket(); // TODO add ignoreFilters parameter boolean ignoreFilters = false; BDDReachabilityAnalysisFactory bddReachabilityAnalysisFactory = getBddReachabilityAnalysisFactory(snapshot, pkt, ignoreFilters); BDDLoopDetectionAnalysis analysis = bddReachabilityAnalysisFactory.bddLoopDetectionAnalysis( getAllSourcesInferFromLocationIpSpaceAssignment(snapshot)); Map<IngressLocation, BDD> loopBDDs = analysis.detectLoops(); Span span1 = GlobalTracer.get().buildSpan("bddLoopDetection.computeResultFlows").start(); try (Scope scope1 = GlobalTracer.get().scopeManager().activate(span)) { assert scope1 != null; // avoid unused warning return loopBDDs.entrySet().stream() .map( entry -> pkt.getFlow(entry.getValue()) .map( fb -> { IngressLocation loc = entry.getKey(); fb.setIngressNode(loc.getNode()); switch (loc.getType()) { case INTERFACE_LINK: fb.setIngressInterface(loc.getInterface()); break; case VRF: fb.setIngressVrf(loc.getVrf()); break; default: throw new BatfishException( "Unknown Location Type: " + loc.getType()); } return fb.build(); })) .flatMap(optional -> optional.map(Stream::of).orElse(Stream.empty())) .collect(ImmutableSet.toImmutableSet()); } finally { span1.finish(); } } finally { span.finish(); } } @Override public Set<Flow> bddMultipathConsistency( NetworkSnapshot snapshot, MultipathConsistencyParameters parameters) { Span span = GlobalTracer.get().buildSpan("bddMultipathConsistency").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning BDDPacket pkt = new BDDPacket(); // TODO add ignoreFilters parameter boolean ignoreFilters = false; BDDReachabilityAnalysisFactory bddReachabilityAnalysisFactory = getBddReachabilityAnalysisFactory(snapshot, pkt, ignoreFilters); IpSpaceAssignment srcIpSpaceAssignment = parameters.getSrcIpSpaceAssignment(); Set<String> finalNodes = parameters.getFinalNodes(); Set<FlowDisposition> failureDispositions = ImmutableSet.of( FlowDisposition.DENIED_IN, FlowDisposition.DENIED_OUT, FlowDisposition.LOOP, FlowDisposition.INSUFFICIENT_INFO, FlowDisposition.NEIGHBOR_UNREACHABLE, FlowDisposition.NO_ROUTE, FlowDisposition.NULL_ROUTED); Set<FlowDisposition> successDispositions = ImmutableSet.of( FlowDisposition.ACCEPTED, FlowDisposition.DELIVERED_TO_SUBNET, FlowDisposition.EXITS_NETWORK); Set<String> forbiddenTransitNodes = parameters.getForbiddenTransitNodes(); Set<String> requiredTransitNodes = parameters.getRequiredTransitNodes(); Map<IngressLocation, BDD> successBdds = bddReachabilityAnalysisFactory.getAllBDDs( srcIpSpaceAssignment, parameters.getHeaderSpace(), forbiddenTransitNodes, requiredTransitNodes, finalNodes, successDispositions); Map<IngressLocation, BDD> failureBdds = bddReachabilityAnalysisFactory.getAllBDDs( srcIpSpaceAssignment, parameters.getHeaderSpace(), forbiddenTransitNodes, requiredTransitNodes, finalNodes, failureDispositions); return ImmutableSet.copyOf(computeMultipathInconsistencies(pkt, successBdds, failureBdds)); } finally { span.finish(); } } @Nonnull public IpSpaceAssignment getAllSourcesInferFromLocationIpSpaceAssignment( NetworkSnapshot snapshot) { SpecifierContextImpl specifierContext = new SpecifierContextImpl(this, snapshot); Set<Location> locations = new UnionLocationSpecifier( AllInterfacesLocationSpecifier.INSTANCE, AllInterfaceLinksLocationSpecifier.INSTANCE) .resolve(specifierContext); return InferFromLocationIpSpaceSpecifier.INSTANCE.resolve(locations, specifierContext); } @Nonnull private BDDReachabilityAnalysisFactory getBddReachabilityAnalysisFactory( NetworkSnapshot snapshot, BDDPacket pkt, boolean ignoreFilters) { Span span = GlobalTracer.get().buildSpan("getBddReachabilityAnalysisFactory").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning DataPlane dataPlane = loadDataPlane(snapshot); return new BDDReachabilityAnalysisFactory( pkt, loadConfigurations(snapshot), dataPlane.getForwardingAnalysis(), new IpsRoutedOutInterfacesFactory(dataPlane.getFibs()), ignoreFilters, false); } finally { span.finish(); } } public BDDReachabilityAnalysis getBddReachabilityAnalysis( NetworkSnapshot snapshot, BDDPacket pkt, IpSpaceAssignment srcIpSpaceAssignment, AclLineMatchExpr initialHeaderSpace, Set<String> forbiddenTransitNodes, Set<String> requiredTransitNodes, Set<String> finalNodes, Set<FlowDisposition> actions, boolean ignoreFilters, boolean useInterfaceRoots) { BDDReachabilityAnalysisFactory factory = getBddReachabilityAnalysisFactory(snapshot, pkt, ignoreFilters); return factory.bddReachabilityAnalysis( srcIpSpaceAssignment, initialHeaderSpace, forbiddenTransitNodes, requiredTransitNodes, finalNodes, actions, useInterfaceRoots); } /** * Return a set of flows (at most 1 per source {@link Location}) for which reachability has been * reduced by the change from base to delta snapshot. */ @Override public DifferentialReachabilityResult bddDifferentialReachability( NetworkSnapshot snapshot, NetworkSnapshot reference, DifferentialReachabilityParameters parameters) { Span span = GlobalTracer.get().buildSpan("bddDifferentialReachability").start(); try (Scope scope = GlobalTracer.get().scopeManager().activate(span)) { assert scope != null; // avoid unused warning checkArgument( !parameters.getFlowDispositions().isEmpty(), "Must specify at least one FlowDisposition"); BDDPacket pkt = new BDDPacket(); AclLineMatchExpr headerSpace = parameters.getInvertSearch() ? not(parameters.getHeaderSpace()) : parameters.getHeaderSpace(); /* * TODO should we have separate parameters for base and delta? * E.g. suppose we add a host subnet in the delta network. This would be a source of * differential reachability, but we currently won't find it because it won't be in the * IpSpaceAssignment. */ Map<IngressLocation, BDD> baseAcceptBDDs = getBddReachabilityAnalysisFactory(snapshot, pkt, parameters.getIgnoreFilters()) .getAllBDDs( parameters.getIpSpaceAssignment(), headerSpace, parameters.getForbiddenTransitNodes(), parameters.getRequiredTransitNodes(), parameters.getFinalNodes(), parameters.getFlowDispositions()); Map<IngressLocation, BDD> deltaAcceptBDDs = getBddReachabilityAnalysisFactory(reference, pkt, parameters.getIgnoreFilters()) .getAllBDDs( parameters.getIpSpaceAssignment(), headerSpace, parameters.getForbiddenTransitNodes(), parameters.getRequiredTransitNodes(), parameters.getFinalNodes(), parameters.getFlowDispositions()); Set<IngressLocation> commonSources = Sets.intersection(baseAcceptBDDs.keySet(), deltaAcceptBDDs.keySet()); Set<Flow> decreasedFlows = getDifferentialFlows(pkt, commonSources, baseAcceptBDDs, deltaAcceptBDDs); Set<Flow> increasedFlows = getDifferentialFlows(pkt, commonSources, deltaAcceptBDDs, baseAcceptBDDs); return new DifferentialReachabilityResult(increasedFlows, decreasedFlows); } finally { span.finish(); } } private static Set<Flow> getDifferentialFlows( BDDPacket pkt, Set<IngressLocation> commonSources, Map<IngressLocation, BDD> includeBDDs, Map<IngressLocation, BDD> excludeBDDs) { return commonSources.stream() .flatMap( source -> { BDD difference = includeBDDs.get(source).diff(excludeBDDs.get(source)); if (difference.isZero()) { return Stream.of(); } Flow.Builder flow = pkt.getFlow(difference) .orElseThrow(() -> new BatfishException("Error getting flow from BDD")); // set flow parameters flow.setIngressNode(source.getNode()); switch (source.getType()) { case VRF: flow.setIngressVrf(source.getVrf()); break; case INTERFACE_LINK: flow.setIngressInterface(source.getInterface()); break; default: throw new BatfishException("Unexpected IngressLocationType: " + source.getType()); } return Stream.of(flow.build()); }) .collect(ImmutableSet.toImmutableSet()); } private void writeJsonAnswer(String structuredAnswerString) throws IOException { SnapshotId referenceSnapshot = _settings.getDiffQuestion() ? _referenceSnapshot : null; NetworkId networkId = _settings.getContainer(); QuestionId questionId = _settings.getQuestionName(); AnalysisId analysisId = _settings.getAnalysisName(); NodeRolesId networkNodeRolesId = _idResolver .getNetworkNodeRolesId(networkId) .orElse(NodeRolesId.DEFAULT_NETWORK_NODE_ROLES_ID); AnswerId baseAnswerId = _idResolver.getAnswerId( networkId, _snapshot, questionId, networkNodeRolesId, referenceSnapshot, analysisId); _storage.storeAnswer(structuredAnswerString, baseAnswerId); } private void writeJsonAnswerWithLog( String answerOutput, String workJsonLogAnswerString, boolean writeLog) throws IOException { if (writeLog && _settings.getTaskId() != null) { _storage.storeWorkJson( workJsonLogAnswerString, _settings.getContainer(), _settings.getTestrig(), _settings.getTaskId()); } // Write answer if WorkItem was answering a question if (_settings.getQuestionName() != null) { writeJsonAnswer(answerOutput); } } @Override public @Nullable Answerer createAnswerer(@Nonnull Question question) { AnswererCreator creator = _answererCreators.get(question.getName()); return creator != null ? creator.create(question, this) : null; } private static final Logger LOGGER = LogManager.getLogger(Batfish.class); }
import java.util.Scanner; /** * <dl> * <dt>Purpose: * <dd>Simple example program * * <dt>Description: * <dd>This program asks the user for first and last name and prints them out * </dl> * * */ public class InputOutput { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); System.out.println("What is your first name?"); String firstName = scanner.nextLine(); System.out.println("What is your last name?"); String lastName = scanner.nextLine(); // print welcome message on screen including the saved input System.out.println("Hello! I thought that your name might be " + firstName + " " + lastName + "!"); } }
package controllers; import com.mashape.unirest.http.exceptions.UnirestException; import factories.BidirectionalLoginDataCustomFactory; import factories.BidirectionalPendingPasswordResetFactory; import factories.BidirectionalQuestionFactory; import factories.BidirectionalUserFactory; import funWebMailer.FunWebMailer; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.ModelAndView; import pojos.LoginDataCustom; import pojos.PendingPasswordReset; import pojos.Question; import pojos.User; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.List; import java.util.UUID; @Controller @SessionAttributes(value = "username") public class MainController { User loggedInUser = null; @RequestMapping(value ="statistics", method = RequestMethod.GET) public String getStatistics(HttpServletRequest request) { String username = (String) request.getSession().getAttribute("username"); if (username == null) { return "error"; } return "statistics"; } @RequestMapping(value = "/register", method = RequestMethod.GET) public String getRegisterPage(HttpServletRequest request) { return "register"; } @RequestMapping(value = "/change_password", method = RequestMethod.GET) public String getChangePassword(HttpServletRequest request) { return "change_password"; } @RequestMapping(value = "/recover_password", method = RequestMethod.GET) public String getRecoverPasswordPage() { return "recover_password"; } @RequestMapping( value = "/recoverPassword", method = RequestMethod.POST ) public ModelAndView postRecoverPassword( @RequestParam(name = "username") String username) { User user = null; try { user = BidirectionalUserFactory.newInstance(username); } catch (UnirestException e) { e.printStackTrace(); } String recoverUrlToken = UUID.randomUUID().toString(); // send mail with reset link for the password String recoverUrl = String.format("localhost:8089/reset_password/%s", recoverUrlToken); PendingPasswordReset pendingPasswordReset = new PendingPasswordReset(); pendingPasswordReset.setId(0l); // Dummy pendingPasswordReset.setToken(recoverUrlToken); pendingPasswordReset.setUsername(user.getName()); try { BidirectionalPendingPasswordResetFactory.persist(pendingPasswordReset); } catch (UnirestException e) { e.printStackTrace(); } FunWebMailer.setResetPasswordLink(user.getName(), user.getEmail(), recoverUrl); return new ModelAndView("success_recover"); } @RequestMapping(value = "reset_password/{token}", method = RequestMethod.GET) public ModelAndView getResetPassword(@PathVariable String token) { PendingPasswordReset pendingPasswordReset = null; try { pendingPasswordReset = BidirectionalPendingPasswordResetFactory.newInstance(token); } catch (UnirestException e) { e.printStackTrace(); } if (pendingPasswordReset == null) { return null; // some error page } return new ModelAndView("reset_password"); } @RequestMapping(value = "reset_password/{token}", method = RequestMethod.POST) public ModelAndView postResetPassword( @PathVariable String token, @RequestParam(name = "new_password1") String newPassword1, @RequestParam(name = "new_password2") String newPassword2) { if (!newPassword1.equals(newPassword2)) { return null; } PendingPasswordReset pendingPasswordReset = null; try { pendingPasswordReset = BidirectionalPendingPasswordResetFactory.newInstance(token); } catch (UnirestException e) { e.printStackTrace(); } User user = null; try { user = BidirectionalUserFactory.newInstance(pendingPasswordReset.getUsername()); } catch (UnirestException e) { e.printStackTrace(); } try { BidirectionalLoginDataCustomFactory.update(user.getId(), String.valueOf(newPassword1.hashCode())); } catch (UnirestException e) { e.printStackTrace(); } return new ModelAndView("reset_password_success"); } @RequestMapping(value = "/login", method = RequestMethod.POST) public ModelAndView doLogin( HttpServletRequest request, HttpServletResponse response, @RequestParam(name = "username") String username, @RequestParam(name = "password") String password) { User user = null; try { user = BidirectionalUserFactory.newInstance(username); } catch (UnirestException e) { e.printStackTrace(); } String actualPassword = null; // That's not the way it supposed to be try { actualPassword = BidirectionalLoginDataCustomFactory.getPassword(user.getId()); } catch (UnirestException e) { e.printStackTrace(); } if (String.valueOf(password.hashCode()).equals(actualPassword)) { request.getSession().setAttribute("loggedIn", Boolean.TRUE); request.getSession().setAttribute("username", user.getName()); } return new ModelAndView("redirect:/main_menu"); } @RequestMapping(value="/pvp", method = RequestMethod.GET) public ModelAndView getPvpPage(HttpServletRequest request) { String username = (String) request.getSession().getAttribute("username"); if (username == null) { return new ModelAndView("error"); } ModelAndView modelAndView = new ModelAndView(); modelAndView.addObject("username", request.getSession().getAttribute("username")); return modelAndView; } @RequestMapping(value="/main_menu", method = RequestMethod.GET) public String getMainMenuPage(HttpServletRequest request){ String username = (String) request.getSession().getAttribute("username"); if (username == null) { return "error"; } return "main_menu"; } @RequestMapping(value="/chat_room", method = RequestMethod.GET) public String getChatRoomPage(HttpServletRequest request){ String username = (String) request.getSession().getAttribute("username"); if (request.getSession().getAttribute("username").equals("")) { return "error"; } return "chat_room"; } @RequestMapping(value="/add_question", method = RequestMethod.GET) public String getAddQuestionPage(HttpServletRequest request) { if (request.getSession().getAttribute("username").equals("")) { return "error"; } return "add_question"; } @ResponseBody @RequestMapping(value = "/checkUsernameAvailable", method = RequestMethod.POST) public String checkValidUsername(@RequestParam String username) { // JSONObject json = new JSONObject(); // JSONArray jsonArray = new JSONArray(); // String suggestion = dao.checkIfValidUsername(username); // if (suggestion != null) { // try { // json.put("status", "taken"); // json.put("suggestion", suggestion); // } catch (JSONException e) { // e.printStackTrace(); // return json.toString(); // } else { // try { // json.put("status", "ok"); // } catch (JSONException e) { // e.printStackTrace(); // return json.toString(); return null; } @ResponseBody @RequestMapping(value = "/checkPasswordStrength", method = RequestMethod.POST) public String checkPasswordStrength(@RequestParam String password) { // JSONObject json = new JSONObject(); // int strength = dao.checkPasswordStrengthness(password); // try { // json.put("strength", strength); // } catch (JSONException e) { // e.printStackTrace(); // return json.toString(); return null; } @RequestMapping(value = "/validateRegistration", method = RequestMethod.POST) public ModelAndView validateRegistration( @RequestParam(name = "email") String email, @RequestParam(name = "username") String username, @RequestParam(name = "password") String password) { if (email.equals("") || username.equals("") || password.equals("")) { ModelAndView modelAndView = new ModelAndView("redirect:/register"); modelAndView.addObject("error", "Invalid credentials"); return modelAndView; } FunWebMailer.sendTextRegisterNotification(username, email); try { if (BidirectionalUserFactory.newInstance(username) == null) { ModelAndView modelAndView = new ModelAndView("redirect:/register"); modelAndView.addObject("error", "Username already taken"); return modelAndView; } } catch (UnirestException e) { e.printStackTrace(); } User user = new User(); user.setName(username); user.setUserRole("user"); user.setEmail(email); user.setLoginType("custom"); user.setLevel(0); user.setHintsLeft(0); user.setGoldLeft(0); user.setAvatarPath("/home"); user.setId(0l); try { BidirectionalUserFactory.persist(user); } catch (UnirestException e) { e.printStackTrace(); ModelAndView modelAndView = new ModelAndView("redirect:/register"); modelAndView.addObject("error", "Error storing the new user"); } try { user = BidirectionalUserFactory.newInstance(username); } catch (UnirestException e) { ModelAndView modelAndView = new ModelAndView("redirect:/register"); modelAndView.addObject("error", "Error accessing the db service"); e.printStackTrace(); } LoginDataCustom loginDataCustom = new LoginDataCustom(); loginDataCustom.setPassword(String.valueOf(password.hashCode())); loginDataCustom.setId(0l); loginDataCustom.setUserId(user.getId()); try { BidirectionalLoginDataCustomFactory.persist(loginDataCustom); } catch (UnirestException e) { ModelAndView modelAndView = new ModelAndView("redirect:/register"); modelAndView.addObject("error", "Error accesing the db service"); e.printStackTrace(); } return new ModelAndView("redirect:/"); } @ResponseBody @RequestMapping(value = "/weakestChapter", method = RequestMethod.POST) public String getWeakestChapter() { // JSONObject json = new JSONObject(); // try { // json.put("weakestChapter", dao.weakestChapter((int) loggedInUser.getId())); // } catch (JSONException e) { // e.printStackTrace(); // return json.toString(); return null; } @ResponseBody @RequestMapping(value = "/isRelevant", method = RequestMethod.POST) public String getRelevance(@RequestParam(name ="id") Long id){ // JSONObject json = new JSONObject(); // try{ // json.put("relevance", qDao.isRelevant(id)); // if (qDao.getError() != null) { // json.put("error", "yes"); // json.put("errorMessage", qDao.getError()); // } else { // json.put("error", "no"); // } catch (JSONException e){ // e.printStackTrace(); // return json.toString(); return null; } @RequestMapping(value = "/adminPanel", method = RequestMethod.GET) public ModelAndView getAdminPannel(HttpServletRequest request) { String username = (String) request.getSession().getAttribute("username"); if (username == null) { return new ModelAndView("error"); } return new ModelAndView("admin"); } @RequestMapping(value = "/logout", method = RequestMethod.GET) public ModelAndView logout(HttpServletRequest request) { request.getSession().removeAttribute("username"); return new ModelAndView("register"); } @ResponseBody @RequestMapping(value="/getUsersList" , method = RequestMethod.POST) public String getUsersList(){ JSONArray jsonArray = new JSONArray(); List<String> users = new ArrayList<String>(); try { users = BidirectionalUserFactory.getAll(); } catch (UnirestException e) { e.printStackTrace(); } for (String user : users) { JSONObject jsonUser = new JSONObject(); try { jsonUser.put("username", user); jsonArray.put(jsonUser); } catch (JSONException e) { e.printStackTrace(); } } return jsonArray.toString(); } @ResponseBody @RequestMapping(value="/banUser" , method = RequestMethod.POST) public String banUser(@RequestParam(name = "username") String username) { User toBan = new User(); toBan.setName(username); System.out.println(toBan.getName()); try { BidirectionalUserFactory.remove(toBan); } catch (UnirestException e) { e.printStackTrace(); } return null; } @ResponseBody @RequestMapping(value = "/updatePassword", method = RequestMethod.POST) public String updatePassword(@RequestParam(name = "newPassword") String newPassword) { // JSONObject json = new JSONObject(); // dao.updateUserPassword(loggedInUser, newPassword); // try { // json.put("status", "success"); // } catch (JSONException e) { // e.printStackTrace(); // return json.toString(); return null; } @ResponseBody @RequestMapping(value = "/checkAlreadyReceived", method = RequestMethod.POST) public String checkAlreadyReceived(@RequestParam(name = "id") String id) { JSONObject json = new JSONObject(); try { json.put("receivedStatus", null); } catch (JSONException e) { e.printStackTrace(); } return null; } @RequestMapping(value = "/arena", method = RequestMethod.GET) public ModelAndView getArena(HttpServletRequest request) { String username = (String) request.getSession().getAttribute("username"); if (username == null) { return new ModelAndView("error"); } return new ModelAndView("arena"); } @RequestMapping(value = "/quick_chat", method = RequestMethod.GET) public ModelAndView quickChatPage(HttpServletRequest request, HttpServletResponse response) { String username = (String) request.getSession().getAttribute("username"); if (username == null) { return new ModelAndView("error"); } ModelAndView modelAndView = new ModelAndView("quick_chat"); modelAndView.addObject("username", username); return modelAndView; } }
package com.facebook.litho; import javax.annotation.CheckReturnValue; import javax.annotation.concurrent.GuardedBy; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.ref.WeakReference; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import android.content.Context; import android.content.ContextWrapper; import android.graphics.Rect; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; import android.os.Process; import android.support.annotation.IntDef; import android.support.annotation.Keep; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.VisibleForTesting; import android.util.Log; import android.view.View; import android.view.ViewParent; import com.facebook.infer.annotation.ReturnsOwnership; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.infer.annotation.ThreadSafe; import static com.facebook.litho.ComponentLifecycle.StateUpdate; import static com.facebook.litho.FrameworkLogEvents.EVENT_LAYOUT_CALCULATE; import static com.facebook.litho.FrameworkLogEvents.EVENT_PRE_ALLOCATE_MOUNT_CONTENT; import static com.facebook.litho.FrameworkLogEvents.PARAM_IS_BACKGROUND_LAYOUT; import static com.facebook.litho.FrameworkLogEvents.PARAM_LOG_TAG; import static com.facebook.litho.FrameworkLogEvents.PARAM_TREE_DIFF_ENABLED; import static com.facebook.litho.ThreadUtils.assertHoldsLock; import static com.facebook.litho.ThreadUtils.assertMainThread; import static com.facebook.litho.ThreadUtils.isMainThread; /** * Represents a tree of components and controls their life cycle. ComponentTree takes in a single * root component and recursively invokes its OnCreateLayout to create a tree of components. * ComponentTree is responsible for refreshing the mounted state of a component with new props. * * The usual use case for {@link ComponentTree} is: * <code> * ComponentTree component = ComponentTree.create(context, MyComponent.create()); * myHostView.setRoot(component); * <code/> */ @ThreadSafe public class ComponentTree { private static final String TAG = ComponentTree.class.getSimpleName(); private static final int SIZE_UNINITIALIZED = -1; // MainThread Looper messages: private static final int MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED = 1; private static final String DEFAULT_LAYOUT_THREAD_NAME = "ComponentLayoutThread"; private static final int DEFAULT_LAYOUT_THREAD_PRIORITY = Process.THREAD_PRIORITY_BACKGROUND; private static final int SCHEDULE_NONE = 0; private static final int SCHEDULE_LAYOUT_ASYNC = 1; private static final int SCHEDULE_LAYOUT_SYNC = 2; private LithoDebugInfo mLithoDebugInfo; private boolean mReleased; @IntDef({SCHEDULE_NONE, SCHEDULE_LAYOUT_ASYNC, SCHEDULE_LAYOUT_SYNC}) @Retention(RetentionPolicy.SOURCE) private @interface PendingLayoutCalculation {} private static final AtomicInteger sIdGenerator = new AtomicInteger(0); private static final Handler sMainThreadHandler = new ComponentMainThreadHandler(); // Do not access sDefaultLayoutThreadLooper directly, use getDefaultLayoutThreadLooper(). @GuardedBy("ComponentTree.class") private static volatile Looper sDefaultLayoutThreadLooper; private static final ThreadLocal<WeakReference<Handler>> sSyncStateUpdatesHandler = new ThreadLocal<>(); // Helpers to track view visibility when we are incrementally // mounting and partially invalidating private static final int[] sCurrentLocation = new int[2]; private static final int[] sParentLocation = new int[2]; private static final Rect sParentBounds = new Rect(); private final Runnable mCalculateLayoutRunnable = new Runnable() { @Override public void run() { calculateLayout(null, false); } }; private final Runnable mAnimatedCalculateLayoutRunnable = new Runnable() { @Override public void run() { calculateLayout(null, true); } }; private final Runnable mPreAllocateMountContentRunnable = new Runnable() { @Override public void run() { preAllocateMountContent(); } }; private final Runnable mUpdateStateSyncRunnable = new Runnable() { @Override public void run() { updateStateInternal(false); } }; private final ComponentContext mContext; private final boolean mCanPrefetchDisplayLists; private final boolean mCanCacheDrawingDisplayLists; private final boolean mShouldClipChildren; // These variables are only accessed from the main thread. @ThreadConfined(ThreadConfined.UI) private boolean mIsMounting; @ThreadConfined(ThreadConfined.UI) private final boolean mIncrementalMountEnabled; @ThreadConfined(ThreadConfined.UI) private final boolean mIsLayoutDiffingEnabled; @ThreadConfined(ThreadConfined.UI) private boolean mIsAttached; @ThreadConfined(ThreadConfined.UI) private final boolean mIsAsyncUpdateStateEnabled; @ThreadConfined(ThreadConfined.UI) private LithoView mLithoView; @ThreadConfined(ThreadConfined.UI) private LayoutHandler mLayoutThreadHandler; @GuardedBy("this") private boolean mHasViewMeasureSpec; // TODO(6606683): Enable recycling of mComponent. // We will need to ensure there are no background threads referencing mComponent. We'll need // to keep a reference count or something. :-/ @GuardedBy("this") private @Nullable Component<?> mRoot; @GuardedBy("this") private int mWidthSpec = SIZE_UNINITIALIZED; @GuardedBy("this") private int mHeightSpec = SIZE_UNINITIALIZED; // This is written to only by the main thread with the lock held, read from the main thread with // no lock held, or read from any other thread with the lock held. private LayoutState mMainThreadLayoutState; // The semantics here are tricky. Whenever you transfer mBackgroundLayoutState to a local that // will be accessed outside of the lock, you must set mBackgroundLayoutState to null to ensure // that the current thread alone has access to the LayoutState, which is single-threaded. @GuardedBy("this") private LayoutState mBackgroundLayoutState; @GuardedBy("this") private StateHandler mStateHandler; @ThreadConfined(ThreadConfined.UI) private RenderState mPreviousRenderState; @ThreadConfined(ThreadConfined.UI) private boolean mPreviousRenderStateSetFromBuilder = false; private final Object mLayoutLock; protected final int mId; @GuardedBy("this") private boolean mIsMeasuring; @PendingLayoutCalculation @GuardedBy("this") private int mScheduleLayoutAfterMeasure; // This flag is so we use the correct shouldAnimateTransitions flag when calculating // the LayoutState in measure -- we should respect the most recent setRoot* call. private volatile boolean mLastShouldAnimateTransitions; public static Builder create(ComponentContext context, Component.Builder<?> root) { return create(context, root.build()); } public static Builder create(ComponentContext context, Component<?> root) { return ComponentsPools.acquireComponentTreeBuilder(context, root); } protected ComponentTree(Builder builder) { mContext = ComponentContext.withComponentTree(builder.context, this); mRoot = builder.root; mIncrementalMountEnabled = builder.incrementalMountEnabled; mIsLayoutDiffingEnabled = builder.isLayoutDiffingEnabled; mLayoutThreadHandler = builder.layoutThreadHandler; mLayoutLock = builder.layoutLock; mIsAsyncUpdateStateEnabled = builder.asyncStateUpdates; mCanPrefetchDisplayLists = builder.canPrefetchDisplayLists; mCanCacheDrawingDisplayLists = builder.canCacheDrawingDisplayLists; mShouldClipChildren = builder.shouldClipChildren; if (mLayoutThreadHandler == null) { mLayoutThreadHandler = new DefaultLayoutHandler(getDefaultLayoutThreadLooper()); } final StateHandler builderStateHandler = builder.stateHandler; mStateHandler = builderStateHandler == null ? StateHandler.acquireNewInstance(null) : builderStateHandler; if (builder.previousRenderState != null) { mPreviousRenderState = builder.previousRenderState; mPreviousRenderStateSetFromBuilder = true; } if (builder.overrideComponentTreeId != -1) { mId = builder.overrideComponentTreeId; } else { mId = generateComponentTreeId(); } } @ThreadConfined(ThreadConfined.UI) LayoutState getMainThreadLayoutState() { return mMainThreadLayoutState; } @VisibleForTesting protected LayoutState getBackgroundLayoutState() { return mBackgroundLayoutState; } /** * Picks the best LayoutState and sets it in mMainThreadLayoutState. The return value * is a LayoutState that must be released (after the lock is released). This * awkward contract is necessary to ensure thread-safety. */ @CheckReturnValue @ReturnsOwnership @ThreadConfined(ThreadConfined.UI) private LayoutState setBestMainThreadLayoutAndReturnOldLayout() { assertHoldsLock(this); // If everything matches perfectly then we prefer mMainThreadLayoutState // because that means we don't need to remount. boolean isMainThreadLayoutBest; if (isCompatibleComponentAndSpec(mMainThreadLayoutState)) { isMainThreadLayoutBest = true; } else if (isCompatibleSpec(mBackgroundLayoutState, mWidthSpec, mHeightSpec) || !isCompatibleSpec(mMainThreadLayoutState, mWidthSpec, mHeightSpec)) { // If mMainThreadLayoutState isn't a perfect match, we'll prefer // mBackgroundLayoutState since it will have the more recent create. isMainThreadLayoutBest = false; } else { // If the main thread layout is still compatible size-wise, and the // background one is not, then we'll do nothing. We want to keep the same // main thread layout so that we don't force main thread re-layout. isMainThreadLayoutBest = true; } if (isMainThreadLayoutBest) { // We don't want to hold onto mBackgroundLayoutState since it's unlikely // to ever be used again. We return mBackgroundLayoutState to indicate it // should be released after exiting the lock. LayoutState toRelease = mBackgroundLayoutState; mBackgroundLayoutState = null; return toRelease; } else { // Since we are changing layout states we'll need to remount. if (mLithoView != null) { mLithoView.setMountStateDirty(); } LayoutState toRelease = mMainThreadLayoutState; mMainThreadLayoutState = mBackgroundLayoutState; mBackgroundLayoutState = null; return toRelease; } } private void backgroundLayoutStateUpdated() { assertMainThread(); // If we aren't attached, then we have nothing to do. We'll handle // everything in onAttach. if (!mIsAttached) { return; } LayoutState toRelease; boolean layoutStateUpdated; int componentRootId; synchronized (this) { if (mRoot == null) { // We have been released. Abort. return; } LayoutState oldMainThreadLayoutState = mMainThreadLayoutState; toRelease = setBestMainThreadLayoutAndReturnOldLayout(); layoutStateUpdated = (mMainThreadLayoutState != oldMainThreadLayoutState); componentRootId = mRoot.getId(); } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } if (!layoutStateUpdated) { return; } // We defer until measure if we don't yet have a width/height int viewWidth = mLithoView.getMeasuredWidth(); int viewHeight = mLithoView.getMeasuredHeight(); if (viewWidth == 0 && viewHeight == 0) { // The host view has not been measured yet. return; } final boolean needsAndroidLayout = !isCompatibleComponentAndSize( mMainThreadLayoutState, componentRootId, viewWidth, viewHeight); if (needsAndroidLayout) { mLithoView.requestLayout(); } else { mountComponentIfDirty(); } } void attach() { assertMainThread(); if (mLithoView == null) { throw new IllegalStateException("Trying to attach a ComponentTree without a set View"); } LayoutState toRelease; int componentRootId; synchronized (this) { // We need to track that we are attached regardless... mIsAttached = true; // ... and then we do state transfer toRelease = setBestMainThreadLayoutAndReturnOldLayout(); componentRootId = mRoot.getId(); } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } // We defer until measure if we don't yet have a width/height int viewWidth = mLithoView.getMeasuredWidth(); int viewHeight = mLithoView.getMeasuredHeight(); if (viewWidth == 0 && viewHeight == 0) { // The host view has not been measured yet. return; } final boolean needsAndroidLayout = !isCompatibleComponentAndSize( mMainThreadLayoutState, componentRootId, viewWidth, viewHeight); if (needsAndroidLayout || mLithoView.isMountStateDirty()) { mLithoView.requestLayout(); } else { mLithoView.rebind(); } } private static boolean hasSameBaseContext(Context context1, Context context2) { return getBaseContext(context1) == getBaseContext(context2); } private static Context getBaseContext(Context context) { Context baseContext = context; while (baseContext instanceof ContextWrapper) { baseContext = ((ContextWrapper) baseContext).getBaseContext(); } return baseContext; } @ThreadConfined(ThreadConfined.UI) boolean isMounting() { return mIsMounting; } private boolean mountComponentIfDirty() { if (mLithoView.isMountStateDirty()) { if (mIncrementalMountEnabled) { incrementalMountComponent(); } else { mountComponent(null); } return true; } return false; } void incrementalMountComponent() { assertMainThread(); if (!mIncrementalMountEnabled) { throw new IllegalStateException("Calling incrementalMountComponent() but incremental mount" + " is not enabled"); } // Per ComponentTree visible area. Because LithoViews can be nested and mounted // not in "depth order", this variable cannot be static. final Rect currentVisibleArea = ComponentsPools.acquireRect(); if (getVisibleRect(currentVisibleArea)) { mountComponent(currentVisibleArea); } // if false: no-op, doesn't have visible area, is not ready or not attached ComponentsPools.release(currentVisibleArea); } private boolean getVisibleRect(Rect visibleBounds) { assertMainThread(); getLocationAndBoundsOnScreen(mLithoView, sCurrentLocation, visibleBounds); final ViewParent viewParent = mLithoView.getParent(); if (viewParent instanceof View) { View parent = (View) viewParent; getLocationAndBoundsOnScreen(parent, sParentLocation, sParentBounds); if (!visibleBounds.setIntersect(visibleBounds, sParentBounds)) { return false; } } visibleBounds.offset(-sCurrentLocation[0], -sCurrentLocation[1]); return true; } private static void getLocationAndBoundsOnScreen(View view, int[] location, Rect bounds) { assertMainThread(); view.getLocationOnScreen(location); bounds.set( location[0], location[1], location[0] + view.getWidth(), location[1] + view.getHeight()); } void mountComponent(Rect currentVisibleArea) { assertMainThread(); final boolean isDirtyMount = mLithoView.isMountStateDirty(); mIsMounting = true; if (isDirtyMount) { applyPreviousRenderInfo(mMainThreadLayoutState); } // currentVisibleArea null or empty => mount all mLithoView.mount(mMainThreadLayoutState, currentVisibleArea); if (isDirtyMount) { recordRenderInfo(mMainThreadLayoutState); } mIsMounting = false; } private void applyPreviousRenderInfo(LayoutState layoutState) { final List<Component> components = layoutState.getComponentsNeedingPreviousRenderInfo(); if (components == null || components.isEmpty()) { return; } if (mPreviousRenderState == null) { return; } mPreviousRenderState.applyPreviousRenderInfo(components); } private void recordRenderInfo(LayoutState layoutState) { final List<Component> components = layoutState.getComponentsNeedingPreviousRenderInfo(); if (components == null || components.isEmpty()) { return; } if (mPreviousRenderState == null) { mPreviousRenderState = ComponentsPools.acquireRenderState(); } mPreviousRenderState.recordRenderInfo(components); } void detach() { assertMainThread(); synchronized (this) { mIsAttached = false; mHasViewMeasureSpec = false; } } /** * Set a new LithoView to this ComponentTree checking that they have the same context and * clear the ComponentTree reference from the previous LithoView if any. * Be sure this ComponentTree is detach first. */ void setLithoView(@NonNull LithoView view) { assertMainThread(); // It's possible that the view associated with this ComponentTree was recycled but was // never detached. In all cases we have to make sure that the old references between // lithoView and componentTree are reset. if (mIsAttached) { if (mLithoView != null) { mLithoView.setComponentTree(null); } else { detach(); } } else if (mLithoView != null) { // Remove the ComponentTree reference from a previous view if any. mLithoView.clearComponentTree(); } if (!hasSameBaseContext(view.getContext(), mContext)) { // This would indicate bad things happening, like leaking a context. throw new IllegalArgumentException( "Base view context differs, view context is: " + view.getContext() + ", ComponentTree context is: " + mContext); } mLithoView = view; } void clearLithoView() { assertMainThread(); // Crash if the ComponentTree is mounted to a view. if (mIsAttached) { throw new IllegalStateException( "Clearing the LithoView while the ComponentTree is attached"); } mLithoView = null; } void measure(int widthSpec, int heightSpec, int[] measureOutput, boolean forceLayout) { assertMainThread(); Component component = null; LayoutState toRelease; synchronized (this) { mIsMeasuring = true; // This widthSpec/heightSpec is fixed until the view gets detached. mWidthSpec = widthSpec; mHeightSpec = heightSpec; mHasViewMeasureSpec = true; toRelease = setBestMainThreadLayoutAndReturnOldLayout(); if (forceLayout || !isCompatibleComponentAndSpec(mMainThreadLayoutState)) { // Neither layout was compatible and we have to perform a layout. // Since outputs get set on the same object during the lifecycle calls, // we need to copy it in order to use it concurrently. component = mRoot.makeShallowCopy(); } } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } if (component != null) { // TODO: We should re-use the existing CSSNodeDEPRECATED tree instead of re-creating it. if (mMainThreadLayoutState != null) { // It's beneficial to delete the old layout state before we start creating a new one since // we'll be able to re-use some of the layout nodes. LayoutState localLayoutState; synchronized (this) { localLayoutState = mMainThreadLayoutState; mMainThreadLayoutState = null; } localLayoutState.releaseRef(); } // We have no layout that matches the given spec, so we need to compute it on the main thread. LayoutState localLayoutState = calculateLayoutState( mLayoutLock, mContext, component, widthSpec, heightSpec, mIsLayoutDiffingEnabled, mLastShouldAnimateTransitions, null); final StateHandler layoutStateStateHandler = localLayoutState.consumeStateHandler(); synchronized (this) { if (layoutStateStateHandler != null) { mStateHandler.commit(layoutStateStateHandler); ComponentsPools.release(layoutStateStateHandler); } mMainThreadLayoutState = localLayoutState; localLayoutState = null; } // We need to force remount on layout mLithoView.setMountStateDirty(); } measureOutput[0] = mMainThreadLayoutState.getWidth(); measureOutput[1] = mMainThreadLayoutState.getHeight(); int layoutScheduleType = SCHEDULE_NONE; Component root = null; synchronized (this) { mIsMeasuring = false; if (mScheduleLayoutAfterMeasure != SCHEDULE_NONE) { layoutScheduleType = mScheduleLayoutAfterMeasure; mScheduleLayoutAfterMeasure = SCHEDULE_NONE; root = mRoot.makeShallowCopy(); } } if (layoutScheduleType != SCHEDULE_NONE) { // shouldAnimateTransitions - This is a scheduled layout from a state update, so we animate it setRootAndSizeSpecInternal( root, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, layoutScheduleType == SCHEDULE_LAYOUT_ASYNC, true /* = shouldAnimateTransitions */, null /*output */); } } /** * Returns {@code true} if the layout call mounted the component. */ boolean layout() { assertMainThread(); return mountComponentIfDirty(); } /** * Returns whether incremental mount is enabled or not in this component. */ public boolean isIncrementalMountEnabled() { return mIncrementalMountEnabled; } synchronized Component getRoot() { return mRoot; } /** * Update the root component. This can happen in both attached and detached states. In each case * we will run a layout and then proxy a message to the main thread to cause a * relayout/invalidate. */ public void setRoot(Component<?> rootComponent) { setRoot(rootComponent, false); } /** * Sets a new component root, specifying whether to animate transitions where transition * animations have been specified. * * @see #setRoot */ public void setRoot(Component<?> rootComponent, boolean shouldAnimateTransitions) { if (rootComponent == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( rootComponent, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, false /* isAsync */, shouldAnimateTransitions, null /* output */); } /** * Schedule to asynchronizely pre-allocate the mount content of all MountSpec in this tree. * Must be called after layout is created, or after async layout is scheduled. */ @ThreadSafe(enableChecks = false) public void preAllocateMountContentAsync() { mLayoutThreadHandler.removeCallbacks(mPreAllocateMountContentRunnable); mLayoutThreadHandler.post(mPreAllocateMountContentRunnable); } /** * Pre-allocate the mount content of all MountSpec in this tree. * Must be called after layout is created. */ @ThreadSafe(enableChecks = false) public void preAllocateMountContent() { final LayoutState toPrePopulate; // Cancel any scheduled preallocate requests we might have in the background queue // since we are starting the preallocation. mLayoutThreadHandler.removeCallbacks(mPreAllocateMountContentRunnable); synchronized (this) { if (mMainThreadLayoutState != null) { toPrePopulate = mMainThreadLayoutState; } else { toPrePopulate = mBackgroundLayoutState; } } if (toPrePopulate == null) { return; } toPrePopulate.acquireRef(); final ComponentsLogger logger = mContext.getLogger(); LogEvent event = null; if (logger != null) { event = logger.newPerformanceEvent(EVENT_PRE_ALLOCATE_MOUNT_CONTENT); event.addParam(PARAM_LOG_TAG, mContext.getLogTag()); } toPrePopulate.preAllocateMountContent(); if (logger != null) { logger.log(event); } toPrePopulate.releaseRef(); } public void setRootAsync(Component<?> rootComponent) { if (rootComponent == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( rootComponent, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, true /* isAsync */, false /* shouldAnimateTransitions */, null /* output */); } synchronized void updateStateLazy(String componentKey, StateUpdate stateUpdate) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } void updateState(String componentKey, StateUpdate stateUpdate) { synchronized (this) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } Looper looper = Looper.myLooper(); if (looper == null) { Log.w( TAG, "You cannot update state synchronously from a thread without a looper, " + "using the default background layout thread instead"); mLayoutThreadHandler.removeCallbacks(mUpdateStateSyncRunnable); mLayoutThreadHandler.post(mUpdateStateSyncRunnable); return; } Handler handler; synchronized (this) { final WeakReference<Handler> handlerWr = sSyncStateUpdatesHandler.get(); if (handlerWr != null && handlerWr.get() != null) { handler = handlerWr.get(); handler.removeCallbacks(mUpdateStateSyncRunnable); } else { handler = new Handler(looper); sSyncStateUpdatesHandler.set(new WeakReference<>(handler)); } } handler.post(mUpdateStateSyncRunnable); } void updateStateAsync(String componentKey, StateUpdate stateUpdate) { if (!mIsAsyncUpdateStateEnabled) { throw new RuntimeException("Triggering async state updates on this component tree is " + "disabled, use sync state updates."); } synchronized (this) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } updateStateInternal(true); } void updateStateInternal(boolean isAsync) { final Component<?> root; synchronized (this) { if (mRoot == null) { return; } if (mIsMeasuring) { // If the layout calculation was already scheduled to happen synchronously let's just go // with a sync layout calculation. if (mScheduleLayoutAfterMeasure == SCHEDULE_LAYOUT_SYNC) { return; } mScheduleLayoutAfterMeasure = isAsync ? SCHEDULE_LAYOUT_ASYNC : SCHEDULE_LAYOUT_SYNC; return; } root = mRoot.makeShallowCopy(); } setRootAndSizeSpecInternal( root, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, isAsync, true /* shouldAnimateTransitions */, null /*output */); } /** * Update the width/height spec. This is useful if you are currently detached and are responding * to a configuration change. If you are currently attached then the HostView is the source of * truth for width/height, so this call will be ignored. */ public void setSizeSpec(int widthSpec, int heightSpec) { setSizeSpec(widthSpec, heightSpec, null); } /** * Same as {@link #setSizeSpec(int, int)} but fetches the resulting width/height * in the given {@link Size}. */ public void setSizeSpec(int widthSpec, int heightSpec, Size output) { setRootAndSizeSpecInternal( null, widthSpec, heightSpec, false /* isAsync */, false /* shouldAnimateTransitions */, output /* output */); } public void setSizeSpecAsync(int widthSpec, int heightSpec) { setRootAndSizeSpecInternal( null, widthSpec, heightSpec, true /* isAsync */, false /* shouldAnimateTransitions */, null /* output */); } /** * Compute asynchronously a new layout with the given component root and sizes */ public void setRootAndSizeSpecAsync(Component<?> root, int widthSpec, int heightSpec) { setRootAndSizeSpecAsync(root, widthSpec, heightSpec, false); } /** * Like {@link #setRootAndSizeSpecAsync}, allowing specification of whether transitions should be * animated where transition animations have been specified. */ public void setRootAndSizeSpecAsync( Component<?> root, int widthSpec, int heightSpec, boolean shouldAnimateTransitions) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, true /* isAsync */, shouldAnimateTransitions, null /* output */); } /** * Compute a new layout with the given component root and sizes */ public void setRootAndSizeSpec(Component<?> root, int widthSpec, int heightSpec) { setRootAndSizeSpec(root, widthSpec, heightSpec, false); } /** * Like {@link #setRootAndSizeSpec}, allowing specification of whether transitions should be * animated where transition animations have been specified. */ public void setRootAndSizeSpec( Component<?> root, int widthSpec, int heightSpec, boolean shouldAnimateTransitions) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, false /* isAsync */, shouldAnimateTransitions, null /* output */); } public void setRootAndSizeSpec(Component<?> root, int widthSpec, int heightSpec, Size output) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, false /* isAsync */, false /* shouldAnimateTransitions */, output); } /** * @return the {@link LithoView} associated with this ComponentTree if any. */ @Keep @Nullable public LithoView getLithoView() { assertMainThread(); return mLithoView; } /** * Provides a new instance from the StateHandler pool that is initialized with the information * from the StateHandler currently held by the ComponentTree. Once the state updates have been * applied and we are back in the main thread the state handler gets released to the pool. * @return a copy of the state handler instance held by ComponentTree. */ public synchronized StateHandler getStateHandler() { return StateHandler.acquireNewInstance(mStateHandler); } /** * Takes ownership of the {@link RenderState} object from this ComponentTree - this allows the * RenderState to be persisted somewhere and then set back on another ComponentTree using the * {@link Builder}. See {@link RenderState} for more information on the purpose of this object. */ @ThreadConfined(ThreadConfined.UI) public RenderState consumePreviousRenderState() { final RenderState previousRenderState = mPreviousRenderState; mPreviousRenderState = null; mPreviousRenderStateSetFromBuilder = false; return previousRenderState; } private void setRootAndSizeSpecInternal( Component<?> root, int widthSpec, int heightSpec, boolean isAsync, boolean shouldAnimateTransitions, Size output) { synchronized (this) { mLastShouldAnimateTransitions = shouldAnimateTransitions; final Map<String, List<StateUpdate>> pendingStateUpdates = mStateHandler.getPendingStateUpdates(); if (pendingStateUpdates != null && pendingStateUpdates.size() > 0 && root != null) { root = root.makeShallowCopyWithNewId(); } final boolean rootInitialized = root != null; final boolean widthSpecInitialized = widthSpec != SIZE_UNINITIALIZED; final boolean heightSpecInitialized = heightSpec != SIZE_UNINITIALIZED; if (mHasViewMeasureSpec && !rootInitialized) { // It doesn't make sense to specify the width/height while the HostView is attached and it // has been measured. We do not throw an Exception only because there can be race conditions // that can cause this to happen. In such race conditions, ignoring the setSizeSpec call is // the right thing to do. return; } final boolean widthSpecDidntChange = !widthSpecInitialized || widthSpec == mWidthSpec; final boolean heightSpecDidntChange = !heightSpecInitialized || heightSpec == mHeightSpec; final boolean sizeSpecDidntChange = widthSpecDidntChange && heightSpecDidntChange; final LayoutState mostRecentLayoutState = mBackgroundLayoutState != null ? mBackgroundLayoutState : mMainThreadLayoutState; final boolean allSpecsWereInitialized = widthSpecInitialized && heightSpecInitialized && mWidthSpec != SIZE_UNINITIALIZED && mHeightSpec != SIZE_UNINITIALIZED; final boolean sizeSpecsAreCompatible = sizeSpecDidntChange || (allSpecsWereInitialized && mostRecentLayoutState != null && LayoutState.hasCompatibleSizeSpec( mWidthSpec, mHeightSpec, widthSpec, heightSpec, mostRecentLayoutState.getWidth(), mostRecentLayoutState.getHeight())); final boolean rootDidntChange = !rootInitialized || root.getId() == mRoot.getId(); if (rootDidntChange && sizeSpecsAreCompatible) { // The spec and the root haven't changed. Either we have a layout already, or we're // currently computing one on another thread. if (output != null) { output.height = mostRecentLayoutState.getHeight(); output.width = mostRecentLayoutState.getWidth(); } return; } if (widthSpecInitialized) { mWidthSpec = widthSpec; } if (heightSpecInitialized) { mHeightSpec = heightSpec; } if (rootInitialized) { mRoot = root; } } if (isAsync && output != null) { throw new IllegalArgumentException("The layout can't be calculated asynchronously if" + " we need the Size back"); } else if (isAsync) { mLayoutThreadHandler.removeCallbacks(mCalculateLayoutRunnable); mLayoutThreadHandler.removeCallbacks(mAnimatedCalculateLayoutRunnable); mLayoutThreadHandler.post( shouldAnimateTransitions ? mAnimatedCalculateLayoutRunnable : mCalculateLayoutRunnable); } else { calculateLayout(output, shouldAnimateTransitions); } } /** * Calculates the layout. * @param output a destination where the size information should be saved * @param shouldAnimateTransitions whether component transitions should be animated */ private void calculateLayout(Size output, boolean shouldAnimateTransitions) { int widthSpec; int heightSpec; Component<?> root; LayoutState previousLayoutState = null; // Cancel any scheduled layout requests we might have in the background queue // since we are starting a new layout computation. mLayoutThreadHandler.removeCallbacks(mCalculateLayoutRunnable); mLayoutThreadHandler.removeCallbacks(mAnimatedCalculateLayoutRunnable); synchronized (this) { // Can't compute a layout if specs or root are missing if (!hasSizeSpec() || mRoot == null) { return; } // Check if we already have a compatible layout. if (hasCompatibleComponentAndSpec()) { if (output != null) { final LayoutState mostRecentLayoutState = mBackgroundLayoutState != null ? mBackgroundLayoutState : mMainThreadLayoutState; output.width = mostRecentLayoutState.getWidth(); output.height = mostRecentLayoutState.getHeight(); } return; } widthSpec = mWidthSpec; heightSpec = mHeightSpec; root = mRoot.makeShallowCopy(); if (mMainThreadLayoutState != null) { previousLayoutState = mMainThreadLayoutState.acquireRef(); } } final ComponentsLogger logger = mContext.getLogger(); LogEvent layoutEvent = null; if (logger != null) { layoutEvent = logger.newPerformanceEvent(EVENT_LAYOUT_CALCULATE); layoutEvent.addParam(PARAM_LOG_TAG, mContext.getLogTag()); layoutEvent.addParam(PARAM_TREE_DIFF_ENABLED, String.valueOf(mIsLayoutDiffingEnabled)); layoutEvent.addParam(PARAM_IS_BACKGROUND_LAYOUT, String.valueOf(!ThreadUtils.isMainThread())); } LayoutState localLayoutState = calculateLayoutState( mLayoutLock, mContext, root, widthSpec, heightSpec, mIsLayoutDiffingEnabled, shouldAnimateTransitions, previousLayoutState != null ? previousLayoutState.getDiffTree() : null); if (output != null) { output.width = localLayoutState.getWidth(); output.height = localLayoutState.getHeight(); } if (previousLayoutState != null) { previousLayoutState.releaseRef(); previousLayoutState = null; } boolean layoutStateUpdated = false; synchronized (this) { // Make sure some other thread hasn't computed a compatible layout in the meantime. if (!hasCompatibleComponentAndSpec() && isCompatibleSpec(localLayoutState, mWidthSpec, mHeightSpec)) { if (localLayoutState != null) { final StateHandler layoutStateStateHandler = localLayoutState.consumeStateHandler(); if (layoutStateStateHandler != null) { if (mStateHandler != null) { // we could have been released mStateHandler.commit(layoutStateStateHandler); } ComponentsPools.release(layoutStateStateHandler); } } // Set the new layout state, and remember the old layout state so we // can release it. LayoutState tmp = mBackgroundLayoutState; mBackgroundLayoutState = localLayoutState; localLayoutState = tmp; layoutStateUpdated = true; } } if (localLayoutState != null) { localLayoutState.releaseRef(); localLayoutState = null; } if (layoutStateUpdated) { postBackgroundLayoutStateUpdated(); } if (logger != null) { logger.log(layoutEvent); } } /** * Transfer mBackgroundLayoutState to mMainThreadLayoutState. This will proxy * to the main thread if necessary. If the component/size-spec changes in the * meantime, then the transfer will be aborted. */ private void postBackgroundLayoutStateUpdated() { if (isMainThread()) { // We need to possibly update mMainThreadLayoutState. This call will // cause the host view to be invalidated and re-laid out, if necessary. backgroundLayoutStateUpdated(); } else { // If we aren't on the main thread, we send a message to the main thread // to invoke backgroundLayoutStateUpdated. sMainThreadHandler.obtainMessage(MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED, this) .sendToTarget(); } } /** * The contract is that in order to release a ComponentTree, you must do so from the main * thread, or guarantee that it will never be accessed from the main thread again. Usually * HostView will handle releasing, but if you never attach to a host view, then you should call * release yourself. */ public void release() { LayoutState mainThreadLayoutState; LayoutState backgroundLayoutState; synchronized (this) { mReleased = true; if (mLithoView != null) { mLithoView.setComponentTree(null); } mRoot = null; mainThreadLayoutState = mMainThreadLayoutState; mMainThreadLayoutState = null; backgroundLayoutState = mBackgroundLayoutState; mBackgroundLayoutState = null; // TODO t15532529 mStateHandler = null; if (mPreviousRenderState != null && !mPreviousRenderStateSetFromBuilder) { ComponentsPools.release(mPreviousRenderState); } mPreviousRenderState = null; mPreviousRenderStateSetFromBuilder = false; } if (mainThreadLayoutState != null) { mainThreadLayoutState.releaseRef(); mainThreadLayoutState = null; } if (backgroundLayoutState != null) { backgroundLayoutState.releaseRef(); backgroundLayoutState = null; } } private boolean isCompatibleComponentAndSpec(LayoutState layoutState) { assertHoldsLock(this); return mRoot != null && isCompatibleComponentAndSpec( layoutState, mRoot.getId(), mWidthSpec, mHeightSpec); } // Either the MainThreadLayout or the BackgroundThreadLayout is compatible with the current state. private boolean hasCompatibleComponentAndSpec() { assertHoldsLock(this); return isCompatibleComponentAndSpec(mMainThreadLayoutState) || isCompatibleComponentAndSpec(mBackgroundLayoutState); } private boolean hasSizeSpec() { assertHoldsLock(this); return mWidthSpec != SIZE_UNINITIALIZED && mHeightSpec != SIZE_UNINITIALIZED; } private static synchronized Looper getDefaultLayoutThreadLooper() { if (sDefaultLayoutThreadLooper == null) { HandlerThread defaultThread = new HandlerThread(DEFAULT_LAYOUT_THREAD_NAME, DEFAULT_LAYOUT_THREAD_PRIORITY); defaultThread.start(); sDefaultLayoutThreadLooper = defaultThread.getLooper(); } return sDefaultLayoutThreadLooper; } private static boolean isCompatibleSpec( LayoutState layoutState, int widthSpec, int heightSpec) { return layoutState != null && layoutState.isCompatibleSpec(widthSpec, heightSpec) && layoutState.isCompatibleAccessibility(); } private static boolean isCompatibleComponentAndSpec( LayoutState layoutState, int componentId, int widthSpec, int heightSpec) { return layoutState != null && layoutState.isCompatibleComponentAndSpec(componentId, widthSpec, heightSpec) && layoutState.isCompatibleAccessibility(); } private static boolean isCompatibleComponentAndSize( LayoutState layoutState, int componentId, int width, int height) { return layoutState != null && layoutState.isComponentId(componentId) && layoutState.isCompatibleSize(width, height) && layoutState.isCompatibleAccessibility(); } public synchronized boolean isReleased() { return mReleased; } public ComponentContext getContext() { return mContext; } private static class ComponentMainThreadHandler extends Handler { private ComponentMainThreadHandler() { super(Looper.getMainLooper()); } @Override public void handleMessage(Message msg) { switch (msg.what) { case MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED: ComponentTree that = (ComponentTree) msg.obj; that.backgroundLayoutStateUpdated(); break; default: throw new IllegalArgumentException(); } } } protected LayoutState calculateLayoutState( @Nullable Object lock, ComponentContext context, Component<?> root, int widthSpec, int heightSpec, boolean diffingEnabled, boolean shouldAnimateTransitions, @Nullable DiffNode diffNode) { final ComponentContext contextWithStateHandler; synchronized (this) { contextWithStateHandler = new ComponentContext(context, StateHandler.acquireNewInstance(mStateHandler)); } if (lock != null) { synchronized (lock) { return LayoutState.calculate( contextWithStateHandler, root, mId, widthSpec, heightSpec, diffingEnabled, shouldAnimateTransitions, diffNode, mCanPrefetchDisplayLists, mCanCacheDrawingDisplayLists, mShouldClipChildren); } } else { return LayoutState.calculate( contextWithStateHandler, root, mId, widthSpec, heightSpec, diffingEnabled, shouldAnimateTransitions, diffNode, mCanPrefetchDisplayLists, mCanCacheDrawingDisplayLists, mShouldClipChildren); } } /** * A default {@link LayoutHandler} that will use a {@link Handler} with a {@link Thread}'s * {@link Looper}. */ private static class DefaultLayoutHandler extends Handler implements LayoutHandler { private DefaultLayoutHandler(Looper threadLooper) { super(threadLooper); } } public static int generateComponentTreeId() { return sIdGenerator.getAndIncrement(); } /** * A builder class that can be used to create a {@link ComponentTree}. */ public static class Builder { // required private ComponentContext context; private Component<?> root; // optional private boolean incrementalMountEnabled = true; private boolean isLayoutDiffingEnabled = true; private LayoutHandler layoutThreadHandler; private Object layoutLock; private StateHandler stateHandler; private RenderState previousRenderState; private boolean asyncStateUpdates = true; private int overrideComponentTreeId = -1; private boolean canPrefetchDisplayLists = false; private boolean canCacheDrawingDisplayLists = false; private boolean shouldClipChildren = true; protected Builder() { } protected Builder(ComponentContext context, Component<?> root) { init(context, root); } protected void init(ComponentContext context, Component<?> root) { this.context = context; this.root = root; } protected void release() { context = null; root = null; incrementalMountEnabled = true; isLayoutDiffingEnabled = true; layoutThreadHandler = null; layoutLock = null; stateHandler = null; previousRenderState = null; asyncStateUpdates = true; overrideComponentTreeId = -1; canPrefetchDisplayLists = false; canCacheDrawingDisplayLists = false; shouldClipChildren = true; } /** * Whether or not to enable the incremental mount optimization. True by default. * In order to use incremental mount you should disable mount diffing. * * @Deprecated We will remove this option soon, please consider turning it on (which is on by * default) */ public Builder incrementalMount(boolean isEnabled) { incrementalMountEnabled = isEnabled; return this; } /** * Whether or not to enable layout tree diffing. This will reduce the cost of * updates at the expense of using extra memory. True by default. * * @Deprecated We will remove this option soon, please consider turning it on (which is on by * default) */ public Builder layoutDiffing(boolean enabled) { isLayoutDiffingEnabled = enabled; return this; } /** * Specify the looper to use for running layouts on. Note that in rare cases * layout must run on the UI thread. For example, if you rotate the screen, * we must measure on the UI thread. If you don't specify a Looper here, the * Components default Looper will be used. */ public Builder layoutThreadLooper(Looper looper) { if (looper != null) { layoutThreadHandler = new DefaultLayoutHandler(looper); } return this; } /** * Specify the looper to use for running layouts on. Note that in rare cases * layout must run on the UI thread. For example, if you rotate the screen, * we must measure on the UI thread. If you don't specify a Looper here, the * Components default Looper will be used. */ public Builder layoutThreadHandler(LayoutHandler handler) { layoutThreadHandler = handler; return this; } /** * Specify a lock to be acquired during layout. This is an advanced feature * that can lead to deadlock if you don't know what you are doing. */ public Builder layoutLock(Object layoutLock) { this.layoutLock = layoutLock; return this; } /** * Specify an initial state handler object that the ComponentTree can use to set the current * values for states. */ public Builder stateHandler(StateHandler stateHandler) { this.stateHandler = stateHandler; return this; } /** * Specify an existing previous render state that the ComponentTree can use to set the current * values for providing previous versions of @Prop/@State variables. */ public Builder previousRenderState(RenderState previousRenderState) { this.previousRenderState = previousRenderState; return this; } /** * Specify whether the ComponentTree allows async state updates. This is enabled by default. */ public Builder asyncStateUpdates(boolean enabled) { this.asyncStateUpdates = enabled; return this; } /** * Gives the ability to override the auto-generated ComponentTree id: this is generally not * useful in the majority of circumstances, so don't use it unless you really know what you're * doing. */ public Builder overrideComponentTreeId(int overrideComponentTreeId) { this.overrideComponentTreeId = overrideComponentTreeId; return this; } /** * Specify whether the ComponentTree allows to prefetch display lists of its components * on idle time of UI thread. * * NOTE: To make display lists prefetching work, besides setting this flag * {@link com.facebook.litho.utils.DisplayListUtils#prefetchDisplayLists(View)} * should be called on scrollable surfaces like {@link android.support.v7.widget.RecyclerView} * during scrolling. */ public Builder canPrefetchDisplayLists(boolean canPrefetch) { this.canPrefetchDisplayLists = canPrefetch; return this; } /** * Specify whether the ComponentTree allows to cache display lists of the components after it * was first drawng. * * NOTE: To make display lists caching work, {@link #canPrefetchDisplayLists(boolean)} should * be set to true. */ public Builder canCacheDrawingDisplayLists(boolean canCacheDrawingDisplayLists) { this.canCacheDrawingDisplayLists = canCacheDrawingDisplayLists; return this; } public Builder shouldClipChildren(boolean shouldClipChildren) { this.shouldClipChildren = shouldClipChildren; return this; } /** * Builds a {@link ComponentTree} using the parameters specified in this builder. */ public ComponentTree build() { ComponentTree componentTree = new ComponentTree(this); ComponentsPools.release(this); return componentTree; } } }
// $Id: Config.java,v 1.7 2001/10/04 00:24:16 mdb Exp $ // samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.util; import java.io.IOException; import java.util.*; import com.samskivert.Log; /** * The config class provides a unified interaface to application * configuration information. It takes care of loading properties files * from locations in the classpath and binding the properties in those * files into the global config namespace. It also provides access to more * datatypes than simply strings, handling the parsing of ints as well as * int arrays and string arrays. * * <p> An application should construct a single instance of * <code>Config</code> and use it to access all of its configuration * information. */ public class Config { /** * Constructs a new config object which can be used immediately by * binding properties files into the namespace and subsequently * requesting values. */ public Config () { } /** * Binds the specified properties file into the namespace with the * specified name. If the properties file in question contains a * property of the name <code>foo.bar</code> and the file is bound * into the namespace under <code>baz</code>, then that property would * be accessed as <code>baz.foo.bar</code>. * * @param name the root name for all properties in this file. * @param path the path to the properties file which must live * somewhere in the classpath. For example: <code>foo/bar/baz</code> * would indicate a file named "foo/bar/baz.properties" living in the * classpath. * @param inherit if true, the properties file will be loaded using * {@link ConfigUtil#loadInheritedProperties} rather than {@link * ConfigUtil#loadProperties}. * * @exception IOException thrown if an error occurrs loading the * properties file (like it doesn't exist or cannot be accessed). */ public void bindProperties (String name, String path, boolean inherit) throws IOException { // append the file suffix onto the path path += PROPS_SUFFIX; // load the properties file Properties props = ConfigUtil.loadProperties(path); if (props == null) { throw new IOException("Unable to load properties file: " + path); } // bind the properties instance _props.put(name, props); } /** * A backwards compatibility method that does not use inherited * properties loading. * * @see #bindProperties(String,String,boolean) */ public void bindProperties (String name, String path) throws IOException { bindProperties(name, path, false); } /** * Fetches and returns the value for the specified configuration * property. If the value is not specified in the associated * properties file, the supplied default value is returned instead. If * the property specified in the file is poorly formatted (not and * integer, not in proper array specification), a warning message will * be logged and the default value will be returned. * * @param the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defval the value to return if the property is not specified * in the config file. * * @return the value of the requested property. */ public int getValue (String name, int defval) { String val = resolveProperty(name); // if it's not specified, we return the default if (val == null) { return defval; } // otherwise parse it into an integer try { return Integer.parseInt(val); } catch (NumberFormatException nfe) { Log.warning("Malformed integer property [fqn=" + name + ", value=" + val + "]."); return defval; } } /** * Fetches and returns the value for the specified configuration * property. If the value is not specified in the associated * properties file, the supplied default value is returned instead. * * @param the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defval the value to return if the property is not specified * in the config file. * * @return the value of the requested property. */ public String getValue (String name, String defval) { String val = resolveProperty(name); // if it's not specified, we return the default return (val == null) ? defval : val; } /** * Fetches and returns the value for the specified configuration * property. If the value is not specified in the associated * properties file, the supplied default value is returned * instead. The returned value will be <code>false</code> if the * config value is <code>"false"</code> (case-insensitive), else * the return value will be true. * * @param the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defval the value to return if the property is not specified * in the config file. * * @return the value of the requested property. */ public boolean getValue (String name, boolean defval) { String val = resolveProperty(name); // if it's not specified, we return the default val = val.toLowerCase(); return (val == null) ? defval : (!val.equals("false")); } /** * Fetches and returns the value for the specified configuration * property. If the value is not specified in the associated * properties file, the supplied default value is returned instead. If * the property specified in the file is poorly formatted (not and * integer, not in proper array specification), a warning message will * be logged and the default value will be returned. * * @param the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defval the value to return if the property is not specified * in the config file. * * @return the value of the requested property. */ public int[] getValue (String name, int[] defval) { String val = resolveProperty(name); // if it's not specified, we return the default if (val == null) { return defval; } // otherwise parse it into an array of ints int[] result = StringUtil.parseIntArray(val); if (result == null) { Log.warning("Malformed int array property [fqn=" + name + ", value=" + val + "]."); return defval; } return result; } /** * Fetches and returns the value for the specified configuration * property. If the value is not specified in the associated * properties file, the supplied default value is returned instead. If * the property specified in the file is poorly formatted (not and * integer, not in proper array specification), a warning message will * be logged and the default value will be returned. * * @param key the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defval the value to return if the property is not specified * in the config file. * * @return the value of the requested property. */ public String[] getValue (String name, String[] defval) { String val = resolveProperty(name); // if it's not specified, we return the default if (val == null) { return defval; } // otherwise parse it into an array of strings String[] result = StringUtil.parseStringArray(val); if (result == null) { Log.warning("Malformed string array property [fqn=" + name + ", value=" + val + "]."); return defval; } return result; } /** * Looks up the specified string-valued configuration entry, * loads the class with that name and instantiates a new instance * of that class, which is returned. * * @param key the fully qualified name of the property (fully qualified * meaning that it contains the namespace identifier as well), for * example: <code>foo.bar.baz</code>. * @param defcname the class name to use if the property is not * specified in the config file. * * @exception Exception thrown if any error occurs while loading * or instantiating the class. */ public Object instantiateValue (String name, String defcname) throws Exception { return Class.forName(getValue(name, defcname)).newInstance(); } /** * Returns the entire properties instance bound to a particular * namespace identifier. * * @return a properties instance that was bound to the specified * namespace identifier or null if nothing is bound to that * identifier. */ public Properties getProperties (String name) { return (Properties)_props.get(name); } /** * Returns an iterator that returns all of the configuration keys that * match the specified prefix. The prefix should at least contain a * namespace identifier but can contain further path components to * restrict the iteration. For example: <code>foo</code> would iterate * over every property key in the properties file that was bound to * <code>foo</code>. <code>foo.bar</code> would iterate over every * property key in the <code>foo</code> property file that began with * the string <code>bar</code>. * * <p> If an invalid or non-existent namespace identifier is supplied, * a warning will be logged and an empty iterator. */ public Iterator keys (String prefix) { String id = prefix; String key = ""; // parse the key prefix if one was provided int didx = prefix.indexOf("."); if (didx != -1) { id = prefix.substring(0, didx); key = prefix.substring(didx+1); } Properties props = (Properties)_props.get(id); if (props == null) { Log.warning("No property file bound to top-level name " + "[name=" + id + ", key=" + key + "]."); return new Iterator() { public boolean hasNext () { return false; } public Object next () { return null; } public void remove () { /* do nothing */ }; }; } return new PropertyIterator(key, props.keys()); } protected static class PropertyIterator implements Iterator { public PropertyIterator (String prefix, Enumeration enum) { _prefix = prefix; _enum = enum; scanToNext(); } public boolean hasNext () { return (_next != null); } public Object next () { String next = _next; scanToNext(); return next; } public void remove () { // not supported } protected void scanToNext () { // assume that nothing is left _next = null; while (_enum.hasMoreElements()) { String next = (String)_enum.nextElement(); if (next.startsWith(_prefix)) { _next = next; break; } } } protected String _prefix; protected Enumeration _enum; protected String _next; } protected String resolveProperty (String name) { int didx = name.indexOf("."); if (didx == -1) { Log.warning("Invalid fully qualified property name " + "[name=" + name + "]."); return null; } String id = name.substring(0, didx); String key = name.substring(didx+1); Properties props = (Properties)_props.get(id); if (props == null) { Log.warning("No property file bound to top-level name " + "[name=" + id + ", key=" + key + "]."); return null; } return props.getProperty(key); } public static void main (String[] args) { Config config = new Config(); try { config.bindProperties("test", "com/samskivert/util/test"); System.out.println("test.prop1: " + config.getValue("test.prop1", 1)); System.out.println("test.prop2: " + config.getValue("test.prop2", "two")); int[] ival = new int[] { 1, 2, 3 }; ival = config.getValue("test.prop3", ival); System.out.println("test.prop3: " + StringUtil.toString(ival)); String[] sval = new String[] { "one", "two", "three" }; sval = config.getValue("test.prop4", sval); System.out.println("test.prop4: " + StringUtil.toString(sval)); System.out.println("test.prop5: " + config.getValue("test.prop5", "undefined")); Iterator iter = config.keys("test.prop2"); while (iter.hasNext()) { System.out.println(iter.next()); } iter = config.keys("test.prop"); while (iter.hasNext()) { System.out.println(iter.next()); } iter = config.keys("test"); while (iter.hasNext()) { System.out.println(iter.next()); } } catch (IOException ioe) { ioe.printStackTrace(System.err); } } protected Hashtable _props = new Hashtable(); protected static final String PROPS_SUFFIX = ".properties"; }
package com.joelapenna.foursquared.util; import com.joelapenna.foursquare.types.Checkin; import com.joelapenna.foursquare.types.User; import com.joelapenna.foursquare.types.Venue; import android.text.TextUtils; import android.text.format.DateUtils; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; /** * @author Joe LaPenna (joe@joelapenna.com) * @author Mark Wyszomierski (markww@gmail.com) * -Added date formats for today/yesterday/older contexts. */ public class StringFormatters { public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat( "EEE, dd MMM yy HH:mm:ss Z"); /** Should look like "9:09 AM". */ public static final SimpleDateFormat DATE_FORMAT_TODAY = new SimpleDateFormat( "h:mm a"); /** Should look like "Sun 1:56 PM". */ public static final SimpleDateFormat DATE_FORMAT_YESTERDAY = new SimpleDateFormat( "E h:mm a"); /** Should look like "Sat Mar 20". */ public static final SimpleDateFormat DATE_FORMAT_OLDER = new SimpleDateFormat( "E MMM d"); public static String getVenueLocationCrossStreetOrCity(Venue venue) { if (!TextUtils.isEmpty(venue.getCrossstreet())) { return "(" + venue.getCrossstreet() + ")"; } else if (!TextUtils.isEmpty(venue.getCity()) && !TextUtils.isEmpty(venue.getState()) && !TextUtils.isEmpty(venue.getZip())) { return venue.getCity() + ", " + venue.getState() + " " + venue.getZip(); } else { return null; } } public static String getCheckinMessage(Checkin checkin, boolean displayAtVenue) { if (checkin.getDisplay() != null) { return checkin.getDisplay(); } else { StringBuffer sb = new StringBuffer(); sb.append(getUserAbbreviatedName(checkin.getUser())); if (checkin.getVenue() != null && displayAtVenue) { sb.append(" @ " + checkin.getVenue().getName()); } return sb.toString(); } } public static String getUserFullName(User user) { StringBuffer sb = new StringBuffer(); sb.append(user.getFirstname()); String lastName = user.getLastname(); if (lastName != null && lastName.length() > 0) { sb.append(" "); sb.append(lastName); } return sb.toString(); } public static String getUserAbbreviatedName(User user) { StringBuffer sb = new StringBuffer(); sb.append(user.getFirstname()); String lastName = user.getLastname(); if (lastName != null && lastName.length() > 0) { sb.append(" "); sb.append(lastName.substring(0, 1) + "."); } return sb.toString(); } public static CharSequence getRelativeTimeSpanString(String created) { try { return DateUtils.getRelativeTimeSpanString(DATE_FORMAT.parse(created).getTime(), new Date().getTime(), DateUtils.MINUTE_IN_MILLIS, DateUtils.FORMAT_ABBREV_RELATIVE); } catch (ParseException e) { return created; } } /** * Returns a format that will look like: "9:09 AM". */ public static String getTodayTimeString(String created) { try { return DATE_FORMAT_TODAY.format(DATE_FORMAT.parse(created)); } catch (ParseException e) { return created; } } /** * Returns a format that will look like: "Sun 1:56 PM". */ public static String getYesterdayTimeString(String created) { try { return DATE_FORMAT_YESTERDAY.format(DATE_FORMAT.parse(created)); } catch (ParseException e) { return created; } } /** * Returns a format that will look like: "Sat Mar 20". */ public static String getOlderTimeString(String created) { try { return DATE_FORMAT_OLDER.format(DATE_FORMAT.parse(created)); } catch (ParseException e) { return created; } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package fasam.global.entidades; import java.util.ArrayList; import java.util.List; /** * * @author Aluno */ public class Artigo { int _codigo; String _titulo; String _descricao; List<Comentario> comentarios; public int GetCodigo(){ return _codigo; } public void SetCodigo(int icodigo){ _codigo = icodigo; } public String GetTitulo(){ return _titulo; } public void SetTitulo(String stitulo){ _titulo = stitulo; } public String GetDescricao(){ return _descricao; } public void SetDescricao(String sdescricao){ _descricao = sdescricao; } public Comentario getComentario(int i) { return comentarios.get(i); } public void addComentarios(Comentario _comentario) { if(this.comentarios == null) this.comentarios = new ArrayList<Comentario>(); this.comentarios.add(_comentario); } public void remComentarios(Comentario _comentario) { if(this.comentarios == null) this.comentarios = new ArrayList<Comentario>(); this.comentarios.remove(_comentario); } public int inserir(){ return 0; } public int excluir(){ return 0; } public int atualizar(){ return 0; } }
package DataObjects; /** * * @author garreola-gutierrez, mtdargen */ public class Positions { Runway parent; String positionMaximumLength; String positionSlope; String positionCenterlineOffset; public Positions(){ } @Override public String toString(){ return positionMaximumLength; } public Positions(Runway parent1, String positionMaximumLength1, String positionSlope1, String positionCenterlineOffset1){ parent = parent1; positionMaximumLength = positionMaximumLength1; positionSlope = positionSlope1; positionCenterlineOffset = positionCenterlineOffset1; } /** * This method can change the Runway that a Position object * belongs to. * This method always works, only if airfield already * exists. * * @param parent1 the Runway this position belongs to */ public void setParent(Runway parent1){ parent = parent1; } /** * Returns the Runway object this Position belongs to * This method always returns immediately,only if airfield exists. * @return the position of Airfield object */ public Runway getParent(){ return parent; } /** * This method can change the position maximum length and makes the * position Maximum Length part of the Airfield object. * This method always works, only if airfield already * exists. * * @param positionMaximumLength1 the String that is * designated to position maximum * length of the Airfield object */ public void setPositionMaximumLength(String positionMaximumLength1){ positionMaximumLength= positionMaximumLength1; } /** * Returns an Airfield object position maximum length to be displayed. * This method always returns immediately,only if airfield exists. * @return the designator of Airfield object */ public String getPositionMaximumLength(){ return positionMaximumLength; } /** * This method can change the position slope and makes the * position slope part of the Airfield object. * This method always works, only if airfield already * exists. * * @param positionSlope1 the String that is designated to * position slop of the Airfield object */ public void setPositionSlope(String positionSlope1){ positionSlope= positionSlope1; } /** * Returns an Airfield object position slope to be displayed. * This method always returns immediately,only if airfield exists. * @return the position slope of Airfield object */ public String getPositionSlope(){ return positionSlope; } /** * This method can change the position centerline offset and * makes the position centerline offset part of the Airfield object. * This method always works, only if airfield already * exists. * * @param positionCenterlineOffset1 the String that is * designated to position slope of * the Airfield object */ public void setPositionCenterlineOffset(String positionCenterlineOffset1){ positionCenterlineOffset = positionCenterlineOffset1; } /** * Returns an Airfield object position centerline offset to be displayed. * This method always returns immediately,only if airfield exists. * @return the position centerline offset of Airfield object */ public String getPositionCenterlineOffset(){ return positionCenterlineOffset; } }
package com.panaton.mgoneup; import android.net.Uri; import android.os.Bundle; import android.app.Activity; import android.content.Intent; import android.view.KeyEvent; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.ProgressBar; public class MainActivity extends Activity { private class OneUpWebViewClient extends WebViewClient { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { if (url.equals("https://gameServerLocation")) { Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); startActivity(intent); return true; } return false; } } private WebView myWebView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); myWebView = (WebView) findViewById(R.id.pyramid_webview); myWebView.loadUrl(getResources().getString(R.string.gameUrl)); WebSettings webSettings = myWebView.getSettings(); webSettings.setJavaScriptEnabled(true); myWebView.setWebViewClient(new OneUpWebViewClient()); final ProgressBar progressWebView = (ProgressBar) findViewById(R.id.progress_webview); myWebView.setWebChromeClient(new WebChromeClient() { @Override public void onProgressChanged(WebView view, int progress) { if(progress < 80){ progressWebView.setProgress(progress); } if(progress < 100 && progressWebView.getVisibility() == ProgressBar.GONE){ progressWebView.setVisibility(ProgressBar.VISIBLE); } if(progress == 100) { progressWebView.setVisibility(ProgressBar.GONE); } } }); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { // Check if the key event was the Back button and if there's history if ((keyCode == KeyEvent.KEYCODE_BACK) && myWebView.canGoBack()) { myWebView.goBack(); return true; } // If it wasn't the Back key or there's no web page history, bubble up to the default // system behavior (probably exit the activity) return super.onKeyDown(keyCode, event); } }
/** * Write a description of class Process here. * * @author (your name) * @version (a version number or a date) */ public class Process { // instance variables - replace the example below with your own private float arrivalTime;//between 0 and 99 private float expectedTime;//between 0.1 and 10 private int priority;//1, 2, 3. or 4 private String name; private double responseTime = 0; private double waitTime = 0; private double turnaroundTime = 0; /** * Constructor for objects of class Process * @param arrivalTime * @param expectedTime * @param priority */ public Process(float arrivalTime, float expectedTime, int priority, String name) { this.arrivalTime = arrivalTime; this.expectedTime = expectedTime; this.priority = priority; this.name = "[" + name +"]"; } /** * This returns the arrival time value * * @return value of arrivalTime */ public float getArrivalTime() { // put your code here return arrivalTime; } /** * This returns the expected time value * * @return value of arrivalTime */ public float getExpectedTime() { // put your code here return expectedTime; } /** * This returns the priority value * * @return value of priority */ public int getPriority() { // put your code here return priority; } /** * This sets the arrival time value * */ public void setArrivalTime(float arrivalTime) { // put your code here this.arrivalTime = arrivalTime; } /** * This sets the expected time value * */ public void setExpectedTime(float expectedTime) { // put your code here this.expectedTime = expectedTime;; } /** * This sets the priority value * * @param priority */ public void setPriority(int priority) { // put your code here this.priority = priority; } /** * This sets the priority value * * @return processString is a string made up of the values in the object */ @Override public String toString() { String processString; processString = name + "\tArrival time is: " + arrivalTime + ",\tExpected time is: " + expectedTime + ",\tPriority is: " + priority + "\n"; return processString; } }
package edu.cs4730.qrDemo; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import androidx.appcompat.app.AppCompatActivity; public class MainActivity extends AppCompatActivity { MainFragment myMainFragment; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (myMainFragment == null) { myMainFragment = new MainFragment(); } if (savedInstanceState == null) { getSupportFragmentManager().beginTransaction() .add(R.id.container, myMainFragment).commit(); } } //wait for result from startActivityForResult calls. public void onActivityResult(int requestCode, int resultCode, Intent intent) { //code to handle the intentintegrator, then IntentResult scanResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, intent); if (scanResult != null) { // handle scan result String contents = scanResult.getContents(); if (contents != null) { myMainFragment.logthis("[II] Scan result is " + scanResult.toString()); } else { myMainFragment.logthis("[II] Scan failed or canceled"); } } else if (requestCode == 0) { //normal intent return codes. if (resultCode == Activity.RESULT_OK) { String contents = intent.getStringExtra("SCAN_RESULT"); String format = intent.getStringExtra("SCAN_RESULT_FORMAT"); // Handle successful scan myMainFragment.logthis("[I] scan Result is " + contents); myMainFragment.logthis("[I] scan Format is " + format); } else if (resultCode == Activity.RESULT_CANCELED) { // Handle cancel myMainFragment.logthis("[I] scan cancel"); } } } }
package sorting; import java.util.Comparator; import exceptions.EmptyHeapException; public class HeapSort { public static <T> T[] heapSort(T[] array, Comparator<T> comparator) { Heap<T> heap = new SimpleHeapImpl<T>(array, comparator); for (int index = array.length - 1; !heap.isEmpty(); index try { array[index] = heap.pop(); } catch (EmptyHeapException e) { // This should never occur throw new RuntimeException(e); } } return array; } }
package org.hisp.dhis.dxf2.events.trackedentity.store; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.dxf2.events.aggregates.AggregateContext; import org.hisp.dhis.dxf2.events.trackedentity.Attribute; import org.hisp.dhis.dxf2.events.trackedentity.ProgramOwner; import org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstance; import org.hisp.dhis.dxf2.events.trackedentity.store.mapper.OwnedTeiMapper; import org.hisp.dhis.dxf2.events.trackedentity.store.mapper.ProgramOwnerRowCallbackHandler; import org.hisp.dhis.dxf2.events.trackedentity.store.mapper.TrackedEntityAttributeRowCallbackHandler; import org.hisp.dhis.dxf2.events.trackedentity.store.mapper.TrackedEntityInstanceRowCallbackHandler; import org.hisp.dhis.dxf2.events.trackedentity.store.query.TeiAttributeQuery; import org.hisp.dhis.dxf2.events.trackedentity.store.query.TrackedEntityInstanceQuery; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.stereotype.Repository; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; /** * @author Luciano Fiandesio * @author Ameen Mohamed */ @Repository public class DefaultTrackedEntityInstanceStore extends AbstractStore implements TrackedEntityInstanceStore { private final static String GET_TEIS_SQL = TrackedEntityInstanceQuery.getQuery(); private final static String GET_TEI_ATTRIBUTES = TeiAttributeQuery.getQuery(); private final static String GET_PROGRAM_OWNERS = "select tei.uid as key, p.uid as prguid, o.uid as ouuid " + "from trackedentityprogramowner teop " + "join program p on teop.programid = p.programid " + "join organisationunit o on teop.organisationunitid = o.organisationunitid " + "join trackedentityinstance tei on teop.trackedentityinstanceid = tei.trackedentityinstanceid " + "where teop.trackedentityinstanceid in (:ids)"; private final static String GET_OWNERSHIP_DATA_FOR_TEIS_FOR_ALL_PROGRAM = "SELECT tei.uid as tei_uid,tpo.trackedentityinstanceid, tpo.programid, tpo.organisationunitid, p.accesslevel,p.uid as pgm_uid " + "FROM trackedentityprogramowner TPO " + "LEFT JOIN program P on P.programid = TPO.programid " + "LEFT JOIN organisationunit OU on OU.organisationunitid = TPO.organisationunitid " + "LEFT JOIN trackedentityinstance TEI on TEI.trackedentityinstanceid = tpo.trackedentityinstanceid " + "WHERE TPO.trackedentityinstanceid in (:ids) " + "AND p.programid in (SELECT programid FROM program) " + "GROUP BY tei.uid,tpo.trackedentityinstanceid, tpo.programid, tpo.organisationunitid, ou.path, p.accesslevel,p.uid " + "HAVING (P.accesslevel in ('OPEN', 'AUDITED') AND (EXISTS(SELECT SS.organisationunitid FROM userteisearchorgunits SS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = SS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')) OR EXISTS(SELECT CS.organisationunitid FROM usermembership CS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = CS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')))) " + "OR (P.accesslevel in ('CLOSED', 'PROTECTED') AND EXISTS(SELECT CS.organisationunitid FROM usermembership CS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = CS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')));"; private final static String GET_OWNERSHIP_DATA_FOR_TEIS_FOR_SPECIFIC_PROGRAM = "SELECT tei.uid as tei_uid,tpo.trackedentityinstanceid, tpo.programid, tpo.organisationunitid, p.accesslevel,p.uid as pgm_uid " + "FROM trackedentityprogramowner TPO " + "LEFT JOIN program P on P.programid = TPO.programid " + "LEFT JOIN organisationunit OU on OU.organisationunitid = TPO.organisationunitid " + "LEFT JOIN trackedentityinstance TEI on TEI.trackedentityinstanceid = tpo.trackedentityinstanceid " + "WHERE TPO.trackedentityinstanceid in (:ids) " + "AND p.uid = :programUid " + "GROUP BY tei.uid,tpo.trackedentityinstanceid, tpo.programid, tpo.organisationunitid, ou.path, p.accesslevel,p.uid " + "HAVING (P.accesslevel in ('OPEN', 'AUDITED') AND (EXISTS(SELECT SS.organisationunitid FROM userteisearchorgunits SS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = SS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')) OR EXISTS(SELECT CS.organisationunitid FROM usermembership CS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = CS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')))) " + "OR (P.accesslevel in ('CLOSED', 'PROTECTED') AND EXISTS(SELECT CS.organisationunitid FROM usermembership CS LEFT JOIN organisationunit OU2 ON OU2.organisationunitid = CS.organisationunitid WHERE userinfoid = :userInfoId AND OU.path LIKE CONCAT(OU2.path, '%')));"; public DefaultTrackedEntityInstanceStore( @Qualifier( "readOnlyJdbcTemplate" ) JdbcTemplate jdbcTemplate ) { super( jdbcTemplate ); } @Override String getRelationshipEntityColumn() { return "trackedentityinstanceid"; } @Override public Map<String, TrackedEntityInstance> getTrackedEntityInstances( List<Long> ids, AggregateContext ctx ) { List<List<Long>> idPartitions = Lists.partition( ids, PARITITION_SIZE ); Map<String, TrackedEntityInstance> trackedEntityMap = new HashMap<>(); idPartitions .forEach( partition -> trackedEntityMap.putAll( getTrackedEntityInstancesPartitioned( partition, ctx ) ) ); return trackedEntityMap; } private Map<String, TrackedEntityInstance> getTrackedEntityInstancesPartitioned( List<Long> ids, AggregateContext ctx ) { TrackedEntityInstanceRowCallbackHandler handler = new TrackedEntityInstanceRowCallbackHandler(); if ( !ctx.isSuperUser() && ctx.getTrackedEntityTypes().isEmpty() ) { // If not super user and no tets are accessible. then simply return // empty list. return new HashMap<>(); } String sql = withAclCheck( GET_TEIS_SQL, ctx, "tei.trackedentitytypeid in (:teiTypeIds)" ); jdbcTemplate.query( applySortOrder( sql, StringUtils.join( ids, "," ), "trackedentityinstanceid" ), createIdsParam( ids ).addValue( "teiTypeIds", ctx.getTrackedEntityTypes() ), handler ); return handler.getItems(); } @Override public Multimap<String, Attribute> getAttributes( List<Long> ids ) { return fetch( GET_TEI_ATTRIBUTES, new TrackedEntityAttributeRowCallbackHandler(), ids ); } public Multimap<String, ProgramOwner> getProgramOwners( List<Long> ids ) { return fetch( GET_PROGRAM_OWNERS, new ProgramOwnerRowCallbackHandler(), ids ); } @Override public Multimap<String, String> getOwnedTeis( List<Long> ids, AggregateContext ctx ) { List<List<Long>> teiIds = Lists.partition( ids, PARITITION_SIZE ); Multimap<String, String> ownedTeisMultiMap = ArrayListMultimap.create(); teiIds.forEach( partition -> { ownedTeisMultiMap.putAll( getOwnedTeisPartitioned( partition, ctx ) ); } ); return ownedTeisMultiMap; } private Multimap<String, String> getOwnedTeisPartitioned( List<Long> ids, AggregateContext ctx ) { OwnedTeiMapper handler = new OwnedTeiMapper(); MapSqlParameterSource paramSource = createIdsParam( ids ).addValue( "userInfoId", ctx.getUserId() ); boolean checkForOwnership = ctx.getQueryParams().isIncludeAllAttributes() || ctx.getParams().isIncludeEnrollments() || ctx.getParams().isIncludeEvents(); String sql; if ( ctx.getQueryParams().hasProgram() ) { sql = GET_OWNERSHIP_DATA_FOR_TEIS_FOR_SPECIFIC_PROGRAM; paramSource.addValue( "programUid", ctx.getQueryParams().getProgram().getUid() ); } else if ( checkForOwnership ) { sql = GET_OWNERSHIP_DATA_FOR_TEIS_FOR_ALL_PROGRAM; } else { return ArrayListMultimap.create(); } jdbcTemplate.query( sql, paramSource, handler ); return handler.getItems(); } }
package upparse; import java.io.*; import java.util.*; import static java.lang.Math.*; /** * Class for evaluating chunker output * @author ponvert@mail.utexas.edu (Elias Ponvert) */ public class ChunkingEval { private final ChunkedCorpus goldCorpus; private final String evalName; private final List<Experiment> experiments = new ArrayList<Experiment>(); private final boolean checkTerms; private ChunkingEval( final String name, final ChunkedCorpus chunkedCorpus, final boolean _checkTerms) { evalName = name; goldCorpus = chunkedCorpus; checkTerms = _checkTerms; } public static ChunkingEval fromCorpusFile( final String filename, final Alpha alpha, final boolean checkTerms) throws IOException { final String name = new File(filename).getName(); return new ChunkingEval( name, ChunkedCorpus.fromFile(filename, alpha), checkTerms); } public void eval(String string, ChunkedCorpus outputCorpus) throws EvalError { experiments.add(new Experiment(string, outputCorpus)); } public void writeSummary(final String evalType) throws EvalError { writeSummary(evalType, System.out); } public void writeSummary(String evalType, PrintStream out) throws EvalError { for (Experiment experiment: experiments) { if (evalType.equals("PR")) experiment.writeSummary(out); else if (evalType.equals("PRLcsv")) experiment.writeSummaryWithLenCSV(out); else if (evalType.equals("PRL")) experiment.writeSummaryWithLen(out); else if (evalType.equals("PRC")) experiment.writeSummaryWithCounts(out); else if (evalType.equals("PRCL")) experiment.writeSummaryWithCountsAndLen(out); else throw new EvalError("Unknown eval type: " + evalType); } } private static class ChunkSet { final int index[][]; final Chunk[] chunks; ChunkSet(final int n, final Chunk[] _chunks) { final int _n = n + 1; chunks = _chunks; index = new int[_n][_n]; int i = 1; for (Chunk c: chunks) index[c.start][c.end] = i++; } @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append(String.format("ChunkSet(%d,[", index.length)); for (int i = 0; i < chunks.length; i++) { sb.append(chunks[i].toString()); if (i != chunks.length-1) sb.append(","); } sb.append("])"); return sb.toString(); } ChunkSet difference(final ChunkSet o) { int nToRemove = 0; List<Chunk> chunkList = new ArrayList<Chunk>(Arrays.asList(chunks)); int[] toRemove = new int[chunks.length]; for (Chunk c: o.chunks) if (contains(c)) toRemove[nToRemove++] = indexOf(c); toRemove = Arrays.copyOf(toRemove, nToRemove); Arrays.sort(toRemove); for (int i = nToRemove-1; i >= 0; i chunkList.remove(toRemove[i]); return new ChunkSet(index.length, chunkList.toArray(new Chunk[0])); } ChunkSet intersection(ChunkSet o) { List<Chunk> shared = new ArrayList<Chunk>(); final ChunkSet checker, lister; if (o.chunks.length > chunks.length) { lister = this; checker = o; } else { lister = o; checker = this; } for (Chunk c: lister.chunks) if (checker.contains(c)) shared.add(c); return new ChunkSet(index.length, shared.toArray(new Chunk[0])); } private int indexOf(Chunk c) { return index[c.start][c.end] - 1; } private boolean contains(Chunk c) { return index[c.start][c.end] != 0; } } private static class Chunk implements Comparable<Chunk> { // provide actual sentence index if we ever want to put these in a set // or hashtable final int start, end; Chunk(int _start, int _end) { start = _start; end = _end; } @Override public boolean equals(Object obj) { Chunk c = (Chunk) obj; return start == c.start && end == c.end; } @Override public int hashCode() { return Arrays.hashCode(new int[] { start, end }); } @Override public int compareTo(Chunk o) { if (o == null) throw new NullPointerException(); if (end < o.start) return -1; if (o.end < start) return 1; return 0; } @Override public String toString() { return String.format("Chunk(%d,%d)", start, end); } public boolean contains(Chunk c) { return start <= c.start && c.end <= end; } } private class Experiment { private static final int TP = 0, FP = 1, FN = 2, NO_OVERLAP = 0, TP_SUB = 1, TP_SUP = 2, CROSSING = 3, NA = 4, MAXLEN = 5; private final int[][][] counts = new int[3][5][MAXLEN+1]; private final String expName; private final int[] len = new int[3]; Experiment(String name, ChunkedCorpus outputCorpus) throws EvalError { expName = name; int[][][] goldIndices = goldCorpus.getArrays(); int[][][] outpIndices = outputCorpus.getArrays(); int[] terms, _terms; int[][] gold, outp; if (goldIndices.length != outpIndices.length) { for (int i = 0; i < Math.min(goldIndices.length, outpIndices.length); i++) { int[] g = termsFromSent(goldIndices[i]); int[] o = termsFromSent(outpIndices[i]); if (checkTerms) assert Arrays.equals(g, o); else assert g.length == o.length; } throw new EvalError( String.format("Different corpus len: Gold = %d Output = %d", goldIndices.length, outpIndices.length)); } for (int i = 0; i < goldIndices.length; i++) { gold = goldIndices[i]; outp = outpIndices[i]; terms = termsFromSent(gold); _terms = termsFromSent(outp); if (checkTerms) assert Arrays.equals(terms, _terms): String.format( "Terms do not match:\nGold: %s\nOutp: %s\n[%d]", termStr(terms), termStr(termsFromSent(outp)), i); else assert terms.length == _terms.length; final ChunkSet goldChunks = new ChunkSet(terms.length, chunks(gold)), outpChunks = new ChunkSet(terms.length, chunks(outp)), truePos = goldChunks.intersection(outpChunks), falsePos = outpChunks.difference(truePos), falseNeg = goldChunks.difference(truePos); int closestI, errorType; Chunk closest; for (Chunk c: truePos.chunks) { counts[TP][NA][lenNorm(c)]++; len[TP] += c.end - c.start; } for (Chunk c: falsePos.chunks) { closestI = Arrays.binarySearch(truePos.chunks, c); if (closestI < 0) errorType = NO_OVERLAP; else { closest = truePos.chunks[closestI]; if (closest.contains(c)) errorType = TP_SUP; else if (c.contains(closest)) errorType = TP_SUB; else errorType = CROSSING; } counts[FP][errorType][lenNorm(c)]++; len[FP] += c.end - c.start; } for (Chunk c: falseNeg.chunks) { closestI = Arrays.binarySearch(falsePos.chunks, c); if (closestI < 0) errorType = NO_OVERLAP; else { closest = falsePos.chunks[closestI]; if (closest.contains(c)) errorType = TP_SUB; else if (c.contains(closest)) errorType = TP_SUP; else errorType = CROSSING; } counts[FN][errorType][lenNorm(c)]++; len[FN] += c.end - c.start; } } } private String termStr(int[] terms) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < terms.length; i++) { sb.append(goldCorpus.alpha.getString(terms[i])); if (i != terms.length-1) sb.append(" "); } return sb.toString(); } private int lenNorm(Chunk c) { return min(MAXLEN, c.end - c.start); } private int[] termsFromSent(int[][] gold) { int n = 0; for (int[] c: gold) n += c.length; int[] terms = new int[n]; int i = 0; for (int[] c: gold) for (int t: c) terms[i++] = t; return terms; } /** @return A sorted array of chunks len > 1 in the sentence */ final Chunk[] chunks(int[][] sent) { int nChunks = 0; for (int[] chunk: sent) if (chunk.length > 1) nChunks++; Chunk[] ind = new Chunk[nChunks]; int i = 0, start = 0, end; for (int[] chunk: sent) { if (chunk.length == 1) start++; else { end = start + chunk.length; ind[i++] = new Chunk(start,end); start = end; } } return ind; } public void writeSummaryWithLenCSV(PrintStream out) { int tp = sum(counts[TP]), fp = sum(counts[FP]), fn = sum(counts[FN]), goldCount = tp + fn, predCount = tp + fp, goldLen = len[TP] + len[FN], predLen = len[TP] + len[FP]; double goldCountF = (double) goldCount, predCountF = (double) predCount, goldLenF = (double) goldLen, predLenF = (double) predLen, goldLenAvg = goldLenF / goldCountF, predLenAvg = predLenF / predCountF, tpF = (double) tp, fpF = (double) fp, fnF = (double) fn, prec = 100 * tpF / (tpF + fpF), rec = 100 * tpF / (tpF + fnF), f = 2 * prec * rec / (prec + rec); out.println(String.format( "%.1f,%.1f,%.1f,%.2f,%.2f", prec, rec, f, goldLenAvg, predLenAvg)); } public void writeSummary(PrintStream out) { int tp = sum(counts[TP]), fp = sum(counts[FP]), fn = sum(counts[FN]); double tpF = (double) tp, fpF = (double) fp, fnF = (double) fn, prec = 100 * tpF / (tpF + fpF), rec = 100 * tpF / (tpF + fnF), f = 2 * prec * rec / (prec + rec); out.println(String.format("%25s %10s : %.1f / %.1f / %.1f ", evalName, expName, prec, rec, f)); } public void writeSummaryWithLen(PrintStream out) { int tp = sum(counts[TP]), fp = sum(counts[FP]), fn = sum(counts[FN]), goldCount = tp + fn, predCount = tp + fp, goldLen = len[TP] + len[FN], predLen = len[TP] + len[FP]; double goldCountF = (double) goldCount, predCountF = (double) predCount, goldLenF = (double) goldLen, predLenF = (double) predLen, goldLenAvg = goldLenF / goldCountF, predLenAvg = predLenF / predCountF, tpF = (double) tp, fpF = (double) fp, fnF = (double) fn, prec = 100 * tpF / (tpF + fpF), rec = 100 * tpF / (tpF + fnF), f = 2 * prec * rec / (prec + rec); out.println(String.format( "%25s %10s : %.1f / %.1f / %.1f [G = %.2f, P = %.2f]", evalName, expName, prec, rec, f, goldLenAvg, predLenAvg)); } public void writeSummaryWithCounts(PrintStream out) { int tp = sum(counts[TP]), fp = sum(counts[FP]), fn = sum(counts[FN]); double tpF = (double) tp, fpF = (double) fp, fnF = (double) fn, prec = 100 * tpF / (tpF + fpF), rec = 100 * tpF / (tpF + fnF), f = 2 * prec * rec / (prec + rec); out.println(String.format( "%25s %10s : %.1f / %.1f / %.1f ( %6d / %6d / %6d )", evalName, expName, prec, rec, f, tp, fp, fn)); } public void writeSummaryWithCountsAndLen(PrintStream out) { int tp = sum(counts[TP]), fp = sum(counts[FP]), fn = sum(counts[FN]), goldCount = tp + fn, predCount = tp + fp, goldLen = len[TP] + len[FN], predLen = len[TP] + len[FP]; double goldCountF = (double) goldCount, predCountF = (double) predCount, goldLenF = (double) goldLen, predLenF = (double) predLen, goldLenAvg = goldLenF / goldCountF, predLenAvg = predLenF / predCountF, tpF = (double) tp, fpF = (double) fp, fnF = (double) fn, prec = 100 * tpF / (tpF + fpF), rec = 100 * tpF / (tpF + fnF), f = 2 * prec * rec / (prec + rec); out.println(String.format( "%25s %10s : %.1f / %.1f / %.1f ( %6d / %6d / %6d ) [G = %.2f, P = %.2f]", evalName, expName, prec, rec, f, tp, fp, fn, goldLenAvg, predLenAvg)); } private int sum(int[][] is) { int s = 0; for (int[] a: is) s += sum(a); return s; } private int sum(int[] a) { int s = 0; for (int n: a) s += n; return s; } } }
package gosu.tools.ant; import gosu.tools.ant.util.AntLoggingHelper; import gw.lang.gosuc.simple.ICompilerDriver; import gw.lang.gosuc.simple.IGosuCompiler; import gw.lang.gosuc.simple.SoutCompilerDriver; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.DirectoryScanner; import org.apache.tools.ant.types.Path; import org.apache.tools.ant.types.Reference; import org.apache.tools.ant.util.FileNameMapper; import org.apache.tools.ant.util.GlobPatternMapper; import org.apache.tools.ant.util.SourceFileScanner; import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Ant task for compiling Gosu files to disk. * <p>The following parameters are available: * <ul> * <li>"srcdir" : A Path containing one or more source directories</li> * <li>"destdir" : A File representing the output destination of the compilation</li> * <li>"checkedarithmetic" : Compile with checked arithmetic if true. Defaults to {@code false}.</li> * <li>"failonerror" : Ignore compile errors and continue if true. Defaults to {@code true}.</li> * </ul> */ public class Gosuc extends GosuMatchingTask { private final AntLoggingHelper log = new AntLoggingHelper(this); private Path _src; private File _destDir; private Path _compileClasspath; private boolean _failOnError = true; private boolean _checkedArithmetic = false; private boolean _force = true; private Set<String> _scriptExtensions = new HashSet<>(Arrays.asList("gs", "gsx", "gst", "gsp")); protected List<File> compileList = new ArrayList<>(); /** * Adds a path for source compilation. * * @return a nested src element. */ public Path createSrc() { if (_src == null) { _src = new Path(getProject()); } return _src.createPath(); } /** * Recreate src. * * @return a nested src element. */ protected Path recreateSrc() { _src = null; return createSrc(); } /** * Set the source directories to find the source Gosu files. * * @param srcDir the source directories as a path */ public void setSrcdir(Path srcDir) { if (_src == null) { _src = srcDir; } else { _src.append(srcDir); } } /** * Gets the source dirs to find the source Gosu files. * * @return the source directories as a path */ public Path getSrcdir() { return _src; } /** * Set the destination directory into which the Gosu source * files should be compiled. * * @param destDir the destination directory */ public void setDestdir(File destDir) { _destDir = destDir; } /** * Gets the destination directory into which the Gosu source files * should be compiled. * * @return the destination directory */ public File getDestdir() { return _destDir; } /** * Adds a path to the classpath. * * @return a classpath to be configured */ public Path createClasspath() { if (_compileClasspath == null) { _compileClasspath = new Path(getProject()); } return _compileClasspath.createPath(); } /** * Adds a reference to a classpath defined elsewhere. * * @param ref a reference to a classpath */ public void setClasspathRef(Reference ref) { createClasspath().setRefid(ref); } private Set<String> getScriptExtensions() { return _scriptExtensions; } /** * Indicates whether the build will continue * even if there are compilation errors; defaults to true. * * @param fail if true halt the build on failure */ public void setFailOnError(boolean fail) { _failOnError = fail; } /** * Gets the FailOnError flag. * * @return the FailOnError flag */ public boolean getFailOnError() { return _failOnError; } public boolean isCheckedArithmetic() { return _checkedArithmetic; } public void setCheckedArithmetic( boolean checkedArithmetic ) { _checkedArithmetic = checkedArithmetic; } /** * Gets the Force flag.<br> * ant's directory scanner is timestamp based. Without proper <depend> tasks, this could result in successful, but incomplete compilation.<br> * Therefore we default 'force' to true, which causes compilation of all matching source files, regardless of the timestamp comparison between source and target. * * @return */ public boolean isForce() { return _force; } public void setForce( boolean force ) { _force = force; } /** * Scans the directory looking for source files to be compiled. * The results are returned in the class variable compileList * * @param srcDir The source directory * @param destDir The destination directory * @param files An array of filenames */ protected void scanDir(File srcDir, File destDir, String[] files) { GlobPatternMapper m = new GlobPatternMapper(); SourceFileScanner sfs = new SourceFileScanner(this); List<File> newFiles; if(!isForce()) { log.warn("Relying on ant's SourceFileScanner, which only looks at timestamps. If broken references result, try setting option 'force' to true."); } for (String extension : getScriptExtensions()) { m.setFrom("*." + extension); m.setTo("*.class"); log.debug("Scanning for *." + extension + " files..."); if(isForce()) { newFiles = asFiles(srcDir, files, m); } else { newFiles = Arrays.asList(sfs.restrictAsFiles(files, srcDir, destDir, m)); } log.debug("Found these files:"); for(File newFile : newFiles) { log.debug('\t' + newFile.getAbsolutePath()); } compileList.addAll(newFiles); } } /** * Converts an array of relative String filenames to a {@code List<File>} * @param srcDir The root directory of all files * @param files All files are relative to srcDir * @return a List of Files by joining srcDir to each file */ private List<File> asFiles(File srcDir, String[] files, FileNameMapper m) { List<File> newFiles = new ArrayList<>(); for(String file : files) { boolean hasMatchingExtension = m.mapFileName(file) != null; //use mapFileName as a check to validate if the source file extension is recognized by the mapper or not if(hasMatchingExtension) { newFiles.add(new File(srcDir, file)); } } return newFiles; } /** * Gets the list of files to be compiled. * * @return the list of files */ public List<File> getFileList() { return compileList; } /** * Executes the task. * * @throws BuildException if an error occurs */ public void execute() throws BuildException { log.debug("src/srcdir=" + getSrcdir()); log.debug("destdir=" + getDestdir()); log.debug("failOnError=" + getFailOnError()); log.debug("checkedArithmetic=" + isCheckedArithmetic()); log.debug("_compileClasspath=" + _compileClasspath); if(isCheckedArithmetic()) { System.setProperty("checkedArithmetic", "true"); } ICompilerDriver driver = new SoutCompilerDriver(); IGosuCompiler gosuc = new gw.lang.gosuc.simple.GosuCompiler(); List<String> classpath = new ArrayList<>(); classpath.addAll(Arrays.asList(_compileClasspath.list())); classpath.addAll(getJreJars()); log.info("Initializing Gosu compiler..."); log.debug("\tsourceFolders:" + Arrays.asList(getSrcdir().list())); log.debug("\tclasspath:" + classpath); log.debug("\toutputPath:" + getDestdir().getAbsolutePath()); String[] list = getSrcdir().list(); for (String filename : list) { File file = getProject().resolveFile(filename); if (!file.exists()) { throw new BuildException("srcdir \"" + file.getPath() + "\" does not exist!", getLocation()); } DirectoryScanner ds = this.getDirectoryScanner(file); String[] files = ds.getIncludedFiles(); scanDir(file, _destDir != null ? _destDir : file, files); } gosuc.initializeGosu(Arrays.asList(getSrcdir().list()), classpath, getDestdir().getAbsolutePath()); log.debug("About to compile these files:"); for(File file : compileList) { log.debug("\t" + file.getAbsolutePath()); } for(File file : compileList) { try { gosuc.compile(file, driver); } catch (Exception e) { log.error(e.getMessage()); throw new BuildException(e); } } gosuc.unitializeGosu(); List<String> warnings = ((SoutCompilerDriver) driver).getWarnings(); boolean errorsInCompilation = ((SoutCompilerDriver) driver).hasErrors(); List<String> errors = ((SoutCompilerDriver) driver).getErrors(); List<String> warningMessages = new ArrayList<>(); List<String> errorMessages = new ArrayList<>(); warnings.forEach(warning -> warningMessages.add("[WARNING] " + warning)); int numWarnings = warningMessages.size(); int numErrors = 0; if(errorsInCompilation) { errors.forEach(error -> errorMessages.add("[ERROR] " + error)); numErrors = errorMessages.size(); } boolean hasWarningsOrErrors = numWarnings > 0 || errorsInCompilation; StringBuilder sb; sb = new StringBuilder(); sb.append("Gosu compilation completed"); if(hasWarningsOrErrors) { sb.append(" with "); if(numWarnings > 0) { sb.append(numWarnings).append(" warning").append(numWarnings == 1 ? "" : 's'); } if(errorsInCompilation) { sb.append(numWarnings > 0 ? " and " : ""); sb.append(numErrors).append(" error").append(numErrors == 1 ? "" : 's'); } } else { sb.append(" successfully."); } if(hasWarningsOrErrors) { log.warn(sb.toString()); } else { log.info(sb.toString()); } int ct = 0; for(String msg : warningMessages) { log.info(msg); ct++; if(ct > 100) { log.info("Total warnings exceeds 100; truncating output"); break; } } ct = 0; for(String msg : errorMessages) { log.error(msg); ct++; if(ct > 100) { log.error("Total errors exceeds 100; truncating output"); break; } } if(errorsInCompilation) { if(getFailOnError()) { buildError("Gosu compilation failed with errors; see compiler output for details."); } else { log.warn("Gosu Compiler: Ignoring compilation failure(s) as 'failOnError' was set to false"); } } } /** * Get all JARs from the lib directory of the System's java.home property * @return List of absolute paths to all JRE libraries */ private List<String> getJreJars() { String javaHome = System.getProperty("java.home"); java.nio.file.Path libsDir = FileSystems.getDefault().getPath(javaHome, "/lib"); try { return Files.walk(libsDir) .filter( path -> path.toFile().isFile()) .filter( path -> path.toString().endsWith(".jar")) .map( java.nio.file.Path::toString ) .collect(Collectors.toList()); } catch (SecurityException | IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } }
package org.nuxeo.ecm.platform.publisher.impl.service; import java.util.List; import org.junit.Before; import org.junit.After; import org.junit.Test; import static org.junit.Assert.*; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.storage.sql.SQLRepositoryTestCase; public class TestDomainsFinder extends SQLRepositoryTestCase { DomainsFinder domainFinder; List<DocumentModel> result; @Before public void setUp() throws Exception { super.setUp(); deployContrib("org.nuxeo.ecm.platform.publisher.test", "OSGI-INF/publisher-lifecycle-contrib.xml"); deployContrib("org.nuxeo.ecm.platform.publisher.test", "OSGI-INF/core-types-contrib.xml"); fireFrameworkStarted(); openSession(); domainFinder = new DomainsFinderTester("default", session); } @After public void tearDown() throws Exception { closeSession(); super.tearDown(); } @Test public void testDomainsFiltered() throws Exception { result = domainFinder.getDomainsFiltered(); assertEquals(0, result.size()); DocumentModel domain1 = session.createDocumentModel("/", "dom1", "Domain"); domain1 = session.createDocument(domain1); DocumentModel domain2 = session.createDocumentModel("/", "dom1", "Domain"); domain2 = session.createDocument(domain2); DocumentModel socialdomain1 = session.createDocumentModel("/", "social", "SocialDomain"); socialdomain1 = session.createDocument(socialdomain1); session.save(); result = domainFinder.getDomainsFiltered(); assertEquals(2, result.size()); domain2.followTransition("delete"); assertEquals("deleted", domain2.getCurrentLifeCycleState()); session.saveDocument(domain2); session.save(); result = domainFinder.getDomainsFiltered(); assertEquals(1, result.size()); } } class DomainsFinderTester extends DomainsFinder { public DomainsFinderTester(String repositoryName) { super(repositoryName); } public DomainsFinderTester(String repositoryName, CoreSession session) { super(repositoryName); this.session = session; } }
package edu.pdx.cs410J.grader; import org.w3c.dom.*; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.time.LocalDateTime; import java.util.Iterator; import java.util.List; import java.util.Objects; import static edu.pdx.cs410J.grader.GradeBook.LetterGradeRanges.LetterGradeRange; /** * This class dumps the contents of a <code>GradeBook</code> to an XML * file. By default, the students in the grade book are dumped to XML * files in the same directory as the grade book's XML file. * * @author David Whitlock * @since Fall 2000 */ public class XmlDumper extends XmlHelper { private File studentDir = null; // Where to dump student XML files PrintWriter pw = null; // Where to dump grade book /** * Creates a new <code>XmlDumper</code> that dumps the contents of a * grade book to a given file. */ public XmlDumper(File xmlFile) throws IOException { this(new PrintWriter(new FileWriter(xmlFile), true)); if (!xmlFile.exists()) { if (!xmlFile.createNewFile()) { throw new IOException("Could not create file " + xmlFile); } } this.setStudentDir(xmlFile.getCanonicalFile().getParentFile()); } /** * Creates a new <code>XmlDumper</code> that dumps the contents of a * grade book to a file of the given name. */ public XmlDumper(String xmlFileName) throws IOException { this(new File(xmlFileName)); } /** * Creates an <code>XmlDumper</code> that dumps the contents of a * grade book to a <code>PrintWriter</code>. The location of the * student files is unspecified. */ private XmlDumper(PrintWriter pw) { this.pw = pw; } /** * Sets the directory in which the XML files for students are * generated. */ public void setStudentDir(File dir) { if (dir.exists() && !dir.isDirectory()) { throw new IllegalArgumentException(dir + " is not a directory"); } this.studentDir = dir; } /** * Dumps the contents of a <code>GradeBook</code> in XML format. */ public void dump(GradeBook book) throws IOException { Document doc = dumpGradeBook(book, this); try { writeXmlToPrintWriter(doc, this.pw); } catch (TransformerException ex) { ex.printStackTrace(System.err); System.exit(1); } dumpDirtyStudents(book); // Mark the grade book as being clean book.makeClean(); } static Document dumpGradeBook(GradeBook book, XmlHelper helper) throws IOException { Document doc = createDocumentForGradeBook(helper); Element root = doc.getDocumentElement(); appendXmlForClassName(book, doc, root); appendXmlForAssignments(book, doc, root); appendXmlForLetterGradeRanges(book, doc, root); appendXmlForStudents(book, doc, root); return doc; } private static void appendXmlForLetterGradeRanges(GradeBook book, Document doc, Element root) { for (Student.Section section : book.getSections()) { Element lgrNode = appendXmlForLetterGradeRange(book, section, doc); root.appendChild(lgrNode); } } private static Element appendXmlForLetterGradeRange(GradeBook book, Student.Section section, Document doc) { Element lgrNode = doc.createElement("letter-grade-ranges"); lgrNode.setAttribute("for-section", getSectionXmlAttributeValue(section)); for (LetterGradeRange range : book.getLetterGradeRanges(section)) { appendXmlForLetterGradeRange(range, doc, lgrNode); } return lgrNode; } private static String getSectionXmlAttributeValue(Student.Section section) { return Objects.toString(section, null); } private static void appendXmlForLetterGradeRange(LetterGradeRange range, Document doc, Element parent) { Element node = doc.createElement("letter-grade-range"); node.setAttribute("letter-grade", range.letterGrade().asString()); node.setAttribute("minimum-score", String.valueOf(range.minimum())); node.setAttribute("maximum-score", String.valueOf(range.maximum())); parent.appendChild(node); } private static Document createDocumentForGradeBook(XmlHelper helper) { Document doc = null; try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(true); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setErrorHandler(helper); builder.setEntityResolver(helper); DOMImplementation dom = builder.getDOMImplementation(); DocumentType docType = dom.createDocumentType("gradebook", publicID, systemID); doc = dom.createDocument(null, "gradebook", docType); } catch (ParserConfigurationException | DOMException ex) { ex.printStackTrace(System.err); System.exit(1); } return doc; } private static void appendXmlForStudents(GradeBook book, Document doc, Element root) { // Students Element studentsNode = doc.createElement("students"); for (String id : book.getStudentIds()) { appendTextElementIfValueIsNotNull(studentsNode, "id", id); } root.appendChild(studentsNode); } private static void appendXmlForClassName(GradeBook book, Document doc, Element root) { appendTextElementIfValueIsNotNull(root, "name", book.getClassName()); } private static void appendXmlForAssignments(GradeBook book, Document doc, Element root) { // assignment nodes Element assignments = doc.createElement("assignments"); for (String assignmentName : book.getAssignmentNames()) { Assignment assign = book.getAssignment(assignmentName); Element assignNode = doc.createElement("assignment"); setAssignmentTypeAttribute(assign, assignNode); appendTextElementIfValueIsNotNull(assignNode, "name", assign.getName()); appendTextElementIfValueIsNotNull(assignNode, "description", assign.getDescription()); appendTextElementIfValueIsNotNull(assignNode, "points", String.valueOf(assign.getPoints())); appendTextElementIfValueIsNotNull(assignNode, "due-date", assign.getDueDate()); doNotes(doc, assignNode, assign.getNotes()); assignments.appendChild(assignNode); } root.appendChild(assignments); } private static void appendTextElementIfValueIsNotNull(Element parent, String elementName, LocalDateTime dateTime) { if (dateTime != null) { appendTextElementIfValueIsNotNull(parent, elementName, dateTime.format(DATE_TIME_FORMAT)); } } private static void setAssignmentTypeAttribute(Assignment assign, Element assignNode) { Assignment.AssignmentType type = assign.getType(); switch (type) { case PROJECT: assignNode.setAttribute("type", "PROJECT"); break; case QUIZ: assignNode.setAttribute("type", "QUIZ"); break; case OTHER: assignNode.setAttribute("type", "OTHER"); break; case OPTIONAL: assignNode.setAttribute("type", "OPTIONAL"); break; case POA: assignNode.setAttribute("type", "POA"); break; default: throw new IllegalArgumentException("Can't handle assignment " + "type " + type); } } private void dumpDirtyStudents(GradeBook book) throws IOException { book.forEachStudent(student -> { if (student.isDirty()) { dumpStudent(student); } }); } /** * Creates a <code>notes</code> XML element for a given * <code>List</code> of notes. */ private static void doNotes(Document doc, Element parent, List<String> notes) { Element notesNode = doc.createElement("notes"); for (String note : notes) { appendTextElementIfValueIsNotNull(notesNode, "note", note); } parent.appendChild(notesNode); } /** * Dumps a <code>Student</code> out to an XML file whose name is * based on the student's id and resides in the * <code>studentDir</code>. */ private void dumpStudent(Student student) { Document doc = toXml(student); // Now dump DOM tree to the file File studentFile = new File(studentDir, student.getId() + ".xml"); PrintWriter pw = new PrintWriter(newFileWriter(studentFile), true); try { writeXmlToPrintWriter(doc, pw); } catch (TransformerException ex) { ex.printStackTrace(System.err); System.exit(1); } } private FileWriter newFileWriter(File studentFile) { try { return new FileWriter(studentFile); } catch (IOException ex) { throw new IllegalStateException("Couldn't create FileWriter for " + studentFile, ex); } } /** * Returns a DOM tree that represents a <code>Student</code> */ static Document toXml(Student student) { Document doc = createXmlDocument(); Element root = doc.getDocumentElement(); appendStudentInformation(student, root); appendGradesInformation(student, root); appendLateInformation(student, root); appendResubmittedInformation(student, root); appendNotes(student, root); return doc; } private static void appendNotes(Student student, Element parent) { List<String> notes = student.getNotes(); if (!notes.isEmpty()) { doNotes(parent.getOwnerDocument(), parent, notes); } } private static void appendResubmittedInformation(Student student, Element parent) { Document doc = parent.getOwnerDocument(); List<String> resubmitted = student.getResubmitted(); if (!resubmitted.isEmpty()) { Element resubNode = doc.createElement("resubmitted"); for (String assignmentName : resubmitted) { appendTextElementIfValueIsNotNull(resubNode, "name", assignmentName); } parent.appendChild(resubNode); } } private static void appendLateInformation(Student student, Element parent) { Document doc = parent.getOwnerDocument(); List<String> late = student.getLate(); if (!late.isEmpty()) { Element lateNode = doc.createElement("late"); for (String assignmentName : late) { appendTextElementIfValueIsNotNull(lateNode, "name", assignmentName); } parent.appendChild(lateNode); } } private static void appendGradesInformation(Student student, Element parent) { Document doc = parent.getOwnerDocument(); Iterator<String> gradeNames = student.getGradeNames().iterator(); if (gradeNames.hasNext()) { Element gradesNode = doc.createElement("grades"); while (gradeNames.hasNext()) { String gradeName = (String) gradeNames.next(); Grade grade = student.getGrade(gradeName); Element gradeNode = doc.createElement("grade"); appendTextElementIfValueIsNotNull(gradeNode, "name", grade.getAssignmentName()); appendTextElementIfValueIsNotNull(gradeNode, "score", String.valueOf(grade.getScore())); appendSubmissionsInformation(grade.getSubmissionInfos(), gradeNode); doNotes(doc, gradeNode, grade.getNotes()); if (grade.getScore() == Grade.INCOMPLETE) { gradeNode.setAttribute("type", "INCOMPLETE"); } else if (grade.getScore() == Grade.NO_GRADE) { gradeNode.setAttribute("type", "NO_GRADE"); } gradesNode.appendChild(gradeNode); } parent.appendChild(gradesNode); } } private static void appendSubmissionsInformation(List<Grade.SubmissionInfo> submissionInfos, Element parent) { if (!submissionInfos.isEmpty()) { Document doc = parent.getOwnerDocument(); Element submissions = doc.createElement("submissions"); parent.appendChild(submissions); submissionInfos.forEach(info -> { Element submissionInfo = doc.createElement("submission-info"); submissions.appendChild(submissionInfo); appendSubmissionInformation(info, submissionInfo); }); } } private static void appendSubmissionInformation(Grade.SubmissionInfo info, Element parent) { appendTextElementIfValueIsNotNull(parent, "date", info.getSubmissionTime()); } private static void appendStudentInformation(Student student, Element root) { appendTextElementIfValueIsNotNull(root, "id", student.getId()); appendTextElementIfValueIsNotNull(root, "firstName", student.getFirstName()); appendTextElementIfValueIsNotNull(root, "lastName", student.getLastName()); appendTextElementIfValueIsNotNull(root, "nickName", student.getNickName()); appendTextElementIfValueIsNotNull(root, "email", student.getEmail()); appendTextElementIfValueIsNotNull(root, "major", student.getMajor()); appendTextElementIfValueIsNotNull(root, "d2l-id", student.getD2LId()); appendTextElementIfValueIsNotNull(root, "letter-grade", Objects.toString(student.getLetterGrade(), null)); setAttributeIfValueIsNotNull(root, "enrolled-section", getSectionXmlAttributeValue(student.getEnrolledSection())); } private static void setAttributeIfValueIsNotNull(Element root, String name, String value) { if (value != null) { root.setAttribute(name, value); } } private static Document createXmlDocument() { Document doc = null; try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(true); DocumentBuilder builder = factory.newDocumentBuilder(); DOMImplementation dom = builder.getDOMImplementation(); DocumentType docType = dom.createDocumentType("student", publicID, systemID); doc = dom.createDocument(null, "student", docType); } catch (ParserConfigurationException | DOMException ex) { ex.printStackTrace(System.err); System.exit(1); } return doc; } private static void appendTextElementIfValueIsNotNull(Element parent, String elementName, String textValue) { if (textValue != null) { Document doc = parent.getOwnerDocument(); Element id = doc.createElement(elementName); id.appendChild(doc.createTextNode(textValue)); parent.appendChild(id); } } }
package org.jcryptool.crypto.flexiprovider.algorithms.ui.views; import java.io.IOException; import java.net.URL; import java.util.Hashtable; import java.util.List; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.swt.SWT; import org.eclipse.swt.events.MouseAdapter; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.IActionBars; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.part.ViewPart; import org.jcryptool.core.logging.utils.LogUtil; import org.jcryptool.crypto.flexiprovider.algorithms.AlgorithmsManager; import org.jcryptool.crypto.flexiprovider.algorithms.FlexiProviderAlgorithmsPlugin; import org.jcryptool.crypto.flexiprovider.algorithms.ui.views.nodes.AlgorithmNode; import org.jcryptool.crypto.flexiprovider.algorithms.ui.views.nodes.CategoryNode; import org.jcryptool.crypto.flexiprovider.algorithms.ui.views.nodes.FolderNode; import org.jcryptool.crypto.flexiprovider.algorithms.ui.views.providers.FlexiProviderAlgorithmsViewContentProvider; import org.jcryptool.crypto.flexiprovider.algorithms.ui.views.providers.FlexiProviderAlgorithmsViewLabelProvider; import org.jcryptool.crypto.flexiprovider.ui.nodes.ITreeNode; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; public class FlexiProviderAlgorithmsView extends ViewPart { public static final String ID = "org.jcryptool.crypto.flexiprovider.algorithms.ui.views.FlexiProviderAlgorithmsView"; //$NON-NLS-1$ private AbstractHandler doubleClickHandler; private TreeViewer viewer; public void createPartControl(Composite parent) { GridData gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.verticalAlignment = GridData.FILL; gridData.grabExcessHorizontalSpace = true; gridData.grabExcessVerticalSpace = true; viewer = new TreeViewer(parent, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER); viewer.setContentProvider(new FlexiProviderAlgorithmsViewContentProvider(this)); viewer.setLabelProvider(new FlexiProviderAlgorithmsViewLabelProvider()); viewer.setInput(getViewSite()); viewer.getTree().setLayoutData(gridData); parent.setLayout(new GridLayout()); hookActions("/xml/help_algorithms.xml"); contributeToActionBars(); PlatformUI.getWorkbench().getHelpSystem() .setHelp(parent, FlexiProviderAlgorithmsPlugin.PLUGIN_ID + ".AlgorithmView"); //$NON-NLS-1$ } private void contributeToActionBars() { IActionBars bars = getViewSite().getActionBars(); fillLocalToolBar(bars.getToolBarManager()); } private void fillLocalToolBar(IToolBarManager manager) { } public void setFocus() { viewer.getControl().setFocus(); } /** * Adds an listener, which will provide the context help for a selected algorithm. If for a child is no context help * available, it will use the parents context help */ @SuppressWarnings("unchecked") private void hookActions(String xmlfile) { doubleClickHandler = new AbstractHandler() { public Object execute(ExecutionEvent event) { ISelection selection = viewer.getSelection(); Object obj = ((IStructuredSelection) selection).getFirstElement(); if (obj instanceof CategoryNode || obj instanceof FolderNode) { if (viewer.getTree().getSelection()[0].getExpanded()) { viewer.collapseToLevel(obj, 1); } else { viewer.expandToLevel(obj, 1); } } else if (obj instanceof AlgorithmNode) { AlgorithmsManager.getInstance().algorithmCalled(((AlgorithmNode) obj).getAlgorithm()); } return(null); } }; URL xmlFile = FlexiProviderAlgorithmsPlugin.getDefault().getBundle().getEntry(xmlfile); SAXBuilder builder = new SAXBuilder(); // Build a lookup table for the names defined in xml/help_algorithmis.xml // to the according context id (in $nl$/contexts_algorithms.xml) final Hashtable<String, String> contextIdLookup = new Hashtable<String, String>(); try { Document doc = builder.build(xmlFile); Element root = doc.getRootElement(); List<Element> helpEntries = root.getChildren("helpentry"); for (Element helpEntry : helpEntries) { String mainname = helpEntry.getAttributeValue("mainname"); String contextid = helpEntry.getAttributeValue("contextid"); contextIdLookup.put(mainname, contextid); Element aliasesRoot = helpEntry.getChild("aliases"); if (aliasesRoot != null) { List<Element> aliases = aliasesRoot.getChildren("alias"); for (Element alias : aliases) { contextIdLookup.put(alias.getValue(), contextid); } } } viewer.getControl().addMouseListener(new MouseAdapter() { @Override public void mouseDoubleClick(final MouseEvent e) { if (e.button == 1) { // only left button double clicks try { doubleClickHandler.execute(null); // run assigned action } catch(ExecutionException ex) { LogUtil.logError(FlexiProviderAlgorithmsPlugin.PLUGIN_ID, ex); } } } @Override public void mouseDown(final MouseEvent event) { IStructuredSelection selection = (IStructuredSelection) viewer.getSelection(); Object obj = ((IStructuredSelection) selection).getFirstElement(); if (obj instanceof ITreeNode) { ITreeNode node = (ITreeNode) obj; // Display the best matching context help for a selected algorithm // display help of parent entry if no context help is available do { String name = node.toString(); if (contextIdLookup.containsKey(name)) { String contextId = FlexiProviderAlgorithmsPlugin.PLUGIN_ID + "." + contextIdLookup.get(name); PlatformUI.getWorkbench().getHelpSystem().displayHelp(contextId); break; } node = node.getParent(); } while (node != null); viewer.getControl().setFocus(); viewer.setSelection(selection); } } }); } catch (JDOMException e) { LogUtil.logError(FlexiProviderAlgorithmsPlugin.PLUGIN_ID, e); } catch (IOException e) { LogUtil.logError(FlexiProviderAlgorithmsPlugin.PLUGIN_ID, e); } } }
package org.hbase.async; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.util.CharsetUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.stumbleupon.async.Callback; import com.stumbleupon.async.Deferred; import static org.hbase.async.HBaseClient.EMPTY_ARRAY; /** * Creates a scanner to read data sequentially from HBase. * <p> * This class is <strong>not synchronized</strong> as it's expected to be * used from a single thread at a time. It's rarely (if ever?) useful to * scan concurrently from a shared scanner using multiple threads. If you * want to optimize large table scans using extra parallelism, create a few * scanners and give each of them a partition of the table to scan. Or use * MapReduce. * <p> * Unlike HBase's traditional client, there's no method in this class to * explicitly open the scanner. It will open itself automatically when you * start scanning by calling {@link #nextRows()}. Also, the scanner will * automatically call {@link #close} when it reaches the end key. If, however, * you would like to stop scanning <i>before reaching the end key</i>, you * <b>must</b> call {@link #close} before disposing of the scanner. Note that * it's always safe to call {@link #close} on a scanner. * <p> * If you keep your scanner open and idle for too long, the RegionServer will * close the scanner automatically for you after a timeout configured on the * server side. When this happens, you'll get an * {@link UnknownScannerException} when you attempt to use the scanner again. * Also, if you scan too slowly (e.g. you take a long time between each call * to {@link #nextRows()}), you may prevent HBase from splitting the region if * the region is also actively being written to while you scan. For heavy * processing you should consider using MapReduce. * <p> * A {@code Scanner} is not re-usable. Should you want to scan the same rows * or the same table again, you must create a new one. * * <h1>A note on passing {@code byte} arrays in argument</h1> * None of the method that receive a {@code byte[]} in argument will copy it. * For more info, please refer to the documentation of {@link HBaseRpc}. * <h1>A note on passing {@code String}s in argument</h1> * All strings are assumed to use the platform's default charset. */ public final class Scanner { private static final Logger LOG = LoggerFactory.getLogger(Scanner.class); /** * The default maximum number of {@link KeyValue}s the server is allowed * to return in a single RPC response to a {@link Scanner}. * <p> * This default value is exposed only as a hint but the value itself * is not part of the API and is subject to change without notice. * @see #setMaxNumKeyValues */ public static final int DEFAULT_MAX_NUM_KVS = 4096; /** * The default maximum number of rows to scan per RPC. * <p> * This default value is exposed only as a hint but the value itself * is not part of the API and is subject to change without notice. * @see #setMaxNumRows */ public static final int DEFAULT_MAX_NUM_ROWS = 128; /** Special reference we use to indicate we're done scanning. */ private static final RegionInfo DONE = new RegionInfo(EMPTY_ARRAY, EMPTY_ARRAY, EMPTY_ARRAY); private final HBaseClient client; private final byte[] table; /** * The key to start scanning from. An empty array means "start from the * first key in the table". This key is updated each time we move on to * another row, so that in the event of a failure, we know what was the * last key previously returned. Note that this doesn't entail that the * full row was returned. Depending on the failure, we may not know if * the last key returned was only a subset of a row or a full row, so it * may not be possible to gracefully recover from certain errors without * re-scanning and re-returning the same data twice. */ private byte[] start_key = EMPTY_ARRAY; /** * The last key to scan up to (exclusive). * An empty array means "scan until the last key in the table". */ private byte[] stop_key = EMPTY_ARRAY; private byte[] family; // TODO(tsuna): Handle multiple families? private byte[] qualifier; // TODO(tsuna): Handle multiple qualifiers? /** Pre-serialized filter to apply on the scanner. */ private byte[] filter; /** @see #setServerBlockCache */ private boolean populate_blockcache = true; /** * Maximum number of rows to fetch at a time. * @see #setMaxNumRows */ private int max_num_rows = DEFAULT_MAX_NUM_ROWS; /** * Maximum number of KeyValues to fetch at a time. * @see #setMaxNumKeyValues */ private int max_num_kvs = DEFAULT_MAX_NUM_KVS; /** * The region currently being scanned. * If null, we haven't started scanning. * If == DONE, then we're done scanning. * Otherwise it contains a proper region name, and we're currently scanning. */ private RegionInfo region; /** * This is the scanner ID we got from the RegionServer. * It's generated randomly so any {@code long} value is possible. */ private long scanner_id; /** * Request object we re-use to avoid generating too much garbage. * @see #getNextRowsRequest */ private GetNextRowsRequest get_next_rows_request; /** * Constructor. * <strong>This byte array will NOT be copied.</strong> * @param table The non-empty name of the table to use. */ Scanner(final HBaseClient client, final byte[] table) { KeyValue.checkTable(table); this.client = client; this.table = table; } /** * Returns the row key this scanner is currently at. * <strong>Do not modify the byte array returned.</strong> */ public byte[] getCurrentKey() { return start_key; } public void setStartKey(final byte[] start_key) { KeyValue.checkKey(start_key); checkScanningNotStarted(); this.start_key = start_key; } public void setStartKey(final String start_key) { setStartKey(start_key.getBytes()); } public void setStopKey(final byte[] stop_key) { KeyValue.checkKey(stop_key); checkScanningNotStarted(); this.stop_key = stop_key; } public void setStopKey(final String stop_key) { setStopKey(stop_key.getBytes()); } public void setFamily(final byte[] family) { KeyValue.checkFamily(family); checkScanningNotStarted(); this.family = family; } /** Specifies a particular column family to scan. */ public void setFamily(final String family) { setFamily(family.getBytes()); } public void setQualifier(final byte[] qualifier) { KeyValue.checkQualifier(qualifier); checkScanningNotStarted(); this.qualifier = qualifier; } /** Specifies a particular column qualifier to scan. */ public void setQualifier(final String qualifier) { setQualifier(qualifier.getBytes()); } /** * Sets a regular expression to filter results based on the row key. * <p> * This is equivalent to calling {@link #setKeyRegexp(String, Charset)} * with the ISO-8859-1 charset in argument. * @param regexp The regular expression with which to filter the row keys. */ public void setKeyRegexp(final String regexp) { setKeyRegexp(regexp, CharsetUtil.ISO_8859_1); } private static final byte[] ROWFILTER = Bytes.ISO88591("org.apache.hadoop" + ".hbase.filter.RowFilter"); private static final byte[] REGEXSTRINGCOMPARATOR = Bytes.ISO88591("org.apache.hadoop" + ".hbase.filter.RegexStringComparator"); private static final byte[] EQUAL = new byte[] { 'E', 'Q', 'U', 'A', 'L' }; /** * Sets a regular expression to filter results based on the row key. * <p> * This regular expression will be applied on the server-side, on the row * key. Rows for which the key doesn't match will not be returned to this * scanner, which can be useful to carefully select which rows are matched * when you can't just do a prefix match, and cut down the amount of data * transfered on the network. * <p> * Don't use an expensive regular expression, because Java's implementation * uses backtracking and matching will happen on the server side, potentially * on many many row keys. See <a href="su.pr/2xaY8D">Regular Expression * Matching Can Be Simple And Fast</a> for more details on regular expression * performance (or lack thereof) and what "backtracking" means. * @param regexp The regular expression with which to filter the row keys. * @param charset The charset used to decode the bytes of the row key into a * string. The RegionServer must support this charset, otherwise it will * unexpectedly close the connection the first time you attempt to use this * scanner. */ public void setKeyRegexp(final String regexp, final Charset charset) { final byte[] regex = Bytes.UTF8(regexp); final byte[] chars = Bytes.UTF8(charset.name()); filter = new byte[(1 + 40 + 2 + 5 + 1 + 1 + 1 + 52 + 2 + regex.length + 2 + chars.length)]; final ChannelBuffer buf = ChannelBuffers.wrappedBuffer(filter); buf.clear(); // Set the writerIndex to 0. buf.writeByte((byte) ROWFILTER.length); buf.writeBytes(ROWFILTER); // writeUTF of the comparison operator buf.writeShort(5); buf.writeBytes(EQUAL); // The comparator: a RegexStringComparator buf.writeByte(53); // Code for WritableByteArrayComparable // 1 buf.writeByte(0); // Code for "this has no code". // 1 buf.writeByte((byte) REGEXSTRINGCOMPARATOR.length); buf.writeBytes(REGEXSTRINGCOMPARATOR); // writeUTF the regexp buf.writeShort(regex.length); buf.writeBytes(regex); // regex.length // writeUTF the charset buf.writeShort(chars.length); buf.writeBytes(chars); // chars.length } public void setServerBlockCache(final boolean populate_blockcache) { checkScanningNotStarted(); this.populate_blockcache = populate_blockcache; } public void setMaxNumRows(final int max_num_rows) { if (max_num_rows <= 0) { throw new IllegalArgumentException("zero or negative argument: " + max_num_rows); } this.max_num_rows = max_num_rows; } public void setMaxNumKeyValues(final int max_num_kvs) { if (max_num_kvs == 0) { throw new IllegalArgumentException("batch size can't be zero"); } checkScanningNotStarted(); this.max_num_kvs = max_num_kvs; } /** * Scans a number of rows. Calling this method is equivalent to: * <pre> * this.{@link #setMaxNumRows setMaxNumRows}(nrows); * this.{@link #nextRows() nextRows}(); * </pre> * @param nrows The maximum number of rows to retrieve. * @return A deferred list of rows. * @see #setMaxNumRows * @see #nextRows() */ public Deferred<ArrayList<ArrayList<KeyValue>>> nextRows(final int nrows) { setMaxNumRows(nrows); return nextRows(); } /** * Scans a number of rows. * <p> * The last row returned may be partial if it's very wide and * {@link #setMaxNumKeyValues} wasn't called with a negative value in * argument. * <p> * Once this method returns {@code null} once (which indicates that this * {@code Scanner} is done scanning), calling it again leads to an undefined * behavior. * @return A deferred list of rows. Each row is a list of {@link KeyValue} * and each element in the list returned represents a different row. Rows * are returned in sequential order. {@code null} is returned if there are * no more rows to scan. Otherwise its {@link ArrayList#size size} is * guaranteed to be less than or equal to the value last given to * {@link #setMaxNumRows}. * @see #setMaxNumRows * @see #setMaxNumKeyValues */ public Deferred<ArrayList<ArrayList<KeyValue>>> nextRows() { if (region == DONE) { // We're already done scanning. return Deferred.fromResult(null); } else if (region == null) { // We need to open the scanner first. return client.openScanner(this).addCallbackDeferring( new Callback<Deferred<ArrayList<ArrayList<KeyValue>>>, Long>() { public Deferred<ArrayList<ArrayList<KeyValue>>> call(final Long arg) { scanner_id = arg; if (LOG.isDebugEnabled()) { LOG.debug("Scanner " + Bytes.hex(arg) + " opened on " + region); } return nextRows(); // Restart the call. } public String toString() { return "scanner opened"; } }); } // Need to silence this warning because the callback `got_next_row' // declares its return type to be Object, because its return value // may or may not be deferred. @SuppressWarnings("unchecked") final Deferred<ArrayList<ArrayList<KeyValue>>> d = (Deferred) client.scanNextRows(this).addCallbacks(got_next_row, nextRowErrback()); return d; } /** * Singleton callback to handle responses of "next" RPCs. * This returns an {@code ArrayList<ArrayList<KeyValue>>} (possibly inside a * deferred one). */ private final Callback<Object, Object> got_next_row = new Callback<Object, Object>() { public Object call(final Object response) { if (response == null) { // We're done scanning this region. final byte[] region_stop_key = region.stopKey(); // Check to see if this region is the last we should scan (either // because (1) it's the last region or (3) because its stop_key is // greater than or equal to the stop_key of this scanner provided // that (2) we're not trying to scan until the end of the table). if (region_stop_key == EMPTY_ARRAY || (stop_key != EMPTY_ARRAY && Bytes.memcmp(stop_key, region_stop_key) <= 0)) { get_next_rows_request = null; // free(); family = qualifier = null; // free(); start_key = stop_key = EMPTY_ARRAY; // free() but mustn't be null. return close() // Auto-close the scanner. .addCallback(new Callback<ArrayList<ArrayList<KeyValue>>, Object>() { public ArrayList<ArrayList<KeyValue>> call(final Object arg) { return null; // Tell the user there's nothing more to scan. } public String toString() { return "auto-close scanner " + Bytes.hex(scanner_id); } }); } return continueScanOnNextRegion(); } else if (!(response instanceof ArrayList)) { throw new InvalidResponseException(ArrayList.class, response); } @SuppressWarnings("unchecked") // I 3>> generics. final ArrayList<ArrayList<KeyValue>> rows = (ArrayList<ArrayList<KeyValue>>) response; final ArrayList<KeyValue> lastrow = rows.get(rows.size() - 1); start_key = lastrow.get(0).key(); return rows; } public String toString() { return "get nextRows response"; } }; /** * Creates a new errback to handle errors while trying to get more rows. */ private final Callback<Object, Object> nextRowErrback() { return new Callback<Object, Object>() { public Object call(final Object error) { final RegionInfo old_region = region; // Save before invalidate(). invalidate(); // If there was an error, don't assume we're still OK. if (error instanceof NotServingRegionException) { // We'll resume scanning on another region, and we want to pick up // right after the last key we successfully returned. Padding the // last key with an extra 0 gives us the next possible key. // TODO(tsuna): If we get 2 NSRE in a row, well pad the key twice! start_key = Arrays.copyOf(start_key, start_key.length + 1); return nextRows(); // XXX dangerous endless retry } else if (error instanceof UnknownScannerException) { // This can happen when our scanner lease expires. Unfortunately // there's no way for us to distinguish between an expired lease // and a real problem, for 2 reasons: the server doesn't keep track // of recently expired scanners and the lease time is only known by // the server and never communicated to the client. The normal // HBase client assumes that the client will share the same // hbase-site.xml configuration so that both the client and the // server will know the same lease time, but this assumption is bad // as nothing guarantees that the client's configuration will be in // sync with the server's. This unnecessarily increases deployment // complexity and it's brittle. final Scanner scnr = Scanner.this; LOG.warn(old_region + " pretends to not know " + scnr + ". I will" + " retry to open a scanner but this is typically because you've" + " been holding the scanner open and idle for too long (possibly" + " due to a long GC pause on your side or in the RegionServer)", error); // Let's re-open ourselves and keep scanning. return nextRows(); // XXX dangerous endless retry } return error; // Let the error propagate. } public String toString() { return "NextRow errback"; } }; } /** * Closes this scanner (don't forget to call this when you're done with it!). * <p> * Closing a scanner already closed has no effect. The deferred returned * will be called back immediately. * @return A deferred object that indicates the completion of the request. * The {@link Object} has not special meaning and can be {@code null}. */ public Deferred<Object> close() { if (region == null) { return Deferred.fromResult(null); } return client.closeScanner(this).addBoth(closedCallback()); } /** Callback+Errback invoked when the RegionServer closed our scanner. */ private Callback<Object, Object> closedCallback() { return new Callback<Object, Object>() { public Object call(Object arg) { if (arg instanceof Exception) { final Exception error = (Exception) arg; // NotServingRegionException: // If the region isn't serving, then our scanner is already broken // somehow, because while it's open it holds a read lock on the // region, which prevents it from splitting (among other things). // So if we get this error, our scanner is already dead anyway. // UnknownScannerException: // If this region doesn't know anything about this scanner then we // don't have anything to do to close it! if (error instanceof NotServingRegionException || error instanceof UnknownScannerException) { if (LOG.isDebugEnabled()) { LOG.debug("Ignoring exception when closing " + Scanner.this, error); } arg = null; // Clear the error. } // else: the `return arg' below will propagate the error. } else if (LOG.isDebugEnabled()) { LOG.debug("Scanner " + Bytes.hex(scanner_id) + " closed on " + region); } region = DONE; scanner_id = 0xDEAD000CC000DEADL; // Make debugging easier. return arg; } public String toString() { return "scanner closed"; } }; } /** * Continues scanning on the next region. * <p> * This method is called when we tried to get more rows but we reached the * end of the current region and need to move on to the next region. * <p> * This method closes the scanner on the current region, updates the start * key of this scanner and resumes scanning on the next region. * @return The deferred results from the next region. */ private Deferred<ArrayList<ArrayList<KeyValue>>> continueScanOnNextRegion() { // Copy those into local variables so we can still refer to them in the // "closure" below even after we've changed them. final long old_scanner_id = scanner_id; final RegionInfo old_region = region; if (LOG.isDebugEnabled()) { LOG.debug("Scanner " + Bytes.hex(old_scanner_id) + " done scanning " + old_region); } client.closeScanner(this).addCallback(new Callback<Object, Object>() { public Object call(final Object arg) { if (LOG.isDebugEnabled()) { LOG.debug("Scanner " + Bytes.hex(old_scanner_id) + " closed on " + old_region); } return arg; } public String toString() { return "scanner moved"; } }); // Continue scanning from the next region's start key. start_key = region.stopKey(); scanner_id = 0xDEAD000AA000DEADL; // Make debugging easier. invalidate(); return nextRows(); } public String toString() { final String region = this.region == null ? "null" : this.region == DONE ? "none" : this.region.toString(); final StringBuilder buf = new StringBuilder(14 + 1 + table.length + 1 + 12 + 1 + start_key.length + 1 + 1 + stop_key.length + 1 + 9 + 1 + (family == null ? 4 : family.length) + 1 + 12 + 1 + (qualifier == null ? 4 : qualifier.length) + 1 + 22 + 5 + 15 + 5 + 14 + 6 + 14 + 1 + region.length() + 1 + 13 + 18 + 1); buf.append("Scanner(table="); Bytes.pretty(buf, table); buf.append(", start_key="); Bytes.pretty(buf, start_key); buf.append(", stop_key="); Bytes.pretty(buf, stop_key); buf.append(", family="); Bytes.pretty(buf, family); buf.append(", qualifier="); Bytes.pretty(buf, qualifier); buf.append(", populate_blockcache=").append(populate_blockcache) .append(", max_num_rows=").append(max_num_rows) .append(", max_num_kvs=").append(max_num_kvs) .append(", region=").append(region); buf.append(", scanner_id=").append(Bytes.hex(scanner_id)) .append(')'); return buf.toString(); } // Package private stuff. // byte[] table() { return table; } byte[] startKey() { return start_key; } /** * Sets the name of the region that's hosting {@code this.start_key}. * @param region The region we're currently supposed to be scanning. */ void setRegionName(final RegionInfo region) { this.region = region; } /** * Invalidates this scanner and makes it assume it's no longer opened. * When a RegionServer goes away while we're scanning it, or some other type * of access problem happens, this method should be called so that the * scanner will have to re-locate the RegionServer and re-open itself. */ void invalidate() { region = null; } /** * Returns the region currently being scanned, if any. */ RegionInfo currentRegion() { return region; } /** * Returns an RPC to fetch the next rows. */ HBaseRpc getNextRowsRequest() { if (get_next_rows_request == null) { get_next_rows_request = new GetNextRowsRequest(); } return get_next_rows_request; } /** * Returns an RPC to open this scanner. */ HBaseRpc getOpenRequest() { return new OpenScannerRequest(); } /** * Returns an RPC to close this scanner. */ HBaseRpc getCloseRequest() { return new CloseScannerRequest(scanner_id); } private void checkScanningNotStarted() { if (region != null) { throw new IllegalStateException("scanning already started"); } } private static final byte[] OPEN_SCANNER = new byte[] { 'o', 'p', 'e', 'n', 'S', 'c', 'a', 'n', 'n', 'e', 'r' }; /** * RPC sent out to open a scanner on a RegionServer. */ private final class OpenScannerRequest extends HBaseRpc { public OpenScannerRequest() { super(OPEN_SCANNER, Scanner.this.table, start_key); } /** * Predicts a lower bound on the serialized size of this RPC. * This is to avoid using a dynamic buffer, to avoid re-sizing the buffer. * Since we use a static buffer, if the prediction is wrong and turns out * to be less than what we need, there will be an exception which will * prevent the RPC from being serialized. That'd be a severe bug. */ private int predictSerializedSize() { int size = 0; size += 4; // int: Number of parameters. size += 1; // byte: Type of the 1st parameter. size += 3; // vint: region name length (3 bytes => max length = 32768). size += super.region.name().length; // The region name. size += 1; // byte: Type of the 2nd parameter. size += 1; // byte: Type again (see HBASE-2877). size += 1; // byte: Version of Scan. size += 3; // vint: start key length (3 bytes => max length = 32768). size += start_key.length; // The start key. size += 3; // vint: stop key length (3 bytes => max length = 32768). size += stop_key.length; // The stop key. size += 4; // int: Max number of versions to return. size += 4; // int: Max number of KeyValues to get per RPC. size += 4; // int: Unused field only used by HBase's client. size += 1; // bool: Whether or not to populate the blockcache. size += 1; // byte: Whether or not to use a filter. if (filter != null) { size += filter.length; } size += 8; // long: Minimum timestamp. size += 8; // long: Maximum timestamp. size += 1; // byte: Boolean: "all time". size += 4; // int: Number of families. if (family != null) { size += 1; // vint: Family length (guaranteed on 1 byte). size += family.length; // The family. size += 4; // int: How many qualifiers follow? if (qualifier != null) { size += 3; // vint: Qualifier length. size += qualifier.length; // The qualifier. } } return size; } /** Serializes this request. */ ChannelBuffer serialize(final byte unused_server_version) { final ChannelBuffer buf = newBuffer(predictSerializedSize()); buf.writeInt(2); // Number of parameters. // 1st param: byte array containing region name writeHBaseByteArray(buf, super.region.name()); // 2nd param: Scan object buf.writeByte(39); // Code for a `Scan' parameter. buf.writeByte(39); // Code again (see HBASE-2877). buf.writeByte(1); // Manual versioning of Scan. writeByteArray(buf, start_key); writeByteArray(buf, stop_key); buf.writeInt(1); // Max number of versions to return. // Max number of KeyValues to get per RPC. buf.writeInt(max_num_kvs); // Unused field, only used by HBase's client. This value should represent // how many rows per call the client will fetch, but the server doesn't // care about this value, neither do we, because we use a different API. buf.writeInt(0xDEADBA5E); // Whether or not to populate the blockcache. buf.writeByte(populate_blockcache ? 0x01 : 0x00); if (filter == null) { buf.writeByte(0x00); // boolean (false): don't use a filter. } else { buf.writeByte(0x01); // boolean (true): use a filter. buf.writeBytes(filter); } // TimeRange buf.writeLong(0); // Minimum timestamp. buf.writeLong(Long.MAX_VALUE); // Maximum timestamp. buf.writeByte(0x01); // Boolean: "all time". // The "all time" boolean indicates whether or not this time range covers // all possible times. Not sure why it's part of the serialized RPC... // Families. buf.writeInt(family != null ? 1 : 0); // Number of families that follow. if (family != null) { // Each family is then written like so: writeByteArray(buf, family); // Column family name. buf.writeInt(qualifier == null ? 0 : 1); // How many qualifiers do we want? if (qualifier != null) { writeByteArray(buf, qualifier); // Column qualifier name. } } // Save the region in the Scanner. This kind of a kludge but it really // is the easiest way to give the Scanner the RegionInfo it needs. Scanner.this.region = super.region; return buf; } public String toString() { final StringBuilder buf = new StringBuilder(12 + start_key.length + 2 + 11 + stop_key.length + 2 + 14 + 4 + 22 + 5); buf.append(", start_key="); Bytes.pretty(buf, start_key); buf.append(", stop_key="); Bytes.pretty(buf, stop_key); buf.append(", max_num_kvs=").append(max_num_kvs) .append(", populate_blockcache=").append(populate_blockcache); return super.toStringWithQualifier("OpenScannerRequest", family, qualifier, buf.toString()); } } private static final byte[] NEXT = new byte[] { 'n', 'e', 'x', 't' }; /** * RPC sent out to fetch the next rows from the RegionServer. */ private final class GetNextRowsRequest extends HBaseRpc { public GetNextRowsRequest() { super(NEXT); // "next"... Great method name! } /** Serializes this request. */ ChannelBuffer serialize(final byte unused_server_version) { final ChannelBuffer buf = newBuffer(4 + 1 + 8 + 1 + 4); buf.writeInt(2); // Number of parameters. writeHBaseLong(buf, scanner_id); writeHBaseInt(buf, max_num_rows); return buf; } public String toString() { return "GetNextRowsRequest(scanner_id=" + scanner_id + ", max_num_rows=" + max_num_rows + ", region=" + region + ", attempt=" + attempt + ')'; } } /** * RPC sent out to close a scanner on a RegionServer. */ private static final class CloseScannerRequest extends HBaseRpc { private static final byte[] CLOSE = new byte[] { 'c', 'l', 'o', 's', 'e' }; private final long scanner_id; public CloseScannerRequest(final long scanner_id) { super(CLOSE); // "close"... Great method name! this.scanner_id = scanner_id; } /** Serializes this request. */ ChannelBuffer serialize(final byte unused_server_version) { final ChannelBuffer buf = newBuffer(4 + 1 + 8); buf.writeInt(1); // Number of parameters. writeHBaseLong(buf, scanner_id); return buf; } public String toString() { return "CloseScannerRequest(scanner_id=" + scanner_id + ", attempt=" + attempt + ')'; } } }
package fr.openwide.core.imports.excel.location; import java.io.Serializable; import org.apache.commons.lang3.builder.ToStringBuilder; public class ExcelImportLocation implements Serializable { private static final long serialVersionUID = 6866449558201453287L; private final String fileName; private final String sheetName; private final Integer rowIndexZeroBased; private final String cellAddress; public ExcelImportLocation(String fileName, String sheetName, Integer rowIndexZeroBased, String cellAddress) { super(); this.fileName = fileName; this.sheetName = sheetName; this.rowIndexZeroBased = rowIndexZeroBased; this.cellAddress = cellAddress; } public String getFileName() { return fileName; } public String getSheetName() { return sheetName; } public Integer getRowIndexZeroBased() { return rowIndexZeroBased; } public Integer getRowIndexOneBased() { return rowIndexZeroBased == null ? null : rowIndexZeroBased + 1; } public String getCellAddress() { return cellAddress; } @Override public String toString() { return new ToStringBuilder(this) .append("fileName", fileName) .append("sheetName", sheetName) .append("rowIndex (1-based)", getRowIndexOneBased()) .append("cellAddress", cellAddress) .build(); } }
package whelk.export.servlet; import whelk.Document; import whelk.util.LegacyIntegrationTools; import javax.servlet.http.HttpServletRequest; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Timestamp; import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Enumeration; import java.util.HashSet; import java.util.StringJoiner; public class Helpers { public static String getUnknownParameters(HttpServletRequest request, String... knownParameters) { HashSet<String> knownParametersSet = new HashSet<String>(); knownParametersSet.addAll(Arrays.asList(knownParameters)); StringJoiner unknownParameters = new StringJoiner(", "); Enumeration parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = (String) parameterNames.nextElement(); if (!knownParametersSet.contains(parameterName) && !parameterName.equals("verb")) { unknownParameters.add(parameterName); } } if (unknownParameters.length() == 0) return null; return unknownParameters.toString(); } public static ZonedDateTime parseISO8601(String dateTimeString) { if (dateTimeString == null) return null; if (dateTimeString.length() == 10) // Date only dateTimeString += "T00:00:00Z"; return ZonedDateTime.parse(dateTimeString); } public static PreparedStatement prepareSameAsStatement(Connection dbconn, String id) throws SQLException { id = LegacyIntegrationTools.fixUri(id); String tableName = OaiPmh.configuration.getProperty("sqlMaintable"); String sql = "SELECT id FROM " + tableName + "__identifiers WHERE iri = ?"; PreparedStatement preparedStatement = dbconn.prepareStatement(sql); preparedStatement.setString(1, id); return preparedStatement; } public static PreparedStatement getMatchingDocumentsStatement(Connection dbconn, ZonedDateTime fromDateTime, ZonedDateTime untilDateTime, SetSpec setSpec, String id, boolean includeDependenciesInTimeInterval) throws SQLException { if (setSpec != null && setSpec.getRootSet() != null && setSpec.getRootSet().startsWith(SetSpec.SET_BIB) && setSpec.getSubset() != null) { // Using a bib:[location] set requires special handling return getBibSigelMatchingDocumentsStatement(dbconn, fromDateTime, untilDateTime, setSpec, includeDependenciesInTimeInterval); } else { return getNormalMatchingDocumentsStatement(dbconn, fromDateTime, untilDateTime, setSpec, id, includeDependenciesInTimeInterval); } } private static PreparedStatement getBibSigelMatchingDocumentsStatement(Connection dbconn, ZonedDateTime fromDateTime, ZonedDateTime untilDateTime, SetSpec setSpec, boolean includeDependenciesInTimeInterval) throws SQLException { // Construct the query String selectSQL = "WITH bib_with_heldby AS (" + " SELECT lddb.id, lddb.data, lddb.collection, lddb.modified, lddb.deleted, lddb.data#>>'{@graph,1,heldBy,@id}' AS sigel, lddb.data#>>'{@graph,1,itemOf,@id}' AS itemOf, lddb_attached_holdings.data#>>'{@graph,1,heldBy,@id}' as heldBy" + " FROM lddb "; selectSQL += " LEFT JOIN lddb lddb_attached_holdings ON lddb.data#>>'{@graph,1,@id}' = lddb_attached_holdings.data#>>'{@graph,1,itemOf,@id}' "; selectSQL += " WHERE lddb.collection <> 'definitions' "; if (fromDateTime != null) { if (includeDependenciesInTimeInterval) selectSQL += " AND lddb.depMaxModified >= ? "; else selectSQL += " AND lddb.modified >= ? "; } if (untilDateTime != null) { if (includeDependenciesInTimeInterval) selectSQL += " AND lddb.depMinModified <= ? "; else selectSQL += " AND lddb.modified <= ? "; } selectSQL += " AND lddb.collection = ? "; selectSQL += " ) "; selectSQL += " SELECT * FROM bib_with_heldby WHERE heldBy = ?"; PreparedStatement preparedStatement = dbconn.prepareStatement(selectSQL); preparedStatement.setFetchSize(512); // Assign parameters int parameterIndex = 1; if (fromDateTime != null) preparedStatement.setTimestamp(parameterIndex++, new Timestamp(fromDateTime.toInstant().getEpochSecond() * 1000L)); if (untilDateTime != null) preparedStatement.setTimestamp(parameterIndex++, new Timestamp(untilDateTime.toInstant().getEpochSecond() * 1000L)); preparedStatement.setString(parameterIndex++, setSpec.getRootSet()); preparedStatement.setString(parameterIndex++, LegacyIntegrationTools.legacySigelToUri(setSpec.getSubset())); return preparedStatement; } private static PreparedStatement getNormalMatchingDocumentsStatement(Connection dbconn, ZonedDateTime fromDateTime, ZonedDateTime untilDateTime, SetSpec setSpec, String id, boolean includeDependenciesInTimeInterval) throws SQLException { // Construct the query String selectSQL = "SELECT lddb.id, lddb.data, lddb.collection, lddb.modified, lddb.deleted, lddb.data#>>'{@graph,1,heldBy,@id}' AS sigel, lddb.data#>>'{@graph,1,itemOf,@id}' AS itemOf" + " FROM lddb "; selectSQL += " WHERE lddb.collection <> 'definitions' "; if (id != null) selectSQL += " AND lddb.id = ? "; if (fromDateTime != null) { if (includeDependenciesInTimeInterval) selectSQL += " AND lddb.depMaxModified >= ? "; else selectSQL += " AND lddb.modified >= ? "; } if (untilDateTime != null) { if (includeDependenciesInTimeInterval) selectSQL += " AND lddb.depMinModified <= ? "; else selectSQL += " AND lddb.modified <= ? "; } if (setSpec != null) { if (setSpec.getRootSet() != null) selectSQL += " AND lddb.collection = ? "; if (setSpec.getSubset() != null) { if (setSpec.getRootSet().startsWith(SetSpec.SET_HOLD)) selectSQL += " AND lddb.data->'@graph' @> ?"; } } PreparedStatement preparedStatement = dbconn.prepareStatement(selectSQL); preparedStatement.setFetchSize(512); // Assign parameters int parameterIndex = 1; if (id != null) preparedStatement.setString(parameterIndex++, id); if (fromDateTime != null) preparedStatement.setTimestamp(parameterIndex++, new Timestamp(fromDateTime.toInstant().getEpochSecond() * 1000L)); if (untilDateTime != null) preparedStatement.setTimestamp(parameterIndex++, new Timestamp(untilDateTime.toInstant().getEpochSecond() * 1000L)); if (setSpec != null) { if (setSpec.getRootSet() != null) preparedStatement.setString(parameterIndex++, setSpec.getRootSet()); if (setSpec.getSubset() != null) { if (setSpec.getRootSet().startsWith(SetSpec.SET_HOLD)) { String strMap = "[{\"heldBy\":{\"@id\": \"" + LegacyIntegrationTools.legacySigelToUri(setSpec.getSubset()) + "\"}}]"; preparedStatement.setObject(parameterIndex++, strMap, java.sql.Types.OTHER); } } } return preparedStatement; } }
package com.jetbrains.python; import com.intellij.codeInsight.folding.CodeFoldingSettings; import com.intellij.lang.ASTNode; import com.intellij.lang.folding.FoldingBuilder; import com.intellij.lang.folding.FoldingDescriptor; import com.intellij.openapi.editor.Document; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.util.TextRange; import com.intellij.psi.TokenType; import com.intellij.psi.tree.IElementType; import com.jetbrains.python.psi.PyFileElementType; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; /** * @author yole */ public class PythonFoldingBuilder implements FoldingBuilder, DumbAware { @NotNull public FoldingDescriptor[] buildFoldRegions(@NotNull ASTNode node, @NotNull Document document) { List<FoldingDescriptor> descriptors = new ArrayList<FoldingDescriptor>(); appendDescriptors(node, descriptors); return descriptors.toArray(new FoldingDescriptor[descriptors.size()]); } private static void appendDescriptors(ASTNode node, List<FoldingDescriptor> descriptors) { if (node.getElementType() instanceof PyFileElementType) { ASTNode firstImport = node.getFirstChildNode(); while(firstImport != null && !isImport(firstImport, false)) { firstImport = firstImport.getTreeNext(); } if (firstImport != null) { ASTNode lastImport = firstImport.getTreeNext(); while(lastImport != null && isImport(lastImport.getTreeNext(), true)) { lastImport = lastImport.getTreeNext(); } if (lastImport != null) { while (lastImport.getElementType() == TokenType.WHITE_SPACE) { lastImport = lastImport.getTreePrev(); } if (isImport(lastImport, false) && firstImport != lastImport) { descriptors.add(new FoldingDescriptor(firstImport, new TextRange(firstImport.getStartOffset(), lastImport.getTextRange().getEndOffset()))); } } } } else if (node.getElementType() == PyElementTypes.STATEMENT_LIST) { IElementType elType = node.getTreeParent().getElementType(); if (elType == PyElementTypes.FUNCTION_DECLARATION || elType == PyElementTypes.CLASS_DECLARATION) { ASTNode colon = node.getTreeParent().findChildByType(PyTokenTypes.COLON); if (colon != null && colon.getStartOffset() + 1 < node.getTextRange().getEndOffset() - 1) { final CharSequence chars = node.getChars(); int nodeStart = node.getTextRange().getStartOffset(); int endOffset = node.getTextRange().getEndOffset(); while(endOffset > colon.getStartOffset()+2 && endOffset > nodeStart && Character.isWhitespace(chars.charAt(endOffset - nodeStart - 1))) { endOffset } descriptors.add(new FoldingDescriptor(node, new TextRange(colon.getStartOffset() + 1, endOffset))); } else { TextRange range = node.getTextRange(); if (range.getStartOffset() < range.getEndOffset() - 1) { // only for ranges at least 1 char wide descriptors.add(new FoldingDescriptor(node, range)); } } } } ASTNode child = node.getFirstChildNode(); while (child != null) { appendDescriptors(child, descriptors); child = child.getTreeNext(); } } private static boolean isImport(ASTNode node, boolean orWhitespace) { if (node == null) return false; IElementType elementType = node.getElementType(); if (orWhitespace && elementType == TokenType.WHITE_SPACE) { return true; } return elementType == PyElementTypes.IMPORT_STATEMENT || elementType == PyElementTypes.FROM_IMPORT_STATEMENT; } public String getPlaceholderText(@NotNull ASTNode node) { if (isImport(node, false)) { return "import ..."; } return "..."; } public boolean isCollapsedByDefault(@NotNull ASTNode node) { if (isImport(node, false)) { return CodeFoldingSettings.getInstance().COLLAPSE_IMPORTS; } return false; } }
package com.google.auth.oauth2; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableMap; import java.io.Serializable; import java.util.Map; import javax.annotation.Nullable; @AutoValue public abstract class JwtClaims implements Serializable { private static final long serialVersionUID = 4974444151019426702L; @Nullable abstract String getAudience(); @Nullable abstract String getIssuer(); @Nullable abstract String getSubject(); /** * Returns additional claims for this object. The returned map is not guaranteed to be mutable. * * @return additional claims */ abstract Map<String, String> getAdditionalClaims(); public static Builder newBuilder() { return new AutoValue_JwtClaims.Builder().setAdditionalClaims(ImmutableMap.<String, String>of()); } /** * Returns a new Claims instance with overridden fields. * * <p>Any non-null field will overwrite the value from the original claims instance. * * @param other claims to override * @return new claims */ public JwtClaims merge(JwtClaims other) { ImmutableMap.Builder<String, String> newClaimsBuilder = ImmutableMap.builder(); newClaimsBuilder.putAll(getAdditionalClaims()); newClaimsBuilder.putAll(other.getAdditionalClaims()); return newBuilder() .setAudience(other.getAudience() == null ? getAudience() : other.getAudience()) .setIssuer(other.getIssuer() == null ? getIssuer() : other.getIssuer()) .setSubject(other.getSubject() == null ? getSubject() : other.getSubject()) .setAdditionalClaims(newClaimsBuilder.build()) .build(); } /** * Returns whether or not this set of claims is complete. * * <p>Audience, issuer, and subject are required to be set in order to use the claim set for a JWT * token. An incomplete Claims instance is useful for overriding claims when using {@link * ServiceAccountJwtAccessCredentials#jwtWithClaims(JwtClaims)} or {@link * JwtCredentials#jwtWithClaims(JwtClaims)}. * * @return true if all required fields have been set; false otherwise */ public boolean isComplete() { return getAudience() != null && getIssuer() != null && getSubject() != null; } @AutoValue.Builder public abstract static class Builder { public abstract Builder setAudience(String audience); public abstract Builder setIssuer(String issuer); public abstract Builder setSubject(String subject); public abstract Builder setAdditionalClaims(Map<String, String> additionalClaims); public abstract JwtClaims build(); } }
package mod._sw; import java.io.PrintWriter; import lib.StatusException; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; import util.AccessibilityTools; import util.WriterTools; import util.utils; import com.sun.star.accessibility.AccessibleRole; import com.sun.star.accessibility.XAccessible; import com.sun.star.accessibility.XAccessibleContext; import com.sun.star.accessibility.XAccessibleValue; import com.sun.star.awt.XWindow; import com.sun.star.frame.XModel; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.text.ControlCharacter; import com.sun.star.text.XText; import com.sun.star.text.XTextCursor; import com.sun.star.text.XTextDocument; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; /** * Test of accessible object for the text document.<p> * Object implements the following interfaces : * <ul> * <li> <code>::com::sun::star::accessibility::XAccessible</code></li> * </ul> * @see com.sun.star.accessibility.XAccessible */ public class SwAccessibleDocumentView extends TestCase { XTextDocument xTextDoc = null; /** * Called to create an instance of <code>TestEnvironment</code> * with an object to test and related objects. The method is called from * <code>getTestEnvironment()</code>. Obtains accissible object for * text document. * * @param tParam test parameters * @param log writer to log information while testing * * @see TestEnvironment * @see #getTestEnvironment() */ protected TestEnvironment createTestEnvironment( TestParameters Param, PrintWriter log) { XInterface oObj = null; XText oText = xTextDoc.getText(); XTextCursor oCursor = oText.createTextCursor(); log.println( "inserting some lines" ); try { for (int i=0; i<5; i++){ oText.insertString( oCursor,"Paragraph Number: " + i, false); oText.insertString( oCursor, " The quick brown fox jumps over the lazy Dog: SwXParagraph", false); oText.insertControlCharacter( oCursor, ControlCharacter.PARAGRAPH_BREAK, false ); oText.insertString( oCursor, "THE QUICK BROWN FOX JUMPS OVER THE LAZY DOG: SwXParagraph", false); oText.insertControlCharacter(oCursor, ControlCharacter.PARAGRAPH_BREAK, false ); oText.insertControlCharacter( oCursor, ControlCharacter.LINE_BREAK, false ); } } catch ( com.sun.star.lang.IllegalArgumentException e ){ e.printStackTrace(log); throw new StatusException( "Couldn't insert lines", e ); } XModel aModel = (XModel) UnoRuntime.queryInterface(XModel.class, xTextDoc); AccessibilityTools at = new AccessibilityTools(); XWindow xWindow = at.getCurrentWindow((XMultiServiceFactory)Param.getMSF(), aModel); XAccessible xRoot = at.getAccessibleObject(xWindow); at.getAccessibleObjectForRole(xRoot, AccessibleRole.DOCUMENT); oObj = AccessibilityTools.SearchedContext; log.println("ImplementationName " + utils.getImplName(oObj)); at.printAccessibleTree(log, xRoot, Param.getBool(util.PropertyName.DEBUG_IS_ACTIVE)); TestEnvironment tEnv = new TestEnvironment(oObj); getAccessibleObjectForRole(xRoot, AccessibleRole.SCROLL_BAR); final XAccessibleValue xAccVal = (XAccessibleValue) UnoRuntime.queryInterface (XAccessibleValue.class, SearchedContext) ; tEnv.addObjRelation("EventProducer", new ifc.accessibility._XAccessibleEventBroadcaster.EventProducer() { public void fireEvent() { xAccVal.setCurrentValue(xAccVal.getMinimumValue()); xAccVal.setCurrentValue(xAccVal.getMaximumValue()); } }); return tEnv; } public static boolean first = false; public static XAccessibleContext SearchedContext = null; public static void getAccessibleObjectForRole(XAccessible xacc,short role) { XAccessibleContext ac = xacc.getAccessibleContext(); if (ac.getAccessibleRole()==role) { if (first) SearchedContext = ac; else first=true; } else { int k = ac.getAccessibleChildCount(); for (int i=0;i<k;i++) { try { getAccessibleObjectForRole(ac.getAccessibleChild(i),role); if (SearchedContext != null) return ; } catch (com.sun.star.lang.IndexOutOfBoundsException e) { System.out.println("Couldn't get Child"); } } } } /** * Called while disposing a <code>TestEnvironment</code>. * Disposes text document. * @param tParam test parameters * @param tEnv the environment to cleanup * @param log writer to log information while testing */ protected void cleanup( TestParameters Param, PrintWriter log) { log.println("dispose text document"); util.DesktopTools.closeDoc(xTextDoc); } /** * Called while the <code>TestCase</code> initialization. * Creates a text document. * * @param tParam test parameters * @param log writer to log information while testing * * @see #initializeTestCase() */ protected void initialize(TestParameters Param, PrintWriter log) { log.println( "creating a text document" ); xTextDoc = WriterTools.createTextDoc((XMultiServiceFactory)Param.getMSF()); } }
package com.systematic.trading.maths.indicator.ema; import static com.systematic.trading.maths.util.SystematicTradingMathsAssert.assertValues; import static com.systematic.trading.maths.util.SystematicTradingMathsAssert.line; import static com.systematic.trading.maths.util.SystematicTradingMathsAssert.point; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import java.math.BigDecimal; import java.time.LocalDate; import java.util.SortedMap; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import com.systematic.trading.data.TradingDayPrices; import com.systematic.trading.maths.indicator.Validator; import com.systematic.trading.maths.util.TradingDayPricesBuilder; /** * Test the ExponentialMovingAverageCalculator. * * @author CJ Hare */ @RunWith(MockitoJUnitRunner.class) public class ClosingPriceExponentialMovingAverageCalculatorTest { @Mock private Validator validator; /** Calculator instance being tested. */ private ExponentialMovingAverageIndicator calculator; @Test(expected = IllegalArgumentException.class) public void invalidLookback() { setUpValidationErrorGreaterThan(); setUpCalculator(0); } @Test(expected = IllegalArgumentException.class) public void invalidDaysOfEmaValues() { setUpValidationErrorGreaterThan(); setUpCalculator(1, 0); } @Test(expected = IllegalArgumentException.class) public void emaNullInput() { setUpValidationErrorNullInput(); setUpCalculator(1); ema(null); } @Test(expected = IllegalArgumentException.class) public void emaNullEntries() { setUpValidationErrorNullEntries(); setUpCalculator(2); ema(new TradingDayPrices[] { null }); } @Test(expected = IllegalArgumentException.class) public void emaNotEnoughValues() { setUpValidationErrorNullInput(); setUpCalculator(1); ema(new TradingDayPrices[] {}); } @Test public void emaIntelExample() { final int lookback = 10; final TradingDayPrices[] data = createIntelExamplePrices(); setUpCalculator(lookback); final ExponentialMovingAverageLine ema = ema(data); verifyEma(ema, line(point(LocalDate.of(2010, 4, 7), 22.22), point(LocalDate.of(2010, 4, 8), 22.21), point(LocalDate.of(2010, 4, 9), 22.24), point(LocalDate.of(2010, 4, 12), 22.27), point(LocalDate.of(2010, 4, 13), 22.33), point(LocalDate.of(2010, 4, 14), 22.52), point(LocalDate.of(2010, 4, 15), 22.80), point(LocalDate.of(2010, 4, 16), 22.97), point(LocalDate.of(2010, 4, 19), 23.13), point(LocalDate.of(2010, 4, 20), 23.28), point(LocalDate.of(2010, 4, 21), 23.34), point(LocalDate.of(2010, 4, 22), 23.43), point(LocalDate.of(2010, 4, 23), 23.51), point(LocalDate.of(2010, 4, 26), 23.53), point(LocalDate.of(2010, 4, 27), 23.47), point(LocalDate.of(2010, 4, 28), 23.40), point(LocalDate.of(2010, 4, 29), 23.39), point(LocalDate.of(2010, 4, 30), 23.26), point(LocalDate.of(2010, 5, 3), 23.23), point(LocalDate.of(2010, 5, 4), 23.08), point(LocalDate.of(2010, 5, 5), 22.92))); verifyValidation(data, lookback); } @Test public void emaIncreasing() { final int lookback = 5; final TradingDayPrices[] data = createIncreasingPrices(); setUpCalculator(lookback); final ExponentialMovingAverageLine ema = ema(data); verifyEma(ema, line(point(LocalDate.of(2017, 9, 15), 2), point(LocalDate.of(2017, 9, 18), 3), point(LocalDate.of(2017, 9, 19), 4), point(LocalDate.of(2017, 9, 20), 5), point(LocalDate.of(2017, 9, 21), 6), point(LocalDate.of(2017, 9, 22), 7))); verifyValidation(data, lookback); } @Test public void emaFlat() { final int lookback = 4; final TradingDayPrices[] data = createFlatPrices(); setUpCalculator(lookback); final ExponentialMovingAverageLine ema = ema(data); verifyEma(ema, line(point(LocalDate.of(2017, 10, 12), 4.5), point(LocalDate.of(2017, 5, 13), 4.5), point(LocalDate.of(2017, 10, 16), 4.5), point(LocalDate.of(2017, 10, 17), 4.5), point(LocalDate.of(2017, 10, 18), 4.5))); verifyValidation(data, lookback); } @Test public void getMinimumNumberOfPrices() { setUpCalculator(4); final int requiredDays = calculator.getMinimumNumberOfPrices(); assertEquals(9, requiredDays); } private ExponentialMovingAverageLine ema( final TradingDayPrices[] data ) { return calculator.calculate(data); } private void setUpValidationErrorGreaterThan() { doThrow(new IllegalArgumentException()).when(validator).verifyGreaterThan(anyInt(), eq(0)); } private void setUpValidationErrorNullInput() { doThrow(new IllegalArgumentException()).when(validator).verifyNotNull(any()); } private void setUpValidationErrorNullEntries() { doThrow(new IllegalArgumentException()).when(validator).verifyZeroNullEntries(any(TradingDayPrices[].class)); } private void verifyEma( final ExponentialMovingAverageLine actual, final SortedMap<LocalDate, BigDecimal> expected ) { assertNotNull(actual); assertNotNull(actual.getEma()); assertEquals(expected.size(), actual.getEma().size()); assertValues(expected, actual.getEma()); } private void setUpCalculator( final int lookback ) { calculator = new ClosingPriceExponentialMovingAverageCalculator(lookback, 1, validator); } private void setUpCalculator( final int lookback, final int daysOfEmaValues ) { calculator = new ClosingPriceExponentialMovingAverageCalculator(lookback, daysOfEmaValues, validator); } private void verifyValidation( final TradingDayPrices[] data, final int lookback ) { verify(validator).verifyGreaterThan(1, lookback); verify(validator).verifyNotNull(data); verify(validator).verifyEnoughValues(data, lookback); verify(validator).verifyZeroNullEntries(data); } /** * Flat prices starting from LocalDate.of(2017, 10, 9). */ private TradingDayPrices[] createFlatPrices() { final LocalDate[] dates = { LocalDate.of(2017, 10, 9), LocalDate.of(2017, 10, 10), LocalDate.of(2017, 10, 11), LocalDate.of(2017, 10, 12), LocalDate.of(2017, 5, 13), LocalDate.of(2017, 10, 16), LocalDate.of(2017, 10, 17), LocalDate.of(2017, 10, 18) }; final double[] close = { 4.5, 4.5, 4.5, 4.5, 4.5, 4.5, 4.5, 4.5 }; return createPrices(dates, close); } /** * Increasing prices starting from LocalDate.of(2017, 9, 11). */ private TradingDayPrices[] createIncreasingPrices() { final LocalDate[] dates = { LocalDate.of(2017, 9, 11), LocalDate.of(2017, 9, 12), LocalDate.of(2017, 9, 13), LocalDate.of(2017, 9, 14), LocalDate.of(2017, 9, 15), LocalDate.of(2017, 9, 18), LocalDate.of(2017, 9, 19), LocalDate.of(2017, 9, 20), LocalDate.of(2017, 9, 21), LocalDate.of(2017, 9, 22) }; final double[] close = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; return createPrices(dates, close); } /** * Thirty days of price data for Intel starting from LocalDate.of(2010, 3, 24). */ private TradingDayPrices[] createIntelExamplePrices() { final LocalDate[] dates = { LocalDate.of(2010, 3, 24), LocalDate.of(2010, 3, 25), LocalDate.of(2010, 3, 26), LocalDate.of(2010, 3, 29), LocalDate.of(2010, 3, 30), LocalDate.of(2010, 3, 31), LocalDate.of(2010, 4, 1), LocalDate.of(2010, 4, 5), LocalDate.of(2010, 4, 6), LocalDate.of(2010, 4, 7), LocalDate.of(2010, 4, 8), LocalDate.of(2010, 4, 9), LocalDate.of(2010, 4, 12), LocalDate.of(2010, 4, 13), LocalDate.of(2010, 4, 14), LocalDate.of(2010, 4, 15), LocalDate.of(2010, 4, 16), LocalDate.of(2010, 4, 19), LocalDate.of(2010, 4, 20), LocalDate.of(2010, 4, 21), LocalDate.of(2010, 4, 22), LocalDate.of(2010, 4, 23), LocalDate.of(2010, 4, 26), LocalDate.of(2010, 4, 27), LocalDate.of(2010, 4, 28), LocalDate.of(2010, 4, 29), LocalDate.of(2010, 4, 30), LocalDate.of(2010, 5, 3), LocalDate.of(2010, 5, 4), LocalDate.of(2010, 5, 5) }; final double[] close = { 22.27, 22.19, 22.08, 22.17, 22.18, 22.13, 22.23, 22.43, 22.24, 22.29, 22.15, 22.39, 22.38, 22.61, 23.36, 24.05, 23.75, 23.83, 23.95, 23.63, 23.82, 23.87, 23.65, 23.19, 23.10, 23.33, 22.68, 23.10, 22.40, 22.17 }; return createPrices(dates, close); } private TradingDayPrices[] createPrices( final LocalDate[] dates, final double[] close ) { final TradingDayPrices[] data = new TradingDayPrices[dates.length]; // Only the close price is used in the EMA calculation for (int i = 0; i < data.length; i++) { data[i] = new TradingDayPricesBuilder().withTradingDate(dates[i]).withClosingPrice(close[i]).build(); } return data; } }
package com.voodoodyne.gstrap.lang; import java.util.Collection; import java.util.Map; /** * Utility methods for comparison and list testing that are typesafe. Gets around the annoying problem with * Set.contains(), Map.get(), etc which doesn't give you compiler checking if you change the type of the * value passed in. */ public class Safe { /** * Adds and checks for null. */ public static <T> boolean addNotNull(Collection<T> coll, T element) { if (element == null) throw new NullPointerException(); return coll.add(element); } /** * Collection.contains() but with type safety. */ public static <T> boolean contains(final Collection<T> coll, T element) { return coll.contains(element); } /** * Collection.remove() but with type safety. */ public static <T> boolean remove(final Collection<T> coll, T element) { return coll.remove(element); } /** * Map.remove() but with type safety. */ public static <K, V> V remove(final Map<K, V> map, K key) { return map.remove(key); } /** * Map.get() but with type safety. */ public static <K, V> V get(final Map<K, V> map, K key) { return map.get(key); } /** * Map.get() but with type safety. * @return the defaultValue if the result of get() is null */ public static <K, V> V get(final Map<K, V> map, K key, V defaultValue) { final V value = get(map, key); return value == null ? defaultValue : value; } }
package de.cooperateproject.eabridge.eaobjectmodel.test; import static org.junit.Assert.assertEquals; import java.util.List; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.compare.Comparison; import org.eclipse.emf.compare.Diff; import org.eclipse.emf.compare.EMFCompare; import org.eclipse.emf.compare.scope.DefaultComparisonScope; import org.eclipse.emf.ecore.util.EcoreUtil; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.Transaction; import org.junit.Assert; import org.junit.Test; import de.cooperateproject.eabridge.eaobjectmodel.Element; import de.cooperateproject.eabridge.eaobjectmodel.Package; import de.cooperateproject.eabridge.eaobjectmodel.PackageBase; import de.cooperateproject.eabridge.eaobjectmodel.RootPackage; import de.cooperateproject.eabridge.eaobjectmodel.test.util.TestResource; public class EA2ObjectModelMappingTest extends TeneoMappingBaseTest { @Test public void testReadSimpleDiagram() throws Exception { initTestDb(TestResource.SimpleClassModelWithSchemaChangelog); Session session = getDataStore().getSessionFactory().openSession(); Transaction trans = session.getTransaction(); trans.begin(); Query query = session.createQuery("FROM RootPackage"); List<RootPackage> results = query.list(); trans.commit(); assertEquals(1, results.size()); RootPackage content = results.get(0); // Element element = content.getElements().get(0); RootPackage compareContent = loadModelFromResource("resources/SimpleClassModel.xmi"); // assertEqualsModel(content, compareContent); } private static void assertEqualsModel(RootPackage content, RootPackage compareContent) { EcoreUtil.resolveAll(content); EList<Diff> diff = compare(content, compareContent); try { Assert.assertTrue(diff.isEmpty()); } catch(AssertionError e) { System.out.println("Differences:"); printDiff(diff); throw(e); } } private static EList<Diff> compare(RootPackage content, RootPackage compareContent) { DefaultComparisonScope scope = new DefaultComparisonScope(content, compareContent, null); Comparison comparison = EMFCompare.builder().build().compare(scope); return comparison.getDifferences(); } private static void printDiff(EList<Diff> diff) { for(Diff d : diff) { System.out.println(d); } } }
package org.openhab.binding.onewire.internal.handler; import static org.openhab.binding.onewire.internal.OwBindingConstants.*; import java.math.BigDecimal; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.eclipse.jdt.annotation.NonNullByDefault; import org.eclipse.smarthome.config.core.Configuration; import org.eclipse.smarthome.core.thing.Thing; import org.eclipse.smarthome.core.thing.ThingStatus; import org.eclipse.smarthome.core.thing.ThingStatusDetail; import org.eclipse.smarthome.core.thing.ThingTypeUID; import org.eclipse.smarthome.core.thing.binding.builder.ThingBuilder; import org.openhab.binding.onewire.internal.DS2438Configuration; import org.openhab.binding.onewire.internal.OwDynamicStateDescriptionProvider; import org.openhab.binding.onewire.internal.OwException; import org.openhab.binding.onewire.internal.SensorId; import org.openhab.binding.onewire.internal.device.AbstractOwDevice; import org.openhab.binding.onewire.internal.device.DS18x20; import org.openhab.binding.onewire.internal.device.DS2406_DS2413; import org.openhab.binding.onewire.internal.device.DS2438; import org.openhab.binding.onewire.internal.device.DS2438.LightSensorType; import org.openhab.binding.onewire.internal.device.OwChannelConfig; import org.openhab.binding.onewire.internal.device.OwSensorType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The {@link AdvancedMultisensorThingHandler} is responsible for handling DS2438 based multisensors (modules) * * @author Jan N. Klug - Initial contribution */ @NonNullByDefault public class AdvancedMultisensorThingHandler extends OwBaseThingHandler { public static final Set<ThingTypeUID> SUPPORTED_THING_TYPES = new HashSet<>( Arrays.asList(THING_TYPE_AMS, THING_TYPE_BMS)); public static final Set<OwSensorType> SUPPORTED_SENSOR_TYPES = Collections .unmodifiableSet(Stream.of(OwSensorType.AMS, OwSensorType.AMS_S, OwSensorType.BMS, OwSensorType.BMS_S) .collect(Collectors.toSet())); private static final String PROPERTY_DS18B20 = "ds18b20"; private static final String PROPERTY_DS2413 = "ds2413"; private static final String PROPERTY_DS2438 = "ds2438"; private static final Set<String> REQUIRED_PROPERTIES_AMS = Collections.unmodifiableSet( Stream.of(PROPERTY_HW_REVISION, PROPERTY_PROD_DATE, PROPERTY_DS18B20, PROPERTY_DS2438, PROPERTY_DS2413) .collect(Collectors.toSet())); private static final Set<String> REQUIRED_PROPERTIES_BMS = Collections.unmodifiableSet( Stream.of(PROPERTY_HW_REVISION, PROPERTY_PROD_DATE, PROPERTY_DS18B20).collect(Collectors.toSet())); private final Logger logger = LoggerFactory.getLogger(AdvancedMultisensorThingHandler.class); private final ThingTypeUID thingType = this.thing.getThingTypeUID(); private int hwRevision = 0; private int digitalRefreshInterval = 10 * 1000; private long digitalLastRefresh = 0; public AdvancedMultisensorThingHandler(Thing thing, OwDynamicStateDescriptionProvider dynamicStateDescriptionProvider) { super(thing, dynamicStateDescriptionProvider, SUPPORTED_SENSOR_TYPES, getRequiredProperties(thing.getThingTypeUID())); } @Override public void initialize() { Configuration configuration = getConfig(); Map<String, String> properties = editProperties(); if (!super.configureThingHandler()) { return; } hwRevision = Integer.valueOf(properties.get(PROPERTY_HW_REVISION)); sensors.add(new DS2438(sensorId, this)); sensors.add(new DS18x20(new SensorId(properties.get(PROPERTY_DS18B20)), this)); if (THING_TYPE_AMS.equals(thingType)) { sensors.add(new DS2438(new SensorId(properties.get(PROPERTY_DS2438)), this)); sensors.add(new DS2406_DS2413(new SensorId(properties.get(PROPERTY_DS2413)), this)); if (configuration.containsKey(CONFIG_DIGITALREFRESH)) { digitalRefreshInterval = ((BigDecimal) configuration.get(CONFIG_DIGITALREFRESH)).intValue() * 1000; } else { digitalRefreshInterval = 10 * 1000; } digitalLastRefresh = 0; } scheduler.execute(() -> { configureThingChannels(); }); } @Override public void refresh(OwBaseBridgeHandler bridgeHandler, long now) { try { if ((now >= (digitalLastRefresh + digitalRefreshInterval)) && (thingType == THING_TYPE_AMS)) { logger.trace("refreshing digital {}", this.thing.getUID()); Boolean forcedRefresh = digitalLastRefresh == 0; digitalLastRefresh = now; if (!sensors.get(3).checkPresence(bridgeHandler)) { return; } sensors.get(3).refresh(bridgeHandler, forcedRefresh); } if (now >= (lastRefresh + refreshInterval)) { if (!sensors.get(0).checkPresence(bridgeHandler)) { return; } logger.trace("refreshing analog {}", this.thing.getUID()); Boolean forcedRefresh = lastRefresh == 0; lastRefresh = now; if (thingType.equals(THING_TYPE_AMS)) { for (int i = 0; i < sensors.size() - 1; i++) { sensors.get(i).refresh(bridgeHandler, forcedRefresh); } } else { for (int i = 0; i < sensors.size(); i++) { sensors.get(i).refresh(bridgeHandler, forcedRefresh); } } } } catch (OwException e) { logger.debug("{}: refresh exception '{}'", this.thing.getUID(), e.getMessage()); updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR, "refresh exception"); } } @Override protected void configureThingChannels() { Configuration configuration = getConfig(); ThingBuilder thingBuilder = editThing(); // delete unwanted channels Set<String> existingChannelIds = thing.getChannels().stream().map(channel -> channel.getUID().getId()) .collect(Collectors.toSet()); Set<String> wantedChannelIds = SENSOR_TYPE_CHANNEL_MAP.get(sensorType).stream() .map(channelConfig -> channelConfig.channelId).collect(Collectors.toSet()); wantedChannelIds.add(CHANNEL_TEMPERATURE); wantedChannelIds.add(CHANNEL_HUMIDITY); existingChannelIds.stream().filter(channelId -> !wantedChannelIds.contains(channelId)) .forEach(channelId -> removeChannelIfExisting(thingBuilder, channelId)); // add or update wanted channels SENSOR_TYPE_CHANNEL_MAP.get(sensorType).stream().forEach(channelConfig -> { addChannelIfMissingAndEnable(thingBuilder, channelConfig); }); // temperature channel if (configuration.containsKey(CONFIG_TEMPERATURESENSOR) && configuration.get(CONFIG_TEMPERATURESENSOR).equals("DS18B20")) { addChannelIfMissingAndEnable(thingBuilder, new OwChannelConfig(CHANNEL_TEMPERATURE, CHANNEL_TYPE_UID_TEMPERATURE_POR_RES), 1); } else { addChannelIfMissingAndEnable(thingBuilder, new OwChannelConfig(CHANNEL_TEMPERATURE, CHANNEL_TYPE_UID_TEMPERATURE)); } // humidity channel addChannelIfMissingAndEnable(thingBuilder, new OwChannelConfig(CHANNEL_HUMIDITY, CHANNEL_TYPE_UID_HUMIDITY), new Configuration(new HashMap<String, Object>() { { put(CONFIG_HUMIDITY, "/HIH4000/humidity"); } })); // configure light channel if (sensorType == OwSensorType.AMS_S || sensorType == OwSensorType.BMS_S) { if (hwRevision <= 13) { ((DS2438) sensors.get(0)).setLightSensorType(LightSensorType.ELABNET_V1); } else { ((DS2438) sensors.get(0)).setLightSensorType(LightSensorType.ELABNET_V2); } } updateThing(thingBuilder.build()); try { for (AbstractOwDevice sensor : sensors) { sensor.configureChannels(); } } catch (OwException e) { updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR, e.getMessage()); return; } validConfig = true; updateStatus(ThingStatus.UNKNOWN, ThingStatusDetail.NONE); } @Override public void updateSensorProperties(OwBaseBridgeHandler bridgeHandler) throws OwException { Map<String, String> properties = editProperties(); DS2438Configuration ds2438configuration = new DS2438Configuration(bridgeHandler, sensorId); sensorType = DS2438Configuration.getMultisensorType(ds2438configuration.getSensorSubType(), ds2438configuration.getAssociatedSensorTypes()); properties.put(PROPERTY_MODELID, sensorType.toString()); properties.put(PROPERTY_VENDOR, ds2438configuration.getVendor()); properties.put(PROPERTY_PROD_DATE, ds2438configuration.getProductionDate()); properties.put(PROPERTY_HW_REVISION, ds2438configuration.getHardwareRevision()); switch (sensorType) { case BMS: case BMS_S: properties.put(PROPERTY_DS18B20, ds2438configuration.getAssociatedSensorIds(OwSensorType.DS18B20).get(0).getFullPath()); break; case AMS: case AMS_S: properties.put(PROPERTY_DS18B20, ds2438configuration.getAssociatedSensorIds(OwSensorType.DS18B20).get(0).getFullPath()); properties.put(PROPERTY_DS2413, ds2438configuration.getAssociatedSensorIds(OwSensorType.DS2413).get(0).getFullPath()); properties.put(PROPERTY_DS2438, ds2438configuration.getAssociatedSensorIds(OwSensorType.MS_TV).get(0).getFullPath()); break; default: throw new OwException("sensorType " + sensorType.toString() + " not supported by this thing handler"); } updateProperties(properties); } /** * used to determine the correct set of required properties * * @param thingType * @return */ private static Set<String> getRequiredProperties(ThingTypeUID thingType) { if (THING_TYPE_AMS.equals(thingType)) { return REQUIRED_PROPERTIES_AMS; } else { return REQUIRED_PROPERTIES_BMS; } } }
package io.hawt.web; import java.io.IOException; import java.io.PrintWriter; import java.security.AccessControlContext; import java.security.AccessController; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.security.auth.Subject; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import io.hawt.system.ConfigManager; import io.hawt.system.Helpers; import org.jolokia.converter.Converters; import org.jolokia.converter.json.JsonConvertOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LoginServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final transient Logger LOG = LoggerFactory.getLogger(LoginServlet.class); private static final int DEFAULT_SESSION_TIMEOUT = 1800; private static final String KNOWN_PRINCIPALS[] = {"UserPrincipal", "KeycloakPrincipal", "JAASPrincipal", "SimplePrincipal"}; protected Converters converters = new Converters(); protected JsonConvertOptions options = JsonConvertOptions.DEFAULT; protected ConfigManager config; private Integer timeout = DEFAULT_SESSION_TIMEOUT; private List<String> knownPrincipalList; @Override public void init(ServletConfig servletConfig) throws ServletException { knownPrincipalList = Arrays.asList(KNOWN_PRINCIPALS); config = (ConfigManager) servletConfig.getServletContext().getAttribute("ConfigManager"); if (config != null) { String s = config.get("sessionTimeout", "" + DEFAULT_SESSION_TIMEOUT); if (s != null) { try { timeout = Integer.parseInt(s); // timeout of 0 means default timeout if (timeout == 0) { timeout = DEFAULT_SESSION_TIMEOUT; } } catch (Exception e) { // ignore and use our own default of 1/2 hour timeout = DEFAULT_SESSION_TIMEOUT; } } } LOG.info("hawtio login is using " + (timeout != null ? timeout + " sec." : "default") + " HttpSession timeout"); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setContentType("application/json"); final PrintWriter out = resp.getWriter(); HttpSession session = req.getSession(false); if (session != null) { Subject subject = (Subject) session.getAttribute("subject"); if (subject == null) { LOG.warn("No security subject stored in existing session, invalidating"); session.invalidate(); Helpers.doForbidden(resp); return; } sendResponse(session, subject, out); return; } AccessControlContext acc = AccessController.getContext(); Subject subject = Subject.getSubject(acc); if (subject == null) { Helpers.doForbidden(resp); return; } Set<Principal> principals = subject.getPrincipals(); String username = null; if (principals != null) { for (Principal principal : principals) { String principalClass = principal.getClass().getSimpleName(); if (knownPrincipalList.contains(principalClass)) { username = principal.getName(); LOG.debug("Authorizing user {}", username); } } } session = req.getSession(true); session.setAttribute("subject", subject); session.setAttribute("user", username); session.setAttribute("org.osgi.service.http.authentication.remote.user", username); session.setAttribute("org.osgi.service.http.authentication.type", HttpServletRequest.BASIC_AUTH); session.setAttribute("loginTime", GregorianCalendar.getInstance().getTimeInMillis()); if (timeout != null) { session.setMaxInactiveInterval(timeout); } if (LOG.isDebugEnabled()) { LOG.debug("Http session timeout for user {} is {} sec.", username, session.getMaxInactiveInterval()); } sendResponse(session, subject, out); } protected void sendResponse(HttpSession session, Subject subject, PrintWriter out) { Map<String, Object> answer = new HashMap<String, Object>(); List<Object> principals = new ArrayList<Object>(); for (Principal principal : subject.getPrincipals()) { Map<String, String> data = new HashMap<String, String>(); data.put("type", principal.getClass().getName()); data.put("name", principal.getName()); principals.add(data); } List<Object> credentials = new ArrayList<Object>(); for (Object credential : subject.getPublicCredentials()) { Map<String, Object> data = new HashMap<String, Object>(); data.put("type", credential.getClass().getName()); data.put("credential", credential); } answer.put("principals", principals); answer.put("credentials", credentials); ServletHelpers.writeObject(converters, options, out, answer); } }
package com.rgi.common.util.jdbc; import com.mockrunner.mock.jdbc.MockConnection; import com.rgi.common.Pair; import com.sun.org.apache.xalan.internal.utils.XMLSecurityManager; import com.sun.org.omg.CORBA.AttributeDescription; import org.junit.Test; import java.io.File; import java.nio.file.FileSystems; import java.sql.*; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Random; import java.util.function.BinaryOperator; import static org.junit.Assert.*; public class JdbcUtilityTest { private static final String TEST_TABLE_NAME = "tiles"; private final Random randomGenerator = new Random(); //This portion tests the first SelectOne function block @Test(expected = IllegalArgumentException.class) public void selectOneNullConectionTest() throws SQLException { final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.selectOne(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("selectOne should have thrown an IllegalArgumentException for a null Connection."); } @Test(expected = IllegalArgumentException.class) public void selectOneNullStringTest() throws Exception { final File testFile = this.getRandomFile(4); testFile.createNewFile(); final Connection con = new MockConnection(); final boolean result = JdbcUtility.selectOne(con, null, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("selectOne should have thrown an IllegalArgumentException for a null or empty String."); } @SuppressWarnings("ConstantConditions") @Test(expected = IllegalArgumentException.class) public void selectOneNullResultSetFunctionTest() throws Exception { final File testFile = this.getRandomFile(2); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.selectOne(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), null); fail("selectOne should have thrown an IllegalArgumentException for a null resultMapper."); } @SuppressWarnings("ConstantConditions") @Test(expected = AssertionError.class) public void selectOneTryStatementTest() throws Exception { final File testFile = this.getRandomFile(2); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "This is not a sql command"; JdbcUtility.selectOne(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)); fail("selectOne should have a sql command in the String str"); } //this portion tests the select function block @Test(expected = IllegalArgumentException.class) public void selectNullConnectionTest() throws SQLException { final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.selectOne(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("select should have thrown an IllegalArgumentException for a null Connection."); } @Test(expected = IllegalArgumentException.class) public void selectNullStringTest() throws Exception { final File testFile = this.getRandomFile(4); testFile.createNewFile(); final Connection con = new MockConnection(); final boolean result = JdbcUtility.selectOne(con, null, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("select should have thrown an IllegalArgumentException for a null or empty String."); } @SuppressWarnings("ConstantConditions") @Test(expected = IllegalArgumentException.class) public void selectNullResultSetFunctionTest() throws Exception { final File testFile = this.getRandomFile(2); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.selectOne(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), null); fail("selectOne should have thrown an IllegalArgumentException for a null resultMapper."); } //this portion tests the forEach function block @Test(expected = IllegalArgumentException.class) public void forEachNullConnectionTest() throws Exception { final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; JdbcUtility.forEach(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)); fail("select should have thrown an IllegalArgumentException for a null Connection."); } @Test(expected = IllegalArgumentException.class) public void forEachNullStringTest() throws Exception { final File testFile = this.getRandomFile(3); testFile.createNewFile(); final Connection con = new MockConnection(); JdbcUtility.forEach(con, null, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)); fail("forEach should have thrown an IllegalArgumentException for a null String."); } @SuppressWarnings("ConstantConditions") @Test(expected = IllegalArgumentException.class) public void forEachNullResultSetConsumerFunctionTest() throws Exception { final File testFile = this.getRandomFile(8); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; JdbcUtility.forEach(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), null); fail("forEach should have thrown an IllegalArgumentException for a null resultMapper."); } //This portion tests the first update function block @Test(expected = IllegalArgumentException.class) public void update1NullDatabaseConnectionTest() throws Exception { final File testFile = this.getRandomFile(2); testFile.createNewFile(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; JdbcUtility.update(null, str); fail("update should have thrown an IllegalArgumentException for a null connection"); } @Test(expected = IllegalArgumentException.class) public void update1NullStringTest() throws Exception { final File testFile = this.getRandomFile(5); testFile.createNewFile(); final Connection con = new MockConnection(); JdbcUtility.update(con, null); fail("update should have thrown an IllegalArgumentException for a null string"); } @Test(expected = AssertionError.class) public void update1PreparedStatementCatchTest() throws Exception { final File testFile = this.getRandomFile(4); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "Hello! what is up my mans."; JdbcUtility.update(con, str); fail("update should return a sql string"); } //this portion tests the second update funciton block @Test(expected = IllegalArgumentException.class) public void update2NullConnectionTest() throws Exception { final File testFile = this.getRandomFile(5); testFile.createNewFile(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; JdbcUtility.update(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)); fail("update should have thrown an IllegalArgumentException for a null connection"); } @Test(expected = IllegalArgumentException.class) public void update2NullStringTest() throws Exception { final File testFile = this.getRandomFile(7); testFile.createNewFile(); final Connection con = new MockConnection(); JdbcUtility.update(con, null, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)); fail("update should have thrown an IllegalArgumentException for a null String"); } @Test(expected = IllegalArgumentException.class) public void update2NullResultSetFuncitonTest() throws Exception { final File testFile = this.getRandomFile(5); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; JdbcUtility.update(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), null); fail("update should have thrown an IllegalArgumentException for a null resultSetFunction"); } //this portion tests the third update function block @Test(expected = IllegalArgumentException.class) public void update3NullConnectionTest() throws Exception { final File testFile = this.getRandomFile(4); testFile.createNewFile(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.update(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("update should have thrown an IllegalArgumentException for a null Connection"); } @Test(expected = IllegalArgumentException.class) public void update3NullStringTest() throws Exception { final File testFile = this.getRandomFile(4); testFile.createNewFile(); final Connection con = new MockConnection(); final boolean result = JdbcUtility.update(con, null, preparedStatement -> preparedStatement.setString(1, "tiles"), resultSet -> resultSet.getInt(1)) > 0; fail("update should have thrown an IllegalArgumentException for a null Connection"); } @Test(expected = IllegalArgumentException.class) public void update3NullKeyMapperTest() throws Exception { final File testFile = this.getRandomFile(8); testFile.createNewFile(); final Connection con = new MockConnection(); final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final boolean result = JdbcUtility.update(con, str, preparedStatement -> preparedStatement.setString(1, "tiles"), null); fail("update should have thrown an illegalArgumentException for a null resultMapper"); } //this portion tests the fourth update function block @Test(expected = IllegalArgumentException.class) public void update4NullConnectionTest() throws SQLException { final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final Iterable<Pair<Integer, Integer>> edges = Arrays.asList( new Pair<>(12, 23), new Pair<>(12, 42), new Pair<>(34, 56)); JdbcUtility.update(null, str, edges, (preparedStatement, edge) -> { preparedStatement.setInt(1, edge.getLeft()); preparedStatement.setInt(2, edge.getRight()); }); fail("update should have thrown an illegalArgumentException for a null connection"); } @Test(expected = IllegalArgumentException.class) public void update4NullStringTest() throws SQLException { final Connection con = new MockConnection(); final Iterable<Pair<Integer, Integer>> edges = Arrays.asList( new Pair<>(12, 23), new Pair<>(12, 42), new Pair<>(34, 56)); JdbcUtility.update(con, null, edges, (preparedStatement, edge) -> { preparedStatement.setInt(1, edge.getLeft()); preparedStatement.setInt(2, edge.getRight()); }); fail("update should have thrown an illegalArgumentException for a null connection"); } @Test(expected = IllegalArgumentException.class) public void update4NullIterableTest() throws SQLException { final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; final Connection con = new MockConnection(); JdbcUtility.update(con, str, null, (preparedStatement, edge) -> { preparedStatement.setInt(1, Integer.parseInt("tiles")); preparedStatement.setInt(2, Integer.parseInt("tiles")); }); fail("update should have thrown an illegalArgumentException for a null Iterable"); } // // this portion tests the accumulate function block // public void accumulateNullConnectionTest() throws SQLException { // final String str = "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;"; // final boolean initialValue = true; // final boolean result = JdbcUtility.accumulate(null, str, preparedStatement -> preparedStatement.setString(1, "tiles"), initialValue, // resultSet -> resultSet.getInt(1), true); //This portion tests the first map function block @Test(expected = IllegalArgumentException.class) public void map1NullResultSetTest() throws Exception { final Connection con = new MockConnection(); final String query = "SELECT table_name, column_name, extension_name FROM %s;"; try(Statement statement = con.createStatement(); ResultSet tableNameColumnNameRS = statement.executeQuery(query)) { JdbcUtility.map(tableNameColumnNameRS, null); fail("map should have thrown an IllegalArgumentException for a null resultSet"); } } //This portion tests arrayList map function block @Test(expected = IllegalArgumentException.class) public void map1NullResultSetFunctionTest() throws SQLException { final Connection con = new MockConnection(); final String query = "SELECT table_name, column_name, extension_name FROM %s;"; try(Statement statement = con.createStatement(); ResultSet tableNameColumnNameRS = statement.executeQuery(query)) { JdbcUtility.map(null, resultSet -> new JdbcUtilityTest.ExtensionData(tableNameColumnNameRS.getString("table_name"), tableNameColumnNameRS.getString("column_name"), tableNameColumnNameRS.getString("extension_name"))); fail("map should have thrown an IllegalArgumentException for a null resultSetFunction"); } } //This portion tests the Collection map function block @Test(expected = IllegalArgumentException.class) public void map2ResultSetNullTest() throws SQLException { final String str = "SELECT table_name FROM %s WHERE data_type = 'tiles';"; final Connection con = new MockConnection(); JdbcUtility.map(null, resultSet -> resultSet.getString("table_name"), HashSet<String>::new); fail("map should have thrown an IllegalArgumentException for a null resultSet"); } @Test(expected = IllegalArgumentException.class) public void map2ResultSetFunctionNullTest() throws SQLException { final String str = "SELECT table_name FROM %s WHERE data_type = 'tiles';"; final Connection con = new MockConnection(); try(final Statement createStmt2 = con.createStatement(); final ResultSet contentsPyramidTables = createStmt2.executeQuery(str)) { JdbcUtility.map(contentsPyramidTables, null, HashSet<String>::new); fail("map should have thrown an IllegalArgumentException for a null resultSetFunction"); } } @Test(expected = IllegalArgumentException.class) public void map2NullCollectionFactoryTest() throws SQLException { final String str = "SELECT table_name FROM %s WHERE data_type = 'tiles';"; final Connection con = new MockConnection(); try(final Statement createStmt2 = con.createStatement(); final ResultSet contentsPyramidTables = createStmt2.executeQuery(str)) { JdbcUtility.map(contentsPyramidTables, resultSet -> resultSet.getString("table_name"), null); fail("map should have thrown an IllegalArgumentException for a null collectionFactory"); } } //This portion tests the mapFilter function block @Test(expected = IllegalArgumentException.class) public void mapFilterNullResultSetTest() throws SQLException { final Connection con = new MockConnection(); JdbcUtility.mapFilter(null, resultSet -> resultSet.getString("table_name"), pyramidName -> JdbcUtilityTest.tableOrViewExists(con, pyramidName), HashSet<String>::new); fail("mapFilter should have thrown an IllegalArgumentException for a null resultSet"); } @Test(expected = IllegalArgumentException.class) public void mapFilterNullFunctionTest() throws SQLException { final String str = "SELECT DISTINCT table_name FROM %s;"; final Connection con = new MockConnection(); try(Statement createStmt3 = con.createStatement(); ResultSet tileMatrixPyramidTables = createStmt3.executeQuery(str)) { JdbcUtility.mapFilter(tileMatrixPyramidTables, null, pyramidName -> JdbcUtilityTest.tableOrViewExists(con, pyramidName), HashSet<String>::new); fail("mapFIlter should have thrown an IllegalArgumentException for a null funciton"); } } @Test(expected = IllegalArgumentException.class) public void mapFilterNullpredicateTest() throws SQLException { final String str = "SELECT DISTINCT table_name FROM %s;"; final Connection con = new MockConnection(); try(Statement createStmt3 = con.createStatement(); ResultSet tileMatrixPyramidTables = createStmt3.executeQuery(str)) { JdbcUtility.mapFilter(tileMatrixPyramidTables, resultSet -> resultSet.getString("table_name"), null, HashSet<String>::new); fail("mapFIlter should have thrown an IllegalArgumentException for a null predicate"); } } //@TODO find out a way to test getObjects //// this portion tests the getObjects function block // public void getObjectsNullResultSetTest() throws SQLException // final Connection con = new MockConnection(); // JdbcUtility.getObjects(null, 1, this.attributeDescriptions.size()); private static final class ExtensionData { private ExtensionData(final String tableName, final String columnName, final String extensionName) { this.tableName = tableName; this.columnName = columnName; this.extensionName = extensionName; } private final String tableName; private final String columnName; private final String extensionName; } @SuppressWarnings("ConstantConditions") public static boolean tableOrViewExists(final Connection connection, final String name) throws SQLException { final Connection con = new MockConnection(); JdbcUtilityTest.verify(connection); if(name == null || name.isEmpty()) { throw new IllegalArgumentException("Table/view name cannot be null or empty"); } return JdbcUtility.selectOne(connection, "SELECT COUNT(*) FROM sqlite_master WHERE (type = 'table' OR type = 'view') AND name = ? LIMIT 1;", preparedStatement -> preparedStatement.setString(1, name), resultSet -> resultSet.getInt(1)) > 0; } private static void verify(final Connection connection) throws SQLException { if(connection == null || connection.isClosed()) { throw new IllegalArgumentException("The connection cannot be null or closed."); } } private File getRandomFile(final int length) { File testFile; do { testFile = new File(FileSystems.getDefault().getPath(this.getRandomString(length)) + ".gpkg"); } while (testFile.exists()); return testFile; } private String getRandomString(final int length) { final String characters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; final char[] text = new char[length]; for (int i = 0; i < length; i++) { text[i] = characters.charAt(this.randomGenerator.nextInt(characters.length())); } return new String(text); } }
package org.deviceconnect.android.deviceplugin.host.profile; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.List; import java.util.Random; import org.deviceconnect.android.deviceplugin.host.R; import org.deviceconnect.android.event.Event; import org.deviceconnect.android.event.EventError; import org.deviceconnect.android.event.EventManager; import org.deviceconnect.android.message.MessageUtils; import org.deviceconnect.android.profile.NotificationProfile; import org.deviceconnect.android.profile.api.DConnectApi; import org.deviceconnect.android.profile.api.DeleteApi; import org.deviceconnect.android.profile.api.PostApi; import org.deviceconnect.android.profile.api.PutApi; import org.deviceconnect.message.DConnectMessage; import org.deviceconnect.message.intent.message.IntentDConnectMessage; import android.app.Notification; import android.app.NotificationChannel; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.drawable.Icon; import android.os.Build; import androidx.core.app.NotificationCompat; import android.widget.Toast; /** * , Notification . * * @author NTT DOCOMO, INC. */ public class HostNotificationProfile extends NotificationProfile { /** Random Seed. */ private static final int RANDOM_SEED = 1000000; /** * Notification. */ private NotificationStatusReceiver mNotificationStatusReceiver; /** * Notification Flag. */ private static final String ACTON_CLICK_NOTIFICATION = "org.deviceconnect.android.intent.action.click.notifiy"; private static final String ACTON_DELETE_NOTIFICATION = "org.deviceconnect.android.intent.action.delete.notifiy"; private final Random mRandom = new Random(); private final DConnectApi mPostNotifyApi = new PostApi() { @Override public String getAttribute() { return ATTRIBUTE_NOTIFY; } @Override public boolean onRequest(final Intent request, final Intent response) { if (mNotificationStatusReceiver == null) { mNotificationStatusReceiver = new NotificationStatusReceiver(); IntentFilter filter = new IntentFilter(); filter.addAction(ACTON_CLICK_NOTIFICATION); filter.addAction(ACTON_DELETE_NOTIFICATION); getContext().getApplicationContext().registerReceiver(mNotificationStatusReceiver, filter); } String serviceId = getServiceID(request); NotificationType type = getType(request); String body = getBody(request); int iconType = 0; String title = ""; if (type == NotificationType.PHONE) { iconType = Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ? R.drawable.notification_00 : R.drawable.notification_00_post_lollipop; title = "PHONE"; } else if (type == NotificationType.MAIL) { iconType = Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ? R.drawable.notification_01 : R.drawable.notification_01_post_lollipop; title = "MAIL"; } else if (type == NotificationType.SMS) { iconType = Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ? R.drawable.notification_02 : R.drawable.notification_02_post_lollipop; title = "SMS"; } else if (type == NotificationType.EVENT) { iconType = Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP ? R.drawable.notification_03 : R.drawable.notification_03_post_lollipop; title = "EVENT"; } else { MessageUtils.setInvalidRequestParameterError(response, "type is invalid."); return true; } String encodeBody = ""; try { if (body != null) { encodeBody = URLDecoder.decode(body, "UTF-8"); } } catch (UnsupportedEncodingException e) { MessageUtils.setInvalidRequestParameterError(response, "body is invalid."); return true; } int notifyId = mRandom.nextInt(RANDOM_SEED); if (Build.MODEL.endsWith("M100")) { Toast.makeText(getContext(), encodeBody, Toast.LENGTH_SHORT).show(); response.putExtra(NotificationProfile.PARAM_NOTIFICATION_ID, notifyId); setResult(response, IntentDConnectMessage.RESULT_OK); } else { // Build intent for notification content Intent notifyClickIntent = new Intent(ACTON_CLICK_NOTIFICATION); notifyClickIntent.putExtra("notificationId", notifyId); notifyClickIntent.putExtra("serviceId", serviceId); PendingIntent pendingClickIntent = PendingIntent.getBroadcast(getContext(), notifyId, notifyClickIntent, PendingIntent.FLAG_UPDATE_CURRENT); Intent notifyDeleteIntent = new Intent(ACTON_DELETE_NOTIFICATION); notifyDeleteIntent.putExtra("notificationId", notifyId); notifyDeleteIntent.putExtra("serviceId", serviceId); PendingIntent pendingDeleteIntent = PendingIntent.getBroadcast(getContext(), notifyId, notifyDeleteIntent, PendingIntent.FLAG_UPDATE_CURRENT); Notification notification; // Get an instance of the NotificationManager service NotificationManager mNotification = (NotificationManager) getContext() .getSystemService(Context.NOTIFICATION_SERVICE); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(getContext()) .setSmallIcon(iconType) .setContentTitle("" + title) .setContentText(encodeBody) .setContentIntent(pendingClickIntent) .setDeleteIntent(pendingDeleteIntent); notification = notificationBuilder.build(); } else { Notification.Builder notificationBuilder = new Notification.Builder(getContext()) .setSmallIcon(Icon.createWithResource(getContext(), iconType)) .setContentTitle("" + title) .setContentText(encodeBody) .setContentIntent(pendingClickIntent) .setDeleteIntent(pendingDeleteIntent); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { String channelId = getContext().getResources().getString(R.string.host_notification_channel_id); NotificationChannel channel = new NotificationChannel( channelId, getContext().getResources().getString(R.string.host_notification_channel_title), NotificationManager.IMPORTANCE_DEFAULT); channel.setDescription(getContext().getResources().getString(R.string.host_notification_channel_desc)); mNotification.createNotificationChannel(channel); notificationBuilder.setChannelId(channelId); } notification = notificationBuilder.build(); } // Build the notification and issues it with notification // manager. mNotification.notify(notifyId, notification); response.putExtra(NotificationProfile.PARAM_NOTIFICATION_ID, notifyId); setResult(response, IntentDConnectMessage.RESULT_OK); } List<Event> events = EventManager.INSTANCE.getEventList( serviceId, HostNotificationProfile.PROFILE_NAME, null, HostNotificationProfile.ATTRIBUTE_ON_SHOW); synchronized (events) { for (Event event : events) { Intent intent = EventManager.createEventMessage(event); setNotificationId(intent, String.valueOf(notifyId)); sendEvent(intent, event.getAccessToken()); } } return true; } }; private final DConnectApi mDeleteNotifyApi = new DeleteApi() { @Override public String getAttribute() { return ATTRIBUTE_NOTIFY; } @Override public boolean onRequest(final Intent request, final Intent response) { String serviceId = getServiceID(request); String notificationId = getNotificationId(request); int notifyId = 0; try { notifyId = Integer.parseInt(notificationId); } catch (NumberFormatException e) { MessageUtils.setInvalidRequestParameterError(response, "notificationId is invalid."); return true; } NotificationManager mNotificationManager = (NotificationManager) getContext().getSystemService( Context.NOTIFICATION_SERVICE); mNotificationManager.cancel(notifyId); setResult(response, IntentDConnectMessage.RESULT_OK); List<Event> events = EventManager.INSTANCE.getEventList( serviceId, HostNotificationProfile.PROFILE_NAME, null, HostNotificationProfile.ATTRIBUTE_ON_CLOSE); synchronized (events) { for (Event event : events) { Intent intent = EventManager.createEventMessage(event); intent.putExtra(HostNotificationProfile.PARAM_NOTIFICATION_ID, notificationId); sendEvent(intent, event.getAccessToken()); } } return true; } }; private final DConnectApi mPutOnClickApi = new PutApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_CLICK; } @Override public boolean onRequest(final Intent request, final Intent response) { mNotificationStatusReceiver = new NotificationStatusReceiver(); IntentFilter intentFilter = new IntentFilter(ACTON_CLICK_NOTIFICATION); getContext().registerReceiver(mNotificationStatusReceiver, intentFilter); EventError error = EventManager.INSTANCE.addEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; private final DConnectApi mPutOnCloseApi = new PutApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_CLOSE; } @Override public boolean onRequest(final Intent request, final Intent response) { mNotificationStatusReceiver = new NotificationStatusReceiver(); IntentFilter intentFilter = new IntentFilter(ACTON_DELETE_NOTIFICATION); getContext().registerReceiver(mNotificationStatusReceiver, intentFilter); EventError error = EventManager.INSTANCE.addEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; private final DConnectApi mPutOnShowApi = new PutApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_SHOW; } @Override public boolean onRequest(final Intent request, final Intent response) { EventError error = EventManager.INSTANCE.addEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; private final DConnectApi mDeleteOnClickApi = new DeleteApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_CLICK; } @Override public boolean onRequest(final Intent request, final Intent response) { EventError error = EventManager.INSTANCE.removeEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { switch (error) { case FAILED: MessageUtils.setUnknownError(response, "Do not unregister event."); break; case INVALID_PARAMETER: MessageUtils.setInvalidRequestParameterError(response); break; case NOT_FOUND: MessageUtils.setUnknownError(response, "Event not found."); break; default: MessageUtils.setUnknownError(response); break; } setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; private final DConnectApi mDeleteOnCloseApi = new DeleteApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_CLOSE; } @Override public boolean onRequest(final Intent request, final Intent response) { EventError error = EventManager.INSTANCE.removeEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { switch (error) { case FAILED: MessageUtils.setUnknownError(response, "Do not unregister event."); break; case INVALID_PARAMETER: MessageUtils.setInvalidRequestParameterError(response); break; case NOT_FOUND: MessageUtils.setUnknownError(response, "Event not found."); break; default: MessageUtils.setUnknownError(response); break; } setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; private final DConnectApi mDeleteOnShowApi = new DeleteApi() { @Override public String getAttribute() { return ATTRIBUTE_ON_SHOW; } @Override public boolean onRequest(final Intent request, final Intent response) { EventError error = EventManager.INSTANCE.removeEvent(request); if (error == EventError.NONE) { setResult(response, DConnectMessage.RESULT_OK); } else { switch (error) { case FAILED: MessageUtils.setUnknownError(response, "Do not unregister event."); break; case INVALID_PARAMETER: MessageUtils.setInvalidRequestParameterError(response); break; case NOT_FOUND: MessageUtils.setUnknownError(response, "Event not found."); break; default: MessageUtils.setUnknownError(response); break; } setResult(response, DConnectMessage.RESULT_ERROR); } return true; } }; public HostNotificationProfile() { addApi(mPostNotifyApi); addApi(mDeleteNotifyApi); addApi(mPutOnClickApi); addApi(mPutOnCloseApi); addApi(mPutOnShowApi); addApi(mDeleteOnClickApi); addApi(mDeleteOnCloseApi); addApi(mDeleteOnShowApi); } private class NotificationStatusReceiver extends BroadcastReceiver { @Override public void onReceive(final Context context, final Intent intent) { String profile; if (intent.getAction() == null) { return; } // Intent#getAction() if (intent.getAction().equals(ACTON_CLICK_NOTIFICATION)) { profile = HostNotificationProfile.ATTRIBUTE_ON_CLICK; } else if (intent.getAction().equals(ACTON_DELETE_NOTIFICATION)) { profile = HostNotificationProfile.ACTON_DELETE_NOTIFICATION; } else { return; } // etcmutex synchronized (this) { int notificationId = intent.getIntExtra("notificationId", -1); String mServiceId = intent.getStringExtra("serviceId"); List<Event> events = EventManager.INSTANCE.getEventList( mServiceId, HostNotificationProfile.PROFILE_NAME, null, profile); for (int i = 0; i < events.size(); i++) { Event event = events.get(i); Intent evtIntent = EventManager.createEventMessage(event); evtIntent.putExtra(HostNotificationProfile.PARAM_NOTIFICATION_ID, "" + notificationId); sendEvent(evtIntent, event.getAccessToken()); } // mutex notifyAll(); } } } }
package music_thing; import java.io.File; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Optional; import java.util.ResourceBundle; import javafx.application.Platform; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.Button; import javafx.scene.control.ButtonType; import javafx.scene.control.Label; import javafx.scene.control.MenuItem; import javafx.scene.control.Slider; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.input.DragEvent; import javafx.scene.input.Dragboard; import javafx.scene.input.MouseEvent; import javafx.scene.input.TransferMode; import javafx.scene.layout.HBox; import javafx.scene.shape.Polygon; import javax.swing.JFileChooser; import javax.swing.SwingUtilities; import javax.swing.filechooser.FileNameExtensionFilter; /** * * @author csstudent */ public class FXMLDocumentController implements Initializable { @FXML private Button play; @FXML private TableView<Track> songList; @FXML private MenuItem fileImport; @FXML private MenuItem quit; @FXML private Label songTime; @FXML private Slider songVolumeBar; @FXML private TableColumn<Track,String> songCol; @FXML private TableColumn<Track,String> artistCol; @FXML private TableColumn<Track,String> albumCol; @FXML private TableColumn<Track,String> genreCol; @FXML private TableColumn<Track,Double> ratingCol; @FXML private TableColumn<Track,Integer> playcountCol; @FXML private TableColumn<Track,Integer> timeCol; @FXML private HBox pauseSymbol; @FXML private Polygon playSymbol; @FXML private void play(ActionEvent event) { if(MusicLibrary.size()>0){ if(!MusicController.getPlaying()){ MusicController.play(MusicLibrary.getSelectedTrack(songList), songVolumeBar.getValue()); pauseSymbol.setVisible(true); playSymbol.setVisible(false); }else if(MusicController.getPlaying() && MusicLibrary.getSelectedTrack(songList)!=MusicController.getCurrentTrack()){ MusicController.play(MusicLibrary.getSelectedTrack(songList), songVolumeBar.getValue()); pauseSymbol.setVisible(true); playSymbol.setVisible(false); }else{ MusicController.pause(); pauseSymbol.setVisible(false); playSymbol.setVisible(true); } songList.getSelectionModel().select(MusicLibrary.getTrackNumber()); songList.requestFocus(); } } @FXML private void quit(ActionEvent event){ Platform.exit(); } @FXML private void changeVolume(MouseEvent event){ MusicController.setVolume(songVolumeBar.getValue()); } @FXML private void deleteFile(ActionEvent event){ Alert alert = new Alert(AlertType.CONFIRMATION); alert.setTitle("Delete?"); alert.setHeaderText(null); alert.setContentText("Are you sure you want to delete?"); Optional<ButtonType> result = alert.showAndWait(); if (result.get() == ButtonType.OK){ Platform.runLater(() -> { try{ Track toDelete = MusicLibrary.getSelectedTrack(songList); if(MusicController.getCurrentTrack()==toDelete){ MusicController.stop(); } Files.delete(Paths.get("music/"+toDelete.getPath())); MusicLibrary.removeTrack(toDelete); MusicLibrary.setTrack(songList.getFocusModel().getFocusedCell().getRow()); }catch(Exception e){} MusicLibrary.save(); }); } else {} } @FXML private void fileDragged(DragEvent event) { Dragboard db = event.getDragboard(); if (db.hasFiles()) { event.acceptTransferModes(TransferMode.COPY); } else { event.consume(); } } @FXML private void importFromDrag(DragEvent event){ Dragboard db = event.getDragboard(); boolean success = false; if (db.hasFiles()) { success = true; for (File file : db.getFiles()) { SwingUtilities.invokeLater(() -> {importFile(file);}); } } event.setDropCompleted(success); event.consume(); if(success){ SwingUtilities.invokeLater(() -> {Platform.runLater(FXMLDocumentController::alertImportComplete);}); } } @FXML private void importFromMenu(ActionEvent event){ SwingUtilities.invokeLater(() -> { JFileChooser chooser = new JFileChooser(); FileNameExtensionFilter filter = new FileNameExtensionFilter( "Supported Audio Files", "mp3", "mid", "m4a", "wav", "aiff"); chooser.setFileFilter(filter); chooser.setMultiSelectionEnabled(true); chooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); int result = chooser.showOpenDialog(null); if(result == JFileChooser.APPROVE_OPTION) { File[] files = chooser.getSelectedFiles(); if(files!=null){ for(File file: java.util.Arrays.asList(files)) importFile(file); Platform.runLater(FXMLDocumentController::alertImportComplete); } } }); } public static void alertImportComplete(){ Alert alert = new Alert(AlertType.INFORMATION); alert.setTitle("Finished Importing"); alert.setHeaderText(null); alert.setContentText("Import Complete"); alert.showAndWait(); } private void importFile(File file){ if(file.isFile()){ File copyTo = new File("music/"+file.getName()); try{ if(!copyTo.exists()){ if(file.getName().toLowerCase().endsWith("mp3")){ Files.copy(file.toPath(), copyTo.toPath()); MusicLibrary.addSong(new Track(SongType.MP3, file.getName())); }else if(file.getName().toLowerCase().endsWith("mid")){ Files.copy(file.toPath(), copyTo.toPath()); MusicLibrary.addSong(new Track(SongType.MIDI, file.getName())); }else if(file.getName().toLowerCase().endsWith("m4a")){ Files.copy(file.toPath(), copyTo.toPath()); MusicLibrary.addSong(new Track(SongType.AAC, file.getName())); }else if(file.getName().toLowerCase().endsWith("aiff")){ Files.copy(file.toPath(), copyTo.toPath()); MusicLibrary.addSong(new Track(SongType.AIFF, file.getName())); }else if(file.getName().toLowerCase().endsWith("wav")){ Files.copy(file.toPath(), copyTo.toPath()); MusicLibrary.addSong(new Track(SongType.WAV, file.getName())); }//else{ //add alert file not supported } }catch (Exception e){} }else if(file.isDirectory()){ for(File thing : file.listFiles()) importFile(thing); } Platform.runLater(MusicLibrary::save); } @Override public void initialize(URL url, ResourceBundle rb) { MusicLibrary.load(); File music = new File("music"); if(!music.exists())music.mkdir(); songCol.setCellValueFactory( new PropertyValueFactory("name")); artistCol.setCellValueFactory( new PropertyValueFactory("artist")); albumCol.setCellValueFactory( new PropertyValueFactory("album")); genreCol.setCellValueFactory( new PropertyValueFactory("genre")); ratingCol.setCellValueFactory( new PropertyValueFactory("rating")); playcountCol.setCellValueFactory( new PropertyValueFactory("playCount")); timeCol.setCellValueFactory( new PropertyValueFactory("length")); songList.setItems(MusicLibrary.getLibrary()); } }
package org.fiteagle.core.repo; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import org.fiteagle.api.core.MessageBusMsgFactory; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryParseException; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; public class QueryExecuter { private static final String FUSEKI_SERVICE = "http://localhost:3030/ds/query"; @SuppressWarnings("unused") private static Logger LOGGER = Logger.getLogger(QueryExecuter.class.toString()); public static ResultSet executeSparqlSelectQuery(String queryString) throws QueryParseException{ ResultSet rs = null; QueryExecution qe = QueryExecutionFactory.sparqlService(FUSEKI_SERVICE, queryString); rs = qe.execSelect(); return rs; } public static Model executeSparqlDescribeQuery(String queryString) throws QueryParseException{ Model rs = null; QueryExecution qe = QueryExecutionFactory.sparqlService(FUSEKI_SERVICE, queryString); rs = qe.execDescribe(); //todo: find a better way to set our own common known prefixes (e.g. omn, wgs, ...) Map<String, String> nsPrefix = new HashMap<>(); nsPrefix = rs.getNsPrefixMap(); for(Map.Entry<String, String> entry : nsPrefix.entrySet()){ if (entry.getKey().toString().contains("j.")){ rs.removeNsPrefix(entry.getKey()); } } // rs.removeNsPrefix("j.0"); // rs.removeNsPrefix("j.1"); // rs.removeNsPrefix("j.2"); rs.setNsPrefix("omn", "http://fiteagle.org/ontology rs.setNsPrefix("wgs", "http://www.w3.org/2003/01/geo/wgs84_pos rs.setNsPrefix("foaf", "http://xmlns.com/foaf/0.1/"); rs.setNsPrefix("mightyrobot", "http://fiteagle.org/ontology/adapter/mightyrobot //MessageBusMsgFactory.setCommonPrefixes(rs); return rs; } }
package Servlet; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import Archivos.RegresarUsuarios; import Archivos.Usuario1; import java.util.ArrayList; import org.json.simple.*; public class RegresarUsuariosServlet extends HttpServlet { private static final long serialVersionUID = 1L; public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html"); PrintWriter out = response.getWriter(); String e = request.getParameter("username"); ArrayList<Usuario1> u = RegresarUsuarios.validate(e); if (u!=null) { String format = request.getParameter("format"); if (format.equals("json")) { JSONObject json = new JSONObject(); json.put("success", true); JSONArray users = new JSONArray(); for (int i = 0; i < u.size(); i++) { JSONObject user = new JSONObject(); user.put("idUsuario", u.get(i).getidUsuario()); user.put("usuario", u.get(i).getidUsuario()); user.put("password", u.get(i).getPassword()); user.put("nickname",u.get(i).getNickname()); user.put("email", u.get(i).getEmail()); user.put("status",u.get(i).getStatus()); users.add(user); } json.put("users", users); out.print(json); } } else { String format = request.getParameter("format"); if (format.equals("json")) { JSONObject json = new JSONObject(); json.put("success", false); json.put("message","ERROR"); out.print(json); } } out.close(); } }
package edu.cs4460.msd.visual.maps; import processing.core.PApplet; import ch.randelshofer.tree.NodeInfo; import ch.randelshofer.tree.circlemap.CirclemapNode; import ch.randelshofer.tree.circlemap.CirclemapTree; import de.fhpotsdam.unfolding.UnfoldingMap; import de.fhpotsdam.unfolding.geo.Location; import de.fhpotsdam.unfolding.providers.MBTilesMapProvider; import de.fhpotsdam.unfolding.utils.MapUtils; import de.fhpotsdam.unfolding.utils.ScreenPosition; import edu.cs4460.msd.backend.genre.GenreFilter; import edu.cs4460.msd.backend.maps_works.ContinentData; import edu.cs4460.msd.backend.utilities.PathHandler; import edu.cs4460.msd.backend.visual_abstract.AbstractMap; import edu.cs4460.msd.visual.circles.CirclemapDraw; public class GenreLocationMap extends AbstractMap { private PApplet parent; private int x, y, width, height; private CirclemapTree[] models; private CirclemapDraw[] cmDraws; private NodeInfo[] infos; private boolean[] drawContinent = {false, false, false, false, false, false}; private CirclemapNode hoverNode; public GenreLocationMap(PApplet p, int x, int y, int width, int height, CirclemapTree[] models) { this.parent = p; this.x = x; this.y = y; this.width = width; this.height = height; this.models = models; this.cmDraws = new CirclemapDraw[6]; this.infos = new NodeInfo[6]; for (int i = 0; i < 6; i++) { this.cmDraws[i] = new CirclemapDraw(this.models[i]); this.cmDraws[i].setRadius(50); ContinentData cd = new ContinentData(); Location loc = cd.getContinentCenter(ContinentData.getContinents()[i]); ScreenPosition pos = map.getScreenPosition(loc); this.cmDraws[i].setCX(pos.x); this.cmDraws[i].setCY(pos.y); this.infos[i] = this.models[i].getInfo(); } PathHandler ph = new PathHandler(); String mbTilesConnectionString = "jdbc:sqlite:" + ph.getPathToResource("blankLight-1-3.mbtiles"); map = new UnfoldingMap(parent, "detail", x, y, width, height, true, false, new MBTilesMapProvider(mbTilesConnectionString)); map.setPanningRestriction(getMapCenter(), 0); map.setZoomRange(1, 1); MapUtils.createDefaultEventDispatcher(parent, map); } public void draw() { map.draw(); for(int i = 0; i < 6; i++) { if (drawContinent[i]) { cmDraws[i].drawTree(parent); } } } public void flipDrawContinent(int index) { drawContinent[index] = !drawContinent[index]; } private Location getMapCenter() { return getMapLocation(x + width / 2,y + height / 2); } public Location getMapLocation(int mouseX, int mouseY) { return map.getLocation(mouseX, mouseY); } @Override public void updateFilter(GenreFilter filter) { // TODO Auto-generated method stub } }
package edu.cs4460.msd.visual.maps; import java.awt.Color; import processing.core.PApplet; import ch.randelshofer.tree.NodeInfo; import ch.randelshofer.tree.circlemap.CirclemapNode; import ch.randelshofer.tree.circlemap.CirclemapTree; import de.fhpotsdam.unfolding.UnfoldingMap; import de.fhpotsdam.unfolding.geo.Location; import de.fhpotsdam.unfolding.providers.MBTilesMapProvider; import de.fhpotsdam.unfolding.utils.MapUtils; import de.fhpotsdam.unfolding.utils.ScreenPosition; import edu.cs4460.msd.backend.genre.GenreFilter; import edu.cs4460.msd.backend.maps_works.ContinentData; import edu.cs4460.msd.backend.utilities.PathHandler; import edu.cs4460.msd.backend.visual_abstract.AbstractMap; import edu.cs4460.msd.visual.circles.CirclemapDraw; import edu.cs4460.msd.visual.controls.ToolTip; public class GenreLocationMap extends AbstractMap { private PApplet parent; private int x, y, width, height; private CirclemapTree[] models; private CirclemapDraw[] cmDraws; private NodeInfo[] infos; private boolean[] drawContinent = {false, false, false, false, false, false}; private CirclemapNode hoverNode; private int hoverIndex; private boolean isToolTipVisible = true; private ToolTip tooltip; public GenreLocationMap(PApplet p, int x, int y, int width, int height, CirclemapTree[] models) { this.parent = p; this.x = x; this.y = y; this.width = width; this.height = height; PathHandler ph = new PathHandler(); String mbTilesConnectionString = "jdbc:sqlite:" + ph.getPathToResource("blankLight-1-3.mbtiles"); map = new UnfoldingMap(parent, "detail", x, y, width, height, true, false, new MBTilesMapProvider(mbTilesConnectionString)); map.setPanningRestriction(getMapCenter(), 0); map.setZoomRange(1, 1); MapUtils.createDefaultEventDispatcher(parent, map); this.models = models; this.cmDraws = new CirclemapDraw[6]; this.infos = new NodeInfo[6]; for (int i = 0; i < 6; i++) { this.cmDraws[i] = new CirclemapDraw(this.models[i]); this.cmDraws[i].setRadius(40); ContinentData cd = new ContinentData(); Location loc = cd.getContinentCenter(ContinentData.getContinents()[i]); ScreenPosition pos = map.getScreenPosition(loc); this.cmDraws[i].setCX(pos.x); this.cmDraws[i].setCY(pos.y); this.infos[i] = this.models[i].getInfo(); } this.tooltip = new ToolTip(parent, 0, 0); } public void draw() { map.draw(); for(int i = 0; i < 6; i++) { if (drawContinent[i]) { cmDraws[i].drawTree(parent); } } if (hoverNode != null) { if(infos[hoverIndex].getWeight(hoverNode.getDataNodePath()) > 0) { cmDraws[hoverIndex].drawNodeBounds(parent, hoverNode, Color.red); if (this.isToolTipVisible && hoverNode != cmDraws[hoverIndex].getRoot()) { tooltip.draw(); } } } } public void mouseMoved(int mx, int my) { for (int i = 0; i < 6; i++) { if (drawContinent[i]) { hoverNode = cmDraws[i].getNodeAt(mx, my); if (hoverNode != null) { hoverIndex = i; tooltip.setText(infos[i].getTooltip(hoverNode.getDataNodePath())); tooltip.setXpos(mx - 5); tooltip.setYpos(my - 64); break; } } } } public void setDrawContinent(int index, boolean bool) { drawContinent[index] = bool; } private Location getMapCenter() { return getMapLocation(x + width / 2,y + height / 2); } public Location getMapLocation(int mouseX, int mouseY) { return map.getLocation(mouseX, mouseY); } @Override public void updateFilter(GenreFilter filter) { // TODO Auto-generated method stub } }
package com.rultor.web; import com.jcabi.aspects.Loggable; import com.rexsl.page.JaxbBundle; import com.rexsl.page.PageBuilder; import com.rultor.spi.Markdown; import com.rultor.spi.Time; import com.rultor.timeline.Event; import com.rultor.timeline.Product; import com.rultor.timeline.Tag; import com.rultor.timeline.Timeline; import com.rultor.timeline.Timelines; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.logging.Level; import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; /** * Front page of get. * * @author Yegor Bugayenko (yegor@tpc2.com) * @version $Id$ * @since 1.0 * @checkstyle MultipleStringLiterals (500 lines) * @checkstyle ClassDataAbstractionCoupling (500 lines) */ @Path("/t/{name:[a-z]+}") @Loggable(Loggable.DEBUG) public final class TimelineRs extends BaseRs { /** * Timeline. */ private transient Timeline timeline; /** * Inject it from query. * @param name Name of get */ @PathParam("name") public void setName(@NotNull(message = "unit name can't be NULL") final String name) { try { this.timeline = this.timelines().get(name); } catch (Timelines.TimelineNotFoundException ex) { throw new WebApplicationException( ex, HttpURLConnection.HTTP_NOT_FOUND ); } } /** * Get entrance page JAX-RS response. * @return The JAX-RS response */ @GET @Path("/") public Response index() { return new PageBuilder() .stylesheet("/xsl/timeline.xsl") .build(EmptyPage.class) .init(this) .append(new JaxbBundle("name", this.timeline.name())) .append( new JaxbBundle("events").add( new JaxbBundle.Group<Event>( this.timeline.events(new Time())) { @Override public JaxbBundle bundle(final Event event) { return TimelineRs.this.event(event); } } ) ) .render() .build(); } /** * Post into it. * @param text Text of the event * @return The JAX-RS response */ @POST @Path("/post") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public Response post(@FormParam("text") @NotNull final String text) { final Event event = this.timeline.post( text, new ArrayList<Tag>(0), new ArrayList<Product>(0) ); throw this.flash().redirect( this.uriInfo().getBaseUri(), String.format("Event `%s` successfully posted", event.time()), Level.INFO ); } /** * Convert event to JaxbBundle. * @param event The event to convert * @return Bundle */ private JaxbBundle event(final Event event) { return new JaxbBundle("event") .add("time", event.time().toString()) .up() .add("when", event.time().when()) .up() .add("text", event.text()) .up() .add("tags") .add( new JaxbBundle.Group<Tag>(event.tags()) { @Override public JaxbBundle bundle(final Tag tag) { return new JaxbBundle("tag") .add("label", tag.label()) .up() .add("level", tag.level().toString()) .up(); } } ) .up() .add(this.products(event.products())) .up(); } /** * Convert products to JaxbBundle. * @param products Products to convert * @return Bundle */ private JaxbBundle products(final Iterable<Product> products) { return new JaxbBundle("products").add( new JaxbBundle.Group<Product>(products) { @Override public JaxbBundle bundle(final Product product) { return TimelineRs.this.product(product); } } ); } /** * Convert product to JaxbBundle. * @param product Product to convert * @return Bundle */ private JaxbBundle product(final Product product) { return new JaxbBundle("product") .add("name", product.name()) .up() .add("html", new Markdown(product.markdown()).html()) .up(); } }
// AutoPluginInvoker.java package imagej.plugin; import ij.IJ; import ij.ImagePlus; import ij.ImageStack; import ij.WindowManager; import ij.gui.GenericDialog; import ij.gui.ImageWindow; import ij.gui.NonBlockingGenericDialog; import ij.gui.Roi; import ij.plugin.filter.PlugInFilterRunner; import ij.plugin.frame.RoiManager; import ij.process.ByteProcessor; import ij.process.ColorProcessor; import ij.process.ImageProcessor; import imagej.io.ImageOpener; import java.awt.Canvas; import java.awt.Color; import java.awt.Rectangle; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.prefs.Preferences; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JPanel; import mpicbg.imglib.cursor.Cursor; import mpicbg.imglib.cursor.LocalizableByDimCursor; import mpicbg.imglib.image.Image; //import mpicbg.imglib.Type; import mpicbg.imglib.type.numeric.RealType; import net.java.sezpoz.Index; import net.java.sezpoz.IndexItem; /** * This class automatically invokes plugins when an image is loaded. * * Plugins should implement the IAutoPlugin interface and be labelled * with the "@Dimensions" annotation. They are matched on the basis of * these declared dimension names. * * The IAutoDisplayPlugin interface indicates an automatic plugin also * displays a representation of the image. There is no need to display * the default IJ representation of the image. * * @author Aivar Grislis */ public class AutoPluginInvoker { /** * Checks image dimensions against list of automatic plugins. * If there is a match, runs plugin. If there is more than one * match, pops up a dialog box to choose plugin, runs selection. * * @param img image just opened * @return whether a plugin was automatically run */ public boolean matchPlugin(ImagePlus imp) { boolean handled = false; // Get imglib Image from ImagePlus Image<?> image = null; ImageStack stack = imp.getStack(); if (stack != null) { image = stack.getStorage(); } // if successful if (null != image) { List<IndexItem> plugins = new ArrayList<IndexItem>(); // get set of dimension names Set<String> imageSet = dimensionSet(image.getName()); // look for matches for (final IndexItem<Dimensions, IAutoPlugin> item : Index.load(Dimensions.class, IAutoPlugin.class)) { // extract set of required and optional dimension names from annotation Set<String> requiredSet = getSet(item.annotation().required()); Set<String> optionalSet = getSet(item.annotation().optional()); // look for appropriate matches if (isAppropriate(imageSet, requiredSet, optionalSet)) { plugins.add(item); } } // select a plugin IndexItem<Dimensions, IAutoPlugin> selectedPlugin = null; if (plugins.size() > 0) { if (1 == plugins.size()) { // one and only match, so run it selectedPlugin = plugins.get(0); } else { // allow user to choose String choices[] = new String[plugins.size()]; for (int i = 0; i < plugins.size(); ++i) { choices[i] = getPluginNameFromClassName(plugins.get(i).className()); } // throw up a dialog box int index = selectPluginDialog(choices); if (-1 != index) { selectedPlugin = plugins.get(index); } } } // run selected plugin if (null != selectedPlugin) { // create an instance IAutoPlugin instance = null; try { instance = selectedPlugin.instance(); } catch (InstantiationException e) { System.out.println("Error instantiating plugin " + e.getMessage()); } if (null != instance) { // show the newly-loaded image if necessary if (!(instance instanceof IAutoDisplayPlugin)) { imp.show(); } // run the plugin ImagePlus temp = WindowManager.getTempCurrentImage(); WindowManager.setTempCurrentImage(imp); new PlugInFilterRunner(instance, "", ""); WindowManager.setTempCurrentImage(temp); ; handled = true; } } } return handled; } /* * This method parses a string of the format: * "Name [X Y Timebins]" and builds a set with * the dimensions 'X', 'Y', and 'Timebins'. * * Temporary kludge. */ private Set<String> dimensionSet(String name) { Set<String> set = new HashSet<String>(); int startIndex = name.indexOf('[') + 1; int endIndex = name.indexOf(']'); String coded = name.substring(startIndex, endIndex); String dimensions[] = coded.split(" "); for (String dimension : dimensions) { set.add(dimension); } return set; } /** * Builds a set of strings from comma separated values * in an input string. * * @param commaSeparated * @return */ private Set<String> getSet(String commaSeparated) { Set<String> set = new HashSet<String>(); String[] elements = commaSeparated.split(","); for (String element: elements) { set.add(element); } return set; } /** * Sees whether the image's set of dimension names is appropriate * for the plugin's required and optional sets of dimension names. * * @param imageSet * @param requiredSet changed as side effect * @param optionalSet * @return whether appropriate */ private boolean isAppropriate(Set<String> imageSet, Set<String> requiredSet, Set<String> optionalSet) { boolean match = false; // image must have all required dimensions if (imageSet.containsAll(requiredSet)) { requiredSet.addAll(optionalSet); // any additional dimensions must be optional dimensions if (requiredSet.containsAll(imageSet)) { match = true; } } return match; } /** * Given a class name builds the plugin name. * For example "loci.whatever.What_Ever" becomes "What Ever". * * @param class name * @returns plugin name */ private String getPluginNameFromClassName(String className) { return className.substring(className.lastIndexOf('.') + 1).replace('_', ' '); } /** * Shows a dialog of appropriate plugins so that the user * can select one. * * @param choices array of plugin names * @return index of choice or -1 for no choice */ private int selectPluginDialog(String[] choices) { GenericDialog dialog = new GenericDialog("Select a Plugin"); dialog.addChoice( "Plugin", choices, choices[0]); dialog.showDialog(); if (dialog.wasCanceled()) { return -1; } return dialog.getNextChoiceIndex(); } }
package com.amirarcane.sample; import android.app.Activity; import android.app.Dialog; import android.content.ContentUris; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Matrix; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.provider.MediaStore; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.Gravity; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import com.amirarcane.recentimages.RecentImages; import com.amirarcane.recentimages.thumbnailOptions.ImageAdapter; import com.jess.ui.TwoWayAdapterView; import com.jess.ui.TwoWayGridView; import java.io.File; import java.io.IOException; import java.util.ArrayList; public class MainActivity extends AppCompatActivity { private Uri imageUri; ArrayList<MenuItem> menuItems = new ArrayList<>(); private TwoWayGridView mImageGrid; private ImageView image; private static final int TAKE_PICTURE = 0; private static final int SELECT_PHOTO = 1; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); final View bottomSheet = getLayoutInflater().inflate(R.layout.bottom_sheet, null); image = (ImageView) findViewById(R.id.imageView); final TwoWayGridView gridview = (TwoWayGridView) bottomSheet.findViewById(R.id.gridview); final Dialog mBottomSheetDialog = new Dialog(this, R.style.MaterialDialogSheet); mBottomSheetDialog.setContentView(bottomSheet); mBottomSheetDialog.setCancelable(true); mBottomSheetDialog.getWindow().setLayout(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT); mBottomSheetDialog.getWindow().setGravity(Gravity.BOTTOM); menuItems.add(new MenuItem("Camera", R.drawable.ic_local_see_black_48dp)); menuItems.add(new MenuItem("Gallery", R.drawable.ic_action_image)); RecyclerView menu = (RecyclerView) bottomSheet.findViewById(R.id.menu); MenuAdapter menuAdapter = new MenuAdapter(menuItems); menu.setLayoutManager(new LinearLayoutManager(this)); menu.setAdapter(menuAdapter); menu.addOnItemTouchListener(new RecyclerItemClickListener(this, menu, new RecyclerItemClickListener.OnItemClickListener() { @Override public void onItemClick(View view, int i) { if (i == 0) { takePhoto(view); mBottomSheetDialog.dismiss(); } else if (i == 1) { Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
// AutoPluginInvoker.java package imagej.plugin; import ij.IJ; import ij.ImagePlus; import ij.ImageStack; import ij.WindowManager; import ij.gui.GenericDialog; import ij.gui.ImageWindow; import ij.gui.NonBlockingGenericDialog; import ij.gui.Roi; import ij.plugin.filter.PlugInFilterRunner; import ij.plugin.frame.RoiManager; import ij.process.ByteProcessor; import ij.process.ColorProcessor; import ij.process.ImageProcessor; import imagej.io.ImageOpener; import java.awt.Canvas; import java.awt.Color; import java.awt.Rectangle; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.prefs.Preferences; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JPanel; import mpicbg.imglib.cursor.Cursor; import mpicbg.imglib.cursor.LocalizableByDimCursor; import mpicbg.imglib.image.Image; //import mpicbg.imglib.Type; import mpicbg.imglib.type.numeric.RealType; import net.java.sezpoz.Index; import net.java.sezpoz.IndexItem; /** * This class automatically invokes plugins when an image is loaded. * * Plugins should implement the IAutoPlugin interface and be labelled * with the "@Dimensions" annotation. They are matched on the basis of * these declared dimension names. * * The IAutoDisplayPlugin interface indicates an automatic plugin also * displays a representation of the image. There is no need to display * the default IJ representation of the image. * * @author Aivar Grislis */ public class AutoPluginInvoker { /** * Checks image dimensions against list of automatic plugins. * If there is a match, runs plugin. If there is more than one * match, pops up a dialog box to choose plugin, runs selection. * * @param img image just opened * @return whether a plugin was automatically run */ public boolean matchPlugin(ImagePlus imp) { boolean handled = false; // Get imglib Image from ImagePlus Image<?> image = null; ImageStack stack = imp.getStack(); if (stack != null) { image = stack.getStorage(); } // if successful if (null != image) { List<IndexItem> plugins = new ArrayList<IndexItem>(); // get set of dimension names Set<String> imageSet = dimensionSet(image.getName()); // look for matches for (final IndexItem<Dimensions, IAutoPlugin> item : Index.load(Dimensions.class, IAutoPlugin.class)) { // extract set of required and optional dimension names from annotation Set<String> requiredSet = getSet(item.annotation().required()); Set<String> optionalSet = getSet(item.annotation().optional()); // look for appropriate matches if (isAppropriate(imageSet, requiredSet, optionalSet)) { plugins.add(item); } } // select a plugin IndexItem<Dimensions, IAutoPlugin> selectedPlugin = null; if (plugins.size() > 0) { if (1 == plugins.size()) { // one and only match, so run it selectedPlugin = plugins.get(0); } else { // allow user to choose String choices[] = new String[plugins.size()]; for (int i = 0; i < plugins.size(); ++i) { choices[i] = getPluginNameFromClassName(plugins.get(i).className()); } // throw up a dialog box int index = selectPluginDialog(choices); if (-1 != index) { selectedPlugin = plugins.get(index); } } } // run selected plugin if (null != selectedPlugin) { // create an instance IAutoPlugin instance = null; try { instance = selectedPlugin.instance(); } catch (InstantiationException e) { System.out.println("Error instantiating plugin " + e.getMessage()); } if (null != instance) { // show the newly-loaded image if necessary if (!(instance instanceof IAutoDisplayPlugin)) { imp.show(); } // run the plugin WindowManager.setTempCurrentImage(imp); new PlugInFilterRunner(instance, "", ""); WindowManager.setTempCurrentImage(null); ; handled = true; } } } return handled; } /* * This method parses a string of the format: * "Name [X Y Timebins]" and builds a set with * the dimensions 'X', 'Y', and 'Timebins'. * * Temporary kludge. */ private Set<String> dimensionSet(String name) { Set<String> set = new HashSet<String>(); int startIndex = name.indexOf('[') + 1; int endIndex = name.indexOf(']'); String coded = name.substring(startIndex, endIndex); String dimensions[] = coded.split(" "); for (String dimension : dimensions) { set.add(dimension); } return set; } /** * Builds a set of strings from comma separated values * in an input string. * * @param commaSeparated * @return */ private Set<String> getSet(String commaSeparated) { Set<String> set = new HashSet<String>(); String[] elements = commaSeparated.split(","); for (String element: elements) { set.add(element); } return set; } /** * Sees whether the image's set of dimension names is appropriate * for the plugin's required and optional sets of dimension names. * * @param imageSet * @param requiredSet changed as side effect * @param optionalSet * @return whether appropriate */ private boolean isAppropriate(Set<String> imageSet, Set<String> requiredSet, Set<String> optionalSet) { boolean match = false; // image must have all required dimensions if (imageSet.containsAll(requiredSet)) { requiredSet.addAll(optionalSet); // any additional dimensions must be optional dimensions if (requiredSet.containsAll(imageSet)) { match = true; } } return match; } /** * Given a class name builds the plugin name. * For example "loci.whatever.What_Ever" becomes "What Ever". * * @param class name * @returns plugin name */ private String getPluginNameFromClassName(String className) { return className.substring(className.lastIndexOf('.') + 1).replace('_', ' '); } /** * Shows a dialog of appropriate plugins so that the user * can select one. * * @param choices array of plugin names * @return index of choice or -1 for no choice */ private int selectPluginDialog(String[] choices) { GenericDialog dialog = new GenericDialog("Select a Plugin"); dialog.addChoice( "Plugin", choices, choices[0]); dialog.showDialog(); if (dialog.wasCanceled()) { return -1; } return dialog.getNextChoiceIndex(); } }
// package package org.mskcc.cbio.importer; // imports import org.mskcc.cbio.importer.*; import org.mskcc.cbio.importer.model.*; import org.mskcc.cbio.portal.dao.DaoCancerStudy; import org.apache.commons.cli.*; import org.apache.commons.logging.*; import org.apache.log4j.PropertyConfigurator; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import java.io.*; import java.util.*; import java.text.SimpleDateFormat; /** * Class which provides command line admin capabilities * to the importer tool. */ public class Admin implements Runnable { // our context file public static final String contextFile = "classpath:applicationContext-importer.xml"; // date format public static final SimpleDateFormat PORTAL_DATE_FORMAT = new SimpleDateFormat("MM/dd/yyyy"); // context private static final ApplicationContext context = new ClassPathXmlApplicationContext(contextFile); // our logger private static final Log LOG = LogFactory.getLog(Admin.class); // options var private static final Options options = initializeOptions(); // identifiers for init db command private static final String PORTAL_DATABASE = "portal"; private static final String IMPORTER_DATABASE = "importer"; // parsed command line private CommandLine commandLine; /** * Method to get beans by id * * @param String beanID * @return Object */ private static Object getBean(String beanID) { return context.getBean(beanID); } /** * Method to initialize our static options var * * @return Options */ private static Options initializeOptions() { // create each option Option help = new Option("help", "Print this message."); Option initializeDatabase = (OptionBuilder.withArgName("db_name") .hasArg() .withDescription("Initialize database(s). Valid " + "database identifiers are: " + "\"" + PORTAL_DATABASE + "\" and \"" + IMPORTER_DATABASE + "\" or " + "\"" + Config.ALL + "\".") .create("init_db")); Option fetchData = (OptionBuilder.withArgName("data_source:run_date:send_notification") .hasArgs(3) .withValueSeparator(':') .withDescription("Fetch data from the given data_source and the given run date (mm/dd/yyyy). " + "Use \"" + Fetcher.LATEST_RUN_INDICATOR + "\" to retrieve the most current run or " + "when fetching clinical data.") .create("fetch_data")); Option fetchReferenceData = (OptionBuilder.withArgName("reference_data") .hasArg() .withDescription("Fetch the given reference data." + " Use \"" + Config.ALL + "\" to retrieve all reference data.") .create("fetch_reference_data")); Option oncotateMAF = (OptionBuilder.withArgName("maf_file") .hasArg() .withDescription("Run the given MAF though the Oncotator and OMA tools.") .create("oncotate_maf")); Option oncotateAllMAFs = (OptionBuilder.withArgName("data_source") .hasArg() .withDescription("Run all MAFs in the given datasource though the Oncotator and OMA tools.") .create("oncotate_mafs")); Option convertData = (OptionBuilder.withArgName("portal:run_date:apply_overrides") .hasArgs(3) .withValueSeparator(':') .withDescription("Convert data within the importer database " + "from the given run date (mm/dd/yyyy), " + "for the given portal. If apply_overrides is 't', " + "overrides will be substituted for data_source data " + "before staging files are created.") .create("convert_data")); Option applyOverrides = (OptionBuilder.withArgName("portal:exclude_datatype:apply_case_lists") .hasArgs(3) .withValueSeparator(':') .withDescription("Replace staging files for the given portal " + "with any exisiting overrides. If exclude_datatype is set, " + "the datatype provided will not have overrides applied. If " + "apply_case_lists is 'f', case lists will not be copied into staging directory.") .create("apply_overrides")); Option generateCaseLists = (OptionBuilder.withArgName("portal") .hasArg() .withDescription("Generate case lists for existing " + "staging files for the given portal.") .create("generate_case_lists")); Option importReferenceData = (OptionBuilder.withArgName("reference_type") .hasArg() .withDescription("Import reference data for the given reference_type. "+ "Use \"" + Config.ALL + "\" to import all reference data.") .create("import_reference_data")); Option importTypesOfCancer = (OptionBuilder.hasArg(false) .withDescription("Import types of cancer.") .create("import_types_of_cancer")); Option importData = (OptionBuilder.withArgName("portal:init_portal_db:init_tumor_types:ref_data") .hasArgs(4) .withValueSeparator(':') .withDescription("Import data for the given portal. " + "If init_portal_db is 't' a portal db will be created (an existing one will be clobbered. " + "If init_tumor_types is 't' tumor types will be imported " + "If ref_data is 't', all reference data will be imported prior to importing staging files.") .create("import_data")); Option updateStudyData = (OptionBuilder.withArgName("portal:update_worksheet") .hasArgs(2) .withValueSeparator(':') .withDescription("Updates study data for the given portal. if update_worksheet is 't' " + "UPDATE_AVAILABLE and IMPORT columns in cancer_studies google worksheet are updated.") .create("update_study_data")); Option importCaseLists = (OptionBuilder.withArgName("portal") .hasArgs(1) .withDescription("Import case lists for the given portal.") .create("import_case_lists")); Option copySegFiles = (OptionBuilder.withArgName("portal:seg_datatype:remote_user_name") .hasArgs(3) .withValueSeparator(':') .withDescription("Copy's given portal's .seg files to location used for linking to IGV " + "from cBio Portal web site. 'ssh-add' should be executed prior to this " + "command to add your identity to the authentication agent.") .create("copy_seg_files")); Option redeployWar = (OptionBuilder.withArgName("portal") .hasArg() .withDescription("Redeploy war for given portal. " + "'ssh-add' should be executed prior to this " + "command to add your identity to the authentication agent.") .create("redeploy_war")); Option deleteCancerStudy = (OptionBuilder.withArgName("cancer_study_id") .hasArg() .withDescription("Delete a cancer study matching the given cancer study id.") .create("delete_cancer_study")); // create an options instance Options toReturn = new Options(); // add options toReturn.addOption(help); toReturn.addOption(initializeDatabase); toReturn.addOption(fetchData); toReturn.addOption(fetchReferenceData); toReturn.addOption(oncotateMAF); toReturn.addOption(oncotateAllMAFs); toReturn.addOption(convertData); toReturn.addOption(applyOverrides); toReturn.addOption(generateCaseLists); toReturn.addOption(importReferenceData); toReturn.addOption(importTypesOfCancer); toReturn.addOption(importData); toReturn.addOption(updateStudyData); toReturn.addOption(importCaseLists); toReturn.addOption(copySegFiles); toReturn.addOption(redeployWar); toReturn.addOption(deleteCancerStudy); // outta here return toReturn; } /** * Parses the arguments. * * @param args String[] */ public void setCommandParameters(String[] args) { // create our parser CommandLineParser parser = new PosixParser(); // parse try { commandLine = parser.parse(options, args); } catch (Exception e) { Admin.usage(new PrintWriter(System.out, true)); } } /** * Executes the desired portal commmand. */ @Override public void run() { // sanity check if (commandLine == null) { return; } try { // usage if (commandLine.hasOption("help")) { Admin.usage(new PrintWriter(System.out, true)); } // initialize import database else if (commandLine.hasOption("init_db")) { initializeDatabase(commandLine.getOptionValue("init_db")); } // fetch else if (commandLine.hasOption("fetch_data")) { String[] values = commandLine.getOptionValues("fetch_data"); fetchData(values[0], values[1], (values.length == 3) ? values[2] : ""); } // fetch reference data else if (commandLine.hasOption("fetch_reference_data")) { fetchReferenceData(commandLine.getOptionValue("fetch_reference_data")); } // oncotate MAF else if (commandLine.hasOption("oncotate_maf")) { oncotateMAF(commandLine.getOptionValue("oncotate_maf")); } // oncotate MAFs else if (commandLine.hasOption("oncotate_mafs")) { oncotateAllMAFs(commandLine.getOptionValue("oncotate_mafs")); } // apply overrides else if (commandLine.hasOption("apply_overrides")) { String[] values = commandLine.getOptionValues("apply_overrides"); applyOverrides(values[0], (values.length >= 2) ? values[1] : "", (values.length == 3) ? values[2] : ""); } // convert data else if (commandLine.hasOption("convert_data")) { String[] values = commandLine.getOptionValues("convert_data"); convertData(values[0], values[1], (values.length == 3) ? values[2] : ""); } // generate case lists else if (commandLine.hasOption("generate_case_lists")) { generateCaseLists(commandLine.getOptionValue("generate_case_lists")); } // import reference data else if (commandLine.hasOption("import_reference_data")) { importReferenceData(commandLine.getOptionValue("import_reference_data")); } else if (commandLine.hasOption("import_types_of_cancer")) { importTypesOfCancer(); } // import data else if (commandLine.hasOption("import_data")) { String[] values = commandLine.getOptionValues("import_data"); importData(values[0], values[1], values[2], values[3]); } else if (commandLine.hasOption("update_study_data")) { String[] values = commandLine.getOptionValues("update_study_data"); updateStudyData(values[0], values[1]); } // import case lists else if (commandLine.hasOption("import_case_lists")) { String[] values = commandLine.getOptionValues("import_case_lists"); importCaseLists(values[0]); } // copy seg files else if (commandLine.hasOption("copy_seg_files")) { String[] values = commandLine.getOptionValues("copy_seg_files"); copySegFiles(values[0], values[1], values[2]); } // redeploy war else if (commandLine.hasOption("redeploy_war")) { redeployWar(commandLine.getOptionValue("redeploy_war")); } else if (commandLine.hasOption("delete_cancer_study")) { deleteCancerStudy(commandLine.getOptionValue("delete_cancer_study")); } else { Admin.usage(new PrintWriter(System.out, true)); } } catch (Exception e) { e.printStackTrace(); } } /** * Helper function to initialize import database. * * @param databaseName String * @throws Exception */ private void initializeDatabase(String databaseName) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("initializeDatabase(): " + databaseName); } boolean unknownDB = true; DatabaseUtils databaseUtils = (DatabaseUtils)getBean("databaseUtils"); if (databaseName.equals(Config.ALL) || databaseName.equals(IMPORTER_DATABASE)) { unknownDB = false; databaseUtils.createDatabase(databaseUtils.getImporterDatabaseName(), true); } if (databaseName.equals(Config.ALL) || databaseName.equals(PORTAL_DATABASE)) { unknownDB = false; databaseUtils.createDatabase(databaseUtils.getPortalDatabaseName(), false); boolean success = databaseUtils.executeScript(databaseUtils.getPortalDatabaseName(), databaseUtils.getPortalDatabaseSchema(), databaseUtils.getDatabaseUser(), databaseUtils.getDatabasePassword()); if (!success) { System.err.println("Error creating database schema."); } } if (unknownDB && LOG.isInfoEnabled()) { LOG.info("initializeDatabase(), unknown database: " + databaseName); } if (LOG.isInfoEnabled()) { LOG.info("initializeDatabase(), complete"); } } /** * Helper function to get data. * * @param dataSource String * @param runDate String * @throws Exception */ private void fetchData(String dataSource, String runDate, String sendNotification) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("fetchData(), dateSource:runDate: " + dataSource + ":" + runDate); } // create an instance of fetcher Boolean sendNotificationBool = getBoolean(sendNotification); DataSourcesMetadata dataSourcesMetadata = getDataSourcesMetadata(dataSource); // fetch the given data source Fetcher fetcher = (Fetcher)getBean(dataSourcesMetadata.getFetcherBeanID()); fetcher.fetch(dataSource, runDate, sendNotificationBool); if (LOG.isInfoEnabled()) { LOG.info("fetchData(), complete"); } } /** * Helper function to fetch reference data. * * @param referenceType String * * @throws Exception */ private void fetchReferenceData(String referenceType) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("fetchReferenceData(), referenceType: " + referenceType); } // create an instance of fetcher Config config = (Config)getBean("config"); Collection<ReferenceMetadata> referenceMetadatas = config.getReferenceMetadata(referenceType); if (referenceMetadatas.isEmpty()) { if (LOG.isInfoEnabled()) { LOG.info("fetchReferenceData(), unknown referenceType: " + referenceType); } } else { Fetcher fetcher = (Fetcher)getBean("referenceDataFetcher"); for (ReferenceMetadata referenceMetadata : referenceMetadatas) { if ((referenceType.equals(Config.ALL) && referenceMetadata.getFetch()) || referenceMetadata.getReferenceType().equals(referenceType)) { if (LOG.isInfoEnabled()) { LOG.info("fetchReferenceData(), calling fetcher for: " + referenceMetadata.getReferenceType()); } fetcher.fetchReferenceData(referenceMetadata); } } } if (LOG.isInfoEnabled()) { LOG.info("fetchReferenceData(), complete"); } } /** * Helper function to oncotate the give MAF. * * @param mafFile String * * @throws Exception */ private void oncotateMAF(String mafFileName) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("oncotateMAF(), mafFile: " + mafFileName); } // sanity check File mafFile = new File(mafFileName); if (!mafFile.exists()) { throw new IllegalArgumentException("cannot find the give MAF: " + mafFileName); } // create fileUtils object Config config = (Config)getBean("config"); FileUtils fileUtils = (FileUtils)getBean("fileUtils"); // create tmp file for given MAF File tmpMAF = org.apache.commons.io.FileUtils.getFile(org.apache.commons.io.FileUtils.getTempDirectory(), ""+System.currentTimeMillis()+".tmpMAF"); org.apache.commons.io.FileUtils.copyFile(mafFile, tmpMAF); // oncotate the MAF (input is tmp maf, output is original maf) fileUtils.oncotateMAF(FileUtils.FILE_URL_PREFIX + tmpMAF.getCanonicalPath(), FileUtils.FILE_URL_PREFIX + mafFile.getCanonicalPath()); // clean up if (tmpMAF.exists()) { org.apache.commons.io.FileUtils.forceDelete(tmpMAF); } if (LOG.isInfoEnabled()) { LOG.info("oncotateMAF(), complete"); } } /** * Helper function to oncotate MAFs. * * @param dataSource String * * @throws Exception */ private void oncotateAllMAFs(String dataSource) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("oncotateAllMAFs(), dataSource: " + dataSource); } // get the data source metadata object DataSourcesMetadata dataSourcesMetadata = getDataSourcesMetadata(dataSource); // oncotate all the files of the given data source FileUtils fileUtils = (FileUtils)getBean("fileUtils"); fileUtils.oncotateAllMAFs(dataSourcesMetadata); if (LOG.isInfoEnabled()) { LOG.info("oncotateAllMAFs(), complete"); } } /** * Helper function to convert data. * * @param portal String * @param runDate String * @param applyOverrides String * * @throws Exception */ private void convertData(String portal, String runDate, String applyOverrides) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("convertData(), portal: " + portal); LOG.info("convertData(), run date: " + runDate); LOG.info("convertData(), apply overrides: " + applyOverrides); } Boolean applyOverridesBool = getBoolean(applyOverrides); // sanity check date format - doesn't work? PORTAL_DATE_FORMAT.setLenient(false); PORTAL_DATE_FORMAT.parse(runDate); // create an instance of Converter Converter converter = (Converter)getBean("converter"); converter.convertData(portal, runDate, applyOverridesBool); if (LOG.isInfoEnabled()) { LOG.info("convertData(), complete"); } } /** * Helper function to apply overrides to a given portal. * * @param portal String * @param excludeDatatype String * @param applyCaseLists String * @throws Exception */ private void applyOverrides(String portal, String excludeDatatype, String applyCaseLists) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("applyOverrides(), portal: " + portal); LOG.info("applyOverrides(), exclude_datatype: " + excludeDatatype); LOG.info("applyOverrides(), apply_case_lists: " + applyCaseLists); } Converter converter = (Converter)getBean("converter"); HashSet<String> excludeDatatypes = new HashSet<String>(); if (excludeDatatype.length() > 0) excludeDatatypes.add(excludeDatatype); Boolean applyCaseListsBool = getBoolean(applyCaseLists); converter.applyOverrides(portal, excludeDatatypes, applyCaseListsBool); if (LOG.isInfoEnabled()) { LOG.info("applyOverrides(), complete"); } } /** * Helper function to generate case lists. * * @param portal String * * @throws Exception */ private void generateCaseLists(String portal) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("generateCaseLists(), portal: " + portal); } // create an instance of Converter Converter converter = (Converter)getBean("converter"); converter.generateCaseLists(portal); if (LOG.isInfoEnabled()) { LOG.info("generateCaseLists(), complete"); } } /** * Helper function to import reference data. * * @param referenceType String * * @throws Exception */ private void importReferenceData(String referenceType) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("importReferenceData(), referenceType: " + referenceType); } // create an instance of Importer Config config = (Config)getBean("config"); Collection<ReferenceMetadata> referenceMetadatas = config.getReferenceMetadata(referenceType); if (referenceMetadatas.isEmpty()) { if (LOG.isInfoEnabled()) { LOG.info("importReferenceData(), unknown referenceType: " + referenceType); } } else { Importer importer = (Importer)getBean("importer"); for (ReferenceMetadata referenceMetadata : referenceMetadatas) { if ((referenceType.equals(Config.ALL) && referenceMetadata.getImport()) || referenceMetadata.getReferenceType().equals(referenceType)) { if (LOG.isInfoEnabled()) { LOG.info("importReferenceData(), calling import for: " + referenceMetadata.getReferenceType()); } importer.importReferenceData(referenceMetadata); } } } if (LOG.isInfoEnabled()) { LOG.info("importReferenceData(), complete"); } } /** * Helper function to import types of cancer. * * @param referenceType String * * @throws Exception */ private void importTypesOfCancer() throws Exception { if (LOG.isInfoEnabled()) { LOG.info("importTypesOfCancer()"); } Importer importer = (Importer)getBean("importer"); importer.importTypesOfCancer(); if (LOG.isInfoEnabled()) { LOG.info("importReferenceData(), complete"); } } /** * Helper function to import data. * * @param portal String * @param initPortalDatabase String * @param initTumorTypes String * @param importReferenceData String * * @throws Exception */ private void importData(String portal, String initPortalDatabase, String initTumorTypes, String importReferenceData) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("importData(), portal: " + portal); LOG.info("importData(), initPortalDatabase: " + initPortalDatabase); LOG.info("importData(), initTumorTypes: " + initTumorTypes); LOG.info("importData(), importReferenceData: " + importReferenceData); } // get booleans Boolean initPortalDatabaseBool = getBoolean(initPortalDatabase); Boolean initTumorTypesBool = getBoolean(initTumorTypes); Boolean importReferenceDataBool = getBoolean(importReferenceData); // create an instance of Importer Importer importer = (Importer)getBean("importer"); importer.importData(portal, initPortalDatabaseBool, initTumorTypesBool, importReferenceDataBool); if (LOG.isInfoEnabled()) { LOG.info("importData(), complete"); } } private void updateStudyData(String portal, String updateWorksheet) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("updateStudyData(), portal: " + portal); LOG.info("updateStudyData(), update_worksheet: " + updateWorksheet); } Boolean updateWorksheetBool = getBoolean(updateWorksheet); Config config = (Config)getBean("config"); Importer importer = (Importer)getBean("importer"); Map<String,String> propertyMap = new HashMap<String,String>(); for (CancerStudyMetadata cancerStudyMetadata : config.getCancerStudyMetadata(portal)) { propertyMap.clear(); // if we are updating triage and this study is ready for update, then import it if (portal.equals(PortalMetadata.TRIAGE_PORTAL) && cancerStudyMetadata.updateTriage()) { importer.updateCancerStudy(portal, cancerStudyMetadata); // we've updated the study in triage, turn off update triage flag propertyMap.put(CancerStudyMetadata.UPDATE_TRIAGE_COLUMN_KEY, "false"); } // otherwise, we only update studies that are ready for release else if (cancerStudyMetadata.readyForRelease()) { importer.updateCancerStudy(portal, cancerStudyMetadata); // turn off ready for release so that the next // fetch does not get imported before being vetted propertyMap.put(CancerStudyMetadata.READY_FOR_RELEASE_COLUMN_KEY, "false"); } if (updateWorksheetBool) { config.updateCancerStudyAttributes(cancerStudyMetadata.getStudyPath(), propertyMap); } } } /** * * @param portal * @throws Exception */ private void importCaseLists(String portal) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("importData(), portal: " + portal); } // create an instance of Importer Importer importer = (Importer)getBean("importer"); importer.importCaseLists(portal); if (LOG.isInfoEnabled()) { LOG.info("importCaseLists(), complete"); } } /** * Helper function to copy seg files for IGV linking. * * @param portalName String * @param segDatatype String * @param removeUserName String * * @throws Exception */ private void copySegFiles(String portalName, String segDatatype, String remoteUserName) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("copySegFiles(), portal: " + portalName); LOG.info("copySegFiles(), segDatatype: " + segDatatype); LOG.info("copySegFiles(), remoteUserName: " + remoteUserName); } Config config = (Config)getBean("config"); Collection<PortalMetadata> portalMetadatas = config.getPortalMetadata(portalName); Collection<DatatypeMetadata> datatypeMetadatas = config.getDatatypeMetadata(segDatatype); // sanity check args if (remoteUserName.length() == 0 || portalMetadatas.isEmpty() || datatypeMetadatas.isEmpty()) { if (LOG.isInfoEnabled()) { LOG.info("copySegFiles(), error processing arguments, aborting...."); } } else { // create an instance of Importer FileUtils fileUtils = (FileUtils)getBean("fileUtils"); fileUtils.copySegFiles(portalMetadatas.iterator().next(), datatypeMetadatas.iterator().next(), remoteUserName); } if (LOG.isInfoEnabled()) { LOG.info("copySegFiles(), complete"); } } private void redeployWar(String portalName) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("redeployWar(), portal: " + portalName); } Config config = (Config)getBean("config"); Collection<PortalMetadata> portalMetadatas = config.getPortalMetadata(portalName); // sanity check args if (portalMetadatas.isEmpty()) { if (LOG.isInfoEnabled()) { LOG.info("redeployWar(), error processing argument, aborting...."); } } else { // create an instance of Importer FileUtils fileUtils = (FileUtils)getBean("fileUtils"); fileUtils.redeployWar(portalMetadatas.iterator().next()); } if (LOG.isInfoEnabled()) { LOG.info("redeployWar(), complete"); } } private void deleteCancerStudy(String cancerStudyStableId) throws Exception { if (LOG.isInfoEnabled()) { LOG.info("deleteCancerStudy(), study id: " + cancerStudyStableId); } DaoCancerStudy.deleteCancerStudy(cancerStudyStableId); if (LOG.isInfoEnabled()) { LOG.info("deleteCancerStudy(), complete"); } } /** * Helper function to get a DataSourcesMetadata from * a given datasource (name). * * @param dataSource String * @return DataSourcesMetadata */ private DataSourcesMetadata getDataSourcesMetadata(String dataSource) { DataSourcesMetadata toReturn = null; Config config = (Config)getBean("config"); Collection<DataSourcesMetadata> dataSources = config.getDataSourcesMetadata(dataSource); if (!dataSources.isEmpty()) { toReturn = dataSources.iterator().next(); } // sanity check if (toReturn == null) { throw new IllegalArgumentException("cannot instantiate a proper DataSourcesMetadata object."); } // outta here return toReturn; } /** * Helper function to create boolean based on argument parameter. * * @param parameterValue String * @return Boolean */ private Boolean getBoolean(String parameterValue) { if (parameterValue.length() == 0) return new Boolean("false"); return (parameterValue.equalsIgnoreCase("t")) ? new Boolean("true") : new Boolean("false"); } /** * Helper function - prints usage */ public static void usage(PrintWriter writer) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(writer, HelpFormatter.DEFAULT_WIDTH, "Admin", "", options, HelpFormatter.DEFAULT_LEFT_PAD, HelpFormatter.DEFAULT_DESC_PAD, ""); } /** * The big deal main. * * @param args String[] */ public static void main(String[] args) throws Exception { // sanity check if (args.length == 0) { System.err.println("Missing args to Admin."); Admin.usage(new PrintWriter(System.err, true)); return; } // configure logging Properties props = new Properties(); props.load(Admin.class.getResourceAsStream("/log4j.properties")); PropertyConfigurator.configure(props); // process Admin admin = new Admin(); try { admin.setCommandParameters(args); admin.run(); } catch (Exception e) { e.printStackTrace(); } } }
import zemberek.morphology.analysis.SentenceAnalysis; import zemberek.morphology.analysis.WordAnalysis; import zemberek.morphology.analysis.tr.TurkishSentenceAnalyzer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.TreeSet; /** * @author Fatih Barmanbay This class does morphological analysis operations * like disambiguation, POS tagging and stemming on given sentences. All * of this operations done in sentenceAnalyzer method. */ public class MorphologicalAnalyzer { private TurkishSentenceAnalyzer analyzer; private int unkCount; private Set<String> unknownWords; private WordCorrector wordCorrector; /** * @param analyzer * @param dictionary * Path of words file. Used for creating a dictionary Constructor * of the class. */ public MorphologicalAnalyzer(TurkishSentenceAnalyzer analyzer, String dictionary, String stopWords, String turkishWords) { this.analyzer = analyzer; unkCount = 0; unknownWords = new TreeSet<>(); wordCorrector = new WordCorrector(FileOperations.readFile(dictionary), new TreeSet<String>(FileOperations.readFile(stopWords)), new TreeSet<String>(FileOperations.readFile(turkishWords))); } /** * @param words: * A sentence tokenized into words. Disambiguates, POS tags and * finds stems of words on given sentence. */ public String sentenceAnalyzer(List<String> words){ List<JsonBuilder> wordsAndTags = new ArrayList<>(); String rawSentence = String.join(" ", words); SentenceAnalysis analysis = analyzer.analyze(rawSentence); analyzer.disambiguate(analysis); for (SentenceAnalysis.Entry entry : analysis) { WordAnalysis wordAnalysis = entry.parses.get(0); String word = wordAnalysis.getSurfaceForm(); // Check if word is already in its root form if(wordCorrector.isWord(wordAnalysis.getSurfaceForm())) { // Check if the word is a stop word if (!wordCorrector.isStopWord(word)){ String posTag = wordAnalysis.getPos().toString(); String engWord = GCTranslate.getTranslation(word); // Check if the translation has more than one word if (engWord.contains(" ")) { List<String> engWords = Arrays.asList(engWord.split(" ")); engWords.forEach(e -> wordsAndTags.add(new JsonBuilder(e, posTag))); } else { wordsAndTags.add(new JsonBuilder(engWord, posTag)); } } // Word is not in its root form. Do stemming. } else { if (wordAnalysis.getLemma() == "UNK") { String unkWord = wordAnalysis.getSurfaceForm(); String correctWord = wordCorrector.correctWord(unkWord); //System.out.println("Before Correction: " + unkWord + "\nAfter Correction: " + correctWord); SentenceAnalysis analyzeAgain = analyzer.analyze(correctWord); analyzer.disambiguate(analyzeAgain); WordAnalysis wa = analyzeAgain.getEntry(0).parses.get(0); String root = wa.getLemma().toLowerCase(); // If root is still unknown, add it as UNK. if (root == "UNK") { String posTag = wa.dictionaryItem.primaryPos.toString(); wordsAndTags.add(new JsonBuilder(root, posTag)); unknownWords.add(wordAnalysis.root); unkCount++; // If word successfully corrected and it is not a stop word, // then translate the root of the word and add it to JSON. } else if (!wordCorrector.isStopWord(root)) { // If the word is root, then do not try to take the root of it. if (wordCorrector.isWord(correctWord)) { String engRoot = GCTranslate.getTranslation(correctWord); // Is there more than one word in translation? If there is, // than take the all the words and add them to the JSON if (engRoot.contains(" ")) { String[] translations = engRoot.split(" "); String posTag = wa.dictionaryItem.primaryPos.toString(); Arrays.asList(translations).forEach(e -> wordsAndTags.add(new JsonBuilder(e, posTag))); } else { String posTag = wa.dictionaryItem.primaryPos.toString(); wordsAndTags.add(new JsonBuilder(engRoot, posTag)); } } else { String engRoot = GCTranslate.getTranslation(root); String posTag = wa.dictionaryItem.primaryPos.toString(); wordsAndTags.add(new JsonBuilder(engRoot, posTag)); } } else { continue; } // If the word is correctly spelled, just take the root and POSTAG // and add it the JSON. } else { String root = wordAnalysis.getLemma().toLowerCase(); if (!wordCorrector.isStopWord(root)) { String engRoot = GCTranslate.getTranslation(root); String posTag = wordAnalysis.dictionaryItem.primaryPos.toString(); // Check if translation has more than one word if (engRoot.contains(" ")) { List<String> engWords = Arrays.asList(engRoot.split(" ")); engWords.forEach(e -> wordsAndTags.add(new JsonBuilder(e, posTag))); } else { wordsAndTags.add(new JsonBuilder(engRoot, posTag)); } } else { continue; } } } } return JsonBuilder.toJson(wordsAndTags); } /** * @return number of UNK tag in data set. */ public int unknownCount() { return unkCount; } /** * @return number of unique words that tagged as UNK. */ public int uniqueUnknownCount() { return unknownWords.size(); } /** * Prints all unique words tagged as UNK. */ public void printUnknownWords() { unknownWords.forEach(e -> System.out.println(e)); } }
package agentgui.core.network; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import agentgui.core.application.Application; /** * This class can be used in order to evaluate and compare configured JADE URL's and its ports. * * @author Christian Derksen - DAWIS - ICB - University of Duisburg - Essen */ public class JadeUrlConfiguration { private String currURLorIP; private InetAddress currInetAddress; private int currPort = -1; private int currPort4MTP = -1; private boolean errors=false; private List<InetAddress> inetAddresses; private List<InetAddress> ip4InetAddresses; private List<InetAddress> ip6InetAddresses; /** * Instantiates a new JadeUrlConfiguration. * * @param urlOrIPtoCheck the URL or IP on which the JADE platform is running or acting (e.g "localhost:1099/JADE") */ public JadeUrlConfiguration(String urlOrIPtoCheck) { try { if (urlOrIPtoCheck==null || urlOrIPtoCheck.trim().equals("")) { this.currURLorIP = InetAddress.getLocalHost().getCanonicalHostName(); this.currInetAddress = InetAddress.getLocalHost(); } else { this.currURLorIP = this.filterPort(urlOrIPtoCheck); this.currInetAddress = InetAddress.getByName(currURLorIP); } this.errors=false; } catch (UnknownHostException err) { String appTitle = Application.getGlobalInfo().getApplicationTitle(); System.err.println(""); System.err.println("=> [" + appTitle + "] Error while trying to receive network address '" + this.currURLorIP + "' !" ); System.err.println("=> Please, check your " + appTitle + "-options for server settings (e.g. for the server.master)."); System.err.println("=> Also make sure that the local machine has a working network connection."); System.err.println(""); this.errors=true; this.currURLorIP = null; this.currInetAddress = null; } } /** * Filters the Port on which JADE is running or acting. * @param url The URL on which JADE is running or acting (e.g "localhost:1099/JADE") * @return The URL or IP, which is used by JADE (e.g "localhost") */ private String filterPort(String url){ String workURL = url; String workPort = null; String workPortNew = ""; if (workURL==null) return null; if ( url.contains(":")) { workURL = url.substring(0, url.indexOf(":")); workPort = url.substring(url.indexOf(":")+1).trim(); String workPortArr[] = workPort.split(""); for (int i = 0; i < workPortArr.length; i++) { if ( workPortArr[i].equalsIgnoreCase("")==false ) { String sngChar = workPortArr[i]; if ( sngChar.matches( "[0-9]" )==true ) { workPortNew += sngChar; } else { break; } } } this.currPort = Integer.parseInt(workPortNew); } return workURL; } /** * Check for errors. * @return true, if errors occurred with the current URL or IP-address */ public boolean hasErrors() { return errors; } /** * Checks if the specified instance equal to the current one. * * @param jucToCompare the JadeUrlChecker to compare * @return true, if the jade platform configuration is equal */ public boolean isEqualJadePlatform(JadeUrlConfiguration jucToCompare) { if (jucToCompare.getHostIP().equals(this.getHostIP())==true) { if (jucToCompare.getPort()==this.getPort()) { return true; } } return false; } /** * Returns true, if the current JADE instance is located on a local machine. * @return True, if the JADE URL is pointing to the local machine */ public boolean isLocalhost() { for (InetAddress inetAddress : this.getLocalInetAddresses()) { if (inetAddress.equals(this.currInetAddress)) { return true; } } return false; } /** * Provides the JADE URL out of the analysed subcomponents like IP, Port and "/JADE"-Suffix. * * @return JADE URL */ public String getJadeURL(){ if (currInetAddress!=null && currPort!=-1) { return this.currURLorIP + ":" + currPort + "/JADE"; } else { return null; } } /** * return the URL for the MTP of JADE. * @return JADE URL for MTP */ public String getJadeURL4MTP() { if (currInetAddress!=null && currPort4MTP!=-1) { return "http://" + this.currURLorIP + ":" + currPort4MTP + "/acc"; } else { return null; } } /** * * Provides the IP-Address of the current JADE-URL. * @return IP address (e.g. 127.0.0.1) */ public String getHostIP() { if (currInetAddress!=null) { return currInetAddress.getHostAddress(); } return null; } /** * Provides the host name of the current JADE-URL. * @return host name (e.g. 'localhost') */ public String getHostName() { if (currInetAddress!=null) { return currInetAddress.getHostName(); } return null; } /** * Provides the port number of JADE. * @return The port number */ public int getPort() { return currPort; } /** * Can be used to set the port number for JADE. * @param newPort The port to be used */ public void setPort(int newPort) { currPort = newPort; } /** * Provides the port number of JADE, that is used for the MTP of the MainContainer. * @return the port4 mtp */ public int getPort4MTP() { return currPort4MTP; } /** * Can be used to set the port number for the MTP of the Main-Container. * @param newPort4MTP the new port4 mtp */ public void setPort4MTP(int newPort4MTP) { this.currPort4MTP = newPort4MTP; } /** * Returns all local InetAddress's. * @return the local InetAddress's */ private List<InetAddress> getLocalInetAddresses() { if (inetAddresses==null) { inetAddresses = new ArrayList<InetAddress>(); inetAddresses.addAll(this.getLocalIP4InetAddresses()); inetAddresses.addAll(this.getLocalIP6InetAddresses()); } return inetAddresses; } /** * Returns all local IP4 InetAddress's. * @return the local IP4 InetAddress's */ private List<InetAddress> getLocalIP4InetAddresses() { if (ip4InetAddresses==null) { ip4InetAddresses = new ArrayList<InetAddress>(); this.setLocalInetAddresses(); } return ip4InetAddresses; } /** * Returns all local IP6 InetAddress's. * @return the local IP6 InetAddress's */ private List<InetAddress> getLocalIP6InetAddresses() { if (ip6InetAddresses==null) { ip6InetAddresses = new ArrayList<InetAddress>(); this.setLocalInetAddresses(); } return ip6InetAddresses; } /** * Sets the local InetAddress's. */ private void setLocalInetAddresses() { try { Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces(); for (NetworkInterface netint : Collections.list(nets)) { Enumeration<InetAddress> inetAddresses = netint.getInetAddresses(); if (inetAddresses.hasMoreElements()) { // System.out.printf("Display name: %s\n", // netint.getDisplayName()); // System.out.printf("Name: %s\n", netint.getName()); for (InetAddress inetAddress : Collections.list(inetAddresses)) { if (inetAddress instanceof Inet4Address) { if (this.getLocalIP4InetAddresses().contains(inetAddress)==false) { this.getLocalIP4InetAddresses().add(inetAddress); } } else if (inetAddress instanceof Inet6Address) { if (this.getLocalIP6InetAddresses().contains(inetAddress)==false) { this.getLocalIP6InetAddresses().add(inetAddress); } } } } } } catch (SocketException e) { System.err.println("Error retrieving local network addresses list ... "); } } }
package ru.pinkponies.app; import java.io.IOException; import java.lang.ref.WeakReference; import java.net.InetSocketAddress; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.security.InvalidParameterException; import java.util.Iterator; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import ru.pinkponies.protocol.LoginPacket; import ru.pinkponies.protocol.Packet; import ru.pinkponies.protocol.Protocol; import ru.pinkponies.protocol.SayPacket; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.os.Message; public class NetworkingThread extends Thread { private final String SERVER_IP = "10.55.87.47"; private final int SERVER_PORT = 4266; private static final int BUFFER_SIZE = 8192; private final static Logger logger = Logger.getLogger(NetworkingThread.class.getName()); private Protocol protocol; private WeakReference<MainActivity> mainActivity; public MessageHandler messageHandler; private SocketChannel socket; private Selector selector; private ByteBuffer incomingData = ByteBuffer.allocate(BUFFER_SIZE); private ByteBuffer outgoingData = ByteBuffer.allocate(BUFFER_SIZE); NetworkingThread(MainActivity activity) { mainActivity = new WeakReference<MainActivity>(activity); protocol = new Protocol(); } public void run() { try { Looper.prepare(); messageHandler = new MessageHandler(this); sendMessageToUIThread("initialized"); Looper.loop(); } catch (Exception e) { logger.log(Level.SEVERE, "Exception", e); } } private void connect() throws IOException { logger.info("Connecting to " + SERVER_IP + ":" + SERVER_PORT + "..."); socket = SocketChannel.open(); socket.configureBlocking(false); socket.connect(new InetSocketAddress(SERVER_IP, SERVER_PORT)); selector = Selector.open(); socket.register(selector, SelectionKey.OP_CONNECT); logger.info("Connection initiated, waiting for finishing..."); } private void service() throws IOException { if (selector.select() > 0) { Set<SelectionKey> keys = selector.selectedKeys(); Iterator<SelectionKey> iterator = keys.iterator(); while (iterator.hasNext()) { SelectionKey key = iterator.next(); iterator.remove(); if (!key.isValid()) { continue; } if (key.isConnectable()) { finishConnection(key); } else if (key.isReadable()) { read(key); } else if (key.isWritable()) { write(key); } } } } private void finishConnection(SelectionKey key) throws IOException { SocketChannel channel = (SocketChannel) key.channel(); if (channel.isConnectionPending()) { channel.finishConnect(); channel.register(selector, SelectionKey.OP_READ); channel.register(selector, SelectionKey.OP_WRITE); sendMessageToUIThread("connected"); } } private void close(SelectionKey key) throws IOException { SocketChannel channel = (SocketChannel) key.channel(); channel.close(); key.cancel(); } private void read(SelectionKey key) throws IOException { SocketChannel channel = (SocketChannel) key.channel(); incomingData.limit(incomingData.capacity()); int numRead = -1; try { numRead = channel.read(incomingData); } catch (IOException e) { close(key); sendMessageToUIThread("Exception: " + e.getMessage()); return; } if (numRead == -1) { close(key); return; } Packet packet = null; incomingData.flip(); try { packet = protocol.unpack(incomingData); } catch (Exception e) { e.printStackTrace(); sendMessageToUIThread("Exception: " + e.getMessage()); } incomingData.compact(); if (packet == null) { return; } if (packet instanceof SayPacket) { SayPacket sayPacket = (SayPacket) packet; logger.info("Server: " + sayPacket.toString()); } } private void write(SelectionKey key) throws IOException { SocketChannel channel = (SocketChannel) key.channel(); outgoingData.flip(); channel.write(outgoingData); outgoingData.compact(); } private void sendPacket(Packet packet) throws IOException { try { outgoingData.put(protocol.pack(packet)); } catch(BufferOverflowException e) { logger.log(Level.SEVERE, "Exception", e); } } private void login() throws IOException { LoginPacket packet = new LoginPacket(Build.BOARD, Build.BOOTLOADER, Build.BRAND, Build.CPU_ABI, Build.CPU_ABI2, Build.DEVICE); sendPacket(packet); } private void say(String message) throws IOException { SayPacket packet = new SayPacket(message); sendPacket(packet); } private void onMessageFromUIThread(Object message) { try { logger.info("MA: " + message.toString()); if (message.equals("connect")) { connect(); } else if (message.equals("service")) { service(); } else if (message.equals("login")) { login(); } else if (message instanceof Packet) { sendPacket((Packet) message); } else if (message instanceof String) { say((String) message); } else { throw new InvalidParameterException("Unknown message type."); } } catch (Exception e) { logger.log(Level.SEVERE, "Exception", e); } } private void sendMessageToUIThread(String message) { try { Message msg = mainActivity.get().messageHandler.obtainMessage(); msg.obj = message; mainActivity.get().messageHandler.sendMessage(msg); } catch (Exception e) { logger.log(Level.SEVERE, "Exception", e); } } static public class MessageHandler extends Handler { private WeakReference<NetworkingThread> thread; MessageHandler(NetworkingThread networkingThread) { thread = new WeakReference<NetworkingThread>(networkingThread); } @Override public void handleMessage(Message msg) { thread.get().onMessageFromUIThread(msg.obj); } }; }
package com.poco; public class SequentialExecution extends AbstractExecution implements Queryable, Matchable { protected int currentCursor = 0; protected boolean exhausted = false; private boolean currentChildIsZeroPlus = false; private boolean currentChildIsOnePlus = false; public SequentialExecution(String modifier) throws PoCoException { super(modifier); } // use to set the current modifier for the first child before start query, // and later update modifier while advance cursor public void getCurrentChildModifier() { if (this.children.size() > 0 && currentCursor < this.children.size()) { Class<AbstractExecution> classAE = AbstractExecution.class; Class<? extends EventResponder> classChild = children.get( this.currentCursor).getClass(); if (classAE.isAssignableFrom(classChild)) { // System.out.println("it is assignable from abstractExecution"); currentChildIsZeroPlus = ((AbstractExecution) this.children .get(this.currentCursor)).isZeroPlus(); currentChildIsOnePlus = ((AbstractExecution) this.children .get(this.currentCursor)).isOnePlus(); } else { // now is query the exchange, so isZero and isPlus is the same } } } public int getCurrentCursor() { return currentCursor; } public boolean isExhausted() { return exhausted; } public boolean isCurrentChildIsZeroPlus() { return currentChildIsZeroPlus; } public boolean isCurrentChildIsOnePlus() { return currentChildIsOnePlus; } /** * Advances the cursor pointing to the current child to be queried. For * special cases (i.e. the * modifier), the cursor loops back around to the * front when we reach the end. while advance cursor, we also should update * the modifier so that we always get current execution's modifier */ private void advanceCursor() { if (isZeroPlus || isOnePlus) currentCursor = (currentCursor + 1) % children.size(); else currentCursor++; if (currentCursor >= children.size()) { exhausted = true; } else { // while advance cursor, we also should update the modifier so that // we always get // current execution's modifier getCurrentChildModifier(); } } @Override public SRE query(Event event) { // Don't do anything without children b if (children.size() == 0) { return null; } // Also don't do anything if no more children left if (exhausted) { return null; } getCurrentChildModifier(); EventResponder currentChild = children.get(currentCursor); if (currentChild.accepts(event)) { if (!currentChildIsZeroPlus && !currentChildIsOnePlus) advanceCursor(); SRE res = currentChild.query(event); currentChild.resetIsQueried(); return res; } else { //not accepting if (currentChildIsZeroPlus) { // We can skip a zero-plus (*) modifier advanceCursor(); return this.query(event); } else { // CurrentChild doesn't accept and can't be skipped currentChild.resetIsQueried(); return null; } } } @Override public boolean accepts(Event event) { if (children.size() == 0) { return false; } // The first child of a sequential execution must accept for its parent // to accept return children.get(0).accepts(event); } @Override public String toString() { return "SequentialExecution [currentCursor=" + currentCursor + ", exhausted=" + exhausted + ", isZeroPlus=" + isZeroPlus + ", isOnePlus=" + isOnePlus + ", children=" + children + "]"; } }
package org.bimserver.plugins; import java.io.Closeable; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarInputStream; import java.util.zip.ZipEntry; import javax.xml.bind.JAXBException; import org.apache.maven.artifact.versioning.ArtifactVersion; import org.apache.maven.artifact.versioning.DefaultArtifactVersion; import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException; import org.apache.maven.artifact.versioning.VersionRange; import org.apache.maven.model.Model; import org.apache.maven.model.Repository; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.bimserver.interfaces.objects.SPluginBundle; import org.bimserver.interfaces.objects.SPluginBundleType; import org.bimserver.interfaces.objects.SPluginBundleVersion; import org.bimserver.interfaces.objects.SPluginInformation; import org.bimserver.plugins.classloaders.DelegatingClassLoader; import org.bimserver.plugins.classloaders.EclipsePluginClassloader; import org.bimserver.plugins.classloaders.FileJarClassLoader; import org.bimserver.plugins.classloaders.JarClassLoader; import org.bimserver.plugins.classloaders.PublicFindClassClassLoader; import org.bimserver.plugins.web.WebModulePlugin; import org.bimserver.shared.exceptions.PluginException; import org.bimserver.shared.exceptions.UserException; import org.bimserver.utils.FakeClosingInputStream; import org.bimserver.utils.PathUtils; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.collection.CollectRequest; import org.eclipse.aether.collection.CollectResult; import org.eclipse.aether.collection.DependencyCollectionException; import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.graph.DependencyNode; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.resolution.ArtifactDescriptorException; import org.eclipse.aether.resolution.ArtifactDescriptorRequest; import org.eclipse.aether.resolution.ArtifactDescriptorResult; import org.eclipse.aether.resolution.ArtifactRequest; import org.eclipse.aether.resolution.ArtifactResolutionException; import org.eclipse.aether.resolution.ArtifactResult; import org.eclipse.aether.resolution.DependencyRequest; import org.eclipse.aether.resolution.DependencyResolutionException; import org.eclipse.aether.util.graph.visitor.PreorderNodeListGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PluginBundleManager implements AutoCloseable { private static final Logger LOGGER = LoggerFactory.getLogger(PluginBundleManager.class); private final Map<PluginBundleIdentifier, PluginBundle> pluginBundleIdentifierToPluginBundle = new HashMap<>(); private final Map<PluginBundleVersionIdentifier, PluginBundle> pluginBundleVersionIdentifierToPluginBundle = new HashMap<>(); private final Map<PluginBundleIdentifier, PluginBundleVersionIdentifier> pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier = new HashMap<>(); private PluginManager pluginManager; private final Path pluginsDir; private MavenPluginRepository mavenPluginRepository; private final List<FileJarClassLoader> jarClassLoaders = new ArrayList<>(); public PluginBundleManager(PluginManager pluginManager, MavenPluginRepository mavenPluginRepository, Path pluginsDir) { this.pluginManager = pluginManager; this.mavenPluginRepository = mavenPluginRepository; this.pluginsDir = pluginsDir; if (pluginsDir != null) { if (!Files.isDirectory(pluginsDir)) { try { Files.createDirectories(pluginsDir); } catch (IOException e) { e.printStackTrace(); } } } } public PluginBundle install(MavenPluginBundle mavenPluginBundle, boolean strictDependencyChecking) throws Exception { return install(mavenPluginBundle, null, strictDependencyChecking); } public PluginBundle install(MavenPluginBundle mavenPluginBundle, List<SPluginInformation> plugins, boolean strictDependencyChecking) throws Exception { PluginBundleVersionIdentifier pluginBundleVersionIdentifier = mavenPluginBundle.getPluginVersionIdentifier(); MavenXpp3Reader mavenreader = new MavenXpp3Reader(); Model model = null; try (InputStream pomInputStream = mavenPluginBundle.getPomInputStream()) { model = mavenreader.read(pomInputStream); } if (plugins == null) { try (InputStream inputStream = mavenPluginBundle.getJarInputStream()) { try (JarInputStream jarInputStream = new JarInputStream(inputStream)) { JarEntry nextJarEntry = jarInputStream.getNextJarEntry(); while (nextJarEntry != null) { if (nextJarEntry.getName().equals("plugin/plugin.xml")) { // Install all plugins PluginDescriptor pluginDescriptor = pluginManager.getPluginDescriptor(new FakeClosingInputStream(jarInputStream)); plugins = new ArrayList<>(); pluginManager.processPluginDescriptor(pluginDescriptor, plugins); for (SPluginInformation info : plugins) { info.setInstallForAllUsers(true); info.setInstallForNewUsers(true); } break; } nextJarEntry = jarInputStream.getNextJarEntry(); } } } } DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader()); loadDependencies(mavenPluginBundle.getVersion(), strictDependencyChecking, model, delegatingClassLoader); Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName()); if (Files.exists(target)) { throw new PluginException("This plugin has already been installed " + target.getFileName().toString()); } InputStream jarInputStream = mavenPluginBundle.getJarInputStream(); try { Files.copy(jarInputStream, target); } finally { jarInputStream.close(); } return loadPlugin(pluginBundleVersionIdentifier, target, mavenPluginBundle.getPluginBundle(), mavenPluginBundle.getPluginBundleVersion(), plugins, delegatingClassLoader); } private void loadDependencies(String pluginBundleVersion, boolean strictDependencyChecking, Model model, DelegatingClassLoader delegatingClassLoader) throws DependencyCollectionException, InvalidVersionSpecificationException, Exception { if (model.getRepositories() != null) { for (Repository repository : model.getRepositories()) { RemoteRepository remoteRepository = new RemoteRepository.Builder(repository.getId(), "default", repository.getUrl()).build(); mavenPluginRepository.addRepository(remoteRepository); } } List<Dependency> dependenciesToResolve = new ArrayList<>(); for (org.apache.maven.model.Dependency dependency2 : model.getDependencies()) { if (dependency2.getGroupId().contentEquals("org.opensourcebim") && (dependency2.getArtifactId().contentEquals("shared") || dependency2.getArtifactId().contentEquals("pluginbase"))) { // We don't need to load BIMserver dependencies (and all their dependencies!) continue; } dependenciesToResolve.add(new Dependency(new DefaultArtifact(dependency2.getGroupId(), dependency2.getArtifactId(), "pom", dependency2.getVersion()), dependency2.getScope())); } CollectRequest collectRequest = new CollectRequest(dependenciesToResolve, null, null); CollectResult collectDependencies = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest); PreorderNodeListGenerator nlg = new PreorderNodeListGenerator(); DependencyNode rootDep = collectDependencies.getRoot(); rootDep.accept(nlg); for (Dependency dependency : nlg.getDependencies(true)) { Artifact dependencyArtifact = dependency.getArtifact(); PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependencyArtifact.getGroupId(), dependencyArtifact.getArtifactId()); if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) { if (strictDependencyChecking) { String version = dependencyArtifact.getVersion(); if (!version.contains("[") && !version.contains("(")) { version = "[" + version + "]"; } VersionRange versionRange = VersionRange.createFromVersionSpec(version); // String version = // pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion(); ArtifactVersion artifactVersion = new DefaultArtifactVersion(pluginBundleVersion); if (versionRange.containsVersion(artifactVersion)) { } else { throw new Exception( "Required dependency " + pluginBundleIdentifier + " is installed, but it's version (" + pluginBundleVersion + ") does not comply to the required version (" + dependencyArtifact.getVersion() + ")"); } } else { LOGGER.info("Skipping strict dependency checking for dependency " + dependencyArtifact.getArtifactId()); } } else { try { MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(dependencyArtifact.getGroupId(), dependencyArtifact.getArtifactId()); Path depJarFile = mavenPluginLocation.getVersionJar(dependencyArtifact.getVersion()); FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile); jarClassLoaders.add(jarClassLoader); delegatingClassLoader.add(jarClassLoader); } catch (Exception e) { throw new Exception("Required dependency " + pluginBundleIdentifier + " is not installed"); } } } } public PluginBundle loadFromPluginDir(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, SPluginBundleVersion pluginBundleVersion, List<SPluginInformation> plugins, boolean strictDependencyChecking) throws Exception { Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName()); if (!Files.exists(target)) { throw new PluginException(target.toString() + " not found"); } SPluginBundle sPluginBundle = new SPluginBundle(); MavenXpp3Reader mavenreader = new MavenXpp3Reader(); Model model = null; try (JarFile jarFile = new JarFile(target.toFile())) { ZipEntry entry = jarFile.getEntry("META-INF/maven/" + pluginBundleVersion.getGroupId() + "/" + pluginBundleVersion.getArtifactId() + "/pom.xml"); try (InputStream inputStream = jarFile.getInputStream(entry)) { model = mavenreader.read(inputStream); } } sPluginBundle.setOrganization(model.getOrganization().getName()); sPluginBundle.setName(model.getName()); DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader()); loadDependencies(model.getVersion(), strictDependencyChecking, model, delegatingClassLoader); for (org.apache.maven.model.Dependency dependency : model.getDependencies()) { if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) { // TODO Skip, we should also check the version though } else { PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependency.getGroupId(), dependency.getArtifactId()); if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) { if (strictDependencyChecking) { VersionRange versionRange = VersionRange.createFromVersion(dependency.getVersion()); String version = pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion(); ArtifactVersion artifactVersion = new DefaultArtifactVersion(version); if (versionRange.containsVersion(artifactVersion)) { } else { throw new Exception("Required dependency " + pluginBundleIdentifier + " is installed, but it's version (" + version + ") does not comply to the required version (" + dependency.getVersion() + ")"); } } else { LOGGER.info("Skipping strict dependency checking for dependency " + dependency.getArtifactId()); } } else { if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) { } else { MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(dependency.getGroupId(), dependency.getArtifactId()); try { Path depJarFile = mavenPluginLocation.getVersionJar(dependency.getVersion()); FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile); jarClassLoaders.add(jarClassLoader); delegatingClassLoader.add(jarClassLoader); } catch (Exception e) { } } } } } return loadPlugin(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, plugins, delegatingClassLoader); } @SuppressWarnings({ "unchecked", "rawtypes" }) private PluginBundle loadPlugins(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, ResourceLoader resourceLoader, ClassLoader classLoader, URI location, String classLocation, PluginDescriptor pluginDescriptor, PluginSourceType pluginType, Set<org.bimserver.plugins.Dependency> dependencies, SPluginBundle sPluginBundle, SPluginBundleVersion sPluginBundleVersion) throws PluginException { sPluginBundle.setInstalledVersion(sPluginBundleVersion); PluginBundle pluginBundle = new PluginBundleImpl(pluginBundleVersionIdentifier, sPluginBundle, sPluginBundleVersion, pluginDescriptor); if (classLoader != null && classLoader instanceof Closeable) { pluginBundle.addCloseable((Closeable) classLoader); } for (AbstractPlugin pluginImplementation : pluginDescriptor.getPlugins()) { if (pluginImplementation instanceof JavaPlugin) { JavaPlugin javaPlugin = (JavaPlugin) pluginImplementation; String interfaceClassName = javaPlugin.getInterfaceClass().trim().replace("\n", ""); try { Class interfaceClass = getClass().getClassLoader().loadClass(interfaceClassName); if (javaPlugin.getImplementationClass() != null) { String implementationClassName = javaPlugin.getImplementationClass().trim().replace("\n", ""); try { Class implementationClass = classLoader.loadClass(implementationClassName); Plugin plugin = (Plugin) implementationClass.newInstance(); pluginBundle.add(pluginManager.loadPlugin(pluginBundle, interfaceClass, location, classLocation, plugin, classLoader, pluginType, pluginImplementation, dependencies, plugin.getClass().getName())); } catch (NoClassDefFoundError e) { throw new PluginException("Implementation class '" + implementationClassName + "' not found", e); } catch (ClassNotFoundException e) { throw new PluginException("Implementation class '" + e.getMessage() + "' not found in " + location, e); } catch (InstantiationException e) { throw new PluginException(e); } catch (IllegalAccessException e) { throw new PluginException(e); } } } catch (ClassNotFoundException e) { throw new PluginException("Interface class '" + interfaceClassName + "' not found", e); } catch (Error e) { throw new PluginException(e); } } else if (pluginImplementation instanceof org.bimserver.plugins.WebModulePlugin) { org.bimserver.plugins.WebModulePlugin webModulePlugin = (org.bimserver.plugins.WebModulePlugin) pluginImplementation; JsonWebModule jsonWebModule = new JsonWebModule(webModulePlugin); pluginBundle.add(pluginManager.loadPlugin(pluginBundle, WebModulePlugin.class, location, classLocation, jsonWebModule, classLoader, pluginType, pluginImplementation, dependencies, webModulePlugin.getIdentifier())); } } pluginBundleIdentifierToPluginBundle.put(pluginBundleVersionIdentifier.getPluginBundleIdentifier(), pluginBundle); pluginBundleVersionIdentifierToPluginBundle.put(pluginBundleVersionIdentifier, pluginBundle); pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.put(pluginBundleVersionIdentifier.getPluginBundleIdentifier(), pluginBundleVersionIdentifier); return pluginBundle; } public PluginBundle loadPluginsFromJar(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, Path file, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, ClassLoader parentClassLoader) throws PluginException { PluginBundleIdentifier pluginBundleIdentifier = pluginBundleVersionIdentifier.getPluginBundleIdentifier(); if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) { throw new PluginException("Plugin " + pluginBundleIdentifier.getHumanReadable() + " already loaded (version " + pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion() + ")"); } LOGGER.debug("Loading plugins from " + file.toString()); if (!Files.exists(file)) { throw new PluginException("Not a file: " + file.toString()); } FileJarClassLoader jarClassLoader = null; try { jarClassLoader = new FileJarClassLoader(pluginManager, parentClassLoader, file); jarClassLoaders.add(jarClassLoader); final JarClassLoader finalLoader = jarClassLoader; URL resource = jarClassLoader.findResource("plugin/plugin.xml"); if (resource == null) { throw new PluginException("No plugin/plugin.xml found in " + file.getFileName().toString()); } PluginDescriptor pluginDescriptor = null; try (InputStream pluginStream = resource.openStream()) { pluginDescriptor = pluginManager.getPluginDescriptor(pluginStream); if (pluginDescriptor == null) { jarClassLoader.close(); throw new PluginException("No plugin descriptor could be created"); } } LOGGER.debug(pluginDescriptor.toString()); URI fileUri = file.toAbsolutePath().toUri(); URI jarUri = new URI("jar:" + fileUri.toString()); ResourceLoader resourceLoader = new ResourceLoader() { @Override public InputStream load(String name) { return finalLoader.getResourceAsStream(name); } }; return loadPlugins(pluginBundleVersionIdentifier, resourceLoader, jarClassLoader, jarUri, file.toAbsolutePath().toString(), pluginDescriptor, PluginSourceType.JAR_FILE, new HashSet<org.bimserver.plugins.Dependency>(), sPluginBundle, pluginBundleVersion); } catch (Exception e) { if (jarClassLoader != null) { try { jarClassLoader.close(); } catch (IOException e1) { LOGGER.error("", e1); } } throw new PluginException(e); } } public PluginBundle loadJavaProject(Path projectRoot, Path pomFile, Path pluginFolder, PluginDescriptor pluginDescriptor) throws PluginException, FileNotFoundException, IOException, XmlPullParserException { MavenXpp3Reader mavenreader = new MavenXpp3Reader(); Model model = null; try (FileReader reader = new FileReader(pomFile.toFile())) { model = mavenreader.read(reader); } PluginBundleVersionIdentifier pluginBundleVersionIdentifier = new PluginBundleVersionIdentifier(model.getGroupId(), model.getArtifactId(), model.getVersion()); if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleVersionIdentifier.getPluginBundleIdentifier())) { throw new PluginException("Plugin " + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getHumanReadable() + " already loaded (version " + pluginBundleIdentifierToPluginBundle.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier()).getPluginBundleVersion().getVersion() + ")"); } DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader()); PublicFindClassClassLoader previous = new PublicFindClassClassLoader(getClass().getClassLoader()) { @Override public Class<?> findClass(String name) throws ClassNotFoundException { return null; } @Override public URL findResource(String name) { return null; } @Override public void dumpStructure(int indent) { } }; Set<org.bimserver.plugins.Dependency> bimServerDependencies = new HashSet<>(); pluginBundleVersionIdentifier = new PluginBundleVersionIdentifier(new PluginBundleIdentifier(model.getGroupId(), model.getArtifactId()), model.getVersion()); previous = loadDependencies(bimServerDependencies, model, previous); delegatingClassLoader.add(previous); // Path libFolder = projectRoot.resolve("lib"); // loadDependencies(libFolder, delegatingClassLoader); EclipsePluginClassloader pluginClassloader = new EclipsePluginClassloader(delegatingClassLoader, projectRoot); // pluginClassloader.dumpStructure(0); ResourceLoader resourceLoader = new ResourceLoader() { @Override public InputStream load(String name) { try { return Files.newInputStream(pluginFolder.resolve(name)); } catch (IOException e) { e.printStackTrace(); } return null; } }; SPluginBundle sPluginBundle = new SPluginBundle(); if (model.getOrganization() == null) { throw new PluginException("Plugis are required to have an organization in the pom.xml file"); } sPluginBundle.setOrganization(model.getOrganization().getName()); sPluginBundle.setName(model.getName()); SPluginBundleVersion sPluginBundleVersion = createPluginBundleVersionFromMavenModel(model, true); Path icon = projectRoot.resolve("icon.png"); if (Files.exists(icon)) { byte[] iconBytes = Files.readAllBytes(icon); sPluginBundleVersion.setIcon(iconBytes); } sPluginBundle.setInstalledVersion(sPluginBundleVersion); return loadPlugins(pluginBundleVersionIdentifier, resourceLoader, pluginClassloader, projectRoot.toUri(), projectRoot.resolve("target/classes").toString(), pluginDescriptor, PluginSourceType.ECLIPSE_PROJECT, bimServerDependencies, sPluginBundle, sPluginBundleVersion); } public PluginBundle loadPluginsFromEclipseProject(Path projectRoot) throws PluginException { try { if (!Files.isDirectory(projectRoot)) { throw new PluginException("No directory: " + projectRoot.toString()); } final Path pluginFolder = projectRoot.resolve("plugin"); if (!Files.isDirectory(pluginFolder)) { throw new PluginException("No 'plugin' directory found in " + projectRoot.toString()); } Path pluginFile = pluginFolder.resolve("plugin.xml"); if (!Files.exists(pluginFile)) { throw new PluginException("No 'plugin.xml' found in " + pluginFolder.toString()); } PluginDescriptor pluginDescriptor = null; try (InputStream newInputStream = Files.newInputStream(pluginFile)) { pluginDescriptor = pluginManager.getPluginDescriptor(newInputStream); } Path pomFile = projectRoot.resolve("pom.xml"); if (!Files.exists(pomFile)) { throw new PluginException("No pom.xml found in " + projectRoot); } // Path packageFile = projectRoot.resolve("package.json"); // if (Files.exists(packageFile)) { // return loadJavaScriptProject(projectRoot, packageFile, // pluginFolder, pluginDescriptor); // } else if (Files.exists(pomFile)) { PluginBundle pluginBundle = loadJavaProject(projectRoot, pomFile, pluginFolder, pluginDescriptor); // } else { // throw new PluginException("No pom.xml or package.json found in " // + projectRoot.toString()); List<SPluginInformation> plugins = new ArrayList<>(); pluginManager.processPluginDescriptor(pluginDescriptor, plugins); for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { // For local plugins, we assume to install for all users sPluginInformation.setInstallForAllUsers(true); sPluginInformation.setInstallForNewUsers(true); PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); if (pluginContext == null) { throw new PluginException("No plugin context found for " + sPluginInformation.getIdentifier()); } } } try { long pluginBundleVersionId = pluginManager.pluginBundleInstalled(pluginBundle); for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); // PluginConfiguration pluginConfiguration = PluginConfiguration.fromDefaults(pluginContext.getPlugin().getSystemSettingsDefinition()); // pluginContext.initialize(pluginConfiguration); pluginManager.pluginInstalled(pluginBundleVersionId, pluginContext, sPluginInformation); } } } catch (Exception e) { LOGGER.error("", e); throw new PluginException(e); } return pluginBundle; } catch (JAXBException e) { throw new PluginException(e); } catch (FileNotFoundException e) { throw new PluginException(e); } catch (IOException e) { throw new PluginException(e); } catch (XmlPullParserException e) { throw new PluginException(e); } } private PublicFindClassClassLoader loadDependencies(Set<org.bimserver.plugins.Dependency> bimServerDependencies, Model model, PublicFindClassClassLoader previous) throws FileNotFoundException, IOException { List<org.apache.maven.model.Dependency> dependencies = model.getDependencies(); Iterator<org.apache.maven.model.Dependency> it = dependencies.iterator(); Path workspaceDir = Paths.get(".."); bimServerDependencies.add(new org.bimserver.plugins.Dependency(workspaceDir.resolve("PluginBase/target/classes"))); bimServerDependencies.add(new org.bimserver.plugins.Dependency(workspaceDir.resolve("Shared/target/classes"))); while (it.hasNext()) { org.apache.maven.model.Dependency depend = it.next(); try { if (depend.getGroupId().equals("org.opensourcebim") && (depend.getArtifactId().equals("shared") || depend.getArtifactId().equals("pluginbase") || depend.getArtifactId().equals("ifcplugins"))) { // Skip this one, because we have already // TODO we might want to check the version though continue; } if (depend.isOptional() || "test".equals(depend.getScope())) { continue; } Dependency dependency2 = new Dependency(new DefaultArtifact(depend.getGroupId() + ":" + depend.getArtifactId() + ":jar:" + depend.getVersion()), "compile"); DelegatingClassLoader depDelLoader = new DelegatingClassLoader(previous); if (!dependency2.getArtifact().isSnapshot()) { if (dependency2.getArtifact().getFile() != null) { bimServerDependencies.add(new org.bimserver.plugins.Dependency(dependency2.getArtifact().getFile().toPath())); loadDependencies(dependency2.getArtifact().getFile().toPath(), depDelLoader); } else { ArtifactRequest request = new ArtifactRequest(); request.setArtifact(dependency2.getArtifact()); request.setRepositories(mavenPluginRepository.getRepositoriesAsList()); try { ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), request); if (resolveArtifact.getArtifact().getFile() != null) { bimServerDependencies.add(new org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath())); loadDependencies(resolveArtifact.getArtifact().getFile().toPath(), depDelLoader); } else { // TODO error? } } catch (ArtifactResolutionException e) { e.printStackTrace(); } } } else { // Snapshot projects linked in Eclipse ArtifactRequest request = new ArtifactRequest(); if ((!"test".equals(dependency2.getScope()) && !dependency2.getArtifact().isSnapshot())) { request.setArtifact(dependency2.getArtifact()); request.setRepositories(mavenPluginRepository.getLocalRepositories()); try { ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), request); if (resolveArtifact.getArtifact().getFile() != null) { bimServerDependencies.add(new org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath())); loadDependencies(resolveArtifact.getArtifact().getFile().toPath(), depDelLoader); } else { // TODO error? } } catch (Exception e) { LOGGER.info(dependency2.getArtifact().toString()); e.printStackTrace(); } // bimServerDependencies.add(new // org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath())); } } ArtifactDescriptorRequest descriptorRequest = new ArtifactDescriptorRequest(); descriptorRequest.setArtifact(dependency2.getArtifact()); descriptorRequest.setRepositories(mavenPluginRepository.getRepositoriesAsList()); ArtifactDescriptorResult descriptorResult = mavenPluginRepository.getSystem().readArtifactDescriptor(mavenPluginRepository.getSession(), descriptorRequest); CollectRequest collectRequest = new CollectRequest(); collectRequest.setRootArtifact(descriptorResult.getArtifact()); collectRequest.setDependencies(descriptorResult.getDependencies()); collectRequest.setManagedDependencies(descriptorResult.getManagedDependencies()); collectRequest.setRepositories(descriptorResult.getRepositories()); DependencyNode node = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest).getRoot(); DependencyRequest dependencyRequest = new DependencyRequest(); dependencyRequest.setRoot(node); CollectResult collectResult = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest); PreorderNodeListGenerator nlg = new PreorderNodeListGenerator(); // collectResult.getRoot().accept(new // ConsoleDependencyGraphDumper()); collectResult.getRoot().accept(nlg); try { mavenPluginRepository.getSystem().resolveDependencies(mavenPluginRepository.getSession(), dependencyRequest); } catch (DependencyResolutionException e) { // Ignore } for (DependencyNode dependencyNode : nlg.getNodes()) { ArtifactRequest newRequest = new ArtifactRequest(dependencyNode); newRequest.setRepositories(mavenPluginRepository.getRepositoriesAsList()); ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), newRequest); Artifact artifact = resolveArtifact.getArtifact(); Path jarFile = Paths.get(artifact.getFile().getAbsolutePath()); loadDependencies(jarFile, depDelLoader); Artifact versionArtifact = new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "pom", artifact.getVersion()); ArtifactRequest request = new ArtifactRequest(); request.setArtifact(versionArtifact); request.setRepositories(mavenPluginRepository.getRepositoriesAsList()); // try { // ArtifactResult resolveArtifact = // mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), // request); // File depPomFile = // resolveArtifact.getArtifact().getFile(); // if (depPomFile != null) { // MavenXpp3Reader mavenreader = new MavenXpp3Reader(); // Model depModel = null; // try (FileReader reader = new FileReader(depPomFile)) { // try { // depModel = mavenreader.read(reader); // } catch (XmlPullParserException e) { // e.printStackTrace(); // previous = loadDependencies(bimServerDependencies, // depModel, previous); // } else { // LOGGER.info("Artifact not found " + versionArtifact); // } catch (ArtifactResolutionException e1) { // LOGGER.error(e1.getMessage()); // EclipsePluginClassloader depLoader = new // EclipsePluginClassloader(depDelLoader, projectRoot); bimServerDependencies.add(new org.bimserver.plugins.Dependency(jarFile)); } previous = depDelLoader; } catch (DependencyCollectionException e) { e.printStackTrace(); } catch (ArtifactDescriptorException e2) { e2.printStackTrace(); } catch (ArtifactResolutionException e) { e.printStackTrace(); } } return previous; } private void loadDependencies(Path libFile, DelegatingClassLoader classLoader) throws FileNotFoundException, IOException { if (libFile.getFileName().toString().toLowerCase().endsWith(".jar")) { FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, classLoader, libFile); jarClassLoaders.add(jarClassLoader); classLoader.add(jarClassLoader); } } public void loadPluginsFromEclipseProjectNoExceptions(Path projectRoot) { try { loadPluginsFromEclipseProject(projectRoot); } catch (PluginException e) { LOGGER.error("", e); } } public void loadAllPluginsFromEclipseWorkspace(Path file, boolean showExceptions) throws PluginException, IOException { if (file != null && Files.isDirectory(file)) { for (Path project : PathUtils.list(file)) { if (Files.isDirectory(project)) { Path pluginDir = project.resolve("plugin"); if (Files.exists(pluginDir)) { Path pluginFile = pluginDir.resolve("plugin.xml"); if (Files.exists(pluginFile)) { if (showExceptions) { loadPluginsFromEclipseProject(project); } else { loadPluginsFromEclipseProjectNoExceptions(project); } } } } } } } public void loadAllPluginsFromEclipseWorkspaces(Path directory, boolean showExceptions) throws PluginException, IOException { if (!Files.isDirectory(directory)) { return; } if (Files.exists(directory.resolve("plugin/plugin.xml"))) { if (showExceptions) { loadPluginsFromEclipseProject(directory); } else { loadPluginsFromEclipseProjectNoExceptions(directory); } } loadAllPluginsFromEclipseWorkspace(directory, showExceptions); for (Path workspace : PathUtils.list(directory)) { if (Files.isDirectory(workspace)) { loadAllPluginsFromEclipseWorkspace(workspace, showExceptions); } } } public PluginBundle loadPlugin(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, Path target, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, List<SPluginInformation> plugins, ClassLoader parentClassLoader) throws Exception { PluginBundle pluginBundle = null; // Stage 1, load all plugins from the JAR file and initialize them try { pluginBundle = loadPluginsFromJar(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, parentClassLoader); if (plugins.isEmpty()) { LOGGER.warn("No plugins given to install for bundle " + sPluginBundle.getName()); } for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); if (pluginContext == null) { LOGGER.info("No plugin context found for " + sPluginInformation.getIdentifier()); } else { PluginConfiguration pluginConfiguration = PluginConfiguration.fromDefaults(pluginContext.getPlugin().getSystemSettingsDefinition()); pluginContext.initialize(pluginConfiguration); } } } } catch (Exception e) { if (pluginBundle != null) { pluginBundle.close(); } pluginBundleVersionIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier); pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()); Files.deleteIfExists(target); throw e; } // Stage 2, if all went well, notify the listeners of this plugin, if // anything goes wrong in the notifications, the plugin bundle will be // uninstalled try { long pluginBundleVersionId = pluginManager.pluginBundleInstalled(pluginBundle); for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); if (pluginContext != null) { pluginManager.pluginInstalled(pluginBundleVersionId, pluginContext, sPluginInformation); } } } return pluginBundle; } catch (Exception e) { uninstall(pluginBundleVersionIdentifier); LOGGER.error("", e); throw e; } } public void uninstall(PluginBundleVersionIdentifier pluginBundleVersionIdentifier) { PluginBundle pluginBundle = pluginBundleVersionIdentifierToPluginBundle.get(pluginBundleVersionIdentifier); if (pluginBundle == null) { return; } try { pluginBundle.close(); pluginBundleVersionIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier); pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()); pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()); for (PluginContext pluginContext : pluginBundle) { pluginManager.removeImplementation(pluginContext); } Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName()); Files.delete(target); for (PluginContext pluginContext : pluginBundle) { pluginManager.pluginUninstalled(pluginContext); } pluginManager.pluginBundleUninstalled(pluginBundle); } catch (IOException e) { LOGGER.error("", e); } } public PluginBundle update(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, Path jarFile, Path pomFile, List<SPluginInformation> plugins) throws Exception { PluginBundle existingPluginBundle = pluginBundleIdentifierToPluginBundle.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier()); if (existingPluginBundle == null) { throw new UserException("No previous version of plugin bundle " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " found"); } try { existingPluginBundle.close(); if (pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()) == null) { LOGGER.warn("Previous version of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found"); } PluginBundleVersionIdentifier currentVersion = pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier()); if (pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()) == null) { LOGGER.warn("Previous version of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found"); } if (pluginBundleVersionIdentifierToPluginBundle.remove(currentVersion) == null) { LOGGER.warn("Previous version (" + currentVersion + ") of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found"); } for (PluginContext pluginContext : existingPluginBundle) { pluginManager.removeImplementation(pluginContext); } // TODO in case the update fails (new plugin does not load successfully), we need to be able to replace the removed file... So we should not remove it here but rename it and then remove it later on if (existingPluginBundle.getPluginBundle().getInstalledVersion().getType() == SPluginBundleType.MAVEN) { Path target = pluginsDir.resolve(currentVersion.getFileName()); Files.delete(target); } // for (PluginContext pluginContext : existingPluginBundle) { // pluginChangeListener.pluginUninstalled(pluginContext); } catch (IOException e) { LOGGER.error("", e); } Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName()); if (Files.exists(target)) { throw new PluginException("This plugin has already been installed " + target.getFileName().toString()); } Files.copy(jarFile, target); MavenXpp3Reader mavenreader = new MavenXpp3Reader(); Model model = null; try (FileReader fileReader = new FileReader(pomFile.toFile())) { model = mavenreader.read(fileReader); } DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader()); for (org.apache.maven.model.Dependency dependency : model.getDependencies()) { if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) { // TODO Skip, we should also check the version though } else { PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependency.getGroupId(), dependency.getArtifactId()); if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) { // if (false) { // VersionRange versionRange = // VersionRange.createFromVersion(dependency.getVersion()); // String version = // pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion(); // ArtifactVersion artifactVersion = new // DefaultArtifactVersion(version); // if (versionRange.containsVersion(artifactVersion)) { // } else { // throw new Exception("Required dependency " + // pluginBundleIdentifier + " is installed, but it's version // (" + version + ") does not comply to the required version // (" + dependency.getVersion() + ")"); // } else { LOGGER.info("Skipping strict dependency checking for dependency " + dependency.getArtifactId()); } else { if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) { throw new Exception("Required dependency " + pluginBundleIdentifier + " is not installed"); } else { MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(model.getRepositories().get(0).getUrl(), dependency.getGroupId(), dependency.getArtifactId()); try { Path depJarFile = mavenPluginLocation.getVersionJar(dependency.getVersion()); FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile); jarClassLoaders.add(jarClassLoader); delegatingClassLoader.add(jarClassLoader); } catch (Exception e) { } } } } } PluginBundle pluginBundle = null; // Stage 1, load all plugins from the JAR file and initialize them try { pluginBundle = loadPluginsFromJar(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, delegatingClassLoader); for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); PluginContext previousContext = existingPluginBundle.getPluginContext(pluginContext.getIdentifier()); // TODO when there was no previous plugin (new plugin in bundle for example), we should use the default system settings of the particular plugin... not null pluginContext.getPlugin().init(pluginContext, previousContext == null ? null : previousContext.getSystemSettings()); } } } catch (Exception e) { Files.delete(target); LOGGER.error("", e); throw e; } // Stage 2, if all went well, notify the listeners of this plugin, if // anything goes wrong in the notifications, the plugin bundle will be // uninstalled try { long pluginBundleVersionId = pluginManager.pluginBundleUpdated(pluginBundle); for (SPluginInformation sPluginInformation : plugins) { if (sPluginInformation.isEnabled()) { PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier()); pluginManager.pluginUpdated(pluginBundleVersionId, pluginContext, sPluginInformation); } } return pluginBundle; } catch (Exception e) { uninstall(pluginBundleVersionIdentifier); LOGGER.error("", e); throw e; } } public PluginBundle getPluginBundle(PluginBundleIdentifier pluginIdentifier) { return pluginBundleIdentifierToPluginBundle.get(pluginIdentifier); } public Collection<PluginBundle> getPluginBundles() { return pluginBundleVersionIdentifierToPluginBundle.values(); } public void close() { for (FileJarClassLoader fileJarClassLoader : jarClassLoaders) { try { fileJarClassLoader.close(); } catch (IOException e) { LOGGER.error("", e); } } } private SPluginBundleVersion createPluginBundleVersionFromMavenModel(Model model, boolean isLocalDev) { SPluginBundleVersion sPluginBundleVersion = new SPluginBundleVersion(); sPluginBundleVersion.setType(isLocalDev ? SPluginBundleType.LOCAL_DEV : SPluginBundleType.MAVEN); sPluginBundleVersion.setGroupId(model.getGroupId()); sPluginBundleVersion.setArtifactId(model.getArtifactId()); sPluginBundleVersion.setVersion(model.getVersion()); sPluginBundleVersion.setDescription(model.getDescription()); sPluginBundleVersion.setRepository("local"); sPluginBundleVersion.setMismatch(false); // TODO sPluginBundleVersion.setOrganization(model.getOrganization().getName()); sPluginBundleVersion.setName(model.getName()); return sPluginBundleVersion; } public SPluginBundle extractPluginBundleFromJar(Path jarFilePath) throws PluginException { String filename = jarFilePath.getFileName().toString(); PluginBundleVersionIdentifier pluginBundleVersionIdentifier = PluginBundleVersionIdentifier.fromFileName(filename); try (JarFile jarFile = new JarFile(jarFilePath.toFile())) { String pomLocation = "META-INF/maven/" + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getGroupId() + "/" + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getArtifactId() + "/" + "pom.xml"; ZipEntry pomEntry = jarFile.getEntry(pomLocation); if (pomEntry == null) { throw new PluginException("No pom.xml found in JAR file " + jarFilePath.toString() + ", " + pomLocation); } MavenXpp3Reader mavenreader = new MavenXpp3Reader(); Model model = mavenreader.read(jarFile.getInputStream(pomEntry)); SPluginBundle sPluginBundle = new SPluginBundle(); sPluginBundle.setOrganization(model.getOrganization().getName()); sPluginBundle.setName(model.getName()); return sPluginBundle; } catch (IOException e) { throw new PluginException(e); } catch (XmlPullParserException e) { throw new PluginException(e); } } }
package sample.usecase; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import sample.context.actor.Actor; import sample.context.lock.IdLockHandler.LockType; import sample.model.asset.CashInOut; import sample.model.asset.CashInOut.RegCashOut; import sample.usecase.mail.AssetMailDeliver; @Service public class AssetService extends ServiceSupport { @Autowired private AssetMailDeliver mail; /** Actor */ @Override protected Actor actor() { return ServiceUtils.actorUser(super.actor()); } /** * * low: @Transactional * low: CashInOut * UI */ public List<CashInOut> findUnprocessedCashOut() { final String accId = actor().getId(); return tx(accId, LockType.Read, () -> { return CashInOut.findUnprocessed(rep, accId); }); } /** * * low: UI * low: * low: * @return ID */ public Long withdraw(final RegCashOut p) { return audit.audit("", () -> { p.setAccountId(actor().getId()); // low: ID(WRITE) CashInOut cio = tx(actor().getId(), LockType.Write, () -> { return CashInOut.withdraw(rep, businessDay(), p); }); // low: mail.sendWithdrawal(cio); return cio.getId(); }); } }
package de.gurkenlabs.litiengine.gui; import java.awt.Color; import java.awt.Graphics2D; import java.awt.event.KeyEvent; import java.awt.geom.Rectangle2D; import java.util.Arrays; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.IntConsumer; import de.gurkenlabs.litiengine.Align; import de.gurkenlabs.litiengine.graphics.ShapeRenderer; import de.gurkenlabs.litiengine.graphics.Spritesheet; import de.gurkenlabs.litiengine.input.Input; /** * The Class ListField. */ public class ListField extends GuiComponent { private boolean arrowKeyNavigation; private Spritesheet buttonSprite; private Spritesheet entrySprite; private final List<IntConsumer> changeConsumer; private final Object[][] content; private int nbOfColumns; private final CopyOnWriteArrayList<CopyOnWriteArrayList<ImageComponent>> listEntries; private int verticalLowerBound = 0; private int horizontalLowerBound = 0; private ImageComponent selectedComponent; private int selectionColumn; private int selectionRow; private boolean selectEntireColumn = false; private boolean selectEntireRow = false; private final int shownRows; private final int shownColumns; private VerticalSlider verticalSlider; private HorizontalSlider horizontalSlider; private final double sliderSize = 100.0; private boolean sliderInside = false; /** * Creates a vertical list field. * <br><br> * The <b>content</b> of this list field can only be accessed through the first column (column 0). * <br> * Examples: * <blockquote> * content[0][0] - ok<br> * content[0][1] - ok<br> * content[0][8] - ok<br> * content[1][5] - NOK<br> * content[2][0] - NOK<br> * </blockquote> * * @param x * The x-coordinate of the ListField. * @param y * The y-coordinate of the ListField. * @param width * The width of the ListField. * @param height * The height of the ListField. * @param content * The 1 dimension array to show in the ListField. * @param shownRows * The number of rows/elements to * display before the user needs to scroll for more possible rows/elements. * @param entrySprite * The entrySprite. * @param buttonSprite * The buttonSprite. * @see #ListField(double, double, double, double, Object[], int, boolean, Spritesheet, Spritesheet) */ public ListField(final double x, final double y, final double width, final double height, final Object[] content, final int shownRows, final Spritesheet entrySprite, final Spritesheet buttonSprite) { this(x, y, width, height, new Object[][] {content}, shownRows, 1, false, entrySprite, buttonSprite); } /** * Creates a vertical list field. * <br><br> * The <b>content</b> of this list field can only be accessed through the first column (column 0). * <br> * Examples: * <blockquote> * content[0][0] - ok<br> * content[0][1] - ok<br> * content[0][8] - ok<br> * content[1][5] - NOK<br> * content[2][0] - NOK<br> * </blockquote> * * @param x * The x-coordinate of the ListField. * @param y * The y-coordinate of the ListField. * @param width * The width of the ListField. * @param height * The height of the ListField. * @param content * The 1 dimension array to show in the ListField. * @param shownRows * The number of rows/elements to * display before the user needs to scroll for more possible rows/elements. * @param sliderInside * If set to true, sliders will show inside the ListField. * This can be used, for example, if the ListField's width matches the screen's width. * @param entrySprite * The entrySprite. * @param buttonSprite * The buttonSprite. */ public ListField(final double x, final double y, final double width, final double height, final Object[] content, final int shownRows, final boolean sliderInside, final Spritesheet entrySprite, final Spritesheet buttonSprite) { this(x, y, width, height, new Object[][] {content}, shownRows, 1, sliderInside, entrySprite, buttonSprite); } /** * Creates a 2D vertical list field. * <br><br> * The given <b>content</b> should be arranged as columns of elements. * <br> * Examples: * <blockquote> * content[0][0] - column 0, row 0<br> * content[0][1] - column 0, row 1<br> * content[2][8] - column 2, row 8<br> * </blockquote> * * @param x * The x-coordinate of the ListField. * @param y * The y-coordinate of the ListField. * @param width * The width of the ListField. * @param height * The height of the ListField. * @param content * The 2 dimension array to show in the ListField. * @param shownRows * The number of rows to * display before the user needs to scroll for more possible rows. * @param shownColumns * The number of columns to * display before the user needs to scroll for more possible columns. * @param entrySprite * The entrySprite. * @param buttonSprite * The buttonSprite. * @see #ListField(double, double, double, double, Object[][], int, int, boolean, Spritesheet, Spritesheet) */ public ListField(final double x, final double y, final double width, final double height, final Object[][] content, final int shownRows, final int shownColumns, final Spritesheet entrySprite, final Spritesheet buttonSprite) { this(x, y, width, height, content, shownRows, shownColumns, false, entrySprite, buttonSprite); } /** * Creates a 2D vertical list field. * <br><br> * The given <b>content</b> should be arranged as columns of elements. * <br> * Examples: * <blockquote> * content[0][0] - column 0, row 0<br> * content[0][1] - column 0, row 1<br> * content[2][8] - column 2, row 8<br> * </blockquote> * * @param x * The x-coordinate of the ListField. * @param y * The y-coordinate of the ListField. * @param width * The width of the ListField. * @param height * The height of the ListField. * @param content * The 2 dimension array to show in the ListField. * @param shownRows * The number of rows to * display before the user needs to scroll for more possible rows. * @param shownColumns * The number of columns to * display before the user needs to scroll for more possible columns. * @param sliderInside * If set to true, sliders will show inside the ListField. * This can be used, for example, if the ListField's width matches the screen's width. * @param entrySprite * The entrySprite. * @param buttonSprite * The buttonSprite. */ public ListField(final double x, final double y, final double width, final double height, final Object[][] content, final int shownRows, final int shownColumns, final boolean sliderInside, final Spritesheet entrySprite, final Spritesheet buttonSprite) { super(x, y, width, height); this.changeConsumer = new CopyOnWriteArrayList<>(); System.out.println(Arrays.deepToString(content)); this.content = content; this.nbOfColumns = this.content.length; this.listEntries = new CopyOnWriteArrayList<>(); this.buttonSprite = buttonSprite; this.entrySprite = entrySprite; this.sliderInside = sliderInside; this.shownRows = shownRows; this.shownColumns = shownColumns; this.initSliders(); this.initContentList(); this.prepareInput(); } public Spritesheet getButtonSprite() { return this.buttonSprite; } public List<IntConsumer> getChangeConsumer() { return this.changeConsumer; } public Object[][] getContent() { return this.content; } public Spritesheet getEntrySprite() { return this.entrySprite; } public int getHorizontalLowerBound() { return this.horizontalLowerBound; } public HorizontalSlider getHorizontalSlider() { return this.horizontalSlider; } /** * Returns all list items of a specified column. * * @param column * the column * @return a list of items */ public List<ImageComponent> getListEntry(final int column) { if (column < 0 || column >= this.listEntries.size()) { return null; } return this.listEntries.get(column); } /** * Returns the number of rows of the tallest column. * * @return * int representing the length of the tallest column */ public int getMaxRows() { int result = 0; for (Object[] o : this.getContent()) { if (o.length > result) { result = o.length; } } return result; } public int getNumberOfShownRows() { return this.shownRows; } public int getNumberOfShownColumns() { return this.shownColumns; } public ImageComponent getSelectedComponent() { return this.selectedComponent; } public Object getSelectedObject() { return this.getContent()[this.selectionColumn][this.selectionRow]; } /** * Returns the selected column. * * @return number of the column; -1 if isEntireRowSelected() is true */ public int getSelectionColumn() { if (this.isEntireRowSelected()) { return -1; } return this.selectionColumn; } /** * Returns the selected row. * * @return number of the row */ public int getSelectionRow() { return this.selectionRow; } public int getVerticalLowerBound() { return this.verticalLowerBound; } public VerticalSlider getVerticalSlider() { return this.verticalSlider; } public boolean isArrowKeyNavigation() { return this.arrowKeyNavigation; } public void onChange(final IntConsumer c) { this.getChangeConsumer().add(c); } public void refresh() { for (int column = 0; column < this.getNumberOfShownColumns(); column++) { for (int row = 0; row < this.getNumberOfShownRows(); row++) { if (this.getContent()[column].length <= row) { continue; } if (this.getListEntry(column).get(row) != null) { this.getListEntry(column).get(row).setText(this.getContent()[column + this.getHorizontalLowerBound()][row + this.getVerticalLowerBound()].toString()); } } } if (!this.isEntireRowSelected() && this.selectionColumn >= this.getHorizontalLowerBound() && this.selectionColumn < this.getHorizontalLowerBound() + this.getNumberOfShownColumns() && this.selectionRow >= this.getVerticalLowerBound() && this.selectionRow < this.getVerticalLowerBound() + this.getNumberOfShownRows()) { this.selectedComponent = this.getListEntry(this.selectionColumn - this.getHorizontalLowerBound()).get(this.selectionRow - this.getVerticalLowerBound()); } else if (this.isEntireRowSelected() && this.selectionColumn >= 0 && this.selectionColumn < this.nbOfColumns && this.selectionRow >= this.getVerticalLowerBound() && this.selectionRow < this.getVerticalLowerBound() + this.getNumberOfShownRows()) { this.selectedComponent = this.getListEntry(0).get(this.selectionRow - this.getVerticalLowerBound()); } else { this.selectedComponent = null; } if (this.selectedComponent != null) { this.selectedComponent.setSelected(true); } } @Override public void render(final Graphics2D g) { super.render(g); if (this.selectedComponent != null) { Rectangle2D border; double borderWidth = this.selectedComponent.getWidth() + 2; double borderHeight = this.selectedComponent.getHeight() + 2; if (this.isEntireRowSelected()) { borderWidth = this.getWidth() + 2; } if (this.getVerticalSlider() != null && this.getVerticalSlider().isVisible() && this.isSliderInside()) { borderWidth = borderWidth - this.getVerticalSlider().getWidth(); } if (this.isEntireColumnSelected()) { borderHeight = this.getHeight() + 2; } border = new Rectangle2D.Double(this.getX() - 1, this.selectedComponent.getY() - 1, borderWidth, borderHeight); g.setColor(Color.WHITE); ShapeRenderer.renderOutline(g, border, 2); } } public void setArrowKeyNavigation(final boolean arrowKeyNavigation) { this.arrowKeyNavigation = arrowKeyNavigation; } public void setButtonSprite(final Spritesheet buttonSprite) { this.buttonSprite = buttonSprite; } public void setEntrySprite(final Spritesheet entrySprite) { this.entrySprite = entrySprite; } public void setForwardMouseEvents(final int column, final boolean forwardMouseEvents) { if (column < 0 && column >= this.nbOfColumns) { return; } for (ImageComponent comp : this.getListEntry(column)) { comp.setForwardMouseEvents(forwardMouseEvents); } } public void setHorizontalLowerBound(final int lowerBound) { this.horizontalLowerBound = lowerBound; } public void setSelection(final int column, final int row) { if (column < 0 || column >= this.nbOfColumns || row < 0 || row >= this.getContent()[column].length) { return; } this.selectionColumn = column; this.selectionRow = row; if (this.selectionRow >= this.getVerticalLowerBound() + this.getNumberOfShownRows()) { this.setVerticalLowerBound(this.getVerticalLowerBound() + 1); } else if (this.selectionRow < this.getVerticalLowerBound() && this.getVerticalLowerBound() > 0) { this.setVerticalLowerBound(this.getVerticalLowerBound() - 1); } if (this.selectionColumn >= this.getHorizontalLowerBound() + this.getNumberOfShownColumns()) { this.setHorizontalLowerBound(this.getHorizontalLowerBound() + 1); } else if (this.selectionColumn < this.getHorizontalLowerBound() && this.getHorizontalLowerBound() > 0) { this.setHorizontalLowerBound(this.getHorizontalLowerBound() - 1); } this.getChangeConsumer().forEach(consumer -> consumer.accept(this.selectionRow)); this.refresh(); } /** * If set to true, selecting a element will show a selection of * the entire column on which that element is on. Without taking * account of its row. * <br><br> * Set to <b>false</b> as default. * * @param selectEntireColumn * a boolean */ public void setSelectEntireColumn(boolean selectEntireColumn) { this.selectEntireColumn = selectEntireColumn; } /** * If set to true, selecting a element will show a selection of * the entire row on which that element is on. Without taking * account of its column. * <br><br> * Set to <b>false</b> as default. * * @param selectEntireRow * a boolean */ public void setSelectEntireRow(boolean selectEntireRow) { this.selectEntireRow = selectEntireRow; } public void setVerticalLowerBound(final int lowerBound) { this.verticalLowerBound = lowerBound; } private void initContentList() { final double columnWidth = this.getWidth() / this.getNumberOfShownColumns(); final double rowHeight = this.getHeight() / this.getNumberOfShownRows(); for (int column = 0; column < this.getNumberOfShownColumns(); column++) { this.listEntries.add(new CopyOnWriteArrayList<ImageComponent>()); for (int row = 0; row < this.getNumberOfShownRows(); row++) { if (this.getContent()[column].length <= row) { continue; } ImageComponent entryComponent; if (this.getContent()[column][row] == null) { entryComponent = new ImageComponent(this.getX() + (columnWidth * column), this.getY() + (rowHeight * row), columnWidth, rowHeight, this.entrySprite, "", null); } else { entryComponent = new ImageComponent(this.getX() + (columnWidth * column), this.getY() + (rowHeight * row), columnWidth, rowHeight, this.entrySprite, this.getContent()[column][row].toString(), null); } if (this.isSliderInside()) { entryComponent.setX(this.getX() + ((columnWidth - (this.getVerticalSlider().getWidth() / this.getNumberOfShownColumns())) * column)); entryComponent.setY(this.getY() + ((rowHeight - (this.getHorizontalSlider().getHeight() / this.getNumberOfShownRows())) * row)); entryComponent.setWidth(entryComponent.getWidth() - (this.getVerticalSlider().getWidth() / this.getNumberOfShownColumns())); entryComponent.setHeight(entryComponent.getHeight() - (this.getHorizontalSlider().getHeight() / this.getNumberOfShownRows())); } entryComponent.setTextAlign(Align.LEFT); this.getListEntry(column).add(entryComponent); } this.getComponents().addAll(this.getListEntry(column)); final int col = column; for (final ImageComponent comp : this.getListEntry(col)) { comp.onClicked(e -> { this.setSelection(this.getHorizontalLowerBound() + col % this.getNumberOfShownColumns(), this.getVerticalLowerBound() + this.getListEntry(col).indexOf(comp) % this.getNumberOfShownRows()); this.refresh(); }); } } this.onChange(s -> { if (this.getVerticalSlider() != null) { this.getVerticalSlider().setCurrentValue(this.getVerticalLowerBound()); this.getVerticalSlider().getSliderComponent().setLocation(this.getVerticalSlider().getRelativeSliderPosition()); } if (this.getHorizontalSlider() != null) { this.getHorizontalSlider().setCurrentValue(this.getHorizontalLowerBound()); this.getHorizontalSlider().getSliderComponent().setLocation(this.getHorizontalSlider().getRelativeSliderPosition()); } }); if (this.getVerticalSlider() != null) { this.getVerticalSlider().onChange(sliderValue -> { this.setVerticalLowerBound(sliderValue.intValue()); this.getVerticalSlider().getSliderComponent().setLocation(this.getVerticalSlider().getRelativeSliderPosition()); this.refresh(); }); } if (this.getHorizontalSlider() != null) { this.getHorizontalSlider().onChange(sliderValue -> { this.setHorizontalLowerBound(sliderValue.intValue()); this.getHorizontalSlider().getSliderComponent().setLocation(this.getHorizontalSlider().getRelativeSliderPosition()); this.refresh(); }); } } private void initSliders() { final int maxNbOfRows = this.getMaxRows() - this.getNumberOfShownRows(); if (maxNbOfRows > 0) { if (this.isSliderInside()) { this.verticalSlider = new VerticalSlider(this.getX() + this.getWidth() - this.sliderSize, this.getY(), this.sliderSize, this.getHeight() - this.sliderSize, 0, this.getMaxRows() - this.getNumberOfShownRows(), 1); this.horizontalSlider = new HorizontalSlider(this.getX(), this.getY() + this.getHeight() - this.sliderSize, this.getWidth() - this.sliderSize, this.sliderSize, 0, this.nbOfColumns - this.getNumberOfShownColumns(), 1); } else { this.verticalSlider = new VerticalSlider(this.getX() + this.getWidth(), this.getY(), this.sliderSize, this.getHeight(), 0, this.getMaxRows() - this.getNumberOfShownRows(), 1); this.horizontalSlider = new HorizontalSlider(this.getX(), this.getY() + this.getHeight(), this.getWidth(), this.sliderSize, 0, this.nbOfColumns - this.getNumberOfShownColumns(), 1); } this.getVerticalSlider().setCurrentValue(this.getVerticalLowerBound()); this.getHorizontalSlider().setCurrentValue(this.getHorizontalLowerBound()); this.getComponents().add(this.getVerticalSlider()); this.getComponents().add(this.getHorizontalSlider()); } } /** * See {@link #setSelectEntireColumn(boolean)} * * @return * true if selection is set to select the entire column; false otherwise */ public boolean isEntireColumnSelected() { return this.selectEntireColumn; } /** * See {@link #setSelectEntireRow(boolean)} * * @return * true if selection is set to select the entire row; false otherwise */ public boolean isEntireRowSelected() { return this.selectEntireRow; } /** * Verify if sliders are set to be inside the ListField. * * @return * true if slider is set to be inside the ListField; false otherwise */ public boolean isSliderInside() { return this.sliderInside; } private void prepareInput() { Input.keyboard().onKeyTyped(KeyEvent.VK_UP, e -> { if (this.isSuspended() || !this.isVisible() || !this.isArrowKeyNavigation()) { return; } this.setSelection(this.getHorizontalLowerBound(), this.selectionRow - 1); }); Input.keyboard().onKeyTyped(KeyEvent.VK_DOWN, e -> { if (this.isSuspended() || !this.isVisible() || !this.isArrowKeyNavigation()) { return; } this.setSelection(this.getHorizontalLowerBound(), this.selectionRow + 1); }); Input.keyboard().onKeyTyped(KeyEvent.VK_LEFT, e -> { if (this.isSuspended() || !this.isVisible() || !this.isArrowKeyNavigation()) { return; } this.setSelection(this.getHorizontalLowerBound() - 1, this.selectionRow); }); Input.keyboard().onKeyTyped(KeyEvent.VK_RIGHT, e -> { if (this.isSuspended() || !this.isVisible() || !this.isArrowKeyNavigation()) { return; } this.setSelection(this.getHorizontalLowerBound() + 1, this.selectionRow); }); this.onMouseWheelScrolled(e -> { if (this.isSuspended() || !this.isVisible()) { return; } if (this.isHovered()) { if (e.getEvent().getWheelRotation() < 0) { this.setSelection(this.getHorizontalLowerBound(), this.selectionRow - 1); } else { this.setSelection(this.getHorizontalLowerBound(), this.selectionRow + 1); } return; } }); } }
package de.hs_mannheim.IB.SS15.OOT; import java.awt.BorderLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTable; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; //import sun.nio.ch.SelChImpl; import de.hs_mannheim.IB.SS15.OOT.Participants.Desire; import de.hs_mannheim.IB.SS15.OOT.Participants.Examinee; public class StudentsGUI extends JFrame implements ActionListener { private static final long serialVersionUID = 1L; private GUI gui; private JPanel south; private JButton btnAddStudent; private JButton btnRemoveStudent; private JButton btnAddDesire; public static Subject currentSubject; private SelectSubjectTableModel selectSubjectTableModel; private JTable selectSubjectTable; private MainTableModel mainTableModel; private JTable mainJTable; public StudentsGUI(GUI gui) { this.gui = gui; setTitle("Studenten"); // setup if (gui.getBackend().getSubjects().size() > 0) { currentSubject = gui.getBackend().getSubjects().get(0); } createLayout(); pack(); setSize(600, 400); setLocationRelativeTo(gui); setVisible(true); } @Override public void actionPerformed(ActionEvent e) { if (e.getSource() == btnAddStudent) { addStudentDialog(); } else if (e.getSource() == btnRemoveStudent) { removeStudentDialog(); } else if (e.getSource() == btnAddDesire) { addDesireToExaminee(); } } private void addStudentDialog() { String name = JOptionPane.showInputDialog(this, "Name des Studenten:", "Student hinzufgen", JOptionPane.PLAIN_MESSAGE); if (name != null) { try { boolean foundStudent = false; if(gui.getBackend().getExaminee().size() != 0){ for(int i = 0; i < gui.getBackend().getExaminee().size(); i++){ if(gui.getBackend().getExaminee().get(i).equals(name)){ foundStudent = true; gui.getBackend().getExaminee().get(i).addSubject(currentSubject); } } } else{ if(!foundStudent){ ArrayList<Subject> tempSub = new ArrayList<Subject>(); tempSub.add(currentSubject); gui.getBackend().createExaminee(name, tempSub, new ArrayList<Desire>()); }} mainTableModel.updateData(); // update jTable } catch (IllegalArgumentException e) { JOptionPane.showMessageDialog(this, e.getMessage(), "Student hinzufgen", JOptionPane.ERROR_MESSAGE); } } } private void removeStudentDialog() { // dropdown Men mit den mglichen Fchern ArrayList<Examinee> examinee = gui.getBackend().getExaminee(); if (examinee.size() > 0) { Examinee selectedExaminee = (Examinee) JOptionPane.showInputDialog(this, "Name des Studenten:", "Studenten entfernen", JOptionPane.QUESTION_MESSAGE, null, examinee.toArray(), examinee.get(0)); if (selectedExaminee != null) { gui.getBackend().removeExaminee(selectedExaminee); } } else { JOptionPane.showMessageDialog(this, "Es sind noch keine Studenten vorhanden.", "Studenten entfernen", JOptionPane.ERROR_MESSAGE); } } private void addDesireToExaminee() { ArrayList<Examinee> examinee = gui.getBackend().getExaminee(); if (examinee.size() > 0 && mainJTable.getSelectedRow() >= 0) { new DesireGUI(this, examinee.get(mainJTable.getSelectedRow())); mainTableModel.updateData(); // update jTable } } private void createLayout() { // set Layout getContentPane().setLayout(new BorderLayout()); // CENTER Panel createSubjectTable(); createMainTable(); JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, new JScrollPane(selectSubjectTable), new JScrollPane(mainJTable)); splitPane.setDividerLocation(150); getContentPane().add(splitPane, BorderLayout.CENTER); // SOUTH Panel createSouthButtons(); getContentPane().add(south, BorderLayout.SOUTH); } private void createSubjectTable() { mainTableModel = new MainTableModel(gui); mainJTable = new JTable(mainTableModel); } private void createMainTable() { selectSubjectTableModel = new SelectSubjectTableModel(gui); selectSubjectTable = new JTable(selectSubjectTableModel); selectSubjectTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { currentSubject = gui.getBackend().getSubjects().get(selectSubjectTable.getSelectedRow()); // update jTable mainTableModel.updateData(); } }); } private void createSouthButtons() { south = new JPanel(); south.setLayout(new GridLayout(1, 3)); btnAddStudent = new JButton("Student hinzufgen"); btnAddStudent.addActionListener(this); south.add(btnAddStudent); btnRemoveStudent = new JButton("Student lschen"); btnRemoveStudent.addActionListener(this); south.add(btnRemoveStudent); btnAddDesire = new JButton("Wunsch hinzufgen"); btnAddDesire.addActionListener(this); south.add(btnAddDesire); } } class SelectSubjectTableModel extends AbstractTableModel { private GUI mainGUI; private ArrayList<Subject> subjects; private final int COLUMS = 1; SelectSubjectTableModel(GUI mainGUI) { this.mainGUI = mainGUI; subjects = mainGUI.getBackend().getSubjects(); if (subjects != null && subjects.size() > 0 && subjects.get(0) != null) { StudentsGUI.currentSubject = subjects.get(0); } } @Override public int getRowCount() { return subjects.size(); } @Override public int getColumnCount() { return COLUMS; } @Override public String getColumnName(int col) { return "Fcher"; } @Override public Object getValueAt(int rowIndex, int columnIndex) { return subjects.get(rowIndex).getName() + " (" + subjects.get(rowIndex).getAbbreviation() + ")"; } @Override public boolean isCellEditable(int row, int col) { return false; } } class MainTableModel extends AbstractTableModel { private GUI mainGUI; private ArrayList<Examinee> examinee; private final int COLUMS = 2; MainTableModel(GUI mainGUI) { this.mainGUI = mainGUI; examinee = new ArrayList<Examinee>(); updateData(); } public void updateData() { examinee.clear(); for (int i = 0; i < mainGUI.getBackend().getExaminee().size(); i++) { if (mainGUI.getBackend().getExaminee().get(i) != null && mainGUI.getBackend().getExaminee().get(i).hasSubject(StudentsGUI.currentSubject)) { examinee.add(mainGUI.getBackend().getExaminee().get(i)); } } fireTableDataChanged(); // Notifies all listeners that all cell values in the table's rows may have changed. } @Override public int getRowCount() { return examinee.size(); } @Override public int getColumnCount() { return COLUMS; } @Override public String getColumnName(int col) { if (col == 0) { return "Name"; } else if (col == 1) { return "Wnsche"; } return null; } @Override public Object getValueAt(int rowIndex, int columnIndex) { if (columnIndex == 0) { return examinee.get(rowIndex).getName(); } else if (columnIndex == 1) { return examinee.get(rowIndex).getDesires(); } return null; } @Override public void setValueAt(Object value, int row, int col) { if (col == 0) { examinee.get(row).setName(value.toString()); } } @Override public boolean isCellEditable(int row, int col) { if (col == 0) { return true; } return false; } }
package org.zstack.compute.vm; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.SimpleQuery; import org.zstack.header.vm.VmInstanceState; import org.zstack.header.vm.VmInstanceVO; import org.zstack.header.volume.VolumeStatus; import org.zstack.header.volume.VolumeType; import org.zstack.header.volume.VolumeVO; import org.zstack.header.volume.VolumeVO_; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import static org.zstack.utils.CollectionDSL.list; @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class VmQuotaUtil { @Autowired private DatabaseFacade dbf; public class VmQuota { public long totalVmNum; public long runningVmNum; public long runningVmCpuNum; public long runningVmMemorySize; } @Transactional(readOnly = true) public long getUsedDataVolumeCount(String accountUuid) { String sql = "select count(vol)" + " from VolumeVO vol, AccountResourceRefVO ref " + " where vol.type = :vtype" + " and ref.resourceUuid = vol.uuid " + " and ref.accountUuid = :auuid" + " and ref.resourceType = :rtype" + " and vol.status != :status "; TypedQuery<Tuple> volq = dbf.getEntityManager().createQuery(sql, Tuple.class); volq.setParameter("auuid", accountUuid); volq.setParameter("rtype", VolumeVO.class.getSimpleName()); volq.setParameter("vtype", VolumeType.Data); volq.setParameter("status", VolumeStatus.Deleted); Long n = volq.getSingleResult().get(0, Long.class); n = n == null ? 0 : n; return n; } @Transactional(readOnly = true) public long getUsedAllVolumeSize(String accountUuid) { String sql = "select sum(vol.size)" + " from VolumeVO vol, AccountResourceRefVO ref" + " where ref.resourceUuid = vol.uuid" + " and ref.accountUuid = :auuid" + " and ref.resourceType = :rtype"; TypedQuery<Long> vq = dbf.getEntityManager().createQuery(sql, Long.class); vq.setParameter("auuid", accountUuid); vq.setParameter("rtype", VolumeVO.class.getSimpleName()); Long vsize = vq.getSingleResult(); vsize = vsize == null ? 0 : vsize; return vsize; } @Transactional(readOnly = true) public VmQuota getUsedVmCpuMemory(String accountUUid) { VmQuota quota = new VmQuota(); // get running info String sql = "select count(vm), sum(vm.cpuNum), sum(vm.memorySize)" + " from VmInstanceVO vm, AccountResourceRefVO ref" + " where vm.uuid = ref.resourceUuid" + " and ref.accountUuid = :auuid" + " and ref.resourceType = :rtype" + " and vm.state not in (:states)"; TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class); q.setParameter("auuid", accountUUid); q.setParameter("rtype", VmInstanceVO.class.getSimpleName()); q.setParameter("states", list(VmInstanceState.Stopped, VmInstanceState.Destroying, VmInstanceState.Destroyed, VmInstanceState.Created)); Tuple t = q.getSingleResult(); Long vnum = t.get(0, Long.class); quota.runningVmNum = vnum == null ? 0 : vnum; Long cnum = t.get(1, Long.class); quota.runningVmCpuNum = cnum == null ? 0 : cnum; Long msize = t.get(2, Long.class); quota.runningVmMemorySize = msize == null ? 0 : msize; // get total vm String sql2 = "select count(vm)" + " from VmInstanceVO vm, AccountResourceRefVO ref" + " where vm.uuid = ref.resourceUuid" + " and ref.accountUuid = :auuid" + " and ref.resourceType = :rtype" + " and vm.state not in (:states)"; TypedQuery<Long> q2 = dbf.getEntityManager().createQuery(sql2, Long.class); q2.setParameter("auuid", accountUUid); q2.setParameter("rtype", VmInstanceVO.class.getSimpleName()); q2.setParameter("states", list(VmInstanceState.Destroyed)); Long totalVmNum = q2.getSingleResult(); quota.totalVmNum = totalVmNum == null ? 0 : totalVmNum; return quota; } @Transactional(readOnly = true) public long getVmInstanceRootVolumeSize(String vmInstanceUuid) { SimpleQuery<VolumeVO> sq = dbf.createQuery(VolumeVO.class); sq.select(VolumeVO_.size); sq.add(VolumeVO_.type, SimpleQuery.Op.EQ, VolumeType.Root); sq.add(VolumeVO_.vmInstanceUuid, SimpleQuery.Op.EQ, vmInstanceUuid); Long rootVolumeSize = sq.findValue(); rootVolumeSize = rootVolumeSize == null ? 0 : rootVolumeSize; return rootVolumeSize; } }
package c5db.replication; import c5db.interfaces.ReplicatorLog; import c5db.interfaces.replication.IndexCommitNotice; import c5db.interfaces.replication.ReplicatorInstanceEvent; import c5db.replication.rpc.RpcMessage; import c5db.replication.rpc.RpcRequest; import c5db.replication.rpc.RpcWireReply; import c5db.util.CheckedConsumer; import c5db.util.ExceptionHandlingBatchExecutor; import c5db.util.JUnitRuleFiberExceptions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import io.netty.util.CharsetUtil; import org.hamcrest.Matcher; import org.jetlang.channels.MemoryChannel; import org.jetlang.channels.Request; import org.jetlang.core.BatchExecutor; import org.jetlang.core.RunnableExecutor; import org.jetlang.core.RunnableExecutorImpl; import org.jetlang.fibers.Fiber; import org.jetlang.fibers.ThreadFiber; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import java.nio.ByteBuffer; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; import static c5db.AsyncChannelAsserts.ChannelHistoryMonitor; import static c5db.CollectionMatchers.isIn; import static c5db.FutureMatchers.resultsIn; import static c5db.IndexCommitMatcher.aCommitNotice; import static c5db.RpcMatchers.ReplyMatcher.aPreElectionReply; import static c5db.RpcMatchers.ReplyMatcher.anAppendReply; import static c5db.RpcMatchers.RequestMatcher; import static c5db.RpcMatchers.RequestMatcher.aPreElectionPoll; import static c5db.RpcMatchers.RequestMatcher.anAppendRequest; import static c5db.RpcMatchers.containsQuorumConfiguration; import static c5db.interfaces.replication.Replicator.State.FOLLOWER; import static c5db.interfaces.replication.ReplicatorInstanceEvent.EventType.ELECTION_TIMEOUT; import static c5db.replication.ReplicationMatchers.aQuorumChangeCommittedEvent; import static c5db.replication.ReplicationMatchers.aReplicatorEvent; import static c5db.replication.ReplicationMatchers.hasCommittedEntriesUpTo; import static c5db.replication.ReplicationMatchers.leaderElectedEvent; import static c5db.replication.ReplicationMatchers.theLeader; import static c5db.replication.ReplicationMatchers.willCommitConfiguration; import static c5db.replication.ReplicationMatchers.willCommitEntriesUpTo; import static c5db.replication.ReplicationMatchers.willRespondToAnAppendRequest; import static c5db.replication.ReplicationMatchers.willSend; import static c5db.replication.ReplicationMatchers.wonAnElectionWithTerm; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.core.IsNot.not; /** * A class for tests of the behavior of multiple interacting ReplicatorInstance nodes, */ public class InRamTest { private static final int ELECTION_TIMEOUT_MILLIS = 50; // election timeout (milliseconds) private static final long OFFSET_STAGGERING_MILLIS = 50; // offset between different peers' clocks @Rule public JUnitRuleFiberExceptions fiberExceptionHandler = new JUnitRuleFiberExceptions(); private final BatchExecutor batchExecutor = new ExceptionHandlingBatchExecutor(fiberExceptionHandler); private final RunnableExecutor runnableExecutor = new RunnableExecutorImpl(batchExecutor); private final Fiber fiber = new ThreadFiber(runnableExecutor, "InRamTest-ThreadFiber", true); private InRamSim sim; private ChannelHistoryMonitor<IndexCommitNotice> commitMonitor; private ChannelHistoryMonitor<ReplicatorInstanceEvent> eventMonitor; private ChannelHistoryMonitor<RpcMessage> replyMonitor; private long lastIndexLogged; private final MemoryChannel<Request<RpcRequest, RpcWireReply>> requestLog = new MemoryChannel<>(); private final ChannelHistoryMonitor<Request<RpcRequest, RpcWireReply>> requestMonitor = new ChannelHistoryMonitor<>(requestLog, fiber); private final Map<Long, Long> lastCommit = new HashMap<>(); @Before public final void setUpSimulationAndFibers() throws Exception { sim = new InRamSim(ELECTION_TIMEOUT_MILLIS, OFFSET_STAGGERING_MILLIS, batchExecutor); sim.getRpcChannel().subscribe(fiber, requestLog::publish); sim.getCommitNotices().subscribe(fiber, this::updateLastCommit); sim.getCommitNotices().subscribe(fiber, System.out::println); sim.getStateChanges().subscribe(fiber, System.out::println); commitMonitor = new ChannelHistoryMonitor<>(sim.getCommitNotices(), fiber); eventMonitor = new ChannelHistoryMonitor<>(sim.getStateChanges(), fiber); replyMonitor = new ChannelHistoryMonitor<>(sim.getReplyChannel(), fiber); fiber.start(); sim.start(initialPeerSet()); } @After public final void disposeResources() { sim.dispose(); fiber.dispose(); } @Test public void aLeaderWillBeElectedInATimelyMannerInANewQuorum() throws Exception { waitForALeader(term(1)); } @Test public void aNewLeaderWillBeElectedIfAnExistingLeaderDies() throws Exception { havingElectedALeaderAtOrAfter(term(1)); leader().die(); waitForANewLeader(); } @Test public void ifAKilledLeaderIsRestartedItWillBecomeAFollower() throws Exception { havingElectedALeaderAtOrAfter(term(1)); LeaderController firstLeader = leader(); firstLeader.die(); waitForANewLeader(); assertThat(leader(), is(not(equalTo(firstLeader)))); firstLeader.restart(); assertThat(firstLeader, willRespondToAnAppendRequest(currentTerm())); } @Test public void ifAnElectionOccursWhileAPeerIsOfflineThenThePeerWillRecognizeTheNewLeaderWhenThePeerRestarts() throws Exception { havingElectedALeaderAtOrAfter(term(1)); LeaderController firstLeader = leader(); PeerController follower = pickFollower().die(); leader().log(someData()) .waitForCommit(index(1)) .die(); waitForANewLeader(); assertThat(leader(), not(equalTo(firstLeader))); // The second leader logs some entry, then the first node to go offline comes back leader().log(someData()); follower.restart() .waitForCommit(index(2)); leader().log(someData()); firstLeader.restart(); allPeers((peer) -> assertThat(peer, willCommitEntriesUpTo(index(3)))); } @Test public void aFollowerMaintainsItsCommitIndexWhenItBecomesLeader() throws Exception { havingElectedALeaderAtOrAfter(term(1)); leader().log(someData()); allPeers((peer) -> peer.waitForCommit(lastIndexLogged())); // Kill the first leader; wait for a second leader to come to power leader().die(); waitForANewLeader(); leader().log(someData()); assertThat(leader(), willSend(anAppendRequest().withCommitIndex(equalTo(lastIndexLogged())))); } @Test public void aLeaderSendsDataToAllOtherPeersResultingInAllPeersCommitting() throws Exception { havingElectedALeaderAtOrAfter(term(1)); leader().log(someData()); allPeers((peer) -> assertThat(peer, willCommitEntriesUpTo(lastIndexLogged()))); } @Test public void aFollowerWillStageANewElectionIfItTimesOutWaitingToHearFromTheLeader() throws Exception { havingElectedALeaderAtOrAfter(term(1)); final long firstLeaderTerm = currentTerm(); PeerController follower = pickFollower(); follower.willDropIncomingAppendsUntil(leader(), is(not(theLeader()))); follower.allowToTimeout(); waitForALeader(firstLeaderTerm + 1); assertThat(follower, anyOf(is(theLeader()), willRespondToAnAppendRequest(currentTerm()))); } @Test public void ifAFollowerFallsBehindInReceivingAndLoggingEntriesItIsAbleToCatchUp() throws Exception { havingElectedALeaderAtOrAfter(term(1)); PeerController follower = pickFollower(); follower.willDropIncomingAppendsUntil(leader(), hasCommittedEntriesUpTo(index(3))); leader() .log(someData()) .log(someData()); assertThat(follower, willCommitEntriesUpTo(index(3))); } @Test public void aReplicatorReturnsNullIfAskedToChangeQuorumsWhenItIsNotInTheLeaderState() throws Exception { final Set<Long> newPeerIds = smallerPeerSetWithOneInCommonWithInitialSet(); havingElectedALeaderAtOrAfter(term(1)); assertThat(pickFollower().changeQuorum(newPeerIds), nullValue()); } @Test public void aLeaderCanCoordinateAQuorumMembershipChange() throws Exception { final Set<Long> newPeerIds = smallerPeerSetWithNoneInCommonWithInitialSet(); final QuorumConfiguration finalConfig = QuorumConfiguration.of(newPeerIds); havingElectedALeaderAtOrAfter(term(1)); leader().changeQuorum(newPeerIds); sim.createAndStartReplicators(newPeerIds); waitForANewLeader(); leader().log(someData()); peers(newPeerIds).forEach((peer) -> assertThat(peer, willCommitConfiguration(finalConfig))); assertThat(newPeerIds, hasItem(equalTo(leader().id))); } @Test public void aSecondQuorumChangeWillOverrideTheFirst() throws Exception { final Set<Long> firstPeerSet = smallerPeerSetWithOneInCommonWithInitialSet(); final Set<Long> secondPeerSet = largerPeerSetWithSomeInCommonWithInitialSet(); havingElectedALeaderAtOrAfter(term(1)); leader().changeQuorum(firstPeerSet); sim.createAndStartReplicators(firstPeerSet); leader().changeQuorum(secondPeerSet); sim.createAndStartReplicators(secondPeerSet); waitForALeaderWithId(isIn(secondPeerSet)); leader().log(someData()); peers(secondPeerSet).forEach((peer) -> assertThat(peer, willCommitConfiguration( QuorumConfiguration.of(secondPeerSet)))); } @Test public void theFutureReturnedByAQuorumChangeRequestWillReturnTheReceiptOfTheTransitionalConfigurationEntry() throws Exception { final Set<Long> newPeerIds = smallerPeerSetWithOneInCommonWithInitialSet(); final long lastIndexBeforeQuorumChange = 4; havingElectedALeaderAtOrAfter(term(1)); final long electionTerm = currentTerm(); sim.createAndStartReplicators(newPeerIds); leader().logDataUpToIndex(lastIndexBeforeQuorumChange); assertThat(leader().changeQuorum(newPeerIds), resultsIn(equalTo( new ReplicatorReceipt(electionTerm, lastIndexBeforeQuorumChange + 1)))); } @Test public void aQuorumChangeWillGoThroughEvenIfTheLeaderDiesBeforeItCommitsTheTransitionalConfiguration() throws Exception { // Leader dies before it can commit the transitional configuration, but as long as the next leader // has already received the transitional configuration entry, it can complete the view change. final Set<Long> newPeerIds = smallerPeerSetWithNoneInCommonWithInitialSet(); final QuorumConfiguration transitionalConfig = QuorumConfiguration.of(initialPeerSet()).getTransitionalConfiguration(newPeerIds); havingElectedALeaderAtOrAfter(term(1)); final long nextLogIndex = leader().log.getLastIndex() + 1; leader().changeQuorum(newPeerIds); allPeersExceptLeader((peer) -> assertThat(leader(), willSend( anAppendRequest() .containingQuorumConfig(transitionalConfig) .to(peer.id)))); ignoringPreviousReplies(); allPeers((peer) -> peer.waitForAppendReply(greaterThanOrEqualTo(currentTerm()))); assertThat(leader().hasCommittedEntriesUpTo(nextLogIndex), is(false)); // As of this point, all peers have replicated the transitional config, but the leader has not committed. // It would be impossible to commit because the new peers have not come online, and their votes are // necessary to commit the transitional configuration. leader().die(); sim.createAndStartReplicators(newPeerIds); waitForANewLeader(); assertThat(leader().currentConfiguration(), equalTo(transitionalConfig)); // Necessary to log again because the new leader may not commit an entry from a past term (such as // the configuration entry) until it has also committed an entry from its current term. leader().log(someData()); peers(newPeerIds).forEach((peer) -> assertThat(peer, willCommitConfiguration(QuorumConfiguration.of(newPeerIds)))); } @Test public void afterAQuorumChangeTheNewNodesWillCatchUpToThePreexistingOnes() throws Exception { final Set<Long> newPeerIds = largerPeerSetWithSomeInCommonWithInitialSet(); final long maximumIndex = 5; havingElectedALeaderAtOrAfter(term(1)); leader() .logDataUpToIndex(maximumIndex) .waitForCommit(maximumIndex); sim.createAndStartReplicators(newPeerIds); leader().changeQuorum(newPeerIds); peers(newPeerIds).forEach((peer) -> { assertThat(peer, willCommitEntriesUpTo(maximumIndex)); assertThat(peer, willCommitConfiguration(QuorumConfiguration.of(newPeerIds))); }); } @Test public void aQuorumCanMakeProgressEvenIfAFollowerCanSendRequestsButNotReceiveReplies() throws Exception { final long maximumIndex = 5; havingElectedALeaderAtOrAfter(term(1)); pickFollower() .willDropAllIncomingTraffic() .allowToTimeout(); waitForAnElectionTimeout(); leader().logDataUpToIndex(maximumIndex); assertThat(leader(), willCommitEntriesUpTo(maximumIndex)); } @Test public void aQuorumChangeCanCompleteEvenIfARemovedPeerTimesOutDuringIt() throws Exception { final Set<Long> newPeerIds = smallerPeerSetWithNoneInCommonWithInitialSet(); final QuorumConfiguration transitionalConfig = QuorumConfiguration.of(initialPeerSet()).getTransitionalConfiguration(newPeerIds); final QuorumConfiguration finalConfig = transitionalConfig.getCompletedConfiguration(); havingElectedALeaderAtOrAfter(term(1)); final long firstLeaderTerm = currentTerm(); final long leaderId = currentLeader(); dropAllAppendsWithThisConfigurationUntilAPreElectionPollTakesPlace(finalConfig); leader().changeQuorum(newPeerIds); sim.createAndStartReplicators(newPeerIds); allPeersExceptLeader((peer) -> assertThat(leader(), willSend( anAppendRequest() .containingQuorumConfig(transitionalConfig) .to(peer.id)))); peers(newPeerIds).forEach((peer) -> assertThat(peer, willCommitConfiguration(transitionalConfig))); peersBeingRemoved(transitionalConfig).forEach(PeerController::allowToTimeout); waitForAnElectionTimeout(); peersBeingRemoved(transitionalConfig).forEach((peer) -> { if (peer.id != leaderId) { assertThat(peer, willSend(aPreElectionPoll())); } }); waitForALeaderWithId(isIn(newPeerIds)); leader().log(someData()); peers(newPeerIds).forEach((peer) -> assertThat(peer, willCommitConfiguration(finalConfig))); peersBeingRemoved(transitionalConfig).forEach((peer) -> assertThat(peer, not(wonAnElectionWithTerm(greaterThan(firstLeaderTerm))))); } @Test public void aQuorumCanElectANewLeaderEvenWhileReceivingMessagesFromRemovedPeersWhoHaveTimedOut() throws Exception { final Set<Long> newPeerIds = smallerPeerSetWithNoneInCommonWithInitialSet(); final QuorumConfiguration transitionalConfig = QuorumConfiguration.of(initialPeerSet()).getTransitionalConfiguration(newPeerIds); final QuorumConfiguration finalConfig = transitionalConfig.getCompletedConfiguration(); havingElectedALeaderAtOrAfter(term(1)); long firstLeaderTerm = currentTerm(); leader().changeQuorum(newPeerIds); sim.createAndStartReplicators(newPeerIds); peers(newPeerIds).forEach((peer) -> assertThat(peer, willCommitConfiguration(finalConfig))); peersBeingRemoved(transitionalConfig).forEach(PeerController::allowToTimeout); waitForAnElectionTimeout(); waitForALeader(term(firstLeaderTerm + 1)); leader().die(); waitForANewLeader(); } @Test public void aLateBootstrapCallWillBeDisregarded() throws Exception { havingElectedALeaderAtOrAfter(term(1)); leader().logDataUpToIndex(2); allPeers((peer) -> assertThat(peer, willCommitEntriesUpTo(lastIndexLogged()))); // Bootstrap calls to both leader and a non-leader -- both will be no-ops pickNonLeader().instance.bootstrapQuorum(smallerPeerSetWithNoneInCommonWithInitialSet()); leader().instance.bootstrapQuorum(smallerPeerSetWithNoneInCommonWithInitialSet()); // Verify that quorum is still in a working state assertThat(sim.getLog(leader().id).getLastIndex(), is(equalTo(2L))); leader().logDataUpToIndex(3); allPeers((peer) -> assertThat(peer, willCommitEntriesUpTo(lastIndexLogged()))); } /** * Private methods */ // Blocks until a leader is elected during some term >= minimumTerm. // Throws TimeoutException if that does not occur within the time limit. private void waitForALeader(long minimumTerm) { waitForALeaderElectedEventMatching(anyLeader(), greaterThanOrEqualTo(minimumTerm)); } private void waitForALeaderWithId(Matcher<Long> leaderIdMatcher) { waitForALeaderElectedEventMatching(leaderIdMatcher, anyTerm()); } private void waitForALeaderElectedEventMatching(Matcher<Long> leaderIdMatcher, Matcher<Long> termMatcher) { sim.startAllTimeouts(); eventMonitor.waitFor(leaderElectedEvent(leaderIdMatcher, termMatcher)); sim.stopAllTimeouts(); // Wait for at least one other node to recognize the new leader. This is necessary because // some tests want to be able to identify a follower right away. pickNonLeader().waitForAppendReply(termMatcher); final long leaderId = currentLeader(); assertThat(leaderCount(), is(equalTo(1))); sim.startTimeout(leaderId); } private void havingElectedALeaderAtOrAfter(long minimumTerm) { waitForALeader(minimumTerm); } private void waitForANewLeader() { waitForALeader(currentTerm() + 1); } // Counts leaders in the current term. Used to verify a sensible state. // If the simulation is running correctly, this should only ever return 0 or 1. private int leaderCount() { final long currentTerm = currentTerm(); int leaderCount = 0; for (ReplicatorInstance replicatorInstance : sim.getReplicators().values()) { if (replicatorInstance.isLeader() && replicatorInstance.currentTerm >= currentTerm) { leaderCount++; } } return leaderCount; } private void waitForAnElectionTimeout() { eventMonitor.waitFor(aReplicatorEvent(ELECTION_TIMEOUT)); } private void ignoringPreviousReplies() { replyMonitor.forgetHistory(); } private static List<ByteBuffer> someData() { return Lists.newArrayList( ByteBuffer.wrap("test".getBytes(CharsetUtil.UTF_8))); } private LeaderController leader() { return new LeaderController(); } private long lastIndexLogged() { return lastIndexLogged; } // Syntactic sugar for manipulating leaders class LeaderController extends PeerController { public LeaderController() { super(currentLeader()); } public LeaderController log(List<ByteBuffer> buffers) throws Exception { lastIndexLogged = currentLeaderInstance().logData(buffers).get().seqNum; return this; } public LeaderController logDataUpToIndex(long index) throws Exception { while (lastIndexLogged < index) { log(someData()); } return this; } private ReplicatorInstance currentLeaderInstance() { return sim.getReplicators().get(currentLeader()); } } // Syntactic sugar for manipulating peers class PeerController { public final long id; public final ReplicatorLog log; public final ReplicatorInstance instance; public PeerController(long id) { this.id = id; this.log = sim.getLog(id); this.instance = sim.getReplicators().get(id); } @Override public String toString() { return instance.toString(); } public boolean isCurrentLeader() { return instance.isLeader() && instance.currentTerm >= currentTerm(); } public QuorumConfiguration currentConfiguration() { return instance.getQuorumConfiguration(); } public ListenableFuture<ReplicatorReceipt> changeQuorum(Collection<Long> newPeerIds) throws Exception { return instance.changeQuorum(newPeerIds); } public PeerController die() { sim.killPeer(id); return this; } public PeerController restart() { sim.restartPeer(id); return this; } public boolean isOnline() { return !sim.getOfflinePeers().contains(id); } public PeerController allowToTimeout() { sim.startTimeout(id); return this; } public PeerController waitForCommit(long commitIndex) { commitMonitor.waitFor(aCommitNotice().withIndex(greaterThanOrEqualTo(commitIndex)).issuedFromPeer(id)); return this; } public PeerController waitForQuorumCommit(QuorumConfiguration quorumConfiguration) { eventMonitor.waitFor(aQuorumChangeCommittedEvent(quorumConfiguration, equalTo(id))); return this; } public PeerController waitForAppendReply(Matcher<Long> termMatcher) { replyMonitor.waitFor(anAppendReply().withTerm(termMatcher)); return this; } public PeerController waitForRequest(RequestMatcher requestMatcher) { requestMonitor.waitFor(requestMatcher.from(id)); return this; } public boolean hasCommittedEntriesUpTo(long index) { return commitMonitor.hasAny(aCommitNotice().withIndex(greaterThanOrEqualTo(index)).issuedFromPeer(id)); } public boolean hasWonAnElection(Matcher<Long> termMatcher) { return eventMonitor.hasAny(leaderElectedEvent(equalTo(id), termMatcher)); } public void willDropIncomingAppendsUntil(PeerController peer, Matcher<PeerController> matcher) { sim.dropMessages( (message) -> message.to == id && message.isAppendMessage(), (message) -> matcher.matches(peer)); } public PeerController willDropAllIncomingTraffic() { sim.dropMessages( (message) -> (message.to == id) && (message.to != message.from), (message) -> false); return this; } } private PeerController peer(long peerId) { return new PeerController(peerId); } private Set<PeerController> peers(Collection<Long> peerIds) { return peerIds.stream() .map(this::peer) .collect(Collectors.toSet()); } private <Ex extends Throwable> void allPeers(CheckedConsumer<PeerController, Ex> forEach) throws Ex { for (long peerId : sim.getOnlinePeers()) { forEach.accept(new PeerController(peerId)); } } private <Ex extends Throwable> void allPeersExceptLeader(CheckedConsumer<PeerController, Ex> forEach) throws Ex { for (long peerId : sim.getOnlinePeers()) { if (peerId == currentLeader()) { continue; } forEach.accept(new PeerController(peerId)); } } private PeerController anyPeerSuchThat(Predicate<PeerController> predicate) { for (long peerId : sim.getOnlinePeers()) { if (predicate.test(peer(peerId))) { return peer(peerId); } } return null; } private Set<PeerController> peersBeingRemoved(QuorumConfiguration configuration) { assert configuration.isTransitional; return Sets.difference(configuration.prevPeers(), configuration.nextPeers()) .stream() .map(PeerController::new) .collect(Collectors.toSet()); } private PeerController pickFollower() { PeerController chosenPeer = anyPeerSuchThat((peer) -> peer.instance.myState == FOLLOWER && peer.isOnline()); assertThat(chosenPeer, not(nullValue())); return chosenPeer; } private PeerController pickNonLeader() { PeerController chosenPeer = anyPeerSuchThat((peer) -> not(theLeader()).matches(peer) && peer.isOnline()); assertThat(chosenPeer, not(nullValue())); return chosenPeer; } private long currentTerm() { return eventMonitor.getLatest(leaderElectedEvent(anyLeader(), anyTerm())).leaderElectedTerm; } private long currentLeader() { return eventMonitor.getLatest(leaderElectedEvent(anyLeader(), anyTerm())).newLeader; } private static long term(long term) { return term; } private static long index(long index) { return index; } private static Matcher<Long> anyLeader() { return any(Long.class); } private static Matcher<Long> anyTerm() { return any(Long.class); } private void updateLastCommit(IndexCommitNotice notice) { long peerId = notice.nodeId; if (notice.lastIndex > lastCommit.getOrDefault(peerId, 0L)) { lastCommit.put(peerId, notice.lastIndex); } } private void dropAllAppendsWithThisConfigurationUntilAPreElectionPollTakesPlace(QuorumConfiguration configuration) { sim.dropMessages( (message) -> message.isAppendMessage() && containsQuorumConfiguration(message.getAppendMessage().getEntriesList(), configuration), aPreElectionReply()::matches); } private static Set<Long> initialPeerSet() { return Sets.newHashSet(1L, 2L, 3L, 4L, 5L, 6L, 7L); } private static Set<Long> smallerPeerSetWithOneInCommonWithInitialSet() { return Sets.newHashSet(7L, 8L, 9L); } private static Set<Long> smallerPeerSetWithNoneInCommonWithInitialSet() { return Sets.newHashSet(8L, 9L, 10L); } private static Set<Long> largerPeerSetWithSomeInCommonWithInitialSet() { return Sets.newHashSet(4L, 5L, 6L, 7L, 8L, 9L, 10L); } }
package com.github.conserveorm.tools; import java.io.IOException; import java.io.Reader; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.CharBuffer; import java.sql.Blob; import java.sql.Clob; import java.sql.SQLException; import java.util.Collection; import java.util.HashMap; import java.util.Map; import com.github.conserveorm.adapter.AdapterBase; import com.github.conserveorm.cache.ObjectRowMap; import com.github.conserveorm.connection.ConnectionWrapper; import com.github.conserveorm.tools.metadata.MapEntry; import com.github.conserveorm.tools.metadata.ObjectRepresentation; import com.github.conserveorm.tools.metadata.ObjectStack; /** * Generates and instantiates objects based on property-value pairs. * * @author Erik Berglund * */ public class ObjectFactory { /** * * Create an object from a resultset row. * * @param <T> * * @param map * a hashmap of the values, indexed by name * @return a new object of the appropriate type. * @throws SQLException */ @SuppressWarnings("unchecked") public static <T> T createObject(AdapterBase adapter,ObjectRowMap cache, HashMap<String, Object> map, Class<T> resultClass,ConnectionWrapper cw,String tableName, Long dbId) throws SQLException { try { if (ObjectTools.isDatabasePrimitive(resultClass)) { if(Number.class.isAssignableFrom(resultClass)) { //safe cast of numbers return (T) ObjectTools.cast(resultClass, (Number)map.get(Defaults.VALUE_COL)); } else { return resultClass.cast(map.get(Defaults.VALUE_COL)); } } else { // create a new object using the default constructor. Constructor<T> constructor = resultClass.getDeclaredConstructor(); boolean wasAccessible = constructor.isAccessible(); if(!wasAccessible) { //if the constructor is private, make it accessible constructor.setAccessible(true); } T res = constructor.newInstance(); if(!wasAccessible) { constructor.setAccessible(wasAccessible); } // add object to cache cache.storeObject(tableName, res, dbId); fillObjectValues(adapter,cache, res, resultClass, map,cw); return res; } } catch (Exception e) { throw new SQLException(e); } } @SuppressWarnings("unchecked") private static <T> void fillObjectValues(AdapterBase adapter,ObjectRowMap cache, T res, Class<T> clazz, HashMap<String, Object> map,ConnectionWrapper cw) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException, ClassNotFoundException, SQLException, InstantiationException, IOException { // get an object representation stack ObjectStack objStack = new ObjectStack(adapter, clazz); // iterate over all the representations in the stack for(ObjectRepresentation rep:objStack.getAllRepresentations()) { for (int x = 0; x < rep.getPropertyCount(); x++) { String name = rep.getPropertyName(x); Method m = rep.getMutator(x); Object o = map.get(name); if (o != null) { if (m == null) { if (name.equals(Defaults.MAP_PROPERTY_COL)) { // this is a map, so load the property into a // separate variable and process it Object object = adapter.getPersist().getObject(cw, rep.getReturnType(x), ((Number) o).longValue(), cache); // cast the object to an object array Object[] array = (Object[]) object; Map<Object, Object> resultMap = (Map<Object, Object>) res; for (int y = 0; y < array.length; y++) { MapEntry entry = (MapEntry) array[y]; resultMap.put(entry.getKey(), entry.getValue()); } } else if (name.equals(Defaults.COLLECTION_PROPERTY_COL)) { // this is a collection, so load the property into a // separate variable Object object = adapter.getPersist().getObject(cw, rep.getReturnType(x), ((Number) o).longValue(), cache); // process the contents // cast the object to an object array Object[] array = (Object[]) object; Collection<Object> collection = (Collection<Object>) res; for (int y = 0; y < array.length; y++) { collection.add(array[y]); } } else { // there is no mutator for this property, and that's // ok - it's a derived property. } } else { boolean wasAccessible = m.isAccessible(); if(!wasAccessible) { m.setAccessible(true); } // this is neither a map or collection content variable, // so process it as usual. if(rep.getReturnType(x).isEnum()) { String enumName = (String)o; Class<? extends Enum<?>> enClass = (Class<? extends Enum<?>>) rep.getReturnType(x); Enum<?>[] enConsts =enClass.getEnumConstants(); for(int t = 0;t<enConsts.length;t++) { if(enConsts[t].name().equals(enumName)) { m.invoke(res, enConsts[t]); break; } } } else if(rep.getReturnType(x).equals(Class.class)) { //classes are stored as strings and loaded by the classloader String className = (String)o; Class<?> value = ObjectFactory.class.getClassLoader().loadClass(className); m.invoke(res, value); } else if (rep.isPrimitive(x)) { if (o instanceof Number) { m.invoke(res, ObjectTools.cast((Class<? extends Number>) m.getParameterTypes()[0], (Number) o)); } else if (o instanceof Clob) { Clob clob = (Clob) o; Reader r = clob.getCharacterStream(); CharBuffer cb = CharBuffer.allocate((int) clob.length()); r.read(cb); r.close(); m.invoke(res, cb.array()); } else if (o instanceof Blob) { Blob b = (Blob) o; m.invoke(res, b.getBytes(1, (int) b.length())); } else { m.invoke(res, o); } } else { // get the referenced object // get the id Long dbId = ((Number) o).longValue(); Object object = adapter.getPersist().getObject(cw, rep.getReturnType(x), dbId, cache); if (object != null) { // save the retrieved m.invoke(res, object); } } // set the old accessibility m.setAccessible(wasAccessible); } } } } } }
package integration.tests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import gr.ntua.vision.monitoring.VismoConfiguration; import gr.ntua.vision.monitoring.events.MonitoringEvent; import gr.ntua.vision.monitoring.sources.EventSourceListener; import gr.ntua.vision.monitoring.sources.VismoEventSource; import gr.ntua.vision.monitoring.zmq.ZMQFactory; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.zeromq.ZContext; public class PythonDispatchTest { private static class NoEventsSourceListener implements EventSourceListener { private final int noExpectedEvents; private int noReceivedEvents = 0; /** * Constructor. * * @param noExpectedEvents */ public NoEventsSourceListener(final int noExpectedEvents) { this.noExpectedEvents = noExpectedEvents; } public void haveExpectedNoEvents() { assertEquals(noExpectedEvents, noReceivedEvents); } /** * @see gr.ntua.vision.monitoring.sources.EventSourceListener#receive(gr.ntua.vision.monitoring.events.MonitoringEvent) */ @Override public void receive(final MonitoringEvent e) { if (e == null) return; if (e.get("tag") != null) { log.error("received unexpected event: {} with field 'tag'", e); throw new AssertionError("received unexpected event: " + e + " with field 'tag'"); } if (e.get("transaction-throughput") == null) { log.error("received unexpected event: {} without field 'transaction-throughput'", e); throw new AssertionError("received unexpected event: " + e + " without field 'transaction-throughput'"); } ++noReceivedEvents; } } static final Logger log = LoggerFactory.getLogger(PythonDispatchTest.class); private static final int NO_EVENTS_TO_SEND = 2; private static final String PY_DISPATCH = "../vismo-dispatch/src/main/python/vismo_dispatch.py"; private static final String PYTHON = "/usr/local/bin/python"; private static final String VISMO_CONFIG_FILE = "src/test/resources/vismo-config.properties"; private VismoConfiguration conf; private final ZMQFactory factory = new ZMQFactory(new ZContext()); private NoEventsSourceListener listener; private VismoEventSource source; /** * @throws IOException * @throws InterruptedException */ @Before public void setUp() throws IOException, InterruptedException { requirePythonVersion("2.6"); conf = new VismoConfiguration(VISMO_CONFIG_FILE); source = new VismoEventSource(factory.newBoundPullSocket(conf.getProducersPoint()), factory.newConnectedPushSocket(conf .getProducersPoint())); listener = new NoEventsSourceListener(NO_EVENTS_TO_SEND); source.add(listener); } /** * @throws InterruptedException * @throws IOException */ @Test public void sourceReceivesEventsFromPyDispatch() throws IOException, InterruptedException { source.start(); runPythonVismoDispatch(); Thread.sleep(1000); listener.haveExpectedNoEvents(); } /** * @param version * @throws IOException * @throws InterruptedException */ private static void requirePythonVersion(final String version) throws IOException, InterruptedException { final double requiredVersion = Double.valueOf(version.substring(0, 3)); final ProcessBuilder builder = new ProcessBuilder(PYTHON, "--version"); builder.redirectErrorStream(true); final Process proc = builder.start(); final BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream())); final OutputStreamWriter writer = new OutputStreamWriter(proc.getOutputStream()); writer.flush(); final String[] fs = reader.readLine().split(" "); final double actualVersion = Double.valueOf(fs[1].substring(0, 3)); System.err.println("python version: " + fs[1]); assertTrue("this test should be run against at least python version " + version, actualVersion >= requiredVersion); try { proc.waitFor(); } catch (final InterruptedException e) { throw e; } finally { writer.close(); reader.close(); } } /** * @throws IOException * @throws InterruptedException */ private static void runPythonVismoDispatch() throws IOException, InterruptedException { final ProcessBuilder builder = new ProcessBuilder(PYTHON, PY_DISPATCH, String.valueOf(NO_EVENTS_TO_SEND)); builder.environment().put("VISMO_CONFIG", VISMO_CONFIG_FILE); builder.redirectErrorStream(true); final Process proc = builder.start(); final BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream())); final OutputStreamWriter writer = new OutputStreamWriter(proc.getOutputStream()); writer.flush(); for (String s = reader.readLine(); s != null; s = reader.readLine()) System.err.println(proc + " >> " + s); try { final int ret = proc.waitFor(); assertEquals("process didn't exit normally", 0, ret); } catch (final InterruptedException e) { throw e; } finally { reader.close(); writer.close(); } } }
package com.shinemo.mpush.tools; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; import java.util.zip.DeflaterOutputStream; import java.util.zip.InflaterInputStream; public final class IOUtils { private static final Logger LOGGER = LoggerFactory.getLogger(IOUtils.class); public static void close(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (Exception e) { LOGGER.error("close closeable ex", e); } } } public static byte[] compress(byte[] data) { Profiler.enter("start compress"); ByteArrayOutputStream out = new ByteArrayOutputStream(data.length / 4); DeflaterOutputStream zipOut = new DeflaterOutputStream(out); try { zipOut.write(data); zipOut.finish(); zipOut.close(); } catch (IOException e) { LOGGER.error("compress ex", e); return Constants.EMPTY_BYTES; } finally { close(zipOut); Profiler.release(); } return out.toByteArray(); } public static byte[] uncompress(byte[] data) { Profiler.enter("start uncompress"); InflaterInputStream zipIn = new InflaterInputStream(new ByteArrayInputStream(data)); ByteArrayOutputStream out = new ByteArrayOutputStream(data.length * 4); byte[] buffer = new byte[1024]; int length; try { while ((length = zipIn.read(buffer)) != -1) { out.write(buffer, 0, length); } } catch (IOException e) { LOGGER.error("uncompress ex", e); return Constants.EMPTY_BYTES; } finally { close(zipIn); Profiler.release(); } return out.toByteArray(); } }
package com.google.mu.util; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.mu.util.Optionals.optional; import static java.util.Comparator.naturalOrder; import java.util.Comparator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import org.checkerframework.checker.nullness.qual.Nullable; import com.google.common.collect.ComparisonChain; import com.google.common.collect.DiscreteDomain; import com.google.common.collect.Range; import com.google.errorprone.annotations.Immutable; /** * An insertion point in a sequence of elements (normally of indexes), which is either an exact element, * or in between two adjacent elements in a discrete domain, or before or after all elements of the * discrete domain. * * <p>If representing an exact point, {@link #exact}, {@link #floor} and {@link #ceiling} all return the element. * * <p>If it's between two elements, {@link #exact} will return empty, and {@link #floor} and {@code * #ceiling} will return the two adjacent elements respectively. * * <p>If it's below all possible elements, {@link #isBelowAll} will return true and {@link #floor} * will throw. The {@link #ceiling} method will return the min element of the domain (if present). * * <p>If it's above all possible elements, {@link #isAboveAll} will return true and {@link #ceiling} * will throw. The {@link #floor} method will return the max element of the domain (if present). * * <p>Over a discrete domain with N discrete elements, there are {@code 2 * N + 1} distinct insertion * points, including all the elements, the points between each two adjacent elements, the point before * all elements and the point after all elements. * * @param <C> the domain type * @since 6.4 */ @Immutable(containerOf = "C") public final class InsertionPoint<C extends Comparable<C>> implements Comparable<InsertionPoint<C>> { @SuppressWarnings("unchecked") // Curiously recursive generics doesn't play nicely with wildcard. private static final Comparator<Comparable<?>> NULL_FIRST = (Comparator<Comparable<?>>) Comparator .nullsFirst(naturalOrder()); @SuppressWarnings("unchecked") // Curiously recursive generics doesn't play nicely with wildcard. private static final Comparator<Comparable<?>> NULL_LAST = (Comparator<Comparable<?>>) Comparator .nullsLast(naturalOrder()); private final @Nullable C floor; private final @Nullable C ceiling; private InsertionPoint(@Nullable C floor, @Nullable C ceiling) { this.floor = floor; this.ceiling = ceiling; } /** * Returns an insertion point exactly at {@code element} such that {@link #exact}, * {@link #floor} and {@link #ceiling} all return the same element. */ public static <C extends Comparable<C>> InsertionPoint<C> at(C element) { checkNotNull(element); return new InsertionPoint<>(element, element); } /** * Returns an insertion point immediately before the given {@code ceiling} and * after the previous integer (if {@code ceiling} isn't * {@link Integer#MIN_VALUE}). */ public static InsertionPoint<Integer> before(int ceiling) { return before(ceiling, DiscreteDomain.integers()); } /** * Returns an insertion point immediately before the given {@code ceiling} and * after the previous integer (if {@code ceiling} isn't * {@link Long#MIN_VALUE}). */ public static InsertionPoint<Long> before(long ceiling) { return before(ceiling, DiscreteDomain.longs()); } /** * Returns an insertion point immediately before the given {@code ceiling} and * after the previous element in the given discrete {@code domain} (if a previous * element exists). */ public static <C extends Comparable<C>> InsertionPoint<C> before( C ceiling, DiscreteDomain<C> domain) { return new InsertionPoint<>(domain.previous(ceiling), ceiling); } /** * Returns an insertion point immediately after the given {@code floor} and * before the next integer (if {@code ceiling} isn't {@link Integer#MAX_VALUE}). */ public static InsertionPoint<Integer> after(int floor) { return after(floor, DiscreteDomain.integers()); } /** * Returns an insertion point immediately after the given {@code floor} and * before the next integer (if {@code ceiling} isn't {@link Long#MAX_VALUE}). */ public static InsertionPoint<Long> after(long floor) { return after(floor, DiscreteDomain.longs()); } /** * Returns an insertion point immediately after the given {@code floor} and * before the next element in the given discrete {@code domain} (if a next element * exists). */ public static <C extends Comparable<C>> InsertionPoint<C> after( C floor, DiscreteDomain<C> domain) { return new InsertionPoint<>(floor, domain.next(floor)); } /** * If this represents an exact point (not between two adjacent values), returns * the element at the point; else returns empty. */ public Optional<C> exact() { // floor == ceiling is safe because at() always uses the same reference // for floor and ceiling. // floor and ceiling will never both be nulls. return optional(floor == ceiling, floor); } /** * Returns the floor element such that this insertion point is immediately * {@code >=} the floor. * * @throws NoSuchElementException if this represents a point below all possible * values in the discrete domain. Users can use * {@link #isBelowAll} to guard this condition. */ public C floor() { if (floor == null) { throw new NoSuchElementException("InsertionPoint " + this + " has no floor"); } return floor; } /** * Returns the ceiling element such that this insertion point is immediately * {@code <=} the ceiling. * * @throws NoSuchElementException if this represents a point above all possible * values in the discrete domain. Users can use * {@link #isAboveAll} to guard this condition. */ public C ceiling() { if (ceiling == null) { throw new NoSuchElementException("InsertionPoint " + this + " has no ceiling"); } return ceiling; } /** * Returns true if this is a point above the max possible element in the domain. */ public boolean isAboveAll() { return ceiling == null; } /** * Returns true if this is a point below the min possible element in the domain. */ public boolean isBelowAll() { return floor == null; } @Override public int compareTo(InsertionPoint<C> that) { return ComparisonChain.start().compare(floor, that.floor, NULL_FIRST).compare(ceiling, that.ceiling, NULL_LAST) .result(); } @Override public int hashCode() { return Objects.hashCode(floor) * 31 + Objects.hashCode(ceiling); } @Override public boolean equals(Object obj) { if (obj instanceof InsertionPoint) { InsertionPoint<?> that = ((InsertionPoint<?>) obj); return Objects.equals(floor, that.floor) && Objects.equals(ceiling, that.ceiling); } return false; } /** Returns a human-readable string representation of this insertion point. */ @Override public String toString() { if (floor == null) { return Range.lessThan(ceiling).toString(); } if (ceiling == null) { return Range.greaterThan(floor).toString(); } return exact().map(Object::toString).orElseGet(() -> Range.open(floor, ceiling).toString()); } }
package blog.interceptor; import beans.BeanManager; import blog.data.EIUserFavourRule; import blog.service.IUserFavourRuleService; import blog.service.IUserFavourService; import config.Configs; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.protocol.HttpContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import orm.DatasourceFactory; import rest.Interceptor; import rest.RestHelper; import rest.WithMatcher; import java.io.IOException; import java.sql.Connection; import java.util.*; import java.util.regex.Pattern; public class UserFavourInterceptor extends WithMatcher implements Interceptor { private static final Logger logger = LogManager.getLogger(UserFavourInterceptor.class); public static final String USER_FAVOUR_INTERCEPTOR_CONFIG_KEY = "USER_FAVOUR_INTERCEPTOR_CONFIG_KEY"; private IUserFavourService userFavourService = BeanManager.getInstance().getService(IUserFavourService.class); private IUserFavourRuleService userFavourRuleService = BeanManager.getInstance().getService(IUserFavourRuleService.class); private List<EIUserFavourRule> userFavourRules; private Map<String, Pattern> patternCache; @Override public String name() { return "user favour interceptor"; } @Override public String urlPattern() { return ".*"; } @Override public void handle(HttpRequest httpRequest, HttpResponse httpResponse, HttpContext httpContext) throws HttpException, IOException { Long userId = (Long) httpContext.getAttribute("infinitely-serve-user_id"); if (null == userId) { return; } if (null == this.userFavourRules || !Configs.getConfigs(USER_FAVOUR_INTERCEPTOR_CONFIG_KEY, Boolean.class)) { this.userFavourRules = this.userFavourRuleService.listNotDisabled(); HashMap<String, Pattern> newCache = new HashMap<>(); for (EIUserFavourRule userFavourRule : this.userFavourRules) { String userFavourRulePattern = userFavourRule.getUserFavourRulePattern(); if (this.patternCache.containsKey(userFavourRulePattern)) { newCache.put(userFavourRulePattern, this.patternCache.get(userFavourRulePattern)); } } this.patternCache = newCache; Configs.setConfigs(USER_FAVOUR_INTERCEPTOR_CONFIG_KEY, true); } String decodedUrl = RestHelper.getDecodedUrl(httpRequest); List<EIUserFavourRule> theMostMatchedRules = new ArrayList<>(); for (EIUserFavourRule userFavourRule : this.userFavourRules) { String userFavourRulePattern = userFavourRule.getUserFavourRulePattern(); Pattern pattern = this.patternCache.get(userFavourRulePattern); if (null == pattern) { pattern = Pattern.compile(userFavourRulePattern); this.patternCache.put(userFavourRulePattern, pattern); } if (pattern.matcher(decodedUrl).matches()) { theMostMatchedRules.add(userFavourRule); } } if (theMostMatchedRules.isEmpty()) { return; } theMostMatchedRules.sort(Comparator.comparingInt(EIUserFavourRule::getUserFavourRuleScore)); for (EIUserFavourRule eiUserFavourRule : theMostMatchedRules) { DatasourceFactory.begin(Connection.TRANSACTION_SERIALIZABLE); if (!this.userFavourRuleService.isFillRule(userId, eiUserFavourRule.getUserFavourRuleId(), eiUserFavourRule.getUserFavourRuleLimit())) { DatasourceFactory.rollback(); continue; } if (!this.userFavourService.increaseFavour(userId, eiUserFavourRule.getUserFavourRuleScore())) { logger.error(String.format("ID%s.", userId)); DatasourceFactory.rollback(); return; } if (!this.userFavourRuleService.fullFillRule(userId, eiUserFavourRule.getUserFavourRuleId())) { logger.error(String.format("ID%s.", userId)); DatasourceFactory.rollback(); } else { DatasourceFactory.commit(); } return; } } }
package org.yamcs; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.LogManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yamcs.utils.TimeEncoding; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.error.YAMLException; /** * This class loads yamcs configurations. There are a number of "subsystems", * each using a corresponding subsystem.yaml file * * There are three places where a configuration file is looked up in order: * - in the prefix/file.yaml via the classpath if the prefix is set in the setup method (used in the unittests) * - in the userConfigDirectory .yamcs/etc/file.yaml * - in the file.yaml via the classpath. * * @author nm */ @SuppressWarnings("rawtypes") public class YConfiguration { Map<String, Object> root; static String userConfigDirectory; //This is used by the users to overwrite private final String filename; private static Map<String, YConfiguration> configurations=new HashMap<String,YConfiguration>(); static Logger log=LoggerFactory.getLogger(YConfiguration.class.getName()); static String prefix=null; //keeps track of the configuration path so meaningful error messages can be printed //the path is someting like filename->key1->subkey2[3]->... static private IdentityHashMap<Object, String> confPath=new IdentityHashMap<Object, String>(); @SuppressWarnings("unchecked") private YConfiguration(String subsystem) throws IOException, ConfigurationException { Yaml yaml=new Yaml(); filename=subsystem+".yaml"; try { Object o=yaml.load(getConfigurationStream("/"+filename)); if(o==null) { o=new HashMap<String, Object>(); //config file is empty, not an error } else if(!(o instanceof Map<?, ?>)) { throw new ConfigurationException(filename, "top level structure must be a map and not a "+o); } root=(Map<String, Object>)o; confPath.put(root, filename); } catch (YAMLException e) { throw new ConfigurationException(filename, e.toString(), e); } } /** * If configPrefix is not null, sets up the configuration to search the classpath for files like "configPrefix/xyz.properties" * * Also sets up the TimeEncoding configuration * * @param configPrefix * @throws ConfigurationException */ public synchronized static void setup(String configPrefix) throws ConfigurationException { prefix=configPrefix; configurations.clear();//forget any known config (useful in the maven unit tests called in the same VM) if(System.getenv("YAMCS_DAEMON")==null) { userConfigDirectory=System.getProperty("user.home")+File.separatorChar+".yamcs"; File logDir = new File(userConfigDirectory+File.separatorChar+"log"); if (!logDir.exists()) { if (logDir.mkdirs()) { System.err.println("Created directory: "+logDir); } else { System.err.println("Cannot create directory: "+logDir); } } System.getProperties().put("cacheDirectory", userConfigDirectory+File.separatorChar); } else { String yamcsDirectory=System.getProperty("user.home"); System.getProperties().put("cacheDirectory", yamcsDirectory+File.separatorChar+"cache"+File.separatorChar); userConfigDirectory=yamcsDirectory+File.separatorChar+"etc"; } if(System.getProperty("java.util.logging.config.file")==null) { try { LogManager.getLogManager().readConfiguration(getConfigurationStream("/logging.properties")); } catch (Exception e) { //do nothing, the default java builtin logging is used } } TimeEncoding.setUp(); } /** * calls setup(null) * * @throws ConfigurationException */ public synchronized static void setup() throws ConfigurationException { setup(null); } public synchronized static YConfiguration getConfiguration(String subsystem) throws ConfigurationException { if(subsystem.contains("..") || subsystem.contains("/")) throw new ConfigurationException("Invalid subsystem '"+subsystem+"'"); YConfiguration c=configurations.get(subsystem); if(c==null) { try { c=new YConfiguration(subsystem); } catch (IOException e){ throw new ConfigurationException("Cannot load configuration for subsystem "+subsystem+": "+e); } configurations.put(subsystem, c); } return c; } public synchronized static YConfiguration getConfiguration(String subsystem, boolean reload) throws ConfigurationException { if(reload) { YConfiguration c = configurations.get(subsystem); if (c != null) { configurations.remove(subsystem); } } return getConfiguration(subsystem); } private static InputStream getConfigurationStream(String name) throws ConfigurationException { InputStream is; if(prefix!=null) { if((is=YConfiguration.class.getResourceAsStream("/"+prefix+name))!=null) { log.debug("Reading "+new File(YConfiguration.class.getResource("/"+prefix+name).getFile()).getAbsolutePath()); return is; } } //see if the users has an own version of the file File f=new File(userConfigDirectory+name); if(f.exists()) { try { is=new FileInputStream(f); log.debug("Reading "+f.getAbsolutePath()); return is; } catch (FileNotFoundException e) { e.printStackTrace(); } } if((is=YConfiguration.class.getResourceAsStream(name))==null) { throw(new ConfigurationException("Cannot find resource "+name)); } log.debug("Reading "+new File(YConfiguration.class.getResource(name).getFile()).getAbsolutePath()); return is; } public String getGlobalProperty(String key) { return System.getProperty(key); } static private void checkKey(Map m, String key) throws ConfigurationException { if(!m.containsKey(key)) throw new ConfigurationException(confPath.get(m), "cannot find a mapping for key '"+key+"'"); else if(m.get(key)==null) throw new ConfigurationException(confPath.get(m), key+" exists but is null"); } public boolean containsKey(String key) { return root.containsKey(key); } public boolean containsKey(String key, String key1) throws ConfigurationException { Map<String, Object> m=getMap(key); return m.containsKey(key1); } /** * returns the first entry in the config file if it's a map. Otherwise throws an error */ @SuppressWarnings("unchecked") public Map<String, Object> getFirstMap() throws ConfigurationException { Object o=root.values().iterator().next(); if(o instanceof Map) { return (Map<String, Object>) o; } else { throw new ConfigurationException("the first entry in the config is of type "+o.getClass()+" and not Map"); } } /** * returns the first entry(key) in the config file. * @return */ public String getFirstEntry() throws ConfigurationException { return root.keySet().iterator().next(); } public Set<String> getKeys() { return root.keySet(); } private static String getUnqualfiedClassName(Object o) { String name=o.getClass().getName(); if (name.lastIndexOf('.') > 0) { name = name.substring(name.lastIndexOf('.')+1); // Map$Entry } // The $ can be converted to a . name = name.replace('$', '.'); // Map.Entry return name; } /****************************** Map configs*/ @SuppressWarnings("unchecked") static public Map<String, Object> getMap(Map<String, Object> m, String key) throws ConfigurationException { checkKey(m, key); Object o=m.get(key); if(o instanceof Map) { Map<String, Object> m1=(Map)o; if(confPath.containsKey(m1)) { confPath.put(m1, confPath.get(m)+"->"+key); } return m1; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+o.getClass().getCanonicalName()+" and not Map"); } } public Map<String, Object> getMap(String key) throws ConfigurationException { return getMap(root, key); } public Map<String, Object> getMap(String key, String key1) throws ConfigurationException { Map<String, Object> m=getMap(key); return getMap(m, key1); } /***************************String configs*/ /** * Returns m.get(key) if it exists and is of type string, otherwise throws an exception * @param m * @param key * @return * @throws ConfigurationException */ static public String getString(Map m, String key) throws ConfigurationException { checkKey(m, key); Object o=m.get(key); if(o instanceof String) { return (String)o; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+getUnqualfiedClassName(o)+" and not String"); } } public String getString(String key) throws ConfigurationException { return getString(root, key); } /* * The key has to point to a map that contains the subkey that points to a string */ public String getString(String key, String subkey) throws ConfigurationException { Map<String, Object> m=getMap(key); return getString(m, subkey); } public String getString(String key, String key1, String key2) throws ConfigurationException { Map<String, Object> m=getMap(key,key1); return getString(m, key2); } @SuppressWarnings("unchecked") public <T> List<T> getList(String key) throws ConfigurationException { return (List<T>) getList(root, key); } /*****************List configs*/ /* * The key has to point to a list */ @SuppressWarnings("unchecked") static public <T> List<T> getList(Map<String, Object> m, String key) throws ConfigurationException { checkKey(m, key); Object o=m.get(key); if(o instanceof List) { List l=(List) o; String parentPath=confPath.get(m); for(int i=0; i<l.size();i++) { Object o1=l.get(i); if(!confPath.containsKey(o1)) { confPath.put(o1, parentPath+"->"+key+"["+i+"]"); } } return l; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+getUnqualfiedClassName(o)+" and not List"); } } public <T> List<T> getList(String key, String key1, String key2) throws ConfigurationException { Map<String, Object> m=getMap(key,key1); return getList(m, key2); } public <T> List<T> getList(String key, String key1) throws ConfigurationException { Map<String, Object> m=getMap(key); return getList(m, key1); } /**********************Boolean configs*/ /** * Returns m.get(key) if it exists and is of type boolean, otherwise throws an exception * @param m * @param key * @return * @throws ConfigurationException */ static public boolean getBoolean(Map<String, Object> m, String key, boolean defaultValue) throws ConfigurationException { Object o=m.get(key); if((o!=null) && (o instanceof Boolean)) { return (Boolean)o; } else { return defaultValue; } } static public boolean getBoolean(Map<String, Object> m, String key) throws ConfigurationException { checkKey(m, key); Object o=m.get(key); if(o instanceof Boolean) { return (Boolean)o; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+getUnqualfiedClassName(o)+" and not Boolean (use true or false without quotes)"); } } public boolean getBoolean(String key) throws ConfigurationException { return getBoolean(root,key); } public boolean getBoolean(String key, String key1) throws ConfigurationException { Map<String, Object> m=getMap(key); return getBoolean(m, key1); } public boolean getBoolean(String key, String key1, String key2) throws ConfigurationException { Map<String, Object> m=getMap(key,key1); return getBoolean(m, key2); } /********************** int configs */ static public int getInt(Map<String, Object> m, String key) throws ConfigurationException { checkKey(m, key); Object o=m.get(key); if(o instanceof Integer) { return (Integer)o; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+getUnqualfiedClassName(o)+" and not Integer"); } } /** * return the m.get(key) as an int if it's present or v if it is not. * * If the key is present but the value is not an integer, a ConfigurationException is thrown. * @param m * @param key * @param v * @return * @throws ConfigurationException */ static public int getInt(Map<String, Object> m, String key, int v) throws ConfigurationException { if(!m.containsKey(key)) return v; Object o=m.get(key); if(o instanceof Integer) { return (Integer)o; } else { throw new ConfigurationException(confPath.get(m), "mapping for key '"+key+"' is of type "+getUnqualfiedClassName(o)+" and not Integer"); } } public int getInt(String key) throws ConfigurationException { return getInt(root,key); } public int getInt(String key, String key1) throws ConfigurationException { Map<String, Object> m=getMap(key); return getInt(m, key1); } public boolean isList(String key) { return isList(root, key); } public boolean isList(Map m, String key) { checkKey(m, key); Object o = m.get(key); return (o instanceof List); } }
package weardrip.weardrip; import android.app.AlertDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.ContextWrapper; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v4.app.FragmentActivity; import android.text.InputType; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.fourmob.datetimepicker.date.DatePickerDialog; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.wearable.PutDataMapRequest; import com.google.android.gms.wearable.PutDataRequest; import com.google.android.gms.wearable.Wearable; import com.sleepbot.datetimepicker.time.RadialPickerLayout; import com.sleepbot.datetimepicker.time.TimePickerDialog; import java.io.File; import java.util.Calendar; import de.jonasrottmann.realmbrowser.RealmBrowser; import io.realm.Realm; import io.realm.RealmConfiguration; public class MainActivity extends FragmentActivity implements View.OnClickListener, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, DatePickerDialog.OnDateSetListener, TimePickerDialog.OnTimeSetListener { public static final String REALM_FILE_NAME = "default.realm"; Button startsensor, stopsensor, stopcollectionservice, startcollectionservice; EditText calibration, doublecalibration, intercept, slope; int year, month, day, hour, minute; private GoogleApiClient googleApiClient; private TextView mTxtTitle; private BroadcastReceiver receiver; private static Realm realm; private static Context context; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); startsensor = (Button) findViewById(R.id.startsensor); startsensor.setOnClickListener(this); stopsensor = (Button) findViewById(R.id.stopsensor); stopsensor.setOnClickListener(this); startcollectionservice = (Button) findViewById(R.id.startcollectionservice); startcollectionservice.setOnClickListener(this); stopcollectionservice = (Button) findViewById(R.id.stopcollectionservice); stopcollectionservice.setOnClickListener(this); calibration = (EditText) findViewById(R.id.calibration); calibration.setInputType(InputType.TYPE_CLASS_NUMBER); calibration.setOnClickListener(this); doublecalibration = (EditText) findViewById(R.id.doublecalibration); doublecalibration.setInputType(InputType.TYPE_CLASS_NUMBER); doublecalibration.setOnClickListener(this); slope = (EditText) findViewById(R.id.slope); slope.setInputType(InputType.TYPE_CLASS_NUMBER); slope.setOnClickListener(this); intercept = (EditText) findViewById(R.id.intercept); intercept.setInputType(InputType.TYPE_CLASS_NUMBER); intercept.setOnClickListener(this); googleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(Wearable.API) .build(); RealmBrowser.getInstance().addRealmModel(BGdata.class); mTxtTitle = (TextView) findViewById(R.id.txtTitle); mTxtTitle.setText("Waiting for Data"); findViewById(R.id.btnOpenFile).setOnClickListener(this); findViewById(R.id.btnOpenModel).setOnClickListener(this); updateTitle(); realm = Realm.getInstance(this); context = this; RealmBrowser.showRealmFilesNotification(this); receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { closeRealm(); getRealm(); updateTitle(); } }; } @Override public void onClick(View v) { switch (v.getId()) { case R.id.startsensor: sensorstartonClick(); break; case R.id.stopsensor: sensorstoponClick(); break; case R.id.startcollectionservice: startcollectionserviceonClick(); break; case R.id.stopcollectionservice: stopcollectionserviceonClick(); break; case R.id.calibration: calibrationonClick(); break; case R.id.doublecalibration: doublecalibrationonClick(); break; case R.id.slope: slopeonClick(); break; case R.id.intercept: interceptonClick(); break; case R.id.btnOpenFile: startRealmFilesActivity(); break; case R.id.btnOpenModel: startRealmModelsActivity(); break; } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { Intent i = new Intent(MainActivity.this, Preferences.class); startActivity(i); return true; } return super.onOptionsItemSelected(item); } private void updateTitle() { RealmConfiguration config = new RealmConfiguration.Builder(this) .name(REALM_FILE_NAME) .build(); Realm realm = Realm.getInstance(config); int size = realm.allObjects(BGdata.class).size(); realm.close(); File writableFolder = MainActivity.this.getFilesDir(); File realmFile = new File(writableFolder, Realm.DEFAULT_REALM_NAME); String byts = String.valueOf(realmFile.length()); String kb = String.valueOf(realmFile.length() / 1024); String mb = String.valueOf(realmFile.length() / 1024 / 1024); mTxtTitle.setText(String.format("Items in database: %d", size) + "\nSize on Phone: " + byts + " bytes" + "\nSize on Phone: " + kb + " KB" + "\nSize on Phone: " + mb + " MB"); mTxtTitle.invalidate(); } private void startRealmFilesActivity() { RealmBrowser.startRealmFilesActivity(this); } private void startRealmModelsActivity() { RealmBrowser.startRealmModelsActivity(this, REALM_FILE_NAME); } public static Realm getRealm() { if(realm == null){ realm = Realm.getInstance(context); } try { realm.refresh(); } catch (Exception e) { //Realm was closed earlier realm = Realm.getInstance(context); } return realm; } public static void closeRealm(){ if(realm != null){ realm.close(); } } public void calibrationonClick() { // get prompts.xml view LayoutInflater li = LayoutInflater.from(this); View promptsView = li.inflate(R.layout.calibration_dialog, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); // set prompts.xml to alertdialog builder alertDialogBuilder.setView(promptsView); final EditText userInput = (EditText) promptsView.findViewById(R.id.editcalibration); // set dialog message alertDialogBuilder .setCancelable(false) .setPositiveButton("Send to wear", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { calibration.setText(userInput.getText()); PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_calibration").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("startcalibration", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("calibration", userInput.getText().toString()); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); } public void doublecalibrationonClick() { // get prompts.xml view LayoutInflater li = LayoutInflater.from(this); View promptsView = li.inflate(R.layout.doublecalibration_dialog, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); // set prompts.xml to alertdialog builder alertDialogBuilder.setView(promptsView); final EditText userInput1 = (EditText) promptsView.findViewById(R.id.editdoublecalibration1); final EditText userInput2 = (EditText) promptsView.findViewById(R.id.editdoublecalibration2); // set dialog message alertDialogBuilder .setCancelable(false) .setPositiveButton("Send to wear", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { String val1 = userInput1.getText().toString(); String val2 = userInput2.getText().toString(); String text = val1 + " " + val2; doublecalibration.setText(String.valueOf(text)); PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_doublecalibration").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("startdoublecalibration", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("doublecalibration1", userInput1.getText().toString()); putDataMapReq.getDataMap().putString("doublecalibration2", userInput2.getText().toString()); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); } public void slopeonClick() { // get prompts.xml view LayoutInflater li = LayoutInflater.from(this); View promptsView = li.inflate(R.layout.slope_dialog, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); // set prompts.xml to alertdialog builder alertDialogBuilder.setView(promptsView); final EditText userInput = (EditText) promptsView.findViewById(R.id.editslope); // set dialog message alertDialogBuilder .setCancelable(false) .setPositiveButton("Send to wear", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { slope.setText(userInput.getText()); //googleClient.connect(); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); } public void interceptonClick() { // get prompts.xml view LayoutInflater li = LayoutInflater.from(this); View promptsView = li.inflate(R.layout.intercept_dialog, null); AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this); // set prompts.xml to alertdialog builder alertDialogBuilder.setView(promptsView); final EditText userInput = (EditText) promptsView.findViewById(R.id.editintercept); // set dialog message alertDialogBuilder .setCancelable(false) .setPositiveButton("Send to wear", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { intercept.setText(userInput.getText()); //googleClient.connect(); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); // create alert dialog AlertDialog alertDialog = alertDialogBuilder.create(); // show it alertDialog.show(); } public void sensorstartonClick() { final Calendar calendar = Calendar.getInstance(); final DatePickerDialog datePickerDialog = DatePickerDialog.newInstance((DatePickerDialog.OnDateSetListener) this, calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH)); final TimePickerDialog timePickerDialog = TimePickerDialog.newInstance((TimePickerDialog.OnTimeSetListener) this, calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), false, false); timePickerDialog.show(getSupportFragmentManager(), "timepicker"); datePickerDialog.setYearRange(2016, 2028); datePickerDialog.show(getSupportFragmentManager(), "datepicker"); /* Notification notification = new NotificationCompat.Builder(getApplication()) .setSmallIcon(R.drawable.common_google_signin_btn_icon_dark) .setContentTitle("Hello World") .setContentText("My first Android Wear notification") .extend( new NotificationCompat.WearableExtender().setHintShowBackgroundOnly(false)) .build(); NotificationManagerCompat notificationManager = NotificationManagerCompat.from(getApplication()); int notificationId = 1; notificationManager.notify(notificationId, notification); */ } public void sensorstoponClick() { PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_stopsensor").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("StopSensor", Long.toString(System.currentTimeMillis())); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } @Override public void onDateSet(DatePickerDialog datePickerDialog, int intyear, int intmonth, int intday) { year = intyear; month = intmonth + 1; day = intday; } @Override public void onTimeSet(RadialPickerLayout view, int inthour, int intminute) { hour = inthour; minute = inthour; sendensordata(); } public void sendensordata() { new AlertDialog.Builder(this) .setTitle("Sensor Date:") .setMessage("Sensor Started at: " + day + "." + month + "." + year + " " + hour + ":" + minute) .setPositiveButton("Send to Wear", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_startsensor").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("StartSensor", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putInt("day", day); putDataMapReq.getDataMap().putInt("year", year); putDataMapReq.getDataMap().putInt("month", month); putDataMapReq.getDataMap().putInt("hour", hour); putDataMapReq.getDataMap().putInt("minute", minute); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } }) .setNegativeButton("cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { } }) .show(); } public void startcollectionserviceonClick() { PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_startcollectionservice").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("StartCollectionService", Long.toString(System.currentTimeMillis())); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } public void stopcollectionserviceonClick() { PutDataMapRequest putDataMapReq = PutDataMapRequest.create("/wearable_stopcollectionservice").setUrgent(); putDataMapReq.getDataMap().putString("timestamp", Long.toString(System.currentTimeMillis())); putDataMapReq.getDataMap().putString("StopCollectionService", Long.toString(System.currentTimeMillis())); PutDataRequest putDataReq = putDataMapReq.asPutDataRequest(); Wearable.DataApi.putDataItem(googleApiClient, putDataReq); } // Connect to the data layer when the Activity starts @Override protected void onStart() { super.onStart(); googleApiClient.connect(); IntentFilter filter = new IntentFilter(ListenerService.DATA_STORY_CHANGED); registerReceiver(receiver, filter); } @Override protected void onStop() { if (googleApiClient != null && googleApiClient.isConnected()) { googleApiClient.disconnect(); } closeRealm(); unregisterReceiver(receiver); super.onStop(); } @Override public void onConnected(Bundle bundle) { } @Override public void onConnectionSuspended(int cause) { new AlertDialog.Builder(this) .setTitle("Connection Suspended!") .setMessage("Connection to Wear Suspended") .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }) .show(); } @Override public void onConnectionFailed(ConnectionResult connectionResult) { new AlertDialog.Builder(this) .setTitle("Connection Failed!") .setMessage("Connection to Wear Failed") .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }) .show(); } }
package com.galvarez.ttw.screens.overworld; import static com.galvarez.ttw.utils.Colors.markup; import static java.lang.Math.min; import java.util.List; import java.util.Map.Entry; import com.artemis.Entity; import com.artemis.World; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Button; import com.badlogic.gdx.scenes.scene2d.ui.Label.LabelStyle; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.utils.Align; import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener; import com.badlogic.gdx.utils.IntIntMap; import com.galvarez.ttw.model.DiplomaticSystem; import com.galvarez.ttw.model.DiplomaticSystem.Action; import com.galvarez.ttw.model.InfluenceSystem; import com.galvarez.ttw.model.components.ArmyCommand; import com.galvarez.ttw.model.components.Buildings; import com.galvarez.ttw.model.components.Diplomacy; import com.galvarez.ttw.model.components.Discoveries; import com.galvarez.ttw.model.components.InfluenceSource; import com.galvarez.ttw.model.components.Score; import com.galvarez.ttw.model.data.Building; import com.galvarez.ttw.model.data.Empire; import com.galvarez.ttw.model.map.GameMap; import com.galvarez.ttw.model.map.Influence; import com.galvarez.ttw.model.map.MapPosition; import com.galvarez.ttw.model.map.Terrain; import com.galvarez.ttw.rendering.NotificationsSystem; import com.galvarez.ttw.rendering.NotificationsSystem.Notification; import com.galvarez.ttw.rendering.components.Description; import com.galvarez.ttw.rendering.components.Name; import com.galvarez.ttw.rendering.components.Sprite; import com.galvarez.ttw.rendering.ui.FramedDialog; import com.galvarez.ttw.rendering.ui.FramedMenu; import com.galvarez.ttw.screens.overworld.controls.InputManager; public class MenuBuilder { private static final int MENU_PADDING = 15; private final Stage stage; private final Skin skin; private final FramedMenu turnMenu, indicationMenu, empireMenu, notifMenu; private final FramedMenu selectionMenu; private final FramedMenu buildingsMenu; private FramedMenu actionMenu, mapMenu; private final World world; private final GameMap map; private final OverworldScreen screen; private final InputManager inputManager; private final NotificationsSystem notifications; public MenuBuilder(Stage stage, World world, GameMap map, OverworldScreen screen, InputManager inputManager) { this.stage = stage; this.world = world; this.map = map; this.screen = screen; this.inputManager = inputManager; this.notifications = world.getSystem(NotificationsSystem.class); skin = new Skin(Gdx.files.internal("uiskin/uiskin.json")); turnMenu = new FramedMenu(skin, 256, 256); indicationMenu = new FramedMenu(skin, 512, 512); empireMenu = new FramedMenu(skin, 256, 256).nbColumns(1); selectionMenu = new FramedMenu(skin, 256, 512); buildingsMenu = new FramedMenu(skin, 256, 1024); notifMenu = new FramedMenu(skin, 400, 512); } public void buildTurnMenu() { turnMenu.clear(); // EndTurn button turnMenu.addButton("End turn (year " + screen.getCurrentYear() + ")", null, screen::endTurn, screen.canFinishTurn()); // Access to score ladder Score score = screen.player.getComponent(Score.class); turnMenu.addButton(score.totalScore + " (+" + score.lastTurnPoints + ")", screen::scoresMenu); // access to map options turnMenu.addButton("Map options", () -> displayMapMenu(turnMenu)); turnMenu.addToStage(stage, MENU_PADDING, stage.getHeight() - MENU_PADDING, false); } private void displayMapMenu(FramedMenu parent) { if (mapMenu == null) { mapMenu = new FramedMenu(skin, 256, 128, turnMenu); mapMenu.clear(); mapMenu.addCheckBox("Display colored influence?", screen.displayColoredInfluence(), screen::displayColoredInfluence); mapMenu.addToStage(stage, parent.getX() + parent.getWidth(), parent.getY() + 10, true); } else { mapMenu.clear(); mapMenu = null; } } public void buildIndicationMenu() { indicationMenu.clear(); List<String> indications = screen.getIndications(); if (indications != null && !indications.isEmpty()) { for (String i : indications) indicationMenu.addLabel(i); indicationMenu.setWidth(stage.getWidth() - (turnMenu.getWidth() + buildingsMenu.getWidth() + MENU_PADDING * 3)); indicationMenu.addToStage(stage, MENU_PADDING + turnMenu.getX() + turnMenu.getWidth(), stage.getHeight() - MENU_PADDING, false); } } public void buildEmpireMenu() { empireMenu.clear(); empireMenu.addLabel("- " + screen.player.getComponent(Name.class).name + " -"); // here present a new screen with diplomatic relations empireMenu.addButton("Diplomacy", screen::diplomacyMenu); // here present a new screen with army preferences ArmyCommand command = screen.player.getComponent(ArmyCommand.class); empireMenu.addButton("Army (power=" + (command.militaryPower - command.usedPower) + "/" + command.militaryPower + ")", screen::armiesMenu); // here present a sub-menu to see current discovery and be able to change it Discoveries discoveries = screen.player.getComponent(Discoveries.class); empireMenu.addButton("Discovery " + (discoveries != null && discoveries.next != null ? "(" + discoveries.next.progress + "%)" : "(NONE)"), screen::discoveryMenu); // here present a new screen to choose policies int instability = screen.revoltSystem.getInstability(screen.player); if (instability <= 0) empireMenu.addButton("Policies (instability " + instability + ")", screen::policiesMenu); else { LabelStyle style = empireMenu.getSkin().get("colored", LabelStyle.class); style.font.setMarkupEnabled(true); empireMenu.addButton(style, "[BLACK]Policies (instability [RED]" + instability + "[])", null, screen::policiesMenu, true); } empireMenu.addToStage(stage, MENU_PADDING, turnMenu.getY() - MENU_PADDING, false); } public void buildSelectionMenu(final MapPosition tile, Entity e) { selectionMenu.clear(); addTileDescription(tile); Influence influence = addInfluences(tile); if (e != null) { addDescription(e); InfluenceSource infSource = e.getComponent(InfluenceSource.class); if (infSource != null) { int percent = 100 * infSource.powerAdvancement / world.getSystem(InfluenceSystem.class).getRequiredPowerAdvancement(infSource); selectionMenu.addLabel("Power: " + infSource.power + " (+" + percent + "%)"); } } Entity source = influence.getMainInfluenceSource(world); if (source != null) { if (source != screen.player && screen.player != null) addEmpire(screen.player, source); } selectionMenu.addToStage(stage, MENU_PADDING, empireMenu.getY() - MENU_PADDING, true); } private Influence addInfluences(MapPosition tile) { Influence influence = map.getInfluenceAt(tile); int mainSource = influence.getMainInfluenceSource(); StringBuilder sb = new StringBuilder("[BLACK]Influence: "); for (IntIntMap.Entry e : influence) { Entity source = world.getEntity(e.key); Empire empire = source.getComponent(Empire.class); sb.append("\n ").append(markup(empire.color)).append(source.getComponent(Name.class).name).append("[]: ") .append(100 * e.value / InfluenceSystem.INITIAL_POWER).append('%'); int delta = influence.getDelta(source); if (delta > 0) sb.append(" +").append(100 * delta / InfluenceSystem.INITIAL_POWER).append('%'); else if (delta < 0) sb.append(' ').append(100 * delta / InfluenceSystem.INITIAL_POWER).append('%'); // ignore == 0 if (e.key == mainSource) sb.append(" (main)"); } selectionMenu.addColoredLabel(sb.toString()); return influence; } private void addTileDescription(MapPosition tile) { Terrain terrain = map.getTerrainAt(tile); selectionMenu.addLabelSprite(terrain.getDesc() + " (" + tile.x + ", " + tile.y + ")", screen.mapRenderer.getTexture(terrain), Color.WHITE); } private void addDescription(Entity e) { Description desc = e.getComponent(Description.class); if (desc != null) { if (desc.texture != null) { selectionMenu.addLabelSprite(desc.desc, desc.texture, Color.WHITE); } else { Sprite sprite = e.getComponent(Sprite.class); if (sprite != null) selectionMenu.addLabelSprite(desc.desc, sprite.region, sprite.color); else selectionMenu.addLabel(desc.desc); } } } private void addEmpire(Entity player, Entity selected) { selectionMenu.addLabel("Empire: " + selected.getComponent(Name.class).name); Diplomacy playerDiplo = player.getComponent(Diplomacy.class); Diplomacy selectedDiplo = selected.getComponent(Diplomacy.class); selectionMenu.addLabel(" relations are " + playerDiplo.getRelationWith(selected)); selectionMenu.addButton(" we want ", playerDiplo.getProposalTo(selected).str, () -> displayDiplomaticActionMenu(selectionMenu, playerDiplo, selected), true); selectionMenu.addLabel(" they want " + selectedDiplo.getProposalTo(player).str); } private void displayDiplomaticActionMenu(FramedMenu parent, Diplomacy diplo, Entity target) { if (actionMenu != null) actionMenu.clear(); actionMenu = new FramedMenu(skin, 256, 128, parent); boolean hasActions = false; for (Action action : world.getSystem(DiplomaticSystem.class).getPossibleActions(diplo, target)) { hasActions = true; actionMenu.addButton(action.str, () -> { if (action != Action.NO_CHANGE) diplo.proposals.put(target, action); actionMenu.clear(); inputManager.reloadMenus(); }); } if (!hasActions) actionMenu.addLabel("No possible actions!"); actionMenu.addToStage(stage, parent.getX() + parent.getWidth(), parent.getY() + 10, true); } public void buildBuildingsMenu(Entity e) { buildingsMenu.clear(); if (e != null) { Buildings buildings = e.getComponent(Buildings.class); if (buildings != null) { buildingsMenu.addLabel("Buildings in " + e.getComponent(Name.class)); if (buildings.built.isEmpty()) buildingsMenu.addLabel("- no buildings -"); else for (Entry<String, Building> b : buildings.built.entrySet()) buildingsMenu.addLabel(" " + b.getValue().getName() + " (" + b.getKey() + ")"); buildingsMenu.addToStage(stage, stage.getWidth() - 256, stage.getHeight() - MENU_PADDING, false); } } } public void buildNotificationMenu() { List<Notification> notifs = notifications.getNotifications(); notifMenu.clear(); if (notifs.isEmpty()) notifMenu.addLabel("No notifications"); else for (Notification n : notifs) { notifMenu.addButtonSprite(n.type, n.msg, () -> { inputManager.reloadMenus(); if (n.action != null) n.action.run(); }, true); } notifMenu.addToStage(stage, Gdx.graphics.getWidth() - 400, min(512, notifMenu.getTable().getPrefHeight()), false); } public void buildDialog(String title, int minWidth, int minHeight, String message, Button ... buttons) { FramedDialog fd = new FramedDialog(skin, title, message); for (Button b : buttons) { b.align(Align.center); fd.addButton(b); } fd.addToStage(stage, minWidth, minHeight); } public TextButton getTextButton(String text, ChangeListener listener) { TextButton button = new TextButton(text, skin); if (listener != null) button.addListener(listener); return button; } }
package org.domokit.oknet; import android.content.Context; import android.util.Log; import com.squareup.okhttp.Cache; import com.squareup.okhttp.OkHttpClient; import org.chromium.mojo.bindings.InterfaceRequest; import org.chromium.mojo.system.Core; import org.chromium.mojo.system.DataPipe; import org.chromium.mojo.system.MessagePipeHandle; import org.chromium.mojo.system.MojoException; import org.chromium.mojom.mojo.CookieStore; import org.chromium.mojom.mojo.NetAddress; import org.chromium.mojom.mojo.NetworkService; import org.chromium.mojom.mojo.TcpBoundSocket; import org.chromium.mojom.mojo.TcpConnectedSocket; import org.chromium.mojom.mojo.UdpSocket; import org.chromium.mojom.mojo.UrlLoader; import org.chromium.mojom.mojo.WebSocket; import java.io.File; import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * OkHttp implementation of NetworkService. */ public class NetworkServiceImpl implements NetworkService { private static final String TAG = "NetworkServiceImpl"; private static ExecutorService sThreadPool; private static OkHttpClient sClient; private Core mCore; public NetworkServiceImpl(Context context, Core core, MessagePipeHandle pipe) { assert core != null; mCore = core; if (sThreadPool == null) sThreadPool = Executors.newCachedThreadPool(); if (sClient == null) sClient = new OkHttpClient(); NetworkService.MANAGER.bind(this, pipe); } @Override public void close() {} @Override public void onConnectionError(MojoException e) {} @Override public void createUrlLoader(InterfaceRequest<UrlLoader> loader) { UrlLoader.MANAGER.bind(new UrlLoaderImpl(mCore, sClient, sThreadPool), loader); } @Override public void getCookieStore(InterfaceRequest<CookieStore> cookieStore) { cookieStore.close(); } @Override public void createWebSocket(InterfaceRequest<WebSocket> socket) { socket.close(); } @Override public void createTcpBoundSocket(NetAddress localAddress, InterfaceRequest<TcpBoundSocket> boundSocket, CreateTcpBoundSocketResponse callback) { boundSocket.close(); } @Override public void createTcpConnectedSocket(NetAddress remoteAddress, DataPipe.ConsumerHandle sendStream, DataPipe.ProducerHandle receiveStream, InterfaceRequest<TcpConnectedSocket> clientSocket, CreateTcpConnectedSocketResponse callback) { sendStream.close(); receiveStream.close(); clientSocket.close(); } @Override public void createUdpSocket(InterfaceRequest<UdpSocket> socket) { socket.close(); } }
package lu.svv.offline.check; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Scanner; import java.util.Set; import lu.svv.offline.check.impl.CheckFactoryImpl; import lu.svv.offline.oclr.BiScope; import lu.svv.offline.oclr.EventChainElement; import lu.svv.offline.oclr.OCLRConstraint; import lu.svv.offline.oclr.OCLRExpression; import lu.svv.offline.oclr.OccurrencePattern; import lu.svv.offline.oclr.OclrPackage; import lu.svv.offline.oclr.OrderPattern; import lu.svv.offline.oclr.Pattern; import lu.svv.offline.oclr.Scope; import lu.svv.offline.oclr.UniScope; import lu.svv.offline.trace.Event; import lu.svv.offline.trace.TimeStamp; import lu.svv.offline.trace.Trace; import lu.svv.offline.trace.TraceElement; import lu.svv.offline.trace.TraceFactory; import lu.svv.offline.trace.TracePackage; import lu.svv.offline.trace.impl.TraceFactoryImpl; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.impl.EPackageRegistryImpl; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl; import org.eclipse.ocl.OCL; import org.eclipse.ocl.OCLInput; import org.eclipse.ocl.ParserException; import org.eclipse.ocl.ecore.Constraint; import org.eclipse.ocl.ecore.EcoreEnvironmentFactory; import org.eclipse.ocl.expressions.OCLExpression; import org.eclipse.ocl.helper.OCLHelper; public class OfflineCheck { private Monitor monitor; public static final String oclOperationsFile = "../lu.svv.offline/lib/oclr.ocl"; public static void main(String[] args) { //check_globally(); } // This method is used for checking the OCLR properties p1-p12. Please change the lists if you only want to check subsets of the properties and traces public static void check_globally(){ List<Integer> iList = Arrays.asList(100000, 200000, 300000, 400000, 500000, 600000, 700000, 800000, 900000, 1000000); // indexes of the traces: various trace lengths List<Integer> properties = Arrays.asList(1,2,3,4,5,6,7,8,9,10,11,12); // properties p1-p12 Iterator<Integer> iterProperty = properties.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp = "../lu.svv.offline/instances/p%d_%d.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty.hasNext()){ i=0; int propertyNo = iterProperty.next(); System.out.println("P"+propertyNo+":"); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ long startTime = System.currentTimeMillis(); String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property long stopTime = System.currentTimeMillis(); long elapsedTime = stopTime - startTime; System.out.print(elapsedTime/1000.0); System.out.print('\t'); } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p13-p20. Please change the lists if you only want to check subsets of the properties and traces public static void check_before(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_before = Arrays.asList(13,14,15,16,17,18,19,20); // properties p13-p20 Iterator<Integer> iterProperty_before = properties_before.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_before = "../lu.svv.offline/instances/p%d_100000_%d_before.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_before.hasNext()){ i=0; int propertyNo = iterProperty_before.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ long startTime = System.currentTimeMillis(); String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_before, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property long stopTime = System.currentTimeMillis(); long elapsedTime = stopTime - startTime; System.out.print(elapsedTime/1000.0); System.out.print('\t'); } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p21-p31. Please change the lists if you only want to check subsets of the properties and traces public static void check_after(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9); // indexes of the traces List<Integer> properties_after = Arrays.asList(21,22,23,24,25,26,27,28,29,30,31); // properties p21-p31 Iterator<Integer> iterProperty_after = properties_after.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_after = "../lu.svv.offline/instances/p%d_100000_%d_after.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_after.hasNext()){ i=0; int propertyNo = iterProperty_after.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_after, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p32-p35 on the 'between_mult_fixed_length' traces. Please change the lists if you only want to check subsets of the properties and traces public static void check_between_multiple_fixed_length(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_between = Arrays.asList(32,33,34,35); // properties p32-p35 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_mult_fixed_length.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p32-p35 on the 'between_mult_fixed_number' traces. Please change the lists if you only want to check subsets of the properties and traces public static void check_between_multiple_fixed_number(){ List<Integer> iList = Arrays.asList(1,2,3,4); // indexes of the traces List<Integer> properties_between = Arrays.asList(32,33,34,35); // properties p32-p35 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_mult_fixed_number.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p36-p38 on the 'between_one_fixed_length' traces. Please change the lists if you only want to check subsets of the properties and traces public static void check_between_one_fixed_length(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_between = Arrays.asList(36,37,38); // properties p36-p38 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_one_fixed_length.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property } i++; System.out.println(); } } } // This method is used for checking the OCLR properties p36-p38 on the 'between_one_various_lengths' traces. Please change the lists if you only want to check subsets of the properties and traces public static void check_between_one_various_lengths(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9); // indexes of the traces List<Integer> properties_between = Arrays.asList(36,37,38); // properties p36-p38 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_one_various_lengths.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).check(); //check the property } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p13-p20. Please change the lists if you only want to check subsets of the properties and traces public static void apply_before(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_before = Arrays.asList(13,14,15,16,17,18,19,20); // properties p13-p20 Iterator<Integer> iterProperty_before = properties_before.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_before = "../lu.svv.offline/instances/p%d_100000_%d_before.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_before.hasNext()){ i=0; int propertyNo = iterProperty_before.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_before, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p21-p31. Please change the lists if you only want to check subsets of the properties and traces public static void apply_after(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9); // indexes of the traces List<Integer> properties_after = Arrays.asList(21,22,23,24,25,26,27,28,29,30,31); // properties p21-p31 Iterator<Integer> iterProperty_after = properties_after.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_after = "../lu.svv.offline/instances/p%d_100000_%d_after.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_after.hasNext()){ i=0; int propertyNo = iterProperty_after.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_after, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p32-p35 on the 'between_mult_fixed_length' traces. Please change the lists if you only want to check subsets of the properties and traces public static void apply_between_multiple_fixed_length(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_between = Arrays.asList(32,33,34,35); // properties p32-p35 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_mult_fixed_length.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p32-p35 on the 'between_mult_fixed_number' traces. Please change the lists if you only want to check subsets of the properties and traces public static void apply_between_multiple_fixed_number(){ List<Integer> iList = Arrays.asList(1,2,3,4); // indexes of the traces List<Integer> properties_between = Arrays.asList(32,33,34,35); // properties p32-p35 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_mult_fixed_number.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p36-p38 on the 'between_one_fixed_length' traces. Please change the lists if you only want to check subsets of the properties and traces public static void apply_between_one_fixed_length(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9,10); // indexes of the traces List<Integer> properties_between = Arrays.asList(36,37,38); // properties p36-p38 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_one_fixed_length.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // This method is used for applying the OCLR properties scopes of p36-p38 on the 'between_one_various_lengths' traces. Please change the lists if you only want to check subsets of the properties and traces public static void apply_between_one_various_lengths(){ List<Integer> iList = Arrays.asList(1,2,3,4,5,6,7,8,9); // indexes of the traces List<Integer> properties_between = Arrays.asList(36,37,38); // properties p36-p38 Iterator<Integer> iterProperty_between = properties_between.iterator(); String pPathTemp = "../lu.svv.offline/instances/p%d.xmi"; String tPathTemp_between = "../lu.svv.offline/instances/p%d_100000_%d_between_one_various_lengths.xmi"; int i = 0; OfflineCheck rc = new OfflineCheck(); while(iterProperty_between.hasNext()){ i=0; int propertyNo = iterProperty_between.next(); System.out.println("P"+propertyNo); while(i < 5){ // check each property 5 times Iterator<Integer> iter = iList.iterator(); while(iter.hasNext()){ String pPath = String.format(pPathTemp, propertyNo); String tPath = String.format(tPathTemp_between, propertyNo, iter.next()); rc.loadMonitor(pPath, tPath).evaluate_applyscope(); // apply the scope } i++; System.out.println(); } } } // load XMI instances of OCLR properties and CSV trace instances public OfflineCheck loadMonitorFromCsv(String oclrFilePath, String traceFilePath) { ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put( "xmi", new XMIResourceFactoryImpl()); resourceSet.getPackageRegistry().put(OclrPackage.eNS_URI, OclrPackage.eINSTANCE); Resource oclrResource = resourceSet.getResource(URI.createURI(oclrFilePath), true); OCLRConstraint constraint = (OCLRConstraint) oclrResource.getContents().get(0); Monitor monitor = new CheckFactoryImpl().createMonitor(); monitor.setConstraint(constraint); monitor.setTrace(loadTrace(traceFilePath)); this.monitor = monitor; return this; } // load XMI instances of OCLR properties and trace instances public OfflineCheck loadMonitor(String oclrFilePath, String traceFilePath) { ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put( "xmi", new XMIResourceFactoryImpl()); resourceSet.getPackageRegistry().put(OclrPackage.eNS_URI, OclrPackage.eINSTANCE); Resource oclrResource = resourceSet.getResource(URI.createURI(oclrFilePath), true); OCLRConstraint constraint = (OCLRConstraint) oclrResource.getContents().get(0); resourceSet.getPackageRegistry().put(TracePackage.eNS_URI, TracePackage.eINSTANCE); Resource traceResource = resourceSet.getResource(URI.createURI(traceFilePath), true); Trace trace = (Trace) traceResource.getContents().get(0); Monitor monitor = new CheckFactoryImpl().createMonitor(); //sanitize(trace, constraint); monitor.setConstraint(constraint); monitor.setTrace(trace); this.monitor = monitor; return this; } //load trace from a CSV file public static Trace loadTrace(String traceFilePath) { File traceFile = new File(traceFilePath); java.util.regex.Pattern csvDelimiter = java.util.regex.Pattern.compile("\\W+"); TraceFactory traceFactory = TraceFactoryImpl.init(); Trace trace = traceFactory.createTrace(); Map<String, Event> events = new TreeMap<String, Event>(); List<TraceElement> traceElements = new ArrayList<TraceElement>(); int traceIndex = 0; int eventIndex = 0; try { Scanner traceInputStream = new Scanner(traceFile); traceInputStream.nextLine(); while(traceInputStream.hasNext()){ traceIndex++; String line = traceInputStream.nextLine(); String[] values = csvDelimiter.split(line); String eventName = values[0]; int timestampValue = Integer.parseInt(values[1]); TraceElement traceElement = traceFactory.createTraceElement(); traceElement.setIndex(traceIndex); if (events.containsKey(eventName)) { traceElement.setEvent(events.get(eventName)); } else { eventIndex++; Event newEvent = traceFactory.createEvent(); newEvent.setId(eventIndex); newEvent.setName(eventName); traceElement.setEvent(newEvent); events.put(eventName, newEvent); } TimeStamp timestamp = traceFactory.createTimeStamp(); timestamp.setValue(timestampValue); traceElement.setTimestamp(timestamp); traceElements.add(traceElement); } traceInputStream.close(); } catch(FileNotFoundException e){ e.printStackTrace(); } trace.getTraceElements().addAll(traceElements); return trace; } public void check() { // Copied from org.eclipse.ocl.ecore.tests.DocumentationExamples.java EPackage.Registry registry = new EPackageRegistryImpl(); registry.put(CheckPackage.eNS_URI, CheckPackage.eINSTANCE); registry.put(OclrPackage.eNS_URI, OclrPackage.eINSTANCE); registry.put(TracePackage.eNS_URI, TracePackage.eINSTANCE); EcoreEnvironmentFactory environmentFactory = new EcoreEnvironmentFactory(registry); OCL<EPackage, EClassifier, ?, ?, ?, ?, ?, ?, ?, Constraint, ?, ?> ocl = OCL.newInstance(environmentFactory); // get an OCL text file via some hypothetical API InputStream in = null; try { // parse the contents as an OCL document in = new FileInputStream(oclOperationsFile); in.skip(191); ocl.parse(new OCLInput(in)); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (ParserException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } OCLHelper<EClassifier, ?, ?, Constraint> helper = ocl.createOCLHelper(); helper.setContext(CheckPackage.Literals.MONITOR); try { Map<String, String> constraintStringMap = ConstraintFactory.init().createConstraint(this.monitor); Map<String, Constraint> constraintMap = new HashMap<String, Constraint>(); Iterator<Entry<String, String>> it1 = constraintStringMap.entrySet().iterator(); while(it1.hasNext()) { Map.Entry<String, String> pairs = (Entry<String, String>)it1.next(); constraintMap.put(pairs.getKey(), helper.createInvariant(pairs.getValue())); } Iterator<Entry<String, Constraint>> it2 = constraintMap.entrySet().iterator(); while(it2.hasNext()) { Entry<String, Constraint> pairs = (Entry<String, Constraint>)it2.next(); // check the property long startTime = System.currentTimeMillis(); //ocl.check(this.monitor, pairs.getValue()); System.out.println(ocl.check(this.monitor, pairs.getValue())); long stopTime = System.currentTimeMillis(); long elapsedTime = stopTime - startTime; System.out.print(elapsedTime); System.out.print('\t'); } } catch (ParserException e) { e.printStackTrace(); } } public void evaluate_applyscope() { // Copied from org.eclipse.ocl.ecore.tests.DocumentationExamples.java EPackage.Registry registry = new EPackageRegistryImpl(); registry.put(CheckPackage.eNS_URI, CheckPackage.eINSTANCE); registry.put(OclrPackage.eNS_URI, OclrPackage.eINSTANCE); registry.put(TracePackage.eNS_URI, TracePackage.eINSTANCE); EcoreEnvironmentFactory environmentFactory = new EcoreEnvironmentFactory(registry); OCL<EPackage, EClassifier, ?, ?, ?, ?, ?, ?, ?, Constraint, ?, ?> ocl = OCL.newInstance(environmentFactory); InputStream in = null; try { in = new FileInputStream(oclOperationsFile); in.skip(191); ocl.parse(new OCLInput(in)); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (ParserException e) { e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } OCLHelper<EClassifier, ?, ?, Constraint> helper = ocl.createOCLHelper(); helper.setContext(CheckPackage.Literals.MONITOR); try { Map<String, String> scopeQueryStringMap = ConstraintFactory.init().createScopeQuery(this.monitor); Map<String, OCLExpression<EClassifier>> queryMap = new HashMap<String, OCLExpression<EClassifier>>(); Iterator<Entry<String, String>> it1 = scopeQueryStringMap.entrySet().iterator(); while(it1.hasNext()) { Map.Entry<String, String> pairs = (Entry<String, String>)it1.next(); queryMap.put(pairs.getKey(), helper.createQuery(pairs.getValue())); } Iterator<Entry<String, OCLExpression<EClassifier>>> it2 = queryMap.entrySet().iterator(); while(it2.hasNext()) { Entry<String, OCLExpression<EClassifier>> pairs = (Entry<String, OCLExpression<EClassifier>>)it2.next(); long startTime = System.currentTimeMillis(); System.out.println(ocl.evaluate(this.monitor, pairs.getValue())); long stopTime = System.currentTimeMillis(); long elapsedTime = stopTime - startTime; System.out.print(elapsedTime); System.out.print('\t'); } } catch (ParserException e) { e.printStackTrace(); } } }
package com.wegas.log.neo4j; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.wegas.core.Helper; import com.wegas.core.ejb.RequestManager; import com.wegas.core.exception.internal.NoPlayerException; import com.wegas.core.persistence.game.DebugGame; import com.wegas.core.persistence.game.DebugTeam; import com.wegas.core.persistence.game.Player; import com.wegas.core.persistence.variable.primitive.NumberInstance; import com.wegas.mcq.ejb.QuestionDescriptorFacade; import com.wegas.mcq.persistence.ChoiceDescriptor; import com.wegas.mcq.persistence.QuestionDescriptor; import com.wegas.mcq.persistence.Reply; import org.apache.commons.lang3.StringEscapeUtils; import javax.annotation.Resource; import javax.ejb.*; import javax.enterprise.event.Observes; import java.util.Date; @ConcurrencyManagement(ConcurrencyManagementType.BEAN) @Singleton public class Neo4jPlayerReply { private static final ObjectMapper objectMapper; static { objectMapper = new ObjectMapper(); objectMapper.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, false); objectMapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); } private enum TYPE { QUESTION, NUMBER } @Resource private SessionContext sessionContext; public void onReplyValidate(@Observes QuestionDescriptorFacade.ReplyValidate event) throws JsonProcessingException { sessionContext.getBusinessObject(Neo4jPlayerReply.class).addPlayerReply(event.player, event.reply, (ChoiceDescriptor) event.choice.getDescriptor(), (QuestionDescriptor) event.question.getDescriptor()); } public void onNumberUpdate(@Observes RequestManager.NumberUpdate update) throws NoPlayerException, JsonProcessingException { sessionContext.getBusinessObject(Neo4jPlayerReply.class).addNumberUpdate(update.player, update.number); } @Asynchronous private void addNumberUpdate(Player player, NumberInstance numberInstance) throws NoPlayerException, JsonProcessingException { if (player == null || player.getGame() instanceof DebugGame || player.getTeam() instanceof DebugTeam || Helper.isNullOrEmpty(player.getGameModel().getProperties().getLogID()) || !Neo4jUtils.checkDataBaseIsRunning()) { return; } final String key = nodeKey(player, TYPE.NUMBER); synchronized (player) { ObjectNode newNode = createJsonNode(player, numberInstance.getDescriptor().getName(), numberInstance.getValue()); createLinkedToYoungest(key, "gamelink", newNode, player.getGameModel().getName()); } } /** * Creates or adds a player and its answer data in the graph belonging to a * given game. * * @param player the player data * @param reply the player's answer data * @param choiceDescriptor the selected choice description * @param questionDescriptor the selected question description * @throws JsonProcessingException */ @Asynchronous private synchronized void addPlayerReply(Player player, Reply reply, ChoiceDescriptor choiceDescriptor, QuestionDescriptor questionDescriptor) throws JsonProcessingException { if (player.getGame() instanceof DebugGame || Helper.isNullOrEmpty(player.getGameModel().getProperties().getLogID()) || !Neo4jUtils.checkDataBaseIsRunning()) { return; } String key = nodeKey(player, TYPE.QUESTION); synchronized (player) { ObjectNode newNode = createJsonNode(player, reply, choiceDescriptor, questionDescriptor); createLinkedToYoungest(key, "gamelink", newNode, player.getGameModel().getName()); } } /** * Constructs a key from several fields of the player's object. * * @param player the player data * @return the formed key */ private static String nodeKey(Player player, TYPE type) { return "{playerId:" + player.getId() + ", teamId:" + player.getTeamId() + ", gameId:" + player.getGameId() + ", type:\"" + type.toString() + "\"}"; } /** * Creates a new Question node, with all the necessary properties. * * @param player the player data * @param reply the player's answer data * @param choiceDescriptor the selected choice description * @param questionDescriptor the selected question description * @return a node object */ private static ObjectNode createJsonNode(Player player, Reply reply, ChoiceDescriptor choiceDescriptor, QuestionDescriptor questionDescriptor) { ObjectNode jsonObject = objectMapper.createObjectNode(); jsonObject.put("playerId", player.getId()); jsonObject.put("type", TYPE.QUESTION.toString()); jsonObject.put("teamId", player.getTeamId()); jsonObject.put("gameId", player.getGameId()); jsonObject.put("name", player.getName()); jsonObject.put("starttime", (new Date()).getTime()); jsonObject.put("choice", choiceDescriptor.getName()); jsonObject.put("question", questionDescriptor.getName()); jsonObject.put("result", reply.getResult().getName()); jsonObject.put("impact", StringEscapeUtils.escapeEcmaScript(reply.getResult().getImpact().getContent())); jsonObject.put("logID", player.getGameModel().getProperties().getLogID()); return jsonObject; } /** * Creates a new Number node, with all the necessary properties. * * @param player the player data * @param name the variable name * @param value the actual variable value * @return a node object * @throws JsonProcessingException */ private static ObjectNode createJsonNode(Player player, String name, double value) throws JsonProcessingException { ObjectNode jsonObject = objectMapper.createObjectNode(); jsonObject.put("type", TYPE.NUMBER.toString()); jsonObject.put("playerId", player.getId()); jsonObject.put("teamId", player.getTeamId()); jsonObject.put("gameId", player.getGameId()); jsonObject.put("name", player.getName()); jsonObject.put("starttime", (new Date()).getTime()); jsonObject.put("variable", name); jsonObject.put("number", value); jsonObject.put("logID", player.getGameModel().getProperties().getLogID()); return jsonObject; } /** * Link a new node to an already existing newest filtered by key * * @param key key to filter "youngest" nodes * @param relationLabel label to put onto the relation * @param target new node to create * @param label label to put onto the node * @throws JsonProcessingException */ private static void createLinkedToYoungest(String key, String relationLabel, ObjectNode target, String label) throws JsonProcessingException { String query = "CREATE (p:`" + label + "` " + objectMapper.writeValueAsString(target) + ") WITH p AS p Match (n " + key + ") WHERE n <> p WITH max(n.starttime) AS max, p AS p MATCH (n " + key + ") WHERE n.starttime = max AND n <> p WITH n AS n, p AS p CREATE (n)-[:`" + relationLabel + "`]->(p) return p"; String result = Neo4jUtils.queryDBString(query); checkError(result); } /** * Checks if an error occurred during the execution of a query. The * potential error message is recorded in the JSON result of the query. If * an error was found this method raises an exception. * * @param result the result of the query */ private static void checkError(String result) { String err = Neo4jUtils.extractErrorData(result); if (err == null) { return; } throw new RuntimeException(err); } }
package org.xins.logdoc; import org.xins.util.MandatoryArgumentChecker; /** * Central class for <em>logdoc</em> logging. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ public final class LogCentral extends Object { // Class fields /** * All registered <code>LogController</code> instances. * * @see #registerLog(AbstractLog.LogController) */ private static AbstractLog.LogController[] CONTROLLERS; // Class functions static final void registerLog(AbstractLog.LogController controller) throws IllegalArgumentException { // Check preconditions MandatoryArgumentChecker.check("controller", controller); // Add the controller to the List if (CONTROLLERS == null) { CONTROLLERS = new AbstractLog.LogController[] { controller }; } else { int size = CONTROLLERS.length; AbstractLog.LogController[] a = new AbstractLog.LogController[size + 1]; System.arraycopy(CONTROLLERS, 0, a, 0, size); a[size] = controller; CONTROLLERS = a; } } public static final void setLocale(String newLocale) throws IllegalArgumentException, UnsupportedLocaleException { // Check preconditions MandatoryArgumentChecker.check("newLocale", newLocale); // Make sure the locale is supported by all controllers int size = CONTROLLERS.length; for (int i = 0; i < size; i++) { if (CONTROLLERS[i].isLocaleSupported(newLocale) == false) { throw new UnsupportedLocaleException(newLocale); } } // Change the locale on all controllers for (int i = 0; i < size; i++) { CONTROLLERS[i].setLocale(newLocale); } } // Constructors /** * Constructs a new <code>LogCentral</code> instance. This constructor is * intentionally made <code>private</code>, since no instances should be * constructed of this class. */ private LogCentral() { // empty } // Fields // Methods }
package com.cordovaplugincamerapreview; import android.Manifest; import android.content.pm.PackageManager; import android.app.FragmentManager; import android.app.FragmentTransaction; import android.hardware.Camera; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.ViewGroup; import android.widget.FrameLayout; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONObject; import org.json.JSONArray; import org.json.JSONException; import java.util.List; import java.util.Arrays; public class CameraPreview extends CordovaPlugin implements CameraActivity.CameraPreviewListener { private static final String TAG = "CameraPreview"; private static final String COLOR_EFFECT_ACTION = "setColorEffect"; private static final String ZOOM_ACTION = "setZoom"; private static final String FLASH_MODE_ACTION = "setFlashMode"; private static final String START_CAMERA_ACTION = "startCamera"; private static final String STOP_CAMERA_ACTION = "stopCamera"; private static final String PREVIEW_SIZE_ACTION = "setPreviewSize"; private static final String SWITCH_CAMERA_ACTION = "switchCamera"; private static final String TAKE_PICTURE_ACTION = "takePicture"; private static final String SHOW_CAMERA_ACTION = "showCamera"; private static final String HIDE_CAMERA_ACTION = "hideCamera"; private static final String SUPPORTED_PICTURE_SIZES_ACTION = "getSupportedPictureSizes"; private static final int CAM_REQ_CODE = 0; private static final String [] permissions = { Manifest.permission.CAMERA }; private CameraActivity fragment; private CallbackContext takePictureCallbackContext; private CallbackContext execCallback; private JSONArray execArgs; private int containerViewId = 1; public CameraPreview(){ super(); Log.d(TAG, "Constructing"); } @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { if (START_CAMERA_ACTION.equals(action)) { if (cordova.hasPermission(permissions[0])) { return startCamera(args.getInt(0), args.getInt(1), args.getInt(2), args.getInt(3), args.getString(4), args.getBoolean(5), args.getBoolean(6), args.getBoolean(7), args.getString(8), callbackContext); } else { this.execCallback = callbackContext; this.execArgs = args; cordova.requestPermissions(this, CAM_REQ_CODE, permissions); } } else if (TAKE_PICTURE_ACTION.equals(action)) { return takePicture(args.getInt(0), args.getInt(1), args.getInt(2), callbackContext); } else if (COLOR_EFFECT_ACTION.equals(action)) { return setColorEffect(args.getString(0), callbackContext); } else if (ZOOM_ACTION.equals(action)) { return setZoom(args.getInt(0), callbackContext); } else if (PREVIEW_SIZE_ACTION.equals(action)) { return setPreviewSize(args.getInt(0), args.getInt(1), callbackContext); } else if (FLASH_MODE_ACTION.equals(action)) { return setFlashMode(args.getInt(0), callbackContext); } else if (STOP_CAMERA_ACTION.equals(action)){ return stopCamera(callbackContext); } else if (HIDE_CAMERA_ACTION.equals(action)) { return hideCamera(callbackContext); } else if (SHOW_CAMERA_ACTION.equals(action)) { return showCamera(callbackContext); } else if (SWITCH_CAMERA_ACTION.equals(action)) { return switchCamera(callbackContext); } else if (SUPPORTED_PICTURE_SIZES_ACTION.equals(action)) { return getSupportedPictureSizes(callbackContext); } return false; } @Override public void onRequestPermissionResult(int requestCode, String[] permissions, int[] grantResults) throws JSONException { for(int r:grantResults){ if(r == PackageManager.PERMISSION_DENIED){ execCallback.sendPluginResult(new PluginResult(PluginResult.Status.ILLEGAL_ACCESS_EXCEPTION)); return; } } if (requestCode == CAM_REQ_CODE) { startCamera(this.execArgs.getInt(0), this.execArgs.getInt(1), this.execArgs.getInt(2), this.execArgs.getInt(3), this.execArgs.getString(4), this.execArgs.getBoolean(5), this.execArgs.getBoolean(6), this.execArgs.getBoolean(7), this.execArgs.getString(8), this.execCallback); } } private boolean hasView(CallbackContext callbackContext) { if(fragment == null) { callbackContext.error("No preview"); return false; } return true; } private boolean hasCamera(CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } if(fragment.getCamera() == null) { callbackContext.error("No Camera"); return false; } return true; } private boolean getSupportedPictureSizes(CallbackContext callbackContext) { if(this.hasCamera(callbackContext) == false){ return false; } List<Camera.Size> supportedSizes; Camera camera = fragment.getCamera(); supportedSizes = camera.getParameters().getSupportedPictureSizes(); if (supportedSizes != null) { JSONArray sizes = new JSONArray(); for (int i=0; i<supportedSizes.size(); i++) { Camera.Size size = supportedSizes.get(i); int h = size.height; int w = size.width; JSONObject jsonSize = new JSONObject(); try { jsonSize.put("height", new Integer(h)); jsonSize.put("width", new Integer(w)); } catch(JSONException e){ e.printStackTrace(); } sizes.put(jsonSize); } callbackContext.success(sizes); return true; } callbackContext.error("Camera Parameters access error"); return false; } private boolean startCamera(int x, int y, int width, int height, String defaultCamera, Boolean tapToTakePicture, Boolean dragEnabled, final Boolean toBack, String alpha, CallbackContext callbackContext) { Log.d(TAG, "start camera action"); if (fragment != null) { callbackContext.error("Camera already started"); return false; } final float opacity = Float.parseFloat(alpha); fragment = new CameraActivity(); fragment.setEventListener(this); fragment.defaultCamera = defaultCamera; fragment.tapToTakePicture = tapToTakePicture; fragment.dragEnabled = dragEnabled; DisplayMetrics metrics = cordova.getActivity().getResources().getDisplayMetrics(); // offset int computedX = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, x, metrics); int computedY = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, y, metrics); // size int computedWidth = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, width, metrics); int computedHeight = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, height, metrics); fragment.setRect(computedX, computedY, computedWidth, computedHeight); final CallbackContext cb = callbackContext; cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { //create or update the layout params for the container view FrameLayout containerView = (FrameLayout)cordova.getActivity().findViewById(containerViewId); if(containerView == null){ containerView = new FrameLayout(cordova.getActivity().getApplicationContext()); containerView.setId(containerViewId); FrameLayout.LayoutParams containerLayoutParams = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT); cordova.getActivity().addContentView(containerView, containerLayoutParams); } //display camera bellow the webview if(toBack){ webView.getView().setBackgroundColor(0x00000000); ((ViewGroup)webView.getView()).bringToFront(); }else{ //set camera back to front containerView.setAlpha(opacity); containerView.bringToFront(); } //add the fragment to the container FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.add(containerView.getId(), fragment); fragmentTransaction.commit(); cb.success("Camera started"); } }); return true; } private boolean takePicture(int width, int height, int quality, CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } takePictureCallbackContext = callbackContext; fragment.takePicture(width, height, quality); return true; } public void onPictureTaken(String originalPicture) { Log.d(TAG, "returning picture"); JSONArray data = new JSONArray(); data.put(originalPicture); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, data); pluginResult.setKeepCallback(true); takePictureCallbackContext.sendPluginResult(pluginResult); } public void onPictureTakenError(String message) { Log.d(TAG, "CameraPreview onPictureTakenError"); takePictureCallbackContext.error(message); } private boolean setColorEffect(String effect, CallbackContext callbackContext) { if(this.hasCamera(callbackContext) == false){ return false; } Camera camera = fragment.getCamera(); Camera.Parameters params = camera.getParameters(); if (effect.equals("aqua")) { params.setColorEffect(Camera.Parameters.EFFECT_AQUA); } else if (effect.equals("blackboard")) { params.setColorEffect(Camera.Parameters.EFFECT_BLACKBOARD); } else if (effect.equals("mono")) { params.setColorEffect(Camera.Parameters.EFFECT_MONO); } else if (effect.equals("negative")) { params.setColorEffect(Camera.Parameters.EFFECT_NEGATIVE); } else if (effect.equals("none")) { params.setColorEffect(Camera.Parameters.EFFECT_NONE); } else if (effect.equals("posterize")) { params.setColorEffect(Camera.Parameters.EFFECT_POSTERIZE); } else if (effect.equals("sepia")) { params.setColorEffect(Camera.Parameters.EFFECT_SEPIA); } else if (effect.equals("solarize")) { params.setColorEffect(Camera.Parameters.EFFECT_SOLARIZE); } else if (effect.equals("whiteboard")) { params.setColorEffect(Camera.Parameters.EFFECT_WHITEBOARD); } fragment.setCameraParameters(params); callbackContext.success(effect); return true; } private boolean setZoom(int zoom, CallbackContext callbackContext) { if(this.hasCamera(callbackContext) == false){ return false; } Camera camera = fragment.getCamera(); Camera.Parameters params = camera.getParameters(); if (camera.getParameters().isZoomSupported()) { params.setZoom(zoom); fragment.setCameraParameters(params); callbackContext.success(zoom); return true; } else { callbackContext.error("Zoom not supported"); return false; } } private boolean setPreviewSize(int width, int height, CallbackContext callbackContext) { if(this.hasCamera(callbackContext) == false){ return false; } Camera camera = fragment.getCamera(); Camera.Parameters params = camera.getParameters(); params.setPreviewSize(width, height); fragment.setCameraParameters(params); camera.startPreview(); callbackContext.success(); return true; } private boolean setFlashMode(int mode, CallbackContext callbackContext) { if(this.hasCamera(callbackContext) == false){ return false; } Camera camera = fragment.getCamera(); Camera.Parameters params = camera.getParameters(); switch(mode) { case 0: params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); break; case 1: params.setFlashMode(Camera.Parameters.FLASH_MODE_ON); break; case 2: params.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO); break; case 3: params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); break; } fragment.setCameraParameters(params); callbackContext.success(mode); return true; } private boolean stopCamera(CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.remove(fragment); fragmentTransaction.commit(); fragment = null; callbackContext.success(); return true; } private boolean showCamera(CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.show(fragment); fragmentTransaction.commit(); callbackContext.success(); return true; } private boolean hideCamera(CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } FragmentManager fragmentManager = cordova.getActivity().getFragmentManager(); FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction(); fragmentTransaction.hide(fragment); fragmentTransaction.commit(); callbackContext.success(); return true; } private boolean switchCamera(CallbackContext callbackContext) { if(this.hasView(callbackContext) == false){ return false; } fragment.switchCamera(); callbackContext.success(); return true; } }
package org.wikipedia.history; import android.app.*; import android.content.*; import android.database.*; import android.net.*; import android.os.*; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.widget.CursorAdapter; import android.support.v7.app.*; import android.text.Editable; import android.text.TextWatcher; import android.view.*; import android.widget.*; import com.squareup.picasso.*; import org.wikipedia.*; import org.wikipedia.page.*; import org.wikipedia.pageimages.*; import java.text.*; import java.util.*; public class HistoryActivity extends ActionBarActivity implements LoaderManager.LoaderCallbacks<Cursor> { private ListView historyEntryList; private TextView historyEmptyMessage; private HistoryEntryAdapter adapter; private EditText entryFilter; private WikipediaApp app; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); app = (WikipediaApp)getApplicationContext(); setContentView(R.layout.activity_history); historyEntryList = (ListView) findViewById(R.id.history_entry_list); historyEmptyMessage = (TextView) findViewById(R.id.history_empty_message); entryFilter = (EditText) findViewById(R.id.history_search_list); adapter = new HistoryEntryAdapter(this, null, true); historyEntryList.setAdapter(adapter); historyEntryList.setEmptyView(historyEmptyMessage); entryFilter.addTextChangedListener( new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { // Do nothing } @Override public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) { // Do nothing } @Override public void afterTextChanged(Editable editable) { getSupportLoaderManager().restartLoader(0, null, HistoryActivity.this); if (editable.length() == 0) { historyEmptyMessage.setText(R.string.history_empty_message); } else { historyEmptyMessage.setText(getString(R.string.history_search_empty_message, editable.toString())); } } }); historyEntryList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { HistoryEntry oldEntry = (HistoryEntry) view.getTag(); HistoryEntry newEntry = new HistoryEntry(oldEntry.getTitle(), HistoryEntry.SOURCE_HISTORY); Intent intent = new Intent(); intent.setClass(HistoryActivity.this, PageActivity.class); intent.setAction(PageActivity.ACTION_PAGE_FOR_TITLE); intent.putExtra(PageActivity.EXTRA_PAGETITLE, oldEntry.getTitle()); intent.putExtra(PageActivity.EXTRA_HISTORYENTRY, newEntry); startActivity(intent); } }); getSupportLoaderManager().initLoader(0, null, this); getSupportActionBar().setDisplayHomeAsUpEnabled(true); } @Override public Loader<Cursor> onCreateLoader(int i, Bundle bundle) { String selection = null; String[] selectionArgs = null; if (entryFilter.getText().length() != 0) { // FIXME: Find ways to not have to hard code column names selection = "UPPER(history.title) LIKE UPPER(?)"; selectionArgs = new String[]{"%" + entryFilter.getText().toString() + "%"}; } return new CursorLoader( this, Uri.parse(HistoryEntry.PERSISTANCE_HELPER.getBaseContentURI().toString() + "/" + PageImage.PERSISTANCE_HELPER.getTableName()), null, selection, selectionArgs, "timestamp DESC"); } @Override public void onLoadFinished(Loader<Cursor> cursorLoaderLoader, Cursor cursorLoader) { adapter.swapCursor(cursorLoader); supportInvalidateOptionsMenu(); } @Override public void onLoaderReset(Loader<Cursor> cursorLoaderLoader) { adapter.changeCursor(null); } private class HistoryEntryAdapter extends CursorAdapter { public HistoryEntryAdapter(Context context, Cursor c, boolean autoRequery) { super(context, c, autoRequery); } @Override public View newView(Context context, Cursor cursor, ViewGroup viewGroup) { return getLayoutInflater().inflate(R.layout.item_history_entry, viewGroup, false); } private String getDateString(Date date) { return DateFormat.getDateInstance().format(date); } private int getImageForSource(int source) { switch (source) { case HistoryEntry.SOURCE_INTERNAL_LINK: return R.drawable.link; case HistoryEntry.SOURCE_EXTERNAL_LINK: return R.drawable.external; case HistoryEntry.SOURCE_HISTORY: return R.drawable.external; case HistoryEntry.SOURCE_SEARCH: return R.drawable.search; case HistoryEntry.SOURCE_SAVED_PAGE: return R.drawable.external; case HistoryEntry.SOURCE_LANGUAGE_LINK: return R.drawable.link; case HistoryEntry.SOURCE_RANDOM: return R.drawable.random; case HistoryEntry.SOURCE_MAIN_PAGE: return R.drawable.link; default: throw new RuntimeException("Unknown source id encountered"); } } @Override public void bindView(View view, Context context, Cursor cursor) { TextView title = (TextView) view.findViewById(R.id.history_title); ImageView source = (ImageView) view.findViewById(R.id.history_source); ImageView thumbnail = (ImageView) view.findViewById(R.id.history_thumbnail); HistoryEntry entry = HistoryEntry.PERSISTANCE_HELPER.fromCursor(cursor); title.setText(entry.getTitle().getDisplayText()); source.setImageResource(getImageForSource(entry.getSource())); view.setTag(entry); Picasso.with(HistoryActivity.this) .load(cursor.getString(5)) .placeholder(R.drawable.ic_pageimage_placeholder) .error(R.drawable.ic_pageimage_placeholder) .into(thumbnail); // Check the previous item, see if the times differe enough // If they do, display the section header. // Always do it this is the first item. String curTime, prevTime = ""; if (cursor.getPosition() != 0) { Cursor prevCursor = (Cursor) getItem(cursor.getPosition() - 1); HistoryEntry prevEntry = HistoryEntry.PERSISTANCE_HELPER.fromCursor(prevCursor); prevTime = getDateString(prevEntry.getTimestamp()); } curTime = getDateString(entry.getTimestamp()); TextView sectionHeader = (TextView) view.findViewById(R.id.history_section_header_text); if (!curTime.equals(prevTime)) { sectionHeader.setText(curTime); sectionHeader.setVisibility(View.VISIBLE); } else { sectionHeader.setVisibility(View.GONE); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.menu_history, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { menu.findItem(R.id.menu_clear_all_history).setEnabled(historyEntryList.getCount() > 0); return super.onPrepareOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish(); return true; case R.id.menu_clear_all_history: AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.dialog_title_clear_history) .setMessage(R.string.dialog_message_clear_history); builder.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // Clear history! app.getPersister(HistoryEntry.class).deleteAll(); } }); builder.setNegativeButton(R.string.no, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { // Uh, do nothing? } }); builder.create().show(); return true; default: throw new RuntimeException("Unknown menu item clicked!"); } } }
package org.spigotmc.builder; import com.google.common.base.Charsets; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import com.google.common.io.ByteStreams; import com.google.common.io.CharStreams; import com.google.common.io.Files; import com.google.common.io.Resources; import com.google.gson.Gson; import difflib.DiffUtils; import difflib.Patch; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileDescriptor; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.net.URL; import java.net.URLConnection; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Date; import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import lombok.RequiredArgsConstructor; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.TeeOutputStream; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ResetCommand; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.revwalk.RevCommit; public class Builder { public static final String LOG_FILE = "BuildTools.log.txt"; public static final boolean IS_WINDOWS = System.getProperty( "os.name" ).startsWith( "Windows" ); public static final File CWD = new File( "." ); private static boolean dontUpdate; private static boolean skipCompile; private static boolean generateSource; private static boolean generateDocs; private static boolean dev; public static void main(String[] args) throws Exception { // May be null String buildVersion = Builder.class.getPackage().getImplementationVersion(); int buildNumber = -1; if ( buildVersion != null ) { String[] split = buildVersion.split( "-" ); if ( split.length == 4 ) { try { buildNumber = Integer.parseInt( split[3] ); } catch ( NumberFormatException ex ) { } } } System.out.println( "Loading BuildTools version: " + buildVersion + " (#" + buildNumber + ")" ); OptionParser parser = new OptionParser(); OptionSpec<Void> disableCertFlag = parser.accepts( "disable-certificate-check" ); OptionSpec<Void> dontUpdateFlag = parser.accepts( "dont-update" ); OptionSpec<Void> skipCompileFlag = parser.accepts( "skip-compile" ); OptionSpec<Void> generateSourceFlag = parser.accepts( "generate-source" ); OptionSpec<Void> generateDocsFlag = parser.accepts( "generate-docs" ); OptionSpec<Void> devFlag = parser.accepts( "dev" ); OptionSpec<String> jenkinsVersion = parser.accepts( "rev" ).withRequiredArg().defaultsTo( "latest" ); OptionSet options = parser.parse( args ); if ( options.has( disableCertFlag ) ) { disableHttpsCertificateCheck(); } dontUpdate = options.has( dontUpdateFlag ); skipCompile = options.has( skipCompileFlag ); generateSource = options.has( generateSourceFlag ); generateDocs = options.has( generateDocsFlag ); dev = options.has( devFlag ); logOutput(); if ( Float.parseFloat( System.getProperty( "java.class.version" ) ) < 51.0 ) { System.err.println( "*** WARNING *** You are not using Java 7 or above. Although this will work, it is highly discouraged due to the security issues present." ); System.err.println( "*** WARNING *** Use java -version to check your version and update as soon as possible." ); } try { runProcess( CWD, "bash", "-c", "exit" ); } catch ( Exception ex ) { System.out.println( "You must run this jar through bash (msysgit)" ); System.exit( 1 ); } try { runProcess( CWD, "git", "config", "--global", "user.name" ); } catch ( Exception ex ) { System.out.println( "Git name not set, setting it to default value." ); runProcess( CWD, "git", "config", "--global", "user.name", "BuildTools" ); } try { runProcess( CWD, "git", "config", "--global", "user.email" ); } catch ( Exception ex ) { System.out.println( "Git email not set, setting it to default value." ); runProcess( CWD, "git", "config", "--global", "user.email", "unconfigured@null.spigotmc.org" ); } File workDir = new File( "work" ); workDir.mkdir(); File bukkit = new File( "Bukkit" ); if ( !bukkit.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/bukkit.git", bukkit ); } File craftBukkit = new File( "CraftBukkit" ); if ( !craftBukkit.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/craftbukkit.git", craftBukkit ); } File spigot = new File( "Spigot" ); if ( !spigot.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/spigot.git", spigot ); } File buildData = new File( "BuildData" ); if ( !buildData.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/builddata.git", buildData ); } File maven = new File( "apache-maven-3.2.3" ); if ( !maven.exists() ) { System.out.println( "Maven does not exist, downloading. Please wait." ); File mvnTemp = new File( "mvn.zip" ); mvnTemp.deleteOnExit(); download( "http://static.spigotmc.org/maven/apache-maven-3.2.3-bin.zip", mvnTemp ); unzip( mvnTemp, new File( "." ) ); } String mvn = maven.getAbsolutePath() + "/bin/mvn"; Git bukkitGit = Git.open( bukkit ); Git craftBukkitGit = Git.open( craftBukkit ); Git spigotGit = Git.open( spigot ); Git buildGit = Git.open( buildData ); BuildInfo buildInfo = new BuildInfo( "Dev Build", "Development", 0, new BuildInfo.Refs( "master", "master", "master", "master" ) ); if ( !dontUpdate ) { if ( !dev ) { String askedVersion = options.valueOf( jenkinsVersion ); System.out.println( "Attempting to build version: '" + askedVersion + "' use --rev <version> to override" ); String verInfo; try { verInfo = get( "https://hub.spigotmc.org/versions/" + askedVersion + ".json" ); } catch ( IOException ex ) { System.err.println( "Could not get version " + askedVersion + " does it exist? Try another version or use 'latest'" ); ex.printStackTrace(); return; } System.out.println( "Found version" ); System.out.println( verInfo ); buildInfo = new Gson().fromJson( verInfo, BuildInfo.class ); if ( buildNumber != -1 && buildInfo.getToolsVersion() != -1 && buildNumber < buildInfo.getToolsVersion() ) { System.err.println( "**** Your BuildTools is out of date and will not build the requested version. Please grab a new copy from http: System.exit( 1 ); } } pull( buildGit, buildInfo.getRefs().getBuildData() ); pull( bukkitGit, buildInfo.getRefs().getBukkit() ); pull( craftBukkitGit, buildInfo.getRefs().getCraftBukkit() ); pull( spigotGit, buildInfo.getRefs().getSpigot() ); } VersionInfo versionInfo = new Gson().fromJson( Resources.toString( new File( "BuildData/info.json" ).toURI().toURL(), Charsets.UTF_8 ), VersionInfo.class ); // Default to 1.8 builds. if ( versionInfo == null ) { versionInfo = new VersionInfo( "1.8", "bukkit-1.8.at", "bukkit-1.8-cl.csrg", "bukkit-1.8-members.csrg", "package.srg" ); } System.out.println( "Attempting to build Minecraft with details: " + versionInfo ); File vanillaJar = new File( workDir, "minecraft_server." + versionInfo.getMinecraftVersion() + ".jar" ); if ( !vanillaJar.exists() ) { download( String.format( "https://s3.amazonaws.com/Minecraft.Download/versions/%1$s/minecraft_server.%1$s.jar", versionInfo.getMinecraftVersion() ), vanillaJar ); } Iterable<RevCommit> mappings = buildGit.log() .addPath( "mappings/" + versionInfo.getAccessTransforms() ) .addPath( "mappings/" + versionInfo.getClassMappings() ) .addPath( "mappings/" + versionInfo.getMemberMappings() ) .addPath( "mappings/" + versionInfo.getPackageMappings() ) .setMaxCount( 1 ).call(); Hasher mappingsHash = Hashing.md5().newHasher(); for ( RevCommit rev : mappings ) { mappingsHash.putString( rev.getName(), Charsets.UTF_8 ); } String mappingsVersion = mappingsHash.hash().toString().substring( 24 ); // Last 8 chars File finalMappedJar = new File( workDir, "mapped." + mappingsVersion + ".jar" ); if ( !finalMappedJar.exists() ) { System.out.println( "Final mapped jar: " + finalMappedJar + " does not exist, creating!" ); File clMappedJar = new File( finalMappedJar + "-cl" ); File mMappedJar = new File( finalMappedJar + "-m" ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource-2.jar", "map", "-i", vanillaJar.getPath(), "-m", "BuildData/mappings/" + versionInfo.getClassMappings(), "-o", clMappedJar.getPath() ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource-2.jar", "map", "-i", clMappedJar.getPath(), "-m", "BuildData/mappings/" + versionInfo.getMemberMappings(), "-o", mMappedJar.getPath() ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource.jar", "-i", mMappedJar.getPath(), "--access-transformer", "BuildData/mappings/" + versionInfo.getAccessTransforms(), "-m", "BuildData/mappings/" + versionInfo.getPackageMappings(), "-o", finalMappedJar.getPath() ); } runProcess( CWD, "sh", mvn, "install:install-file", "-Dfile=" + finalMappedJar, "-Dpackaging=jar", "-DgroupId=org.spigotmc", "-DartifactId=minecraft-server", "-Dversion=" + versionInfo.getMinecraftVersion() + "-SNAPSHOT" ); File decompileDir = new File( workDir, "decompile-" + mappingsVersion ); if ( !decompileDir.exists() ) { decompileDir.mkdir(); File clazzDir = new File( decompileDir, "classes" ); unzip( finalMappedJar, clazzDir, new Predicate<String>() { @Override public boolean apply(String input) { return input.startsWith( "net/minecraft/server" ); } } ); runProcess( CWD, "java", "-jar", "BuildData/bin/fernflower.jar", "-dgs=1", "-hdc=0", "-rbr=0", "-asc=1", "-udv=0", clazzDir.getPath(), decompileDir.getPath() ); } System.out.println( "Applying CraftBukkit Patches" ); File nmsDir = new File( craftBukkit, "src/main/java/net" ); if ( nmsDir.exists() ) { System.out.println( "Backing up NMS dir" ); FileUtils.moveDirectory( nmsDir, new File( workDir, "nms.old." + System.currentTimeMillis() ) ); } File patchDir = new File( craftBukkit, "nms-patches" ); for ( File file : patchDir.listFiles() ) { String targetFile = "net/minecraft/server/" + file.getName().replaceAll( ".patch", ".java" ); File clean = new File( decompileDir, targetFile ); File t = new File( nmsDir.getParentFile(), targetFile ); t.getParentFile().mkdirs(); System.out.println( "Patching with " + file.getName() ); List<String> readFile = Files.readLines( file, Charsets.UTF_8 ); // Manually append prelude if it is not found in the first few lines. boolean preludeFound = false; for ( int i = 0; i < Math.min( 3, readFile.size() ); i++ ) { if ( readFile.get( i ).startsWith( "+++" ) ) { preludeFound = true; break; } } if ( !preludeFound ) { readFile.add( 0, "+++" ); } Patch parsedPatch = DiffUtils.parseUnifiedDiff( readFile ); List<?> modifiedLines = DiffUtils.patch( Files.readLines( clean, Charsets.UTF_8 ), parsedPatch ); BufferedWriter bw = new BufferedWriter( new FileWriter( t ) ); for ( String line : (List<String>) modifiedLines ) { bw.write( line ); bw.newLine(); } bw.close(); } File tmpNms = new File( craftBukkit, "tmp-nms" ); FileUtils.copyDirectory( nmsDir, tmpNms ); craftBukkitGit.branchDelete().setBranchNames( "patched" ).setForce( true ).call(); craftBukkitGit.checkout().setCreateBranch( true ).setForce( true ).setName( "patched" ).call(); craftBukkitGit.add().addFilepattern( "src/main/java/net/" ).call(); craftBukkitGit.commit().setMessage( "CraftBukkit $ " + new Date() ).call(); craftBukkitGit.checkout().setName( buildInfo.getRefs().getCraftBukkit() ).call(); FileUtils.moveDirectory( tmpNms, nmsDir ); File spigotApi = new File( spigot, "Bukkit" ); if ( !spigotApi.exists() ) { clone( "file://" + bukkit.getAbsolutePath(), spigotApi ); } File spigotServer = new File( spigot, "CraftBukkit" ); if ( !spigotServer.exists() ) { clone( "file://" + craftBukkit.getAbsolutePath(), spigotServer ); } // Git spigotApiGit = Git.open( spigotApi ); // Git spigotServerGit = Git.open( spigotServer ); if ( !skipCompile ) { System.out.println( "Compiling Bukkit" ); runProcess( bukkit, "sh", mvn, "clean", "install" ); if ( generateDocs ) { runProcess( bukkit, "sh", mvn, "javadoc:jar" ); } if ( generateSource ) { runProcess( bukkit, "sh", mvn, "source:jar" ); } System.out.println( "Compiling CraftBukkit" ); runProcess( craftBukkit, "sh", mvn, "clean", "install" ); } try { runProcess( spigot, "bash", "applyPatches.sh" ); System.out.println( "*** Spigot patches applied!" ); System.out.println( "Compiling Spigot & Spigot-API" ); if ( !skipCompile ) { runProcess( spigot, "sh", mvn, "clean", "install" ); } } catch ( Exception ex ) { System.err.println( "Error compiling Spigot, are you running this jar via msysgit?" ); ex.printStackTrace(); System.exit( 1 ); } for ( int i = 0; i < 35; i++ ) { System.out.println( " " ); } System.out.println( "Success! Everything compiled successfully. Copying final .jar files now." ); copyJar( "CraftBukkit/target", "craftbukkit", "craftbukkit-" + versionInfo.getMinecraftVersion() + ".jar" ); copyJar( "Spigot/Spigot-Server/target", "spigot", "spigot-" + versionInfo.getMinecraftVersion() + ".jar" ); } public static final String get(String url) throws IOException { URLConnection con = new URL( url ).openConnection(); con.setConnectTimeout( 5000 ); con.setReadTimeout( 5000 ); InputStreamReader r = null; try { r = new InputStreamReader( con.getInputStream() ); return CharStreams.toString( r ); } finally { if ( r != null ) { r.close(); } } } public static void copyJar(String path, final String jarPrefix, String outJarName) throws Exception { File[] files = new File( path ).listFiles( new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith( jarPrefix ) && name.endsWith( ".jar" ); } } ); for ( File file : files ) { System.out.println( "Copying " + file.getName() + " to " + CWD.getAbsolutePath() ); Files.copy( file, new File( CWD, outJarName ) ); System.out.println( " - Saved as " + outJarName ); } } public static void pull(Git repo, String ref) throws Exception { System.out.println( "Pulling updates for " + repo.getRepository().getDirectory() ); repo.reset().setRef( "origin/master" ).setMode( ResetCommand.ResetType.HARD ).call(); repo.fetch().call(); System.out.println( "Successfully fetched updates!" ); repo.reset().setRef( ref ).setMode( ResetCommand.ResetType.HARD ).call(); if ( ref.equals( "master" ) ) { repo.reset().setRef( "origin/master" ).setMode( ResetCommand.ResetType.HARD ).call(); } System.out.println( "Checked out: " + ref ); } public static int runProcess(File workDir, String... command) throws Exception { ProcessBuilder pb = new ProcessBuilder( command ); pb.directory( workDir ); pb.environment().put( "JAVA_HOME", System.getProperty( "java.home" ) ); if ( !pb.environment().containsKey( "MAVEN_OPTS" ) ) { pb.environment().put( "MAVEN_OPTS", "-Xmx1024M" ); } final Process ps = pb.start(); new Thread( new StreamRedirector( ps.getInputStream(), System.out ) ).start(); new Thread( new StreamRedirector( ps.getErrorStream(), System.err ) ).start(); int status = ps.waitFor(); if ( status != 0 ) { throw new RuntimeException( "Error running command, return status !=0: " + Arrays.toString( command ) ); } return status; } @RequiredArgsConstructor private static class StreamRedirector implements Runnable { private final InputStream in; private final PrintStream out; @Override public void run() { BufferedReader br = new BufferedReader( new InputStreamReader( in ) ); try { String line; while ( ( line = br.readLine() ) != null ) { out.println( line ); } } catch ( IOException ex ) { throw Throwables.propagate( ex ); } } } public static void unzip(File zipFile, File targetFolder) throws IOException { unzip( zipFile, targetFolder, null ); } public static void unzip(File zipFile, File targetFolder, Predicate<String> filter) throws IOException { targetFolder.mkdir(); ZipFile zip = new ZipFile( zipFile ); for ( Enumeration<? extends ZipEntry> entries = zip.entries(); entries.hasMoreElements(); ) { ZipEntry entry = entries.nextElement(); if ( filter != null ) { if ( !filter.apply( entry.getName() ) ) { continue; } } File outFile = new File( targetFolder, entry.getName() ); if ( entry.isDirectory() ) { outFile.mkdirs(); continue; } if ( outFile.getParentFile() != null ) { outFile.getParentFile().mkdirs(); } InputStream is = zip.getInputStream( entry ); OutputStream os = new FileOutputStream( outFile ); try { ByteStreams.copy( is, os ); } finally { is.close(); os.close(); } System.out.println( "Extracted: " + outFile ); } } public static void clone(String url, File target) throws GitAPIException { System.out.println( "Starting clone of " + url + " to " + target ); Git result = Git.cloneRepository().setURI( url ).setDirectory( target ).call(); try { System.out.println( "Cloned git repository " + url + " to " + target.getAbsolutePath() + ". Current HEAD: " + commitHash( result ) ); } finally { result.close(); } } public static String commitHash(Git repo) throws GitAPIException { return Iterables.getOnlyElement( repo.log().setMaxCount( 1 ).call() ).getName(); } public static File download(String url, File target) throws IOException { System.out.println( "Starting download of " + url ); byte[] bytes = Resources.toByteArray( new URL( url ) ); System.out.println( "Downloaded file: " + target + " with md5: " + Hashing.md5().hashBytes( bytes ).toString() ); Files.write( bytes, target ); return target; } public static void disableHttpsCertificateCheck() { // This globally disables certificate checking try { TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } @Override public void checkClientTrusted(X509Certificate[] certs, String authType) { } @Override public void checkServerTrusted(X509Certificate[] certs, String authType) { } } }; // Trust SSL certs SSLContext sc = SSLContext.getInstance( "SSL" ); sc.init( null, trustAllCerts, new SecureRandom() ); HttpsURLConnection.setDefaultSSLSocketFactory( sc.getSocketFactory() ); // Trust host names HostnameVerifier allHostsValid = new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }; HttpsURLConnection.setDefaultHostnameVerifier( allHostsValid ); } catch ( NoSuchAlgorithmException ex ) { System.out.println( "Failed to disable https certificate check" ); ex.printStackTrace( System.err ); } catch ( KeyManagementException ex ) { System.out.println( "Failed to disable https certificate check" ); ex.printStackTrace( System.err ); } } public static void logOutput() { try { final OutputStream logOut = new BufferedOutputStream( new FileOutputStream( LOG_FILE ) ); Runtime.getRuntime().addShutdownHook( new Thread() { @Override public void run() { System.setOut( new PrintStream( new FileOutputStream( FileDescriptor.out ) ) ); System.setErr( new PrintStream( new FileOutputStream( FileDescriptor.err ) ) ); try { logOut.close(); } catch ( IOException ex ) { // We're shutting the jvm down anyway. } } } ); System.setOut( new PrintStream( new TeeOutputStream( System.out, logOut ) ) ); System.setErr( new PrintStream( new TeeOutputStream( System.err, logOut ) ) ); } catch ( FileNotFoundException ex ) { System.err.println( "Failed to create log file: " + LOG_FILE ); } } }
package mil.nga.giat.mage; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import mil.nga.giat.mage.login.LoginActivity; import mil.nga.giat.mage.map.CacheOverlay; import mil.nga.giat.mage.sdk.event.IUserEventListener; import mil.nga.giat.mage.sdk.fetch.LocationFetchIntentService; import mil.nga.giat.mage.sdk.fetch.ObservationFetchIntentService; import mil.nga.giat.mage.sdk.fetch.StaticFeatureServerFetch; import mil.nga.giat.mage.sdk.fetch.UserFetchIntentService; import mil.nga.giat.mage.sdk.glide.MageUrlLoader; import mil.nga.giat.mage.sdk.glide.MageDiskCache; import mil.nga.giat.mage.sdk.http.client.HttpClientManager; import mil.nga.giat.mage.sdk.location.LocationService; import mil.nga.giat.mage.sdk.preferences.PreferenceHelper; import mil.nga.giat.mage.sdk.push.AttachmentPushAlarmReceiver; import mil.nga.giat.mage.sdk.push.LocationPushIntentService; import mil.nga.giat.mage.sdk.push.ObservationPushIntentService; import mil.nga.giat.mage.sdk.screen.ScreenChangeReceiver; import mil.nga.giat.mage.sdk.utils.StorageUtility; import mil.nga.giat.mage.sdk.utils.UserUtility; import mil.nga.giat.mage.sdk.utils.StorageUtility.StorageType; import android.app.AlarmManager; import android.app.Application; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.AsyncTask; import android.support.v4.app.NotificationCompat; import android.support.v4.app.TaskStackBuilder; import android.util.Log; import com.bumptech.glide.Glide; import com.bumptech.glide.resize.ImageManager; public class MAGE extends Application implements IUserEventListener { private static final String LOG_NAME = MAGE.class.getName(); private AlarmManager alarm; public static final int MAGE_NOTIFICATION_ID = 1414; public interface OnCacheOverlayListener { public void onCacheOverlay(List<CacheOverlay> cacheOverlays); } private LocationService locationService; private Intent locationFetchIntent; private Intent observationFetchIntent; private Intent userFetchIntent; private Intent locationPushIntent; private Intent observationPushIntent; private Intent attachmentPushIntent; private List<CacheOverlay> cacheOverlays = null; private Collection<OnCacheOverlayListener> cacheOverlayListeners = new ArrayList<OnCacheOverlayListener>(); private StaticFeatureServerFetch staticFeatureServerFetch = null; @Override public void onCreate() { alarm = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE); Glide.get().register(URL.class, new MageUrlLoader.Factory()); ImageManager.Builder builder = new ImageManager.Builder(getApplicationContext()); try { builder.setDiskCache(new MageDiskCache(getApplicationContext())); } catch (IOException e) { Log.e(LOG_NAME, "Unable to create Mage disk cache", e); } Glide.get().setImageManager(builder); refreshTileOverlays(); // setup the screen unlock stuff registerReceiver(ScreenChangeReceiver.getInstance(), new IntentFilter(Intent.ACTION_SCREEN_ON)); HttpClientManager.getInstance(getApplicationContext()).addListener(this); super.onCreate(); } public void onLogin() { createNotification(); // Start location services initLocationService(); // Start fetching and pushing observations and locations startFetching(); startPushing(); // Pull static layers and features just once loadStaticFeatures(false); } public void loadStaticFeatures(final boolean force) { Runnable runnable = new Runnable() { @Override public void run() { staticFeatureServerFetch = new StaticFeatureServerFetch(getApplicationContext()); try { staticFeatureServerFetch.fetch(force); } catch (Exception e) { e.printStackTrace(); } } }; new Thread(runnable).start(); } public void onLogout() { destroyFetching(); destroyPushing(); destroyLocationService(); NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.cancel(MAGE_NOTIFICATION_ID); if(PreferenceHelper.getInstance(getApplicationContext()).getValue(R.string.deleteAllDataOnLogoutKey, Boolean.class, R.string.deleteAllDataOnLogoutDefaultValue)) { LandingActivity.deleteAllData(getApplicationContext()); } } private void createNotification() { // this line is some magic for kitkat getLogoutPendingIntent().cancel(); boolean tokenExpired = UserUtility.getInstance(getApplicationContext()).isTokenExpired(); NotificationCompat.Builder builder = new NotificationCompat.Builder(this).setSmallIcon(R.drawable.ic_launcher).setContentTitle("MAGE").setContentText(tokenExpired ? "Your token has expired, please tap to login." : "You are logged in. Slide down to logout.").setOngoing(true) .setPriority(NotificationCompat.PRIORITY_MAX).addAction(R.drawable.ic_power_off_white, "Logout", getLogoutPendingIntent()); NotificationCompat.BigTextStyle bigTextStyle = new NotificationCompat.BigTextStyle(); bigTextStyle.setBigContentTitle("MAGE"); bigTextStyle.bigText(tokenExpired ? "Your token has expired, please tap to login." : "You are logged in. Tap to open MAGE."); builder.setStyle(bigTextStyle); // Creates an explicit intent for an Activity in your app Intent resultIntent = new Intent(this, LoginActivity.class); // The stack builder object will contain an artificial back stack for the // started Activity. // This ensures that navigating backward from the Activity leads out of // your application to the Home screen. TaskStackBuilder stackBuilder = TaskStackBuilder.create(this); // Adds the back stack for the Intent (but not the Intent itself) stackBuilder.addParentStack(LoginActivity.class); // Adds the Intent that starts the Activity to the top of the stack stackBuilder.addNextIntent(resultIntent); PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT); builder.setContentIntent(resultPendingIntent); NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.notify(MAGE.MAGE_NOTIFICATION_ID, builder.build()); } private PendingIntent getLogoutPendingIntent() { Intent intent = new Intent(getApplicationContext(), LoginActivity.class); intent.putExtra("LOGOUT", true); return PendingIntent.getActivity(getApplicationContext(), 1, intent, PendingIntent.FLAG_UPDATE_CURRENT); } /** * Start Tasks responsible for fetching Observations and Locations from the server. */ private void startFetching() { if (userFetchIntent == null) { userFetchIntent = new Intent(getApplicationContext(), UserFetchIntentService.class); startService(userFetchIntent); } if(locationFetchIntent == null) { locationFetchIntent = new Intent(getApplicationContext(), LocationFetchIntentService.class); startService(locationFetchIntent); } if(observationFetchIntent == null) { observationFetchIntent = new Intent(getApplicationContext(), ObservationFetchIntentService.class); startService(observationFetchIntent); } } /** * Stop Tasks responsible for fetching Observations and Locations from the server. */ private void destroyFetching() { if (staticFeatureServerFetch != null) { staticFeatureServerFetch.destroy(); staticFeatureServerFetch = null; } if (userFetchIntent != null) { stopService(userFetchIntent); userFetchIntent = null; } if(locationFetchIntent != null) { stopService(locationFetchIntent); locationFetchIntent = null; } if(observationFetchIntent != null) { stopService(observationFetchIntent); observationFetchIntent = null; } } /** * Start Tasks responsible for pushing Observations, Attachments and Locations to the server. */ private void startPushing() { if (locationPushIntent == null) { locationPushIntent = new Intent(getApplicationContext(), LocationPushIntentService.class); startService(locationPushIntent); } if (observationPushIntent == null) { observationPushIntent = new Intent(getApplicationContext(), ObservationPushIntentService.class); startService(observationPushIntent); } startAttachmentPushing(); } private void startAttachmentPushing() { if(attachmentPushIntent == null) { attachmentPushIntent = new Intent(getApplicationContext(), AttachmentPushAlarmReceiver.class); final PendingIntent pendingIntent = PendingIntent.getBroadcast(this, AttachmentPushAlarmReceiver.REQUEST_CODE, attachmentPushIntent, PendingIntent.FLAG_UPDATE_CURRENT); long firstMillis = System.currentTimeMillis(); int intervalMillis = 60000; alarm.setInexactRepeating(AlarmManager.RTC_WAKEUP, firstMillis, intervalMillis, pendingIntent); } } /** * Stop Tasks responsible for pushing Observations and Locations to the server. */ private void destroyPushing() { if (locationPushIntent != null) { stopService(locationPushIntent); locationPushIntent = null; } if (observationPushIntent != null) { stopService(observationPushIntent); observationPushIntent = null; } cancelAttachmentPush(); } private void cancelAttachmentPush() { Intent intent = new Intent(getApplicationContext(), AttachmentPushAlarmReceiver.class); final PendingIntent pIntent = PendingIntent.getBroadcast(this, AttachmentPushAlarmReceiver.REQUEST_CODE, intent, PendingIntent.FLAG_UPDATE_CURRENT); alarm.cancel(pIntent); if(attachmentPushIntent != null) { this.stopService(attachmentPushIntent); attachmentPushIntent = null; } } private void initLocationService() { if (locationService == null) { locationService = new LocationService(getApplicationContext()); locationService.init(); } } private void destroyLocationService() { if (locationService != null) { locationService.destroy(); locationService = null; } } public LocationService getLocationService() { return locationService; } public void registerCacheOverlayListener(OnCacheOverlayListener listener) { cacheOverlayListeners.add(listener); if (cacheOverlays != null) listener.onCacheOverlay(cacheOverlays); } public void unregisterCacheOverlayListener(OnCacheOverlayListener listener) { cacheOverlayListeners.remove(listener); } public void refreshTileOverlays() { TileOverlaysTask task = new TileOverlaysTask(); task.execute(); } private void setCacheOverlays(List<CacheOverlay> cacheOverlays) { this.cacheOverlays = cacheOverlays; for (OnCacheOverlayListener listener : cacheOverlayListeners) { listener.onCacheOverlay(cacheOverlays); } } private class TileOverlaysTask extends AsyncTask<Void, Void, List<CacheOverlay>> { @Override protected List<CacheOverlay> doInBackground(Void... params) { List<CacheOverlay> overlays = new ArrayList<CacheOverlay>(); Map<StorageType, File> storageLocations = StorageUtility.getAllStorageLocations(); for (File storageLocation : storageLocations.values()) { File root = new File(storageLocation, "MapCache"); if (root.exists() && root.isDirectory() && root.canRead()) { for (File cache : root.listFiles()) { if (cache.isDirectory() && cache.canRead()) { // found a cache overlays.add(new CacheOverlay(cache.getName(), cache)); } } } } return overlays; } @Override protected void onPostExecute(List<CacheOverlay> result) { setCacheOverlays(result); } } @Override public void onError(Throwable error) { // TODO Auto-generated method stub } @Override public void onTokenExpired() { destroyFetching(); destroyPushing(); createNotification(); } }
package org.ocelotds.annotations; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.ElementType.TYPE; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Target; import javax.enterprise.util.Nonbinding; import javax.interceptor.InterceptorBinding; /** * Annotation specifies that the method annotated result is publish to Topic * topic can be specify on field value, or for dynamic topic by JsTopicName * jsonPayload specify that method result is already in json. Only for String return * @author hhfrancois */ @Inherited @InterceptorBinding @Retention(RUNTIME) @Target({METHOD, TYPE}) public @interface JsTopic { @Nonbinding String value() default ""; @Nonbinding boolean jsonPayload() default false; }
package ui.issuecolumn; import backend.interfaces.IModel; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.input.KeyEvent; import javafx.scene.layout.HBox; import prefs.Preferences; import ui.UI; import ui.components.KeyboardShortcuts; import ui.issuepanel.IssuePanel; import util.events.ColumnClickedEvent; import util.events.ColumnClickedEventHandler; import util.events.IssueSelectedEventHandler; import util.events.ModelUpdatedEventHandler; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.function.Consumer; public class ColumnControl extends HBox { private final UI ui; private final Preferences prefs; private IModel model; private Optional<Integer> currentlySelectedColumn = Optional.empty(); public ColumnControl(UI ui, Preferences prefs) { this.ui = ui; this.prefs = prefs; // Set up the connection to the browser new UIBrowserBridge(ui); setSpacing(10); setPadding(new Insets(0, 10, 0, 10)); ui.registerEvent((ModelUpdatedEventHandler) e -> { updateModel(e.model); forEach(child -> { if (child instanceof IssueColumn) { ((IssueColumn) child).setItems(e.model.getIssues(), e.hasMetadata); } }); }); ui.registerEvent((IssueSelectedEventHandler) e -> setCurrentlySelectedColumn(Optional.of(e.columnIndex))); ui.registerEvent((ColumnClickedEventHandler) e -> setCurrentlySelectedColumn(Optional.of(e.columnIndex))); setupKeyEvents(); } /** * Called on login. */ public void init() { restoreColumns(); } private void updateModel(IModel newModel) { model = newModel; } public void recreateColumns() { saveSession(); restoreColumns(); } public void saveSession() { List<String> sessionFilters = new ArrayList<>(); getChildren().forEach(child -> { if (child instanceof IssueColumn) { String filter = ((IssueColumn) child).getCurrentFilterString(); sessionFilters.add(filter); } }); prefs.setLastOpenFilters(sessionFilters); } public void restoreColumns() { getChildren().clear(); List<String> filters = prefs.getLastOpenFilters(); if (filters.isEmpty()) { addColumn(); return; } for (String filter : filters) { addColumn().filterByString(filter); } } public void forEach(Consumer<Column> callback) { getChildren().forEach(child -> callback.accept((Column) child)); } /** * For a quick refresh (without requesting updates) */ public void refresh() { forEach(child -> child.refreshItems(true)); } private IssueColumn addColumn() { return addColumnAt(getChildren().size()); } public IssueColumn addColumnAt(int index) { IssueColumn panel = new IssuePanel(ui, model, this, index); getChildren().add(index, panel); panel.setItems(model.getIssues(), false); updateColumnIndices(); setCurrentlySelectedColumn(Optional.of(index)); return panel; } private void setCurrentlySelectedColumn(Optional<Integer> selectedColumn) { currentlySelectedColumn = selectedColumn; updateCSSforColumns(); } private void updateCSSforColumns() { if (currentlySelectedColumn.isPresent()) { for (int index = 0; index < getChildren().size(); index++) { getColumn(index).getStyleClass().remove("panel-focused"); } getColumn(currentlySelectedColumn.get()).getStyleClass().add("panel-focused"); } } public Column getColumn(int index) { return (Column) getChildren().get(index); } public void closeAllColumns() { getChildren().clear(); // There aren't any children left, so we don't need to update indices } public void openColumnsWithFilters(List<String> filters) { for (String filter : filters) { IssueColumn column = addColumn(); column.filterByString(filter); } } public void closeColumn(int index) { Node child = getChildren().remove(index); updateColumnIndices(); ((Column) child).close(); } private void updateColumnIndices() { int i = 0; for (Node c : getChildren()) { ((Column) c).updateIndex(i++); } } public void createNewPanelAtStart() { addColumnAt(0); } public void createNewPanelAtEnd() { addColumn(); } public void swapColumns(int columnIndex, int columnIndex2) { Column one = getColumn(columnIndex); Column two = getColumn(columnIndex2); one.updateIndex(columnIndex2); two.updateIndex(columnIndex); // This method of swapping is used because Collections.swap // will assign one child without removing the other, causing // a duplicate child exception. HBoxes are constructed because // null also causes an exception. getChildren().set(columnIndex, new HBox()); getChildren().set(columnIndex2, new HBox()); getChildren().set(columnIndex, two); getChildren().set(columnIndex2, one); } public Optional<Integer> getCurrentlySelectedColumn() { return currentlySelectedColumn; } private int currentlyDraggedColumnIndex = -1; public int getCurrentlyDraggedColumnIndex() { return currentlyDraggedColumnIndex; } public void setCurrentlyDraggedColumnIndex(int i) { currentlyDraggedColumnIndex = i; } public void closeCurrentColumn() { if (currentlySelectedColumn.isPresent()) { int columnIndex = currentlySelectedColumn.get(); closeColumn(columnIndex); if (getChildren().size() == 0) { setCurrentlySelectedColumn(Optional.empty()); } else { int newColumnIndex = (columnIndex > getChildren().size() - 1) ? columnIndex - 1 : columnIndex; setCurrentlySelectedColumn(Optional.of(newColumnIndex)); getColumn(currentlySelectedColumn.get()).requestFocus(); } } } public double getPanelWidth() { // COLUMN_WIDTH is used instead of // ((Column) getChildren().get(0)).getWidth(); // because when this function is called, columns may not have been sized yet. // In any case actual column width is COLUMN_WIDTH at minimum, so we can assume // that they are that large. return 40 + Column.COLUMN_WIDTH; } private void setupKeyEvents() { addEventHandler(KeyEvent.KEY_RELEASED, event -> { if (event.getCode() == KeyboardShortcuts.RIGHT_PANEL || event.getCode() == KeyboardShortcuts.LEFT_PANEL) { handleKeys(event.getCode() == KeyboardShortcuts.RIGHT_PANEL); assert currentlySelectedColumn.isPresent() : "handleKeys doesn't set selectedIndex!"; } }); } private void handleKeys(boolean isForwardKey) { if (!currentlySelectedColumn.isPresent()) { return; } if (getChildren().size() == 0) { return; } Column selectedColumn = getColumn(currentlySelectedColumn.get()); if (selectedColumn instanceof IssueColumn){ if (((IssueColumn) selectedColumn).filterTextField.isFocused()){ return; } else { int newIndex = currentlySelectedColumn.get() + (isForwardKey ? 1 : -1); if (newIndex < 0) { newIndex = getChildren().size() - 1; } else if (newIndex > getChildren().size() - 1) { newIndex = 0; } setCurrentlySelectedColumn(Optional.of(newIndex)); selectedColumn = getColumn(currentlySelectedColumn.get()); selectedColumn.requestFocus(); } } ui.triggerEvent(new ColumnClickedEvent(currentlySelectedColumn.get())); scrollandShowColumn(currentlySelectedColumn.get(), getChildren().size()); } private void scrollandShowColumn(int selectedColumnIndex, int numOfColumns) { ui.getMenuControl().scrollTo(selectedColumnIndex, numOfColumns); } public int getNumberOfColumns() { return getChildren().size(); } }
package org.flymine.web; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import org.apache.struts.tiles.actions.TilesAction; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.flymine.metadata.Model; import org.flymine.metadata.ClassDescriptor; import org.flymine.metadata.presentation.DisplayModel; import org.flymine.objectstore.ObjectStoreFactory; import org.flymine.util.TypeUtil; /** * Perform initialisation steps for query editing tile prior to calling * query.jsp. * * @author Mark Woodbridge * @author Richard Smith */ public class QueryController extends TilesAction { /** * @see TilesAction#execute */ public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); Model model = ObjectStoreFactory.getObjectStore().getModel(); session.setAttribute("model", new DisplayModel(model)); Map classNames = new HashMap(); Iterator iter = model.getClassDescriptors().iterator(); while (iter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) iter.next(); classNames.put(cld.getName(), TypeUtil.unqualifiedName(cld.getName())); } session.setAttribute("classNames", classNames); return null; } }
package com.thoughtworks.xstream.core.util; public final class FastStack { private Object[] stack; private int pointer; public FastStack(int initialCapacity) { stack = new Object[initialCapacity]; } public void push(Object value) { if (pointer + 1 >= stack.length) { resizeStack(stack.length * 2); } stack[pointer++] = value; } public void popSilently() { pointer } public Object pop() { return stack[--pointer]; } public Object peek() { return pointer == 0 ? null : stack[pointer - 1]; } public int size() { return pointer; } public Object get(int i) { return stack[i]; } private void resizeStack(int newCapacity) { Object[] newStack = new Object[newCapacity]; System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity)); stack = newStack; } public String toString() { StringBuffer result = new StringBuffer("["); for (int i = 0; i < pointer; i++) { if (i > 0) { result.append(", "); } result.append(stack[i]); } result.append(']'); return result.toString(); } }
package org.yamcs.yarch.streamsql; import java.io.StringReader; import java.lang.reflect.Constructor; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.codehaus.janino.SimpleCompiler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yamcs.yarch.ColumnDefinition; import org.yamcs.yarch.CompiledExpression; import org.yamcs.yarch.DataType; import org.yamcs.yarch.DbReaderStream; import org.yamcs.yarch.TupleDefinition; import org.yamcs.yarch.streamsql.StreamSqlException.ErrCode; public abstract class Expression { protected DataType type = null; protected Expression[] children; protected TupleDefinition inputDef; protected boolean hasAggregates; protected boolean constant = false; String colName; static Logger log = LoggerFactory.getLogger(Expression.class); public Expression(Expression[] children) { this.children = children; hasAggregates = false; if (children != null) { for (Expression c : children) { if (c.isAggregate() || c.hasAggregates) { hasAggregates = true; } } } colName = String.format("%s0x%xd", this.getClass().getSimpleName(), this.hashCode()); } protected boolean isAggregate() { return false; } final public boolean isConstant() { return constant; } /** * add a filter to the table if applicable and returns an expression where the condition is removed. * * @param tableStream * @return * @throws StreamSqlException */ public Expression addFilter(DbReaderStream tableStream) throws StreamSqlException { // by default do nothing return this; } public void collectAggregates(List<AggregateExpression> list) { if (isAggregate()) { list.add((AggregateExpression) this); } else if (children != null) { for (Expression c : children) { if (c.hasAggregates || c.isAggregate()) { c.collectAggregates(list); } } } } protected abstract void doBind() throws StreamSqlException; public void bind(TupleDefinition inputDef2) throws StreamSqlException { this.inputDef = inputDef2; if (children != null) { for (Expression c : children) { c.bind(inputDef); } } doBind(); } public DataType getType() { return type; } protected void fillCode_Declarations(StringBuilder code) { if (children != null) { for (Expression c : children) { c.fillCode_Declarations(code); } } } protected void fillCode_Constructor(StringBuilder code) throws StreamSqlException { if (children != null) { for (Expression c : children) { c.fillCode_Constructor(code); } } } protected void fillCode_AllInputDefVars(StringBuilder code) { for (ColumnDefinition cd : inputDef.getColumnDefinitions()) { // if (cd.getType().val != DataType._type.PROTOBUF) { String javaColIdentifier = "col" + cd.getName().replace("-", "_"); code.append("\t\t" + cd.getType().javaType() + " " + javaColIdentifier + " = null;\n") .append("\t\tif (tuple.hasColumn(\"" + cd.getName() + "\")) {\n") .append("\t\t\t" + javaColIdentifier + " = (" + cd.getType().javaType() + ")tuple.getColumn(\"" + cd.getName() + "\");\n") .append("\t\t}\n"); /* * } else { * throw new UnsupportedOperationException(cd.getName()+ " not supported"); * } */ } } protected void fillCode_getValueBody(StringBuilder code) throws StreamSqlException { } protected abstract void fillCode_getValueReturn(StringBuilder code) throws StreamSqlException; private static AtomicInteger counter = new AtomicInteger(); public CompiledExpression compile() throws StreamSqlException { String className = "Expression" + counter.incrementAndGet(); StringBuilder source = new StringBuilder(); source.append("package org.yamcs.yarch;\n") .append("public class " + className + " implements CompiledExpression {\n") .append("\tColumnDefinition cdef;\n"); fillCode_Declarations(source); source.append("\tpublic " + className + "(ColumnDefinition cdef) {\n") .append("\t\tthis.cdef=cdef;\n"); fillCode_Constructor(source); source.append("\t}\n"); source.append("\tpublic Object getValue(Tuple tuple) {\n"); if (!isConstant()) { fillCode_AllInputDefVars(source); } fillCode_getValueBody(source); // source.append("Value colid=t.getColumn(\"id\");\n"); source.append("\n\t\treturn "); fillCode_getValueReturn(source); source.append(";\n"); source.append("\t}\n") .append("\tpublic ColumnDefinition getDefinition() {\n") .append("\t\treturn cdef;\n") .append("\t}\n") .append("}\n"); try { SimpleCompiler compiler = new SimpleCompiler(); compiler.cook(new StringReader(source.toString())); @SuppressWarnings("unchecked") Class<CompiledExpression> cexprClass = (Class<CompiledExpression>) compiler.getClassLoader() .loadClass("org.yamcs.yarch." + className); Constructor<CompiledExpression> cexprConstructor = cexprClass.getConstructor(ColumnDefinition.class); ColumnDefinition cdef = new ColumnDefinition(colName, type); return cexprConstructor.newInstance(cdef); } catch (Exception e) { log.warn("Got exception when compiling {} ", source.toString(), e); throw new StreamSqlException(ErrCode.COMPILE_ERROR, e.toString()); } } /** * when the expression behaves like a column expression, this is the column name * * @return */ public String getColName() { return colName; } }