code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/******************************************************************************* * Copyright (c) 2010 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.alarm.beast.ui.clientmodel; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.WeakHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import org.csstudio.alarm.beast.Activator; import org.csstudio.alarm.beast.AlarmTreePath; import org.csstudio.alarm.beast.Messages; import org.csstudio.alarm.beast.Preferences; import org.csstudio.alarm.beast.SeverityLevel; import org.csstudio.alarm.beast.client.AADataStructure; import org.csstudio.alarm.beast.client.AlarmConfiguration; import org.csstudio.alarm.beast.client.AlarmTreeItem; import org.csstudio.alarm.beast.client.AlarmTreePV; import org.csstudio.alarm.beast.client.AlarmTreeRoot; import org.csstudio.alarm.beast.client.GDCDataStructure; import org.csstudio.apputil.time.BenchmarkTimer; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.osgi.util.NLS; /** Model of alarm information for client applications. * * <p>Obtains alarm configuration (PVs, their hierarchy, guidance, status, ...) * from RDB, then monitors JMS for changes, * sends out acknowledgments or changes to JMS, * signals listeners about updates, ... * * <p>NOTE ON SYNCHRONIZATION: * <p>The model can be accessed from the GUI but also from JMS threads * that received updates from the alarm server. * One could lock the 'root' element or the 'model' on all access. * That might be easiest to implement and check, but potentially * slower because concurrent access to different parts of the model * are not possible. * * <p>When synchronizing individual pieces of the model (area, pv, ...) * deadlocks are possible if elements are locked in reverse order. * The following order should prevent this: * <ol> * <li>For overall changes, lock the model. * <li>For changes to the alarm tree, lock affected items from the root down. * </ol> * * <p>Note that acknowledging an alarm tree entry will * <ol> * <li>Acknowledge all 'child' entries, recursing to individual PVs * <li>Access to 'root' element to send ack' request to JMS * </ol> * To prevent deadlocks, an acknowledge must therefore first lock the root element, * then the affected sub-elements of the alarm tree. * * @author Kay Kasemir, Xihui Chen */ @SuppressWarnings("nls") public class AlarmClientModel { private static AlarmClientModel default_instance; // shared instances private static final Set<AlarmClientModel> INSTANCES = Collections.newSetFromMap(new WeakHashMap<>()); /** Reference count for instance */ private AtomicInteger references = new AtomicInteger(); /** Name of this configuration, i.e. root element of the configuration tree */ private String config_name; /** Names of all available configuration, i.e. all root elements */ private String config_names[] = new String[0]; /** Have we recently heard from the server? */ private volatile boolean server_alive = false; /** Do we think the server is in maintenance mode? */ private volatile boolean maintenance_mode = false; /** Connection to configuration/state snapshot. * <br><b>SYNC:</b> Access needs to synchronize on <code>this</code> */ private AlarmConfiguration config; /** JMS Connection to server: alarm updates, send acknowledgment. * May be <code>null</code> when we (re-)read the configuration. * * SYNC: on communicator_lock for access. */ private AlarmClientCommunicator communicator = null; /** Lock for <code>communicator</code> */ final private Object communicator_lock = new Object(); /** Root of the alarm tree. * <br><b>SYNC:</b> Access needs to synchronize on <code>this</code> * Usually this would be the same as config.getConfigTree(), * but initially and after errors it will be a pseudo-alarm-tree * that shows error messages */ private AlarmTreeRoot config_tree; /** Array of items which are currently in alarm * <br><b>SYNC:</b> Access needs to synchronize on <code>this</code> */ private Set<AlarmTreePV> active_alarms = new HashSet<AlarmTreePV>(); /** Array of items which are in alarm but acknowledged * <br><b>SYNC:</b> Access needs to synchronize on <code>this</code> */ private Set<AlarmTreePV> acknowledged_alarms = new HashSet<AlarmTreePV>(); /** Listeners who registered for notifications */ final private CopyOnWriteArrayList<AlarmClientModelListener> listeners = new CopyOnWriteArrayList<AlarmClientModelListener>(); /** Send events? */ private boolean notify_listeners = true; /** @return <code>true</code> for read-only model */ final private boolean allow_write = ! Preferences.isReadOnly(); /** Indicates if the model accepts or denies a change of the configuration name */ final private boolean allow_config_changes; /** Initialize client model */ private AlarmClientModel(final String config_name, boolean allow_config_changes) throws Exception { this.config_name = config_name; this.allow_config_changes = allow_config_changes; // Initial dummy alarm info createPseudoAlarmTree(Messages.AlarmClientModel_NotInitialized); new ReadConfigJob(this).schedule(); } /** Obtain the shared instance. * <p> * Increments the reference count. * @see #release() * @param config_name Name of alarm tree root, raise an Exception if null * @return Alarm client model instance * @throws Exception on error */ public static AlarmClientModel getInstance(final String config_name) throws Exception { if(config_name == null) throw new Exception("Configuration name can't be null"); AlarmClientModel instance = null; synchronized (INSTANCES) { for (AlarmClientModel model : INSTANCES) { if (config_name.equals(model.getConfigurationName())) { instance = model; } } if (instance == null) { instance = new AlarmClientModel(config_name,false); INSTANCES.add(instance); } } instance.references.incrementAndGet(); return instance; } /** * Obtain the shared instance for the default alarm tree root. This instance allows * changing the configuration name after the model was created. * <p> * Increments the reference count. * @see #release() * @return Alarm client model instance * @throws Exception on error */ public static AlarmClientModel getInstance() throws Exception { synchronized(INSTANCES) { if (default_instance == null) { default_instance = new AlarmClientModel(Preferences.getAlarmTreeRoot(),true); } } default_instance.references.incrementAndGet(); return default_instance; } /** Must be called to release model when no longer used. * <p> * Based on reference count, model is closed when last * user releases it. */ public void release() { if (references.decrementAndGet() > 0) return; internalRelease(); synchronized(INSTANCES) { if (this == default_instance) { default_instance = null; } } } private void internalRelease() { try { Activator.getLogger().fine("AlarmClientModel closed."); synchronized(INSTANCES) { INSTANCES.remove(this); } // Don't lock the model while closing the communicator // because communicator could right now be in a model // update which in turn already locks the model -> deadlock synchronized (communicator_lock) { if (communicator != null) { communicator.stop(); communicator = null; } } synchronized (this) { if (config != null) { config.close(); config = null; } } } catch (Exception ex) { Activator.getLogger().log(Level.WARNING, "Model release failed", ex); } } @Override protected void finalize() throws Throwable { internalRelease(); super.finalize(); } /** List all configuration 'root' element names, i.e. names * of all possible configurations, including the current one. * @return Array of 'root' elements. * May be empty but non-<code>null</code> while model is still reading from RDB. */ public synchronized String[] getConfigurationNames() { return config_names; } /** @return Name of this configuration, i.e. name of its root element */ public synchronized String getConfigurationName() { return config_name; } /** Load a new alarm configuration. * Ignored if the 'new' configuration name matches the current name. * * @param new_root_name Name of configuration to load * @param listener Listener that's notified when done * @return <code>true</code> if configuration will be changed, <code>false</code> * if requested configuration is already the current one */ public boolean setConfigurationName(final String new_root_name, final AlarmClientModelConfigListener listener) { if (!allow_config_changes) { throw new UnsupportedOperationException("Configuration name of this model cannot be changed."); } // TODO If loading, ignore change synchronized (this) { if (new_root_name.equals(config_name)) return false; // Update config. name config_name = new_root_name; } // Update GUI with 'empty' model createPseudoAlarmTree(Messages.AlarmClientModel_NotInitialized); fireNewConfig(); // Clear JMS communicator because it uses topics of the old config. name synchronized (communicator_lock) { if (communicator != null) { // Close old communicator communicator.stop(); communicator = null; } } server_alive = false; // Load new configuration: // Create new JMS communicator, read from RDB, fire events, ... new ReadConfigJob(this, listener).schedule(); return true; } /** @return <code>true</code> if model allows write access * (acknowledge, update config) */ public boolean isWriteAllowed() { return allow_write; } /** @param listener Listener to add */ public void addListener(final AlarmClientModelListener listener) { listeners.add(listener); } /** @param listener Listener to remove */ public void removeListener(final AlarmClientModelListener listener) { listeners.remove(listener); } /** Read alarm configuration. * May be invoked from ReadConfigJob. * @param monitor Progress monitor (has not been called) */ void readConfiguration(final IProgressMonitor monitor) { final BenchmarkTimer timer = new BenchmarkTimer(); monitor.beginTask(Messages.AlarmClientModel_ReadingConfiguration, IProgressMonitor.UNKNOWN); // Check if we need to create a NEW communicator, // or configure existing communicator to queue updates. final AlarmClientCommunicator comm; synchronized (communicator_lock) { if (communicator == null) { try { communicator = new AlarmClientCommunicator(this); communicator.start(); } catch (Exception ex) { Activator.getLogger().log(Level.SEVERE, "Cannot start AlarmClientCommunicator", ex); return; } } // Queue received events until we read the whole configuration communicator.setQueueMode(true); comm = communicator; } // Clear old data. // If updates arrived now from JMS, we'd get 'unknown PV ...' warnings, // but since the JMS communicator is in 'queue' mode, that should not happen. synchronized (this) { // Prevent a flurry of events while items with alarms are added notify_listeners = false; if (config != null) config.close(); active_alarms.clear(); acknowledged_alarms.clear(); config = null; // Note config_tree stays as it was... } // Connect to RDB final AlarmConfiguration new_config; try { // TODO Rearrange AlarmClientModel, AlarmConfiguration // This is currently odd: // Reading a new configuration (tree), but while doing that // we already update the active_alarms & acknowledged_alarms // of this model. // Better have a separate // 1) AlarmConfiguration with config tree AND active_alarms, acknowledged_alarms // 2) RDB reader/writer // 3) AlarmClientModel that uses 1 & 2 // That way, the 'notify_listeners' can go away: // Reading a new config does not affect an existing config. new_config = new AlarmConfiguration(Preferences.getRDB_Url(),Preferences.getRDB_User(), Preferences.getRDB_Password(), Preferences.getRDB_Schema()) { // When reading config, create the root element // that links to the model instead of the default AlarmTreeRoot @Override protected AlarmTreeRoot createAlarmTreeRoot(int id, String root_name) { return new AlarmClientModelRoot(id, root_name, AlarmClientModel.this); } }; // Read names of available configurations final String new_root_names[] = new_config.listConfigurations(); synchronized (this) { config_names = new_root_names; } } catch (Exception ex) { Activator.getLogger().log(Level.SEVERE, "Cannot connect to RDB", ex); createPseudoAlarmTree("Alarm RDB Error: " + ex.getMessage()); synchronized (this) { notify_listeners = true; } fireNewConfig(); monitor.done(); return; } // Presumably connected to JMS and RDB, // but assert that we are really connected to JMS: // While we read the RDB, new alarms could arrive. // To avoid missing them, assert that we are connected to JMS. // Used to block for JMS connection before connecting to RDB, // but this way both types of errors are more obvious: // RDB error -> exception right away // JMS problem -> will usually still check RDB, so we know that's OK, then hang in here int wait = 0; while (!comm.isConnected()) { monitor.subTask(NLS.bind(Messages.AlarmClientModel_WaitingForJMSFmt, ++wait)); try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } if (monitor.isCanceled()) { synchronized (this) { notify_listeners = true; } fireNewConfig(); monitor.done(); return; } } // Read RDB monitor.subTask(Messages.AlarmClientModel_ReadingRDB); try { new_config.readConfiguration(getConfigurationName(), false, monitor); // Update model with newly received data synchronized (this) { config = new_config; config_tree = config.getAlarmTree(); // active_alarms & acknowledged_alarms already populated // because fireNewAlarmState() was called while building // the alarm tree } } catch (Exception ex) { Activator.getLogger().log(Level.SEVERE, "Cannot read alarm configuration", ex); createPseudoAlarmTree("Alarm RDB Error: " + ex.getMessage()); } // Info about performance timer.stop(); if (Activator.getLogger().isLoggable(Level.INFO)) { final int count; final int pv_count; synchronized (this) { count = config_tree.getElementCount(); pv_count = config_tree.getLeafCount(); } Activator.getLogger().info(String.format( "Read %d alarm tree items, %d PVs in %.2f seconds: %.1f items/sec, %.1f PVs/sec", count, pv_count, timer.getSeconds(), count / timer.getSeconds(), pv_count/timer.getSeconds())); } // After we received configuration, handle updates that might // have accumulated. comm.setQueueMode(false); // Re-enable events, send a single notification. synchronized (this) { notify_listeners = true; } if (monitor.isCanceled()) { createPseudoAlarmTree("Cancelled"); synchronized (this) { if (config != null) config.close(); config = null; active_alarms.clear(); acknowledged_alarms.clear(); } } fireNewConfig(); monitor.done(); } /** @return Name of JMS server or some text that indicates * disconnected state. For information, not to determine * exact connection state. */ public String getJMSServerInfo() { synchronized (communicator_lock) { if (communicator != null) return communicator.toString(); } return "No Communicator"; } /** Invoked by AlarmClientCommunicator whenever an 'IDLE' * message was received from the server * @param maintenance_mode */ public void updateServerState(boolean maintenance_mode) { // Tell GUI that there is a server. if (! server_alive) { server_alive = true; fireNewAlarmState(null, true); } // Change in maintenance mode? if (this.maintenance_mode != maintenance_mode) { this.maintenance_mode = maintenance_mode; fireModeUpdate(); } } /** @return <code>true</code> if we received updates from server, * <code>false</code> after server communication timeout */ public boolean isServerAlive() { return server_alive; } /** @return <code>true</code> if we assume server is in maintenance mode, * <code>false</code> for 'normal' mode. */ public boolean inMaintenanceMode() { return maintenance_mode; } /** Send request to enable/disable maintenance mode to alarm server * @param maintenance <code>true</code> to enable */ public void requestMaintenanceMode(final boolean maintenance) { synchronized (communicator_lock) { if (allow_write && communicator != null) communicator.requestMaintenanceMode(maintenance); } } /** @return root of the alarm tree configuration */ synchronized public AlarmTreeRoot getConfigTree() { return config_tree; } /** Get the currently active alarms. * <p> * @return Array of active alarms. May be empty, but not <code>null</code>. */ synchronized public AlarmTreePV[] getActiveAlarms() { final AlarmTreePV array[] = new AlarmTreePV[active_alarms.size()]; return active_alarms.toArray(array); } /** Get the acknowledged alarms: Still in alarm, but ack'ed. * <p> * @return Array of active alarms. May be empty, but not <code>null</code>. */ synchronized public AlarmTreePV[] getAcknowledgedAlarms() { final AlarmTreePV array[] = new AlarmTreePV[acknowledged_alarms.size()]; return acknowledged_alarms.toArray(array); } /** Add a component to the model and RDB * @param root_or_component Root or Component under which to add the component * @param name Name of the new component * @throws Exception on error */ public void addComponent(final AlarmTreeItem root_or_component, final String name) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; config.addComponent(root_or_component, name); } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(AlarmTreePath.makePath(root_or_component.getPathName(), name)); } } /** Add a PV to the model and config storage (RDB) * @param component Component under which to add the PV * @param name Name of the new PV * @throws Exception on error */ public void addPV(final AlarmTreeItem component, final String name) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; config.addPV(component, name); } // Notify via JMS, then add to local model in response to notification. synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(null); } } /** Change an items configuration in RDB. * @param item Item to configure (which already exists, won't be created) * @param guidance Guidance strings * @param displays Related displays * @param commands Commands * @param auto_actions Automated Actions * @throws Exception on error */ public void configureItem(final AlarmTreeItem item, final GDCDataStructure guidance[], final GDCDataStructure displays[], final GDCDataStructure commands[], final AADataStructure auto_actions[]) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; try { config.configureItem(item, guidance, displays, commands, auto_actions); } finally { config.closeStatements(); } } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(item.getPathName()); } } /** Change a PV's configuration in RDB. * @param pv PV * @param description Description * @param enabled Are alarms enabled? * @param annunciate Annunciate or not? * @param latch Latch highest alarms? * @param delay Alarm delay [seconds] * @param count Count of severity != OK within delay to detect as alarm * @param filter Filter expression for enablement * @param guidance Guidance strings * @param displays Related displays * @param commands Commands * @param auto_actions Automated actions * @throws Exception on error */ public void configurePV(final AlarmTreePV pv, final String description, final boolean enabled, final boolean annunciate, final boolean latch, final int delay, final int count, final String filter, final GDCDataStructure guidance[], final GDCDataStructure displays[], final GDCDataStructure commands[], final AADataStructure auto_actions[]) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; try { config.configurePV(pv, description, enabled, annunciate, latch, delay, count, filter, guidance, displays, commands, auto_actions); } finally { config.closeStatements(); } } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(pv.getPathName()); } } /** Change a PV's enable/disable state in RDB * and send JMS config update. * Receiving that update will then adjust PV in this model. * @param pv PV * @param enabled Are alarms enabled? * @throws Exception on error */ public void enable(final AlarmTreePV pv, final boolean enabled) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; try { config.updatePVEnablement(pv, enabled); } finally { config.closeStatements(); } } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(pv.getPathName()); } } /** Change item's name * @param item Item to change * @param new_name New name for the item * @throws Exception on error */ public void rename(final AlarmTreeItem item, final String new_name) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; config.rename(item, new_name); } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(null); } } /** Change item's location in alarm configuration hierarchy. * This does not actually change the item's position in the in-memory * model. It does change the RDB configuration and trigger an update, * which the client should then receive and consequently re-load the * whole model. * @param item Item to move * @param new_path New path for the item * @throws Exception on error */ public void move(final AlarmTreeItem item, final String new_path) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; config.move(item, new_path); } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(null); } } /** Create new PV by copying existing PV * @param pv Existing PV * @param new_path_and_pv Complete path, including PV name, of PV-to-create * @throws Exception on error in new path, duplicate PV name, error while * adding new PV */ public void duplicatePV(final AlarmTreePV pv, final String new_path_and_pv) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; // Determine path and name of new PV final String new_pieces[] = AlarmTreePath.splitPath(new_path_and_pv); final int new_pieces_len = new_pieces.length; // Need at least "area/pv" if (new_pieces_len < 2) throw new Exception("New path too short"); final String new_path = AlarmTreePath.makePath(new_pieces, new_pieces_len - 1); final String new_name = new_pieces[new_pieces_len - 1]; if (new_name.equals(pv.getName())) throw new Exception("New PV name must differ from existing PV name"); // Locate parent item final AlarmTreeItem new_parent = config.getAlarmTree().getItemByPath(new_path); if (new_parent == null) throw new Exception("Cannot locate parent entry: " + new_path); if (new_parent instanceof AlarmTreePV) throw new Exception("Parent entry has wrong type: " + new_path); // Add new PV final AlarmTreePV new_pv = config.addPV(new_parent, new_name); // Update configuration of new PV to match duplicated PV config.configurePV(new_pv, pv.getDescription(), pv.isEnabled(), pv.isAnnunciating(), pv.isLatching(), pv.getDelay(), pv.getCount(), pv.getFilter(), pv.getGuidance(), pv.getDisplays(), pv.getCommands(), pv.getAutomatedActions()); } // This will trigger an update the configuration of new_pv // in this model as well as other alarm system listeners synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(null); } } /** Remove item and all sub-items from alarm tree. * @param item Item to remove * @throws Exception on error */ public void remove(final AlarmTreeItem item) throws Exception { if (! allow_write) return; synchronized (this) { if (config == null) return; config.remove(item); } synchronized (communicator_lock) { if (communicator != null) communicator.sendConfigUpdate(null); } } /** Update the configuration of a model item * @param path Path name of the added/removed/changed item or null */ public void readConfig(final String path) throws Exception { final AlarmTreeItem item = getConfigTree().getItemByPath(path); if (item == null || !(item instanceof AlarmTreePV)) { // Not a known PV? Update the whole config. new ReadConfigJob(this).schedule(); return; } // Update a known PV final AlarmTreePV pv = (AlarmTreePV) item; synchronized (this) { if (config == null) return; try { config.readPVConfig(pv); } finally { config.closeStatements(); } // This could change the alarm tree after a PV was disabled or enabled. final AlarmTreeItem parent = pv.getParent(); if (parent != null) parent.maximizeSeverity(); } // Note that this may actually be a new PV that this instance // of the client GUI has just added. // We know the PV in the config tree, but it's not visible // in the GUI, yet, because we just added it. // Previously, there was code here that tried to optimize display // updates by suppressing the display update if the PV // has not changed alarm state // -> Always update PVs with changed configuration // Update alarm display fireNewAlarmState(pv, true); } /** Update the enablement of a PV in model. * <p> * Called by AlarmUpdateCommunicator, i.e. from JMS thread. * * @param name PV name * @param enabled Enabled? */ public void updateEnablement(final String name, final boolean enabled) { final AlarmTreePV pv; // Lock model because complete tree is searched for PV, // then severity maximized up to root. synchronized (this) { pv = findPV(name); if (pv == null) { Activator.getLogger().log(Level.WARNING, "Received enablement ({0}) for unknown PV {1}", new Object[] { Boolean.toString(enabled), name }); return; } pv.setEnabled(enabled); // This could change the alarm tree after a PV was disabled or enabled. final AlarmTreeItem parent = pv.getParent(); if (parent != null) parent.maximizeSeverity(); } // Update alarm display fireNewAlarmState(pv, true); } /** Update the state of a PV in model. * <p> * Called by AlarmUpdateCommunicator, i.e. from JMS thread. * * @param into Alarm update info */ void updatePV(final AlarmUpdateInfo info) { // Update should contain PV name String name = info.getNameOrPath(); if (AlarmTreePath.isPath(name)) name = AlarmTreePath.getName(name); server_alive = true; final AlarmTreePV pv = findPV(name); if (pv != null) { pv.setAlarmState(info.getCurrentSeverity(), info.getCurrentMessage(), info.getSeverity(), info.getMessage(), info.getValue(), info.getTimestamp()); return; } // Can this result in out-of-memory?! // First glance: No, since we just log & return. // Is there a memory leak in the logger? // The update comes from JMS, and the logger may also // send info to JMS. Is that a problem? Activator.getLogger().log(Level.WARNING, "Received update for unknown PV {0}", name); } /** Locate PV by name * @param name Name of PV to locate. May be <code>null</code>. * @return PV or <code>null</code> when not found */ public synchronized AlarmTreePV findPV(final String name) { if (config == null) return null; return config.findPV(name); } /** Ask alarm server to acknowledge alarm. * @param pv PV to acknowledge * @param acknowledge Acknowledge, or un-acknowledge? */ public void acknowledge(final AlarmTreePV pv, final boolean acknowledge) { synchronized (communicator_lock) { if (allow_write && communicator != null) communicator.requestAcknowledgement(pv, acknowledge); } } /** Create a pseudo alarm tree for the purpose of displaying a message * @param info Info that will show as dummy alarm tree item * @return Pseudo alarm tree */ private synchronized void createPseudoAlarmTree(final String info) { config_tree = new AlarmTreeRoot("Pseudo", -1); new AlarmTreeItem(config_tree, info, 0); active_alarms.clear(); acknowledged_alarms.clear(); } /** Send debug trigger to alarm server */ public void triggerDebug() { synchronized (communicator_lock) { if (communicator != null) communicator.triggerDebugAction(); } } /** Inform listeners about server timeout */ void fireServerTimeout() { server_alive = false; for (AlarmClientModelListener listener : listeners) { try { listener.serverTimeout(this); } catch (Throwable ex) { Activator.getLogger().log(Level.WARNING, "Server timeout notification error", ex); } } } /** Inform listeners that server is OK and in which mode */ private void fireModeUpdate() { for (AlarmClientModelListener listener : listeners) { try { listener.serverModeUpdate(this, maintenance_mode); } catch (Throwable ex) { Activator.getLogger().log(Level.WARNING, "Model update notification error", ex); } } } /** Inform listeners about overall change to alarm tree configuration: * Items added, removed. */ void fireNewConfig() { for (AlarmClientModelListener listener : listeners) { try { listener.newAlarmConfiguration(this); } catch (Throwable ex) { Activator.getLogger().log(Level.WARNING, "Model config notification error", ex); } } } /** Inform listeners about change in alarm state. * <p> * Typically, this is invoked with the PV that changed state. * May be called with a <code>null</code> PV * to indicate that messages were received after a server timeout. * @param pv PV that might have changed the alarm state or <code>null</code> * @param parent_changed true if a parent item was updated as well */ void fireNewAlarmState(final AlarmTreePV pv, final boolean parent_changed) { if (pv != null) { synchronized (this) { final SeverityLevel severity = pv.getSeverity(); if (severity.ordinal() > 0) { if (severity.isActive()) { active_alarms.add(pv); acknowledged_alarms.remove(pv); } else { acknowledged_alarms.add(pv); active_alarms.remove(pv); } } else { active_alarms.remove(pv); acknowledged_alarms.remove(pv); } if (!notify_listeners ) return; } } for (AlarmClientModelListener listener : listeners) { try { listener.newAlarmState(this, pv, parent_changed); } catch (Throwable ex) { Activator.getLogger().log(Level.WARNING, "Alarm update notification error", ex); } } } /** @return Debug string */ @Override public String toString() { return "AlarmClientModel: " + config_tree; } /** Dump debug information */ public synchronized void dump() { System.out.println("== AlarmClientModel =="); config_tree.dump(System.out); System.out.println("= Active alarms ="); for (AlarmTreePV pv : active_alarms) System.out.println(pv.toString()); System.out.println("= Acknowledged alarms ="); for (AlarmTreePV pv : acknowledged_alarms) System.out.println(pv.toString()); } }
ESSICS/cs-studio
applications/alarm/alarm-plugins/org.csstudio.alarm.beast/src/org/csstudio/alarm/beast/ui/clientmodel/AlarmClientModel.java
Java
epl-1.0
40,140
/******************************************************************************* * Copyright (c) 2006, 2018 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.ws.ejbcontainer.injection.ann.ejb; import javax.ejb.CreateException; import javax.ejb.EJBLocalHome; /** * Compatibility EJBLocalHome interface for CompCatBean. **/ public interface CatEJBLocalHome extends EJBLocalHome { /** * @return CatEJBLocal The SessionBean EJB object. * @exception javax.ejb.CreateException SessionBean EJB object was not created. */ public CatEJBLocal create() throws CreateException; }
OpenLiberty/open-liberty
dev/com.ibm.ws.ejbcontainer.injection_fat/test-applications/EJB3INJSABean.jar/src/com/ibm/ws/ejbcontainer/injection/ann/ejb/CatEJBLocalHome.java
Java
epl-1.0
995
/** * Copyright (c) 2010-2020 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.zwave.internal.protocol.commandclass; import java.util.Arrays; import java.util.List; import org.openhab.binding.zwave.internal.protocol.SerialMessage; import org.openhab.binding.zwave.internal.protocol.ZWaveNode; import org.openhab.binding.zwave.internal.protocol.initialization.ZWaveNodeStageAdvancer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Used only by {@link ZWaveSecurityCommandClassWithInitialization} during device inclusion. * * During device inclusion, the security registration process between us and the node * has multiple stages in order to share our network key with the device. This class * is used to track our current state in that process and determine next steps. * * The specific commands to be exchanged are: * {@value #INIT_COMMAND_ORDER_LIST} * * @author Dave Badia * @since TODO */ class ZWaveSecureInclusionStateTracker { private static final Logger logger = LoggerFactory.getLogger(ZWaveSecureInclusionStateTracker.class); /** * During node inclusion <b>only</b>, this is the order in which commands should be sent and received. * Commands absent from this list (for example {@link #SECURITY_MESSAGE_ENCAP}) can be sent/received at any time */ private final List<Byte> INIT_COMMAND_ORDER_LIST = Arrays.asList(new Byte[]{ ZWaveSecurityCommandClass.SECURITY_SCHEME_GET, ZWaveSecurityCommandClass.SECURITY_SCHEME_REPORT, ZWaveSecurityCommandClass.SECURITY_NETWORK_KEY_SET, ZWaveSecurityCommandClass.SECURITY_NETWORK_KEY_VERIFY, ZWaveSecurityCommandClass.SECURITY_COMMANDS_SUPPORTED_GET, ZWaveSecurityCommandClass.SECURITY_COMMANDS_SUPPORTED_REPORT, }); private static final boolean HALT_ON_IMPROPER_ORDER = true; private byte currentStep = INIT_COMMAND_ORDER_LIST.get(0); /** * After we send a non-nonce security message, we can wait up to 10 seconds * for a reply. Then we must exit the inclusion process */ private static final int WAIT_TIME_MILLIS = 10000; /** * The next {@link SerialMessage} that will be given to {@link ZWaveNodeStageAdvancer} * when it calls {@link ZWaveSecurityCommandClass#initialize(boolean)} */ private SerialMessage nextRequestMessage = null; /** * Lock object that will be used for synchronization */ private final Object nextMessageLock = new Object(); private String errorState = null; private long waitForReplyTimeout = 0; private final ZWaveNode node; ZWaveSecureInclusionStateTracker(ZWaveNode node) { this.node = node; } /** * Since these operations are security sensitive we must ensure they are * executing in the proper sequence * @param newStep the state we are about to enter * @return true if the new command was in an acceptable order, false * if it was not. if false is returned, the response should <b>not</b> * be sent. */ synchronized boolean verifyAndAdvanceState(Byte newStep) { logger.debug("NODE {}: ZWaveSecurityCommandClass in verifyAndAdvanceState with newstep={}, currentstep={}", node.getNodeId(), ZWaveSecurityCommandClass.commandToString(newStep), ZWaveSecurityCommandClass.commandToString(currentStep)); if(!INIT_COMMAND_ORDER_LIST.contains(newStep)) { // Commands absent from EXPECTED_COMMAND_ORDER_LIST are always ok return true; } // Going back to the first step (zero index) is always OK // TODO: DB is it really? if(INIT_COMMAND_ORDER_LIST.indexOf(newStep) > 0) { // We have to verify where we are at int currentIndex = INIT_COMMAND_ORDER_LIST.indexOf(currentStep); int newIndex = INIT_COMMAND_ORDER_LIST.indexOf(newStep); // Accept one message back or the same message(device resending last reply) in addition to the normal one message ahead if(newIndex != currentIndex && newIndex - currentIndex > 1) { if(HALT_ON_IMPROPER_ORDER) { setErrorState(String.format("NODE %s: Commands received out of order, aborting current=%s, new=%s", node.getNodeId(), ZWaveSecurityCommandClass.commandToString(currentStep), ZWaveSecurityCommandClass.commandToString(newStep))); return false; } else { logger.warn("NODE {}: Commands received out of order (warning only, continuing) current={}, new={}", node.getNodeId(), ZWaveSecurityCommandClass.commandToString(currentStep), ZWaveSecurityCommandClass.commandToString(newStep)); // fall through below } } } currentStep = newStep; return true; } public void setErrorState(String errorState) { this.errorState = errorState; } public void resetWaitForReplyTimeout() { waitForReplyTimeout = System.currentTimeMillis() + WAIT_TIME_MILLIS; } void setNextRequest(SerialMessage message) { logger.debug("NODE {}: in InclusionStateTracker.setNextRequest() (current={}) with {}", node.getNodeId(), (nextRequestMessage != null), message); if(nextRequestMessage != null) { logger.warn("NODE {}: in InclusionStateTracker.setNextRequest() overriding old message which was never sent of {}", node.getNodeId(), message); } verifyAndAdvanceState((byte) (message.getMessagePayloadByte(3) & 0xff)); synchronized(nextMessageLock) { nextRequestMessage = message; nextMessageLock.notify(); } } /** * Gets the next message to be sent during the inclusion flow. * Each message can only get retrieved once * @return the next message or null if there was none */ SerialMessage getNextRequest() { synchronized(nextMessageLock) { logger.debug("NODE {}: in InclusionStateTracker.getNextRequest() time left for reply: {}ms, returning {}", node.getNodeId(), (System.currentTimeMillis() - waitForReplyTimeout), nextRequestMessage); if(System.currentTimeMillis() > waitForReplyTimeout) { // waited too long for a reply, secure inclusion failed setErrorState(WAIT_TIME_MILLIS+"ms passed since last request was sent, secure inclusion failed."); return null; } if(nextRequestMessage != null) { SerialMessage message = nextRequestMessage; resetWaitForReplyTimeout(); nextRequestMessage = null; return message; } return null; } } public byte getCurrentStep() { return currentStep; } public String getErrorState() { return errorState; } }
openhab/openhab
bundles/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/protocol/commandclass/ZWaveSecureInclusionStateTracker.java
Java
epl-1.0
7,375
import java.util.logging.Level; import java.util.logging.Logger; public class Main { /** * @param args */ public static void main(String[] args) { Thread t1 = new Hilo(0); t1.start(); try { t1.join(); } catch (InterruptedException ex) { Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex); } } }
davidhmhernandez/ServiciosyProcesos
Concurrencia1Act7/src/Main.java
Java
epl-1.0
384
package eu.modelwriter.specification.editor.scanner; import org.eclipse.jface.text.TextAttribute; import org.eclipse.jface.text.rules.IToken; import org.eclipse.jface.text.rules.RuleBasedScanner; import org.eclipse.jface.text.rules.Token; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.widgets.Display; import eu.modelwriter.specification.editor.RGBStorage; public class LoadScanner extends RuleBasedScanner { public LoadScanner() { final IToken loadToken = new Token(new TextAttribute(new Color(Display.getCurrent(), RGBStorage.LOAD_RGB))); setDefaultReturnToken(loadToken); } }
ModelWriter/Tarski
Source/eu.modelwriter.specification.editor/src/eu/modelwriter/specification/editor/scanner/LoadScanner.java
Java
epl-1.0
619
/** * Copyright (c) 2010-2020 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.tinkerforge.internal.model.impl; import org.eclipse.emf.ecore.EClass; import org.openhab.binding.tinkerforge.internal.model.Electrode; import org.openhab.binding.tinkerforge.internal.model.ModelPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Electrode</b></em>'. * * @author Theo Weiss * @since 1.5.0 * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.openhab.binding.tinkerforge.internal.model.impl.ElectrodeImpl#getDeviceType <em>Device * Type</em>}</li> * </ul> * * @generated */ public class ElectrodeImpl extends MultiTouchDeviceImpl implements Electrode { /** * The default value of the '{@link #getDeviceType() <em>Device Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getDeviceType() * @generated * @ordered */ protected static final String DEVICE_TYPE_EDEFAULT = "electrode"; /** * The cached value of the '{@link #getDeviceType() <em>Device Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @see #getDeviceType() * @generated * @ordered */ protected String deviceType = DEVICE_TYPE_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ protected ElectrodeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override protected EClass eStaticClass() { return ModelPackage.Literals.ELECTRODE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public String getDeviceType() { return deviceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case ModelPackage.ELECTRODE__DEVICE_TYPE: return getDeviceType(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case ModelPackage.ELECTRODE__DEVICE_TYPE: return DEVICE_TYPE_EDEFAULT == null ? deviceType != null : !DEVICE_TYPE_EDEFAULT.equals(deviceType); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (deviceType: "); result.append(deviceType); result.append(')'); return result.toString(); } } // ElectrodeImpl
openhab/openhab
bundles/binding/org.openhab.binding.tinkerforge/src/main/java/org/openhab/binding/tinkerforge/internal/model/impl/ElectrodeImpl.java
Java
epl-1.0
3,535
/******************************************************************************* * Copyright (c) 2016 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.ws.logstash.collector.v10; import org.osgi.framework.Version; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.ConfigurationPolicy; import com.ibm.ws.logstash.collector.LogstashRuntimeVersion; @Component(name = LogstashCollector10.COMPONENT_NAME, service = { LogstashRuntimeVersion.class }, configurationPolicy = ConfigurationPolicy.IGNORE, property = { "service.vendor=IBM" }) public class LogstashCollector10 implements LogstashRuntimeVersion { public static final String COMPONENT_NAME = "com.ibm.ws.logstash.collector.v10.LogstashCollector10"; @Override public Version getVersion() { return VERSION_1_0; } }
OpenLiberty/open-liberty
dev/com.ibm.ws.logstash.collector.1.0/src/com/ibm/ws/logstash/collector/v10/LogstashCollector10.java
Java
epl-1.0
1,247
package com.coverity.ws.v9; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for updateStreamDefects complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="updateStreamDefects"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="streamDefectIds" type="{http://ws.coverity.com/v9}streamDefectIdDataObj" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="defectStateSpec" type="{http://ws.coverity.com/v9}defectStateSpecDataObj" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "updateStreamDefects", propOrder = { "streamDefectIds", "defectStateSpec" }) public class UpdateStreamDefects { protected List<StreamDefectIdDataObj> streamDefectIds; protected DefectStateSpecDataObj defectStateSpec; /** * Gets the value of the streamDefectIds property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the streamDefectIds property. * * <p> * For example, to add a new item, do as follows: * <pre> * getStreamDefectIds().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link StreamDefectIdDataObj } * * */ public List<StreamDefectIdDataObj> getStreamDefectIds() { if (streamDefectIds == null) { streamDefectIds = new ArrayList<StreamDefectIdDataObj>(); } return this.streamDefectIds; } /** * Gets the value of the defectStateSpec property. * * @return * possible object is * {@link DefectStateSpecDataObj } * */ public DefectStateSpecDataObj getDefectStateSpec() { return defectStateSpec; } /** * Sets the value of the defectStateSpec property. * * @param value * allowed object is * {@link DefectStateSpecDataObj } * */ public void setDefectStateSpec(DefectStateSpecDataObj value) { this.defectStateSpec = value; } }
christ66/coverity-plugin
src/main/java/com/coverity/ws/v9/UpdateStreamDefects.java
Java
epl-1.0
2,691
/******************************************************************************* * Copyright (c) 2021 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.ws.jpa.fvt.entity.testlogic.enums; import com.ibm.ws.testtooling.testlogic.JPAEntityClassEnum; public enum DatatypeSupportEntityEnum implements JPAEntityClassEnum { DatatypeSupportPropertyTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.annotation.DatatypeSupportPropertyTestEntity"; } @Override public String getEntityName() { return "DatatypeSupportPropertyTestEntity"; } }, DatatypeSupportTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.annotation.DatatypeSupportTestEntity"; } @Override public String getEntityName() { return "DatatypeSupportTestEntity"; } }, SerializableDatatypeSupportPropertyTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.annotation.SerializableDatatypeSupportPropertyTestEntity"; } @Override public String getEntityName() { return "SerializableDatatypeSupportPropertyTestEntity"; } }, SerializableDatatypeSupportTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.annotation.SerializableDatatypeSupportTestEntity"; } @Override public String getEntityName() { return "SerializableDatatypeSupportTestEntity"; } }, XMLDatatypeSupportPropertyTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.xml.XMLDatatypeSupportPropertyTestEntity"; } @Override public String getEntityName() { return "XMLDatatypeSupportPropertyTestEntity"; } }, XMLDatatypeSupportTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.xml.XMLDatatypeSupportPropertyTestEntity"; } @Override public String getEntityName() { return "XMLDatatypeSupportPropertyTestEntity"; } }, SerializableXMLDatatypeSupportPropertyTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.xml.SerializableXMLDatatypeSupportPropertyTestEntity"; } @Override public String getEntityName() { return "SerializableXMLDatatypeSupportPropertyTestEntity"; } }, SerializableXMLDatatypeSupportTestEntity { @Override public String getEntityClassName() { return "com.ibm.ws.jpa.fvt.entity.entities.datatype.xml.SerializableXMLDatatypeSupportTestEntity"; } @Override public String getEntityName() { return "SerializableXMLDatatypeSupportTestEntity"; } }; @Override public abstract String getEntityClassName(); @Override public abstract String getEntityName(); public static DatatypeSupportEntityEnum resolveEntityByName(String entityName) { return DatatypeSupportEntityEnum.valueOf(entityName); } }
OpenLiberty/open-liberty
dev/com.ibm.ws.jpa.tests.spec10.entity_fat.common/test-applications/entity/src/com/ibm/ws/jpa/fvt/entity/testlogic/enums/DatatypeSupportEntityEnum.java
Java
epl-1.0
3,918
/******************************************************************************* * Copyright (C) 2011, Jens Baumgart <jens.baumgart@sap.com> * Copyright (C) 2011, Stefan Lay <stefan.lay@sap.com> * Copyright (C) 2015, Thomas Wolf <thomas.wolf@paranor.ch> * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.egit.ui.internal.history; import java.io.IOException; import java.util.Collection; import java.util.List; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.egit.ui.Activator; import org.eclipse.egit.ui.JobFamilies; import org.eclipse.egit.ui.internal.UIText; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revplot.PlotCommit; class FormatJob extends Job { @Override public boolean belongsTo(Object family) { if (JobFamilies.FORMAT_COMMIT_INFO.equals(family)) return true; return super.belongsTo(family); } private Object lock = new Object(); // guards formatRequest and formatResult private FormatRequest formatRequest; private FormatResult formatResult; FormatJob(FormatRequest formatRequest) { super(UIText.FormatJob_buildingCommitInfo); this.formatRequest = formatRequest; } FormatResult getFormatResult() { synchronized(lock) { return formatResult; } } @Override protected IStatus run(IProgressMonitor monitor) { if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } FormatResult commitInfo; CommitInfoBuilder builder; try { synchronized(lock) { SWTCommit commit = (SWTCommit)formatRequest.getCommit(); commit.parseBody(); builder = new CommitInfoBuilder(formatRequest.getRepository(), commit, formatRequest.isFill(), formatRequest.getAllRefs()); } commitInfo = builder.format(monitor); } catch (IOException e) { return Activator.createErrorStatus(e.getMessage(), e); } if (monitor.isCanceled()) { return Status.CANCEL_STATUS; } synchronized(lock) { formatResult = commitInfo; } return Status.OK_STATUS; } static class FormatRequest { public Collection<Ref> getAllRefs() { return allRefs; } public void setAllRefs(Collection<Ref> allRefs) { this.allRefs = allRefs; } private Repository repository; private PlotCommit<?> commit; private boolean fill; private Collection<Ref> allRefs; FormatRequest(Repository repository, PlotCommit<?> commit, boolean fill, Collection<Ref> allRefs) { this.repository = repository; this.commit = commit; this.fill = fill; this.allRefs = allRefs; } public Repository getRepository() { return repository; } public PlotCommit<?> getCommit() { return commit; } public boolean isFill() { return fill; } } static class FormatResult{ private final String commitInfo; private final List<GitCommitReference> knownLinks; private final int headerEnd; private final int footerStart; FormatResult(String commmitInfo, List<GitCommitReference> links, int headerEnd, int footerStart) { this.commitInfo = commmitInfo; this.knownLinks = links; this.headerEnd = headerEnd; this.footerStart = footerStart; } public String getCommitInfo() { return commitInfo; } public List<GitCommitReference> getKnownLinks() { return knownLinks; } public int getHeaderEnd() { return headerEnd; } public int getFooterStart() { return footerStart; } } }
SmithAndr/egit
org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/history/FormatJob.java
Java
epl-1.0
3,809
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.config.yangjmxgenerator.plugin; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.maven.project.MavenProject; import org.opendaylight.controller.config.spi.ModuleFactory; import org.opendaylight.controller.config.yangjmxgenerator.ModuleMXBeanEntry; import org.opendaylight.controller.config.yangjmxgenerator.PackageTranslator; import org.opendaylight.controller.config.yangjmxgenerator.ServiceInterfaceEntry; import org.opendaylight.controller.config.yangjmxgenerator.TypeProviderWrapper; import org.opendaylight.yangtools.sal.binding.yang.types.TypeProviderImpl; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.model.api.IdentitySchemaNode; import org.opendaylight.yangtools.yang.model.api.Module; import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.opendaylight.yangtools.yang2sources.spi.BasicCodeGenerator; import org.opendaylight.yangtools.yang2sources.spi.MavenProjectAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class interfaces with yang-maven-plugin. Gets parsed yang modules in * {@link SchemaContext}, and parameters form the plugin configuration, and * writes service interfaces and/or modules. */ public class JMXGenerator implements BasicCodeGenerator, MavenProjectAware { private static final class NamespaceMapping { private final String namespace, packageName; public NamespaceMapping(final String namespace, final String packagename) { this.namespace = namespace; this.packageName = packagename; } } @VisibleForTesting static final String NAMESPACE_TO_PACKAGE_DIVIDER = "=="; @VisibleForTesting static final String NAMESPACE_TO_PACKAGE_PREFIX = "namespaceToPackage"; @VisibleForTesting static final String MODULE_FACTORY_FILE_BOOLEAN = "moduleFactoryFile"; private static final Logger LOG = LoggerFactory.getLogger(JMXGenerator.class); private static final Pattern NAMESPACE_MAPPING_PATTERN = Pattern.compile("(.+)" + NAMESPACE_TO_PACKAGE_DIVIDER + "(.+)"); private PackageTranslator packageTranslator; private final CodeWriter codeWriter; private Map<String, String> namespaceToPackageMapping; private File resourceBaseDir; private File projectBaseDir; private boolean generateModuleFactoryFile = true; public JMXGenerator() { this(new CodeWriter()); } public JMXGenerator(final CodeWriter codeWriter) { this.codeWriter = codeWriter; } @Override public Collection<File> generateSources(final SchemaContext context, final File outputBaseDir, final Set<Module> yangModulesInCurrentMavenModule) { Preconditions.checkArgument(context != null, "Null context received"); Preconditions.checkArgument(outputBaseDir != null, "Null outputBaseDir received"); Preconditions .checkArgument(namespaceToPackageMapping != null && !namespaceToPackageMapping.isEmpty(), "No namespace to package mapping provided in additionalConfiguration"); packageTranslator = new PackageTranslator(namespaceToPackageMapping); if (!outputBaseDir.exists()) { outputBaseDir.mkdirs(); } GeneratedFilesTracker generatedFiles = new GeneratedFilesTracker(); // create SIE structure qNamesToSIEs Map<QName, ServiceInterfaceEntry> qNamesToSIEs = new HashMap<>(); Map<IdentitySchemaNode, ServiceInterfaceEntry> knownSEITracker = new HashMap<>(); for (Module module : context.getModules()) { String packageName = packageTranslator.getPackageName(module); Map<QName, ServiceInterfaceEntry> namesToSIEntries = ServiceInterfaceEntry .create(module, packageName, knownSEITracker); for (Entry<QName, ServiceInterfaceEntry> sieEntry : namesToSIEntries .entrySet()) { // merge value into qNamesToSIEs if (qNamesToSIEs.containsKey(sieEntry.getKey()) == false) { qNamesToSIEs.put(sieEntry.getKey(), sieEntry.getValue()); } else { throw new IllegalStateException( "Cannot add two SIE with same qname " + sieEntry.getValue()); } } if (yangModulesInCurrentMavenModule.contains(module)) { // write this sie to disk for (ServiceInterfaceEntry sie : namesToSIEntries.values()) { try { generatedFiles.addFile(codeWriter.writeSie(sie, outputBaseDir)); } catch (Exception e) { throw new RuntimeException( "Error occurred during SIE source generate phase", e); } } } } File mainBaseDir = concatFolders(projectBaseDir, "src", "main", "java"); Preconditions.checkNotNull(resourceBaseDir, "resource base dir attribute was null"); StringBuilder fullyQualifiedNamesOfFactories = new StringBuilder(); // create MBEs for (Module module : yangModulesInCurrentMavenModule) { String packageName = packageTranslator.getPackageName(module); Map<String /* MB identity local name */, ModuleMXBeanEntry> namesToMBEs = ModuleMXBeanEntry .create(module, qNamesToSIEs, context, new TypeProviderWrapper(new TypeProviderImpl(context)), packageName); for (Entry<String, ModuleMXBeanEntry> mbeEntry : namesToMBEs .entrySet()) { ModuleMXBeanEntry mbe = mbeEntry.getValue(); try { List<File> files1 = codeWriter.writeMbe(mbe, outputBaseDir, mainBaseDir); generatedFiles.addFile(files1); } catch (Exception e) { throw new RuntimeException( "Error occurred during MBE source generate phase", e); } fullyQualifiedNamesOfFactories.append(mbe .getFullyQualifiedName(mbe.getStubFactoryName())); fullyQualifiedNamesOfFactories.append("\n"); } } // create ModuleFactory file if needed if (fullyQualifiedNamesOfFactories.length() > 0 && generateModuleFactoryFile) { File serviceLoaderFile = JMXGenerator.concatFolders( resourceBaseDir, "META-INF", "services", ModuleFactory.class.getName()); // if this file does not exist, create empty file serviceLoaderFile.getParentFile().mkdirs(); try { serviceLoaderFile.createNewFile(); FileUtils.write(serviceLoaderFile, fullyQualifiedNamesOfFactories.toString()); } catch (IOException e) { String message = "Cannot write to " + serviceLoaderFile; LOG.error(message); throw new RuntimeException(message, e); } } return generatedFiles.getFiles(); } @VisibleForTesting static File concatFolders(final File projectBaseDir, final String... folderNames) { StringBuilder b = new StringBuilder(); for (String folder : folderNames) { b.append(folder); b.append(File.separator); } return new File(projectBaseDir, b.toString()); } @Override public void setAdditionalConfig(final Map<String, String> additionalCfg) { LOG.debug("{}: Additional configuration received: {}", getClass().getCanonicalName(), additionalCfg); this.namespaceToPackageMapping = extractNamespaceMapping(additionalCfg); this.generateModuleFactoryFile = extractModuleFactoryBoolean(additionalCfg); } private boolean extractModuleFactoryBoolean( final Map<String, String> additionalCfg) { String bool = additionalCfg.get(MODULE_FACTORY_FILE_BOOLEAN); if (bool == null) { return true; } if ("false".equals(bool)) { return false; } return true; } private static Map<String, String> extractNamespaceMapping( final Map<String, String> additionalCfg) { Map<String, String> namespaceToPackage = Maps.newHashMap(); for (String key : additionalCfg.keySet()) { if (key.startsWith(NAMESPACE_TO_PACKAGE_PREFIX)) { String mapping = additionalCfg.get(key); NamespaceMapping mappingResolved = extractNamespaceMapping(mapping); namespaceToPackage.put(mappingResolved.namespace, mappingResolved.packageName); } } return namespaceToPackage; } private static NamespaceMapping extractNamespaceMapping(final String mapping) { Matcher matcher = NAMESPACE_MAPPING_PATTERN.matcher(mapping); Preconditions.checkArgument(matcher.matches(), "Namespace to package mapping:%s is in invalid format, requested format is: %s", mapping, NAMESPACE_MAPPING_PATTERN); return new NamespaceMapping(matcher.group(1), matcher.group(2)); } @Override public void setResourceBaseDir(final File resourceDir) { this.resourceBaseDir = resourceDir; } @Override public void setMavenProject(final MavenProject project) { this.projectBaseDir = project.getBasedir(); LOG.debug("{}: project base dir: {}", getClass().getCanonicalName(), projectBaseDir); } @VisibleForTesting static class GeneratedFilesTracker { private final Set<File> files = Sets.newHashSet(); void addFile(final File file) { if (files.contains(file)) { List<File> undeletedFiles = Lists.newArrayList(); for (File presentFile : files) { if (presentFile.delete() == false) { undeletedFiles.add(presentFile); } } if (undeletedFiles.isEmpty() == false) { LOG.error( "Illegal state occurred: Unable to delete already generated files, undeleted files: {}", undeletedFiles); } throw new IllegalStateException( "Name conflict in generated files, file" + file + " present twice"); } files.add(file); } void addFile(final Collection<File> files) { for (File file : files) { addFile(file); } } public Set<File> getFiles() { return files; } } }
my76128/controller
opendaylight/config/yang-jmx-generator-plugin/src/main/java/org/opendaylight/controller/config/yangjmxgenerator/plugin/JMXGenerator.java
Java
epl-1.0
11,935
/** * Copyright (c) 2015 Bosch Software Innovations GmbH and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.hawkbit.mgmt.rest.resource; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.RandomStringUtils; import org.eclipse.hawkbit.mgmt.rest.api.MgmtRestConstants; import org.eclipse.hawkbit.repository.model.NamedEntity; import org.eclipse.hawkbit.repository.model.SoftwareModuleType; import org.eclipse.hawkbit.repository.test.util.WithUser; import org.eclipse.hawkbit.rest.util.JsonBuilder; import org.eclipse.hawkbit.rest.util.MockMvcResultPrinter; import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import com.jayway.jsonpath.JsonPath; import io.qameta.allure.Description; import io.qameta.allure.Feature; import io.qameta.allure.Story; /** * Test for {@link MgmtSoftwareModuleTypeResource}. * */ @Feature("Component Tests - Management API") @Story("Software Module Type Resource") public class MgmtSoftwareModuleTypeResourceTest extends AbstractManagementApiIntegrationTest { @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes GET requests.") public void getSoftwareModuleTypes() throws Exception { final SoftwareModuleType testType = createTestType(); mvc.perform(get("/rest/v1/softwaremoduletypes").accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("$.content.[?(@.key=='" + osType.getKey() + "')].name", contains(osType.getName()))) .andExpect(jsonPath("$.content.[?(@.key=='" + osType.getKey() + "')].description", contains(osType.getDescription()))) .andExpect(jsonPath("$.content.[?(@.key=='" + osType.getKey() + "')].colour").doesNotExist()) .andExpect(jsonPath("$.content.[?(@.key=='" + osType.getKey() + "')].maxAssignments", contains(1))) .andExpect(jsonPath("$.content.[?(@.key=='" + osType.getKey() + "')].key", contains("os"))) .andExpect(jsonPath("$.content.[?(@.key=='" + runtimeType.getKey() + "')].name", contains(runtimeType.getName()))) .andExpect(jsonPath("$.content.[?(@.key=='" + runtimeType.getKey() + "')].description", contains(runtimeType.getDescription()))) .andExpect(jsonPath("$.content.[?(@.key=='" + runtimeType.getKey() + "')].maxAssignments", contains(1))) .andExpect(jsonPath("$.content.[?(@.key=='" + runtimeType.getKey() + "')].key", contains("runtime"))) .andExpect( jsonPath("$.content.[?(@.key=='" + appType.getKey() + "')].name", contains(appType.getName()))) .andExpect(jsonPath("$.content.[?(@.key=='" + appType.getKey() + "')].description", contains(appType.getDescription()))) .andExpect(jsonPath("$.content.[?(@.key=='" + appType.getKey() + "')].colour").doesNotExist()) .andExpect(jsonPath("$.content.[?(@.key=='" + appType.getKey() + "')].maxAssignments", contains(Integer.MAX_VALUE))) .andExpect(jsonPath("$.content.[?(@.key=='" + appType.getKey() + "')].key", contains("application"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].id", contains(testType.getId().intValue()))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].name", contains("TestName123"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].description", contains("Desc1234"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].colour", contains("colour"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].createdBy", contains("uploadTester"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].createdAt", contains(testType.getCreatedAt()))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].lastModifiedBy", contains("uploadTester"))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].lastModifiedAt", contains(testType.getLastModifiedAt()))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].maxAssignments", contains(5))) .andExpect(jsonPath("$.content.[?(@.key=='test123')].key", contains("test123"))) .andExpect(jsonPath("$.total", equalTo(4))); } private SoftwareModuleType createTestType() { SoftwareModuleType testType = softwareModuleTypeManagement.create(entityFactory.softwareModuleType().create() .key("test123").name("TestName123").description("Desc123").colour("colour").maxAssignments(5)); testType = softwareModuleTypeManagement .update(entityFactory.softwareModuleType().update(testType.getId()).description("Desc1234")); return testType; } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes GET requests with sorting by MAXASSIGNMENTS field.") public void getSoftwareModuleTypesSortedByMaxAssignments() throws Exception { final SoftwareModuleType testType = createTestType(); // descending mvc.perform(get("/rest/v1/softwaremoduletypes").accept(MediaType.APPLICATION_JSON) .param(MgmtRestConstants.REQUEST_PARAMETER_SORTING, "MAXASSIGNMENTS:DESC")) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("$.content.[1].id", equalTo(testType.getId().intValue()))) .andExpect(jsonPath("$.content.[1].name", equalTo("TestName123"))) .andExpect(jsonPath("$.content.[1].description", equalTo("Desc1234"))) .andExpect(jsonPath("$.content.[1].colour", equalTo("colour"))) .andExpect(jsonPath("$.content.[1].createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.content.[1].createdAt", equalTo(testType.getCreatedAt()))) .andExpect(jsonPath("$.content.[1].lastModifiedBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.content.[1].lastModifiedAt", equalTo(testType.getLastModifiedAt()))) .andExpect(jsonPath("$.content.[1].maxAssignments", equalTo(5))) .andExpect(jsonPath("$.content.[1].key", equalTo("test123"))) .andExpect(jsonPath("$.total", equalTo(4))); // ascending mvc.perform(get("/rest/v1/softwaremoduletypes").accept(MediaType.APPLICATION_JSON) .param(MgmtRestConstants.REQUEST_PARAMETER_SORTING, "MAXASSIGNMENTS:ASC")) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("$.content.[2].id", equalTo(testType.getId().intValue()))) .andExpect(jsonPath("$.content.[2].name", equalTo("TestName123"))) .andExpect(jsonPath("$.content.[2].description", equalTo("Desc1234"))) .andExpect(jsonPath("$.content.[2].createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.content.[2].createdAt", equalTo(testType.getCreatedAt()))) .andExpect(jsonPath("$.content.[2].lastModifiedBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.content.[2].lastModifiedAt", equalTo(testType.getLastModifiedAt()))) .andExpect(jsonPath("$.content.[2].maxAssignments", equalTo(5))) .andExpect(jsonPath("$.content.[2].key", equalTo("test123"))) .andExpect(jsonPath("$.total", equalTo(4))); } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes POST requests when max assignment is smaller than 1") public void createSoftwareModuleTypesInvalidAssignmentBadRequest() throws Exception { final List<SoftwareModuleType> types = new ArrayList<>(); types.add(entityFactory.softwareModuleType().create().key("test-1").name("TestName-1").maxAssignments(-1) .build()); mvc.perform(post("/rest/v1/softwaremoduletypes/").content(JsonBuilder.softwareModuleTypes(types)) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); types.clear(); types.add(entityFactory.softwareModuleType().create().key("test0").name("TestName0").maxAssignments(0).build()); mvc.perform(post("/rest/v1/softwaremoduletypes/").content(JsonBuilder.softwareModuleTypes(types)) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes POST requests.") public void createSoftwareModuleTypes() throws Exception { final List<SoftwareModuleType> types = Arrays.asList( entityFactory.softwareModuleType().create().key("test1").name("TestName1").description("Desc1") .colour("col1‚").maxAssignments(1).build(), entityFactory.softwareModuleType().create().key("test2").name("TestName2").description("Desc2") .colour("col2‚").maxAssignments(2).build(), entityFactory.softwareModuleType().create().key("test3").name("TestName3").description("Desc3") .colour("col3‚").maxAssignments(3).build()); final MvcResult mvcResult = mvc .perform(post("/rest/v1/softwaremoduletypes/").content(JsonBuilder.softwareModuleTypes(types)) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("[0].name", equalTo("TestName1"))).andExpect(jsonPath("[0].key", equalTo("test1"))) .andExpect(jsonPath("[0].description", equalTo("Desc1"))) .andExpect(jsonPath("[0].createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("[0].maxAssignments", equalTo(1))) .andExpect(jsonPath("[1].name", equalTo("TestName2"))).andExpect(jsonPath("[1].key", equalTo("test2"))) .andExpect(jsonPath("[1].description", equalTo("Desc2"))) .andExpect(jsonPath("[1].createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("[1].maxAssignments", equalTo(2))) .andExpect(jsonPath("[2].name", equalTo("TestName3"))).andExpect(jsonPath("[2].key", equalTo("test3"))) .andExpect(jsonPath("[2].description", equalTo("Desc3"))) .andExpect(jsonPath("[2].createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("[2].createdAt", not(equalTo(0)))) .andExpect(jsonPath("[2].maxAssignments", equalTo(3))).andReturn(); final SoftwareModuleType created1 = softwareModuleTypeManagement.getByKey("test1").get(); final SoftwareModuleType created2 = softwareModuleTypeManagement.getByKey("test2").get(); final SoftwareModuleType created3 = softwareModuleTypeManagement.getByKey("test3").get(); assertThat( JsonPath.compile("[0]_links.self.href").read(mvcResult.getResponse().getContentAsString()).toString()) .isEqualTo("http://localhost/rest/v1/softwaremoduletypes/" + created1.getId()); assertThat( JsonPath.compile("[1]_links.self.href").read(mvcResult.getResponse().getContentAsString()).toString()) .isEqualTo("http://localhost/rest/v1/softwaremoduletypes/" + created2.getId()); assertThat( JsonPath.compile("[2]_links.self.href").read(mvcResult.getResponse().getContentAsString()).toString()) .isEqualTo("http://localhost/rest/v1/softwaremoduletypes/" + created3.getId()); assertThat(softwareModuleTypeManagement.count()).isEqualTo(6); } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes/{ID} GET requests.") public void getSoftwareModuleType() throws Exception { final SoftwareModuleType testType = createTestType(); mvc.perform(get("/rest/v1/softwaremoduletypes/{smtId}", testType.getId()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("$.name", equalTo("TestName123"))) .andExpect(jsonPath("$.description", equalTo("Desc1234"))) .andExpect(jsonPath("$.colour", equalTo("colour"))) .andExpect(jsonPath("$.maxAssignments", equalTo(5))) .andExpect(jsonPath("$.createdBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.createdAt", equalTo(testType.getCreatedAt()))) .andExpect(jsonPath("$.lastModifiedBy", equalTo("uploadTester"))) .andExpect(jsonPath("$.lastModifiedAt", equalTo(testType.getLastModifiedAt()))) .andExpect(jsonPath("$.deleted", equalTo(testType.isDeleted()))); } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes/{ID} DELETE requests (hard delete scenario).") public void deleteSoftwareModuleTypeUnused() throws Exception { final SoftwareModuleType testType = createTestType(); assertThat(softwareModuleTypeManagement.count()).isEqualTo(4); mvc.perform(delete("/rest/v1/softwaremoduletypes/{smId}", testType.getId())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isOk()); assertThat(softwareModuleTypeManagement.count()).isEqualTo(3); } @Test @Description("Ensures that module type deletion request to API on an entity that does not exist results in NOT_FOUND.") public void deleteSoftwareModuleTypeThatDoesNotExistLeadsToNotFound() throws Exception { mvc.perform(delete("/rest/v1/softwaremoduletypes/1234")).andDo(MockMvcResultPrinter.print()) .andExpect(status().isNotFound()); } @Test @WithUser(principal = "uploadTester", allSpPermissions = true) @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes/{ID} DELETE requests (soft delete scenario).") public void deleteSoftwareModuleTypeUsed() throws Exception { final SoftwareModuleType testType = createTestType(); softwareModuleManagement .create(entityFactory.softwareModule().create().type(testType).name("name").version("version")); assertThat(softwareModuleTypeManagement.count()).isEqualTo(4); mvc.perform(get("/rest/v1/softwaremoduletypes/{smtId}", testType.getId())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isOk()).andExpect(jsonPath("$.deleted", equalTo(false))); mvc.perform(delete("/rest/v1/softwaremoduletypes/{smtId}", testType.getId())) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); mvc.perform(get("/rest/v1/softwaremoduletypes/{smtId}", testType.getId())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isOk()).andExpect(jsonPath("$.deleted", equalTo(true))); assertThat(softwareModuleTypeManagement.count()).isEqualTo(3); } @Test @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes/{ID} PUT requests.") public void updateSoftwareModuleTypeColourDescriptionAndNameUntouched() throws Exception { final SoftwareModuleType testType = createTestType(); final String body = new JSONObject().put("id", testType.getId()).put("description", "foobardesc") .put("colour", "updatedColour").put("name", "nameShouldNotBeChanged").toString(); mvc.perform(put("/rest/v1/softwaremoduletypes/{smId}", testType.getId()).content(body) .contentType(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(jsonPath("$.id", equalTo(testType.getId().intValue()))) .andExpect(jsonPath("$.description", equalTo("foobardesc"))) .andExpect(jsonPath("$.colour", equalTo("updatedColour"))) .andExpect(jsonPath("$.name", equalTo("TestName123"))).andReturn(); } @Test @Description("Tests the update of the deletion flag. It is verfied that the software module type can't be marked as deleted through update operation.") public void updateSoftwareModuleTypeDeletedFlag() throws Exception { SoftwareModuleType testType = createTestType(); final String body = new JSONObject().put("id", testType.getId()).put("deleted", true).toString(); mvc.perform(put("/rest/v1/softwaremoduletypes/{smtId}", testType.getId()).content(body) .contentType(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(jsonPath("$.id", equalTo(testType.getId().intValue()))) .andExpect(jsonPath("$.lastModifiedAt", equalTo(testType.getLastModifiedAt()))) .andExpect(jsonPath("$.deleted", equalTo(false))); testType = softwareModuleTypeManagement.get(testType.getId()).get(); assertThat(testType.getLastModifiedAt()).isEqualTo(testType.getLastModifiedAt()); assertThat(testType.isDeleted()).isEqualTo(false); } @Test @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes GET requests with paging.") public void getSoftwareModuleTypesWithoutAddtionalRequestParameters() throws Exception { final int types = 3; mvc.perform(get(MgmtRestConstants.SOFTWAREMODULETYPE_V1_REQUEST_MAPPING)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isOk()) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_TOTAL, equalTo(types))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_SIZE, equalTo(types))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_CONTENT, hasSize(types))); } @Test @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes GET requests with paging.") public void getSoftwareModuleTypesWithPagingLimitRequestParameter() throws Exception { final int types = 3; final int limitSize = 1; mvc.perform(get(MgmtRestConstants.SOFTWAREMODULETYPE_V1_REQUEST_MAPPING) .param(MgmtRestConstants.REQUEST_PARAMETER_PAGING_LIMIT, String.valueOf(limitSize))) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_TOTAL, equalTo(types))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_SIZE, equalTo(limitSize))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_CONTENT, hasSize(limitSize))); } @Test @Description("Checks the correct behaviour of /rest/v1/softwaremoduletypes GET requests with paging.") public void getSoftwareModuleTypesWithPagingLimitAndOffsetRequestParameter() throws Exception { final int types = 3; final int offsetParam = 2; final int expectedSize = types - offsetParam; mvc.perform(get(MgmtRestConstants.SOFTWAREMODULETYPE_V1_REQUEST_MAPPING) .param(MgmtRestConstants.REQUEST_PARAMETER_PAGING_OFFSET, String.valueOf(offsetParam)) .param(MgmtRestConstants.REQUEST_PARAMETER_PAGING_LIMIT, String.valueOf(types))) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_TOTAL, equalTo(types))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_SIZE, equalTo(expectedSize))) .andExpect(jsonPath(MgmtTargetResourceTest.JSON_PATH_PAGED_LIST_CONTENT, hasSize(expectedSize))); } @Test @Description("Ensures that the server is behaving as expected on invalid requests (wrong media type, wrong ID etc.).") public void invalidRequestsOnSoftwaremoduleTypesResource() throws Exception { final SoftwareModuleType testType = createTestType(); final List<SoftwareModuleType> types = Arrays.asList(testType); // SM does not exist mvc.perform(get("/rest/v1/softwaremoduletypes/12345678")).andDo(MockMvcResultPrinter.print()) .andExpect(status().isNotFound()); mvc.perform(delete("/rest/v1/softwaremoduletypes/12345678")).andDo(MockMvcResultPrinter.print()) .andExpect(status().isNotFound()); // bad request - no content mvc.perform(post("/rest/v1/softwaremoduletypes").contentType(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); // bad request - bad content mvc.perform(post("/rest/v1/softwaremoduletypes").content("sdfjsdlkjfskdjf".getBytes()) .contentType(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isBadRequest()); mvc.perform(post("/rest/v1/softwaremoduletypes").content( "[{\"description\":\"Desc123\",\"id\":9223372036854775807,\"key\":\"test123\",\"maxAssignments\":5}]") .contentType(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isBadRequest()); final SoftwareModuleType toLongName = entityFactory.softwareModuleType().create().key("test123") .name(RandomStringUtils.randomAlphanumeric(NamedEntity.NAME_MAX_SIZE + 1)).build(); mvc.perform( post("/rest/v1/softwaremoduletypes").content(JsonBuilder.softwareModuleTypes(Arrays.asList(toLongName))) .contentType(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); // unsupported media type mvc.perform(post("/rest/v1/softwaremoduletypes").content(JsonBuilder.softwareModuleTypes(types)) .contentType(MediaType.APPLICATION_OCTET_STREAM)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isUnsupportedMediaType()); // not allowed methods mvc.perform(put("/rest/v1/softwaremoduletypes")).andDo(MockMvcResultPrinter.print()) .andExpect(status().isMethodNotAllowed()); mvc.perform(delete("/rest/v1/softwaremoduletypes")).andDo(MockMvcResultPrinter.print()) .andExpect(status().isMethodNotAllowed()); } @Test @Description("Search erquest of software module types.") public void searchSoftwareModuleTypeRsql() throws Exception { softwareModuleTypeManagement.create(entityFactory.softwareModuleType().create().key("test123") .name("TestName123").description("Desc123").maxAssignments(5)); softwareModuleTypeManagement.create(entityFactory.softwareModuleType().create().key("test1234") .name("TestName1234").description("Desc1234").maxAssignments(5)); final String rsqlFindLikeDs1OrDs2 = "name==TestName123,name==TestName1234"; mvc.perform(get("/rest/v1/softwaremoduletypes?q=" + rsqlFindLikeDs1OrDs2)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isOk()).andExpect(jsonPath("size", equalTo(2))) .andExpect(jsonPath("total", equalTo(2))).andExpect(jsonPath("content[0].name", equalTo("TestName123"))) .andExpect(jsonPath("content[1].name", equalTo("TestName1234"))); } private void createSoftwareModulesAlphabetical(final int amount) { char character = 'a'; for (int index = 0; index < amount; index++) { final String str = String.valueOf(character); softwareModuleManagement.create(entityFactory.softwareModule().create().type(osType).name(str) .description(str).vendor(str).version(str)); character++; } } }
eclipse/hawkbit
hawkbit-rest/hawkbit-mgmt-resource/src/test/java/org/eclipse/hawkbit/mgmt/rest/resource/MgmtSoftwareModuleTypeResourceTest.java
Java
epl-1.0
26,380
package de.cooperateproject.modeling.textual.common.naming; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EReference; import org.eclipse.uml2.uml.NamedElement; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.nodemodel.util.NodeModelUtils; import de.cooperateproject.modeling.textual.common.metamodel.textualCommons.PackageImport; import de.cooperateproject.modeling.textual.common.metamodel.textualCommons.TextualCommonsPackage; import de.cooperateproject.modeling.textual.common.metamodel.textualCommons.util.TextualCommonsSwitch; public class NameSwitch extends TextualCommonsSwitch<String> { @Override public String casePackageImport(PackageImport object) { return determineNameOfReferencedNamedElement(object, TextualCommonsPackage.Literals.UML_REFERENCING_ELEMENT__REFERENCED_ELEMENT); } @Override public String caseNamedElement( de.cooperateproject.modeling.textual.common.metamodel.textualCommons.NamedElement object) { Object result = object.eGet(TextualCommonsPackage.Literals.NAMED_ELEMENT__NAME, false); if (result != null) { return (String) result; } return determineNameOfReferencedNamedElement(object, TextualCommonsPackage.Literals.UML_REFERENCING_ELEMENT__REFERENCED_ELEMENT); } private String determineNameOfReferencedNamedElement(EObject object, EReference umlReference) { List<INode> nodes = NodeModelUtils.findNodesForFeature(object, umlReference); if (!nodes.isEmpty()) { return normalizeNodeName(NodeModelUtils.getTokenText(nodes.get(0))); } Object referencedElement = object.eGet(umlReference, false); if (referencedElement instanceof NamedElement) { return ((NamedElement) referencedElement).getName(); } return null; } private static String normalizeNodeName(String nodeName) { if (nodeName != null && nodeName.matches("\\\".*\\\"")) { return nodeName.subSequence(1, nodeName.length() - 1).toString(); } return nodeName; } }
Cooperate-Project/Cooperate
bundles/de.cooperateproject.modeling.textual.common/src/de/cooperateproject/modeling/textual/common/naming/NameSwitch.java
Java
epl-1.0
2,176
/** * Copyright (c) 2005-2013 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Eclipse Public License (EPL). * Please see the license.txt included with this distribution for details. * Any modifications to this file must keep this entire header intact. */ /* * Author: atotic * Created: Aug 16, 2003 */ package org.python.pydev.debug.ui.launching; import java.io.IOException; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.debug.core.DebugPlugin; import org.eclipse.debug.core.ILaunch; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.core.ILaunchManager; import org.eclipse.debug.core.model.ILaunchConfigurationDelegate; import org.eclipse.debug.core.model.LaunchConfigurationDelegate; import org.eclipse.jface.dialogs.ErrorDialog; import org.eclipse.swt.widgets.Display; import org.python.pydev.core.MisconfigurationException; import org.python.pydev.core.log.Log; import org.python.pydev.debug.core.Constants; import org.python.pydev.debug.core.PydevDebugPlugin; import org.python.pydev.shared_core.SharedCorePlugin; import org.python.pydev.shared_ui.EditorUtils; /** * * Launcher for the python scripts. * * <p>The code is pretty much copied from ExternalTools' ProgramLaunchDelegate. * <p>I would have subclassed, but ProgramLaunchDelegate hides important internals * * Based on org.eclipse.ui.externaltools.internal.program.launchConfigurations.ProgramLaunchDelegate * * Build order based on org.eclipse.jdt.launching.AbstractJavaLaunchConfigurationDelegate */ public abstract class AbstractLaunchConfigurationDelegate extends LaunchConfigurationDelegate implements ILaunchConfigurationDelegate { private IProject[] fOrderedProjects; /** * We need to reimplement this method (otherwise, all the projects in the workspace will be rebuilt, and not only * the ones referenced in the configuration). */ @Override protected IProject[] getBuildOrder(ILaunchConfiguration configuration, String mode) throws CoreException { return fOrderedProjects; } /* * (non-Javadoc) * * @see org.eclipse.debug.core.model.ILaunchConfigurationDelegate2#preLaunchCheck(org.eclipse.debug.core.ILaunchConfiguration, * java.lang.String, org.eclipse.core.runtime.IProgressMonitor) */ @Override public boolean preLaunchCheck(ILaunchConfiguration configuration, String mode, IProgressMonitor monitor) throws CoreException { // build project list fOrderedProjects = null; String projName = configuration.getAttribute(Constants.ATTR_PROJECT, ""); if (projName.length() > 0) { IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(projName); if (project != null) { fOrderedProjects = computeReferencedBuildOrder(new IProject[] { project }); } } // do generic launch checks return super.preLaunchCheck(configuration, mode, monitor); } /** * Launches the python process. * * Modelled after Ant & Java runners * see WorkbenchLaunchConfigurationDelegate::launch */ @Override public void launch(ILaunchConfiguration conf, String mode, ILaunch launch, IProgressMonitor monitor) throws CoreException { if (monitor == null) { monitor = new NullProgressMonitor(); } monitor.beginTask("Preparing configuration", 3); try { PythonRunnerConfig runConfig = new PythonRunnerConfig(conf, mode, getRunnerConfigRun(conf, mode, launch)); monitor.worked(1); try { PythonRunner.run(runConfig, launch, monitor); } catch (IOException e) { Log.log(e); finishLaunchWithError(launch); throw new CoreException(PydevDebugPlugin.makeStatus(IStatus.ERROR, "Unexpected IO Exception in Pydev debugger", null)); } } catch (final InvalidRunException e) { handleError(launch, e); } catch (final MisconfigurationException e) { handleError(launch, e); } } private void handleError(ILaunch launch, final Exception e) { Display.getDefault().asyncExec(new Runnable() { @Override public void run() { ErrorDialog.openError(EditorUtils.getShell(), "Invalid launch configuration", "Unable to make launch because launch configuration is not valid", SharedCorePlugin.makeStatus(IStatus.ERROR, e.getMessage(), e)); } }); finishLaunchWithError(launch); } private void finishLaunchWithError(ILaunch launch) { try { launch.terminate(); ILaunchManager launchManager = DebugPlugin.getDefault().getLaunchManager(); launchManager.removeLaunch(launch); } catch (Throwable x) { Log.log(x); } } /** * @return the mode we should use to run it... * * @see PythonRunnerConfig#RUN_REGULAR * @see PythonRunnerConfig#RUN_COVERAGE * @see PythonRunnerConfig#RUN_UNITTEST * @see PythonRunnerConfig#RUN_JYTHON_UNITTEST * @see PythonRunnerConfig#RUN_JYTHON * @see PythonRunnerConfig#RUN_IRONPYTHON * @see PythonRunnerConfig#RUN_IRONPYTHON_UNITTEST */ protected abstract String getRunnerConfigRun(ILaunchConfiguration conf, String mode, ILaunch launch); }
akurtakov/Pydev
plugins/org.python.pydev.debug/src/org/python/pydev/debug/ui/launching/AbstractLaunchConfigurationDelegate.java
Java
epl-1.0
5,817
/******************************************************************************* * Copyright (c) 2011, 2013 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.ws.resource.internal; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.sql.Connection; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.Test; import com.ibm.ws.javaee.dd.common.ResourceRef; import com.ibm.ws.resource.ResourceRefConfig; import com.ibm.ws.resource.ResourceRefInfo.Property; public class ResourceRefConfigTest { private static ResourceRefConfigImpl serializeAndDeserialize(ResourceRefConfigImpl rrc) throws IOException, ClassNotFoundException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(rrc); oos.close(); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(bais); return (ResourceRefConfigImpl) ois.readObject(); } private static ResourceRefConfigImpl[] serializeAndDeserializePair(ResourceRefConfigImpl rrc) throws IOException, ClassNotFoundException { return new ResourceRefConfigImpl[] { rrc, serializeAndDeserialize(rrc) }; } @Test public void testCtor() throws Exception { for (ResourceRefConfigImpl rrc : serializeAndDeserializePair(new ResourceRefConfigImpl(null, null))) { Assert.assertEquals(null, rrc.getName()); Assert.assertEquals(null, rrc.getType()); } for (ResourceRefConfigImpl rrc : serializeAndDeserializePair(new ResourceRefConfigImpl("name", "type"))) { Assert.assertEquals("name", rrc.getName()); Assert.assertEquals("type", rrc.getType()); } } @Test public void testToString() { // Ensure it doesn't throw. new ResourceRefConfigImpl(null, null).toString(); } @Test public void testDescription() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertNull(rrcCopy.getDescription()); } rrc.setDescription("desc"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals("desc", rrcCopy.getDescription()); } } @Test public void testType() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); rrc.setType("type"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals("type", rrcCopy.getType()); } } @Test public void testAuth() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(ResourceRef.AUTH_APPLICATION, rrcCopy.getAuth()); } rrc.setResAuthType(ResourceRef.AUTH_CONTAINER); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(ResourceRef.AUTH_CONTAINER, rrcCopy.getAuth()); } } @Test public void testSharingScope() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(ResourceRef.SHARING_SCOPE_SHAREABLE, rrcCopy.getSharingScope()); } rrc.setSharingScope(ResourceRef.SHARING_SCOPE_UNSHAREABLE); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(ResourceRef.SHARING_SCOPE_UNSHAREABLE, rrcCopy.getSharingScope()); } } @Test public void testBindigName() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertNull(rrcCopy.getJNDIName()); } rrc.setJNDIName("bind"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals("bind", rrcCopy.getJNDIName()); } } @Test public void testLoginConfigurationName() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertNull(rrcCopy.getLoginConfigurationName()); } rrc.setLoginConfigurationName("lcn"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals("lcn", rrcCopy.getLoginConfigurationName()); } } @Test public void testLoginProperties() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(Collections.emptyList(), rrcCopy.getLoginPropertyList()); } rrc.addLoginProperty("n1", "v1"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(1, rrcCopy.getLoginPropertyList().size()); Assert.assertEquals("n1", rrcCopy.getLoginPropertyList().get(0).getName()); Assert.assertEquals("v1", rrcCopy.getLoginPropertyList().get(0).getValue()); } rrc.addLoginProperty("n2", "v2"); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(2, rrcCopy.getLoginPropertyList().size()); Assert.assertEquals("n2", rrcCopy.getLoginPropertyList().get(1).getName()); Assert.assertEquals("v2", rrcCopy.getLoginPropertyList().get(1).getValue()); } rrc.clearLoginProperties(); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(Collections.emptyList(), rrcCopy.getLoginPropertyList()); } } @Test public void testIsolationLevel() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(Connection.TRANSACTION_NONE, rrcCopy.getIsolationLevel()); } for (int isoLevel : new int[] { Connection.TRANSACTION_READ_COMMITTED, Connection.TRANSACTION_READ_UNCOMMITTED, Connection.TRANSACTION_REPEATABLE_READ, Connection.TRANSACTION_SERIALIZABLE }) { rrc.setIsolationLevel(isoLevel); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(isoLevel, rrcCopy.getIsolationLevel()); } } } @Test public void testCommitPriority() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(0, rrcCopy.getCommitPriority()); } rrc.setCommitPriority(1); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(1, rrcCopy.getCommitPriority()); } } @Test public void testBranchCoupling() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl(null, null); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(ResourceRefConfig.BRANCH_COUPLING_UNSET, rrcCopy.getBranchCoupling()); } for (int bc : new int[] { ResourceRefConfig.BRANCH_COUPLING_LOOSE, ResourceRefConfig.BRANCH_COUPLING_TIGHT }) { rrc.setBranchCoupling(bc); for (ResourceRefConfigImpl rrcCopy : serializeAndDeserializePair(rrc)) { Assert.assertEquals(bc, rrcCopy.getBranchCoupling()); } } } @Test public void testBranchCouplingMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_TIGHT); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_LOOSE); conflicts = merge(rrc, mrrcs); // assert conflict Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("branch-coupling", conflict.getAttributeName()); Assert.assertEquals("TIGHT", conflict.getValue1()); Assert.assertEquals("LOOSE", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals(ResourceRefConfig.BRANCH_COUPLING_TIGHT, rrc.getBranchCoupling()); } @Test public void testCommitPriorityMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setCommitPriority(1); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setCommitPriority(2); conflicts = merge(rrc, mrrcs); // assert conflict Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("commit-priority", conflict.getAttributeName()); Assert.assertEquals("1", conflict.getValue1()); Assert.assertEquals("2", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals(1, rrc.getCommitPriority()); } @Test public void testIsolationLevelMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setIsolationLevel(Connection.TRANSACTION_SERIALIZABLE); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setIsolationLevel(Connection.TRANSACTION_NONE); conflicts = merge(rrc, mrrcs); // assert conflicts Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("isolation-level", conflict.getAttributeName()); Assert.assertEquals("TRANSACTION_SERIALIZABLE", conflict.getValue1()); Assert.assertEquals("TRANSACTION_NONE", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals(Connection.TRANSACTION_SERIALIZABLE, rrc.getIsolationLevel()); } @Test public void testBindingNameMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setJNDIName("jndiName0"); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setJNDIName("jndiName1"); conflicts = merge(rrc, mrrcs); // assert conflicts Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("binding-name", conflict.getAttributeName()); Assert.assertEquals("jndiName0", conflict.getValue1()); Assert.assertEquals("jndiName1", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals("jndiName0", rrc.getJNDIName()); } @Test public void testLoginConfigurationNameMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setLoginConfigurationName("loginCfg0"); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setLoginConfigurationName("loginCfg1"); // assert conflicts conflicts = merge(rrc, mrrcs); Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("custom-login-configuration", conflict.getAttributeName()); Assert.assertEquals("loginCfg0", conflict.getValue1()); Assert.assertEquals("loginCfg1", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals("loginCfg0", rrc.getLoginConfigurationName()); } @Test public void testAuthenticationAliasNameMerge() throws Exception { final String AUTHENTICATION_ALIAS_LOGIN_NAME = "DefaultPrincipalMapping"; ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].addLoginProperty(AUTHENTICATION_ALIAS_LOGIN_NAME, "bob"); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].addLoginProperty(AUTHENTICATION_ALIAS_LOGIN_NAME, "joe"); // assert conflicts conflicts = merge(rrc, mrrcs); Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("authentication-alias", conflict.getAttributeName()); Assert.assertEquals("bob", conflict.getValue1()); Assert.assertEquals("joe", conflict.getValue2()); // assert resulting config (first value should be used) Property prop = rrc.getLoginPropertyList().get(0); Assert.assertNotNull(prop); Assert.assertEquals(AUTHENTICATION_ALIAS_LOGIN_NAME, prop.getName()); Assert.assertEquals("bob", prop.getValue()); } @Test public void testLoginPropertyMerge() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[2]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].addLoginProperty("prop1", "bob"); mrrcs[0].addLoginProperty("prop2", "bob2"); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].addLoginProperty("prop1", "joe"); // assert conflict conflicts = merge(rrc, mrrcs); Assert.assertEquals(1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("custom-login-configuration prop1", conflict.getAttributeName()); Assert.assertEquals("bob", conflict.getValue1()); Assert.assertEquals("joe", conflict.getValue2()); // assert resulting config (first value should be used) Property prop = rrc.getLoginPropertyList().get(0); Assert.assertNotNull(prop); Assert.assertEquals("prop1", prop.getName()); Assert.assertEquals("bob", prop.getValue()); } @Test public void testMultiMergeConflict() throws Exception { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("baseref", "type"); ResourceRefConfigImpl[] mrrcs = new ResourceRefConfigImpl[3]; mrrcs[0] = new ResourceRefConfigImpl("ref2", "type"); mrrcs[0].setLoginConfigurationName("loginCfg0"); mrrcs[1] = serializeAndDeserialize(mrrcs[0]); mrrcs[2] = serializeAndDeserialize(mrrcs[0]); List<ResourceRefConfig.MergeConflict> conflicts = merge(rrc, mrrcs); // assert no conflicts Assert.assertEquals(0, conflicts.size()); mrrcs[1].setLoginConfigurationName("loginCfg1"); mrrcs[2].setLoginConfigurationName("loginCfg2"); // assert conflicts conflicts = merge(rrc, mrrcs); Assert.assertEquals(2, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals("custom-login-configuration", conflict.getAttributeName()); Assert.assertEquals("loginCfg0", conflict.getValue1()); Assert.assertEquals("loginCfg1", conflict.getValue2()); conflict = conflicts.get(1); Assert.assertEquals("custom-login-configuration", conflict.getAttributeName()); Assert.assertEquals("loginCfg0", conflict.getValue1()); Assert.assertEquals("loginCfg2", conflict.getValue2()); // assert resulting config (first value should be used) Assert.assertEquals("loginCfg0", rrc.getLoginConfigurationName()); } private List<ResourceRefConfig.MergeConflict> merge(ResourceRefConfig rrc, ResourceRefConfig[] rrcs) { List<ResourceRefConfig.MergeConflict> conflicts = new ArrayList<ResourceRefConfig.MergeConflict>(); rrc.mergeBindingsAndExtensions(rrcs, conflicts); return conflicts; } @Test public void testCompareDefaults() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); List<ResourceRefConfig.MergeConflict> conflicts = rrc1.compareBindingsAndExtensions(rrc2); Assert.assertTrue(conflicts.toString(), conflicts.isEmpty()); } @Test public void testCompareSetDefaults() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setLoginConfigurationName(null); rrc2.setIsolationLevel(Connection.TRANSACTION_NONE); rrc2.setCommitPriority(0); List<ResourceRefConfig.MergeConflict> conflicts = rrc1.compareBindingsAndExtensions(rrc2); Assert.assertTrue(conflicts.toString(), conflicts.isEmpty()); } @Test public void testCompareEqual() throws Exception { List<ResourceRefConfigImpl> rrcs = new ArrayList<ResourceRefConfigImpl>(); for (int i = 0; i < 2; i++) { ResourceRefConfigImpl rrc = new ResourceRefConfigImpl("name", "type"); rrc.setLoginConfigurationName("lcn"); rrc.addLoginProperty("name1", "value1"); rrc.addLoginProperty("name2", "value2"); rrc.setIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ); rrc.setCommitPriority(1); rrc.setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_LOOSE); rrcs.add(rrc); } List<ResourceRefConfig.MergeConflict> conflicts = rrcs.get(0).compareBindingsAndExtensions(rrcs.get(1)); Assert.assertTrue(conflicts.toString(), conflicts.isEmpty()); } private static void assertCompareConflict(ResourceRefConfig rrc1, ResourceRefConfig rrc2, String attributeName, Object value1, Object value2) { List<ResourceRefConfig.MergeConflict> conflicts = rrc1.compareBindingsAndExtensions(rrc2); Assert.assertEquals(conflicts.toString(), 1, conflicts.size()); ResourceRefConfig.MergeConflict conflict = conflicts.get(0); Assert.assertEquals(conflict.toString(), attributeName, conflict.getAttributeName()); Assert.assertEquals(conflict.toString(), 0, conflict.getIndex1()); Assert.assertEquals(conflict.toString(), value1, conflict.getValue1()); Assert.assertEquals(conflict.toString(), 1, conflict.getIndex2()); Assert.assertEquals(conflict.toString(), value2, conflict.getValue2()); } @Test public void testCompareLoginConfigurationNameConflict() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setLoginConfigurationName("lcn"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration", "null", "lcn"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setLoginConfigurationName("lcn"); rrc2 = new ResourceRefConfigImpl("name", "type"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration", "lcn", "null"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setLoginConfigurationName("lcn1"); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setLoginConfigurationName("lcn2"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration", "lcn1", "lcn2"); } @Test public void testCompareLoginPropertyConflict() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.addLoginProperty("name", "value"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration name", "null", "value"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.addLoginProperty("name", "value"); rrc2 = new ResourceRefConfigImpl("name", "type"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration name", "value", "null"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.addLoginProperty("name", "value1"); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.addLoginProperty("name", "value2"); assertCompareConflict(rrc1, rrc2, "custom-login-configuration name", "value1", "value2"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.addLoginProperty("name1", "value"); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.addLoginProperty("name2", "value"); List<ResourceRefConfig.MergeConflict> conflicts = rrc1.compareBindingsAndExtensions(rrc2); Assert.assertEquals(conflicts.toString(), 2, conflicts.size()); for (int i = 0; i < 2; i++) { ResourceRefConfig.MergeConflict conflict = conflicts.get(i); Assert.assertEquals(conflict.toString(), "custom-login-configuration name" + (i + 1), conflict.getAttributeName()); Assert.assertEquals(conflict.toString(), 0, conflict.getIndex1()); Assert.assertEquals(conflict.toString(), i == 0 ? "value" : "null", conflict.getValue1()); Assert.assertEquals(conflict.toString(), 1, conflict.getIndex2()); Assert.assertEquals(conflict.toString(), i == 0 ? "null" : "value", conflict.getValue2()); } } @Test public void testCompareIsolationLevelConflict() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ); assertCompareConflict(rrc1, rrc2, "isolation-level", "TRANSACTION_NONE", "TRANSACTION_REPEATABLE_READ"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ); rrc2 = new ResourceRefConfigImpl("name", "type"); assertCompareConflict(rrc1, rrc2, "isolation-level", "TRANSACTION_REPEATABLE_READ", "TRANSACTION_NONE"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setIsolationLevel(Connection.TRANSACTION_SERIALIZABLE); assertCompareConflict(rrc1, rrc2, "isolation-level", "TRANSACTION_REPEATABLE_READ", "TRANSACTION_SERIALIZABLE"); } @Test public void testCompareCommitPriorityConflict() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setCommitPriority(1); assertCompareConflict(rrc1, rrc2, "commit-priority", "0", "1"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setCommitPriority(1); rrc2 = new ResourceRefConfigImpl("name", "type"); assertCompareConflict(rrc1, rrc2, "commit-priority", "1", "0"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setCommitPriority(1); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setCommitPriority(2); assertCompareConflict(rrc1, rrc2, "commit-priority", "1", "2"); } @Test public void testCompareBranchCouplingConflict() throws Exception { ResourceRefConfigImpl rrc1 = new ResourceRefConfigImpl("name", "type"); ResourceRefConfigImpl rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_LOOSE); assertCompareConflict(rrc1, rrc2, "branch-coupling", "null", "LOOSE"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_LOOSE); rrc2 = new ResourceRefConfigImpl("name", "type"); assertCompareConflict(rrc1, rrc2, "branch-coupling", "LOOSE", "null"); rrc1 = new ResourceRefConfigImpl("name", "type"); rrc1.setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_LOOSE); rrc2 = new ResourceRefConfigImpl("name", "type"); rrc2.setBranchCoupling(ResourceRefConfig.BRANCH_COUPLING_TIGHT); assertCompareConflict(rrc1, rrc2, "branch-coupling", "LOOSE", "TIGHT"); } }
kgibm/open-liberty
dev/com.ibm.ws.resource/test/com/ibm/ws/resource/internal/ResourceRefConfigTest.java
Java
epl-1.0
27,596
/* * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ /* This must be included before anything else */ #if HAVE_CONFIG_H # include <config.h> #endif #include <yajr/rpc/methods.hpp> namespace yajr { namespace rpc { template<> void InbErr<&yajr::rpc::method::endpoint_unresolve>::process() const { LOG(ERROR); } } }
opendaylight/opflex
libopflex/comms/test/handlers/error_response/endpoint_unresolve.cpp
C++
epl-1.0
586
/******************************************************************************* * Copyright (c) 2015 UT-Battelle, LLC. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Robert Smith *******************************************************************************/ package org.eclipse.eavp.viz.modeling.factory; import org.eclipse.eavp.viz.modeling.base.IMesh; /** * A interface for classes which serve IControllerProviders. An * IControllerFactory takes as input an IMesh and returns an IControllerProvider * which is capable of processing that IMesh. An IControllerFactory * implementation should be specific as to what kind of view and/or controller * its IControllerProviders create, as the same IMesh may be valid for use with * multiple separate implementations of IView and IController. * * @author Robert Smith * */ public interface IControllerProviderFactory { /** * Creates a controller and associated view for the given model. * * @param model * The model for which a controller will be created * @return The new controller, which contains the input model and the new * view */ public IControllerProvider createProvider(IMesh model); }
jarrah42/eavp
org.eclipse.eavp.viz.modeling/src/org/eclipse/eavp/viz/modeling/factory/IControllerProviderFactory.java
Java
epl-1.0
1,456
/** * Copyright (c) 2015 Bosch Software Innovations GmbH and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.hawkbit.ddi.rest.resource; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.startsWith; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.ArrayList; import java.util.List; import org.eclipse.hawkbit.ddi.rest.api.DdiRestConstants; import org.eclipse.hawkbit.repository.model.Action; import org.eclipse.hawkbit.repository.model.Action.Status; import org.eclipse.hawkbit.repository.model.DistributionSet; import org.eclipse.hawkbit.repository.model.Target; import org.eclipse.hawkbit.repository.test.util.TestdataFactory; import org.eclipse.hawkbit.rest.util.JsonBuilder; import org.eclipse.hawkbit.rest.util.MockMvcResultPrinter; import org.junit.jupiter.api.Test; import org.springframework.hateoas.MediaTypes; import org.springframework.http.MediaType; import org.springframework.integration.json.JsonPathUtils; import io.qameta.allure.Description; import io.qameta.allure.Feature; import io.qameta.allure.Story; /** * Test cancel action from the controller. */ @Feature("Component Tests - Direct Device Integration API") @Story("Cancel Action Resource") class DdiCancelActionTest extends AbstractDDiApiIntegrationTest { @Test @Description("Tests that the cancel action resource can be used with CBOR.") void cancelActionCbor() throws Exception { final DistributionSet ds = testdataFactory.createDistributionSet(""); testdataFactory.createTarget(); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); final Action cancelAction = deploymentManagement.cancelAction(actionId); // check that we can get the cancel action as CBOR final byte[] result = mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId(), tenantAware.getCurrentTenant()).accept(DdiRestConstants.MEDIA_TYPE_CBOR)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(DdiRestConstants.MEDIA_TYPE_CBOR)) .andReturn().getResponse().getContentAsByteArray(); assertThat(JsonPathUtils.<String>evaluate(cborToJson(result), "$.id")).isEqualTo(String.valueOf(cancelAction.getId())); assertThat(JsonPathUtils.<String>evaluate(cborToJson(result), "$.cancelAction.stopId")).isEqualTo(String.valueOf(actionId)); // and submit feedback as CBOR mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()).content( jsonToCbor(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "proceeding"))) .contentType(DdiRestConstants.MEDIA_TYPE_CBOR).accept(DdiRestConstants.MEDIA_TYPE_CBOR)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); } @Test @Description("Test of the controller can continue a started update even after a cancel command if it so desires.") void rootRsCancelActionButContinueAnyway() throws Exception { // prepare test data final DistributionSet ds = testdataFactory.createDistributionSet(""); final Target savedTarget = testdataFactory.createTarget(); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), savedTarget.getControllerId())); final Action cancelAction = deploymentManagement.cancelAction(actionId); // controller rejects cancelation mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "rejected")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); final long current = System.currentTimeMillis(); // get update action anyway mvc.perform( get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/deploymentBase/" + actionId, tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id", equalTo(String.valueOf(actionId)))) .andExpect(jsonPath("$.deployment.download", equalTo("forced"))) .andExpect(jsonPath("$.deployment.update", equalTo("forced"))) .andExpect(jsonPath("$.deployment.chunks[?(@.part=='jvm')].version", contains(ds.findFirstModuleByType(runtimeType).get().getVersion()))) .andExpect(jsonPath("$.deployment.chunks[?(@.part=='os')].version", contains(ds.findFirstModuleByType(osType).get().getVersion()))) .andExpect(jsonPath("$.deployment.chunks[?(@.part=='bApp')].version", contains(ds.findFirstModuleByType(appType).get().getVersion()))); // and finish it mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/deploymentBase/" + actionId + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.deploymentActionFeedback(actionId.toString(), "closed", "success")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); // check database after test assertThat(deploymentManagement.getAssignedDistributionSet(TestdataFactory.DEFAULT_CONTROLLER_ID).get()) .isEqualTo(ds); assertThat(deploymentManagement.getInstalledDistributionSet(TestdataFactory.DEFAULT_CONTROLLER_ID).get()) .isEqualTo(ds); assertThat( targetManagement.getByControllerID(TestdataFactory.DEFAULT_CONTROLLER_ID).get().getInstallationDate()) .isGreaterThanOrEqualTo(current); } @Test @Description("Test for cancel operation of a update action.") void rootRsCancelAction() throws Exception { final DistributionSet ds = testdataFactory.createDistributionSet(""); final Target savedTarget = testdataFactory.createTarget(); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), savedTarget.getControllerId())); final long timeBeforeFirstPoll = System.currentTimeMillis(); mvc.perform(get("/{tenant}/controller/v1/{controller}", tenantAware.getCurrentTenant(), TestdataFactory.DEFAULT_CONTROLLER_ID).accept(MediaTypes.HAL_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaTypes.HAL_JSON)) .andExpect(jsonPath("$.config.polling.sleep", equalTo("00:01:00"))) .andExpect(jsonPath("$._links.deploymentBase.href", startsWith("http://localhost/" + tenantAware.getCurrentTenant() + "/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/deploymentBase/" + actionId))); final long timeAfterFirstPoll = System.currentTimeMillis() + 1; assertThat(targetManagement.getByControllerID(TestdataFactory.DEFAULT_CONTROLLER_ID).get().getLastTargetQuery()) .isBetween(timeBeforeFirstPoll, timeAfterFirstPoll); // Retrieved is reported List<Action> activeActionsByTarget = deploymentManagement .findActiveActionsByTarget(PAGE, savedTarget.getControllerId()).getContent(); assertThat(activeActionsByTarget).hasSize(1); assertThat(activeActionsByTarget.get(0).getStatus()).isEqualTo(Status.RUNNING); final Action cancelAction = deploymentManagement.cancelAction(actionId); activeActionsByTarget = deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId()) .getContent(); // the canceled action should still be active! assertThat(cancelAction.isActive()).isTrue(); assertThat(activeActionsByTarget).hasSize(1); assertThat(activeActionsByTarget.get(0).getStatus()).isEqualTo(Status.CANCELING); final long timeBefore2ndPoll = System.currentTimeMillis(); mvc.perform(get("/{tenant}/controller/v1/{controller}", tenantAware.getCurrentTenant(), TestdataFactory.DEFAULT_CONTROLLER_ID)).andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaTypes.HAL_JSON)) .andExpect(jsonPath("$.config.polling.sleep", equalTo("00:01:00"))) .andExpect(jsonPath("$._links.cancelAction.href", equalTo("http://localhost/" + tenantAware.getCurrentTenant() + "/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId()))); final long timeAfter2ndPoll = System.currentTimeMillis() + 1; assertThat(targetManagement.getByControllerID(TestdataFactory.DEFAULT_CONTROLLER_ID).get().getLastTargetQuery()) .isBetween(timeBefore2ndPoll, timeAfter2ndPoll); mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId(), tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id", equalTo(String.valueOf(cancelAction.getId())))) .andExpect(jsonPath("$.cancelAction.stopId", equalTo(String.valueOf(actionId)))); assertThat(targetManagement.getByControllerID(TestdataFactory.DEFAULT_CONTROLLER_ID).get().getLastTargetQuery()) .isLessThanOrEqualTo(System.currentTimeMillis()); // controller confirmed cancelled action, should not be active anymore mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON) .content(JsonBuilder.cancelActionFeedback(actionId.toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); activeActionsByTarget = deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId()) .getContent(); assertThat(activeActionsByTarget).isEmpty(); final Action canceledAction = deploymentManagement.findAction(cancelAction.getId()).get(); assertThat(canceledAction.isActive()).isFalse(); assertThat(canceledAction.getStatus()).isEqualTo(Status.CANCELED); } @Test @Description("Tests various bad requests and if the server handles them as expected.") void badCancelAction() throws Exception { // not allowed methods mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/1", tenantAware.getCurrentTenant())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isMethodNotAllowed()); mvc.perform(put("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/1", tenantAware.getCurrentTenant())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isMethodNotAllowed()); mvc.perform(delete("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/1", tenantAware.getCurrentTenant())).andDo(MockMvcResultPrinter.print()) .andExpect(status().isMethodNotAllowed()); // non existing target mvc.perform(get("/{tenant}/controller/v1/34534543/cancelAction/1", tenantAware.getCurrentTenant()) .accept(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isNotFound()); createCancelAction("34534543"); // wrong media type mvc.perform(get("/{tenant}/controller/v1/34534543/cancelAction/1", tenantAware.getCurrentTenant()) .accept(MediaType.APPLICATION_ATOM_XML)).andDo(MockMvcResultPrinter.print()) .andExpect(status().isNotAcceptable()); } private Action createCancelAction(final String targetid) { final DistributionSet ds = testdataFactory.createDistributionSet(targetid); final Target savedTarget = testdataFactory.createTarget(targetid); final List<Target> toAssign = new ArrayList<>(); toAssign.add(savedTarget); final Long actionId = getFirstAssignedActionId(assignDistributionSet(ds, toAssign)); return deploymentManagement.cancelAction(actionId); } @Test @Description("Tests the feedback channel of the cancel operation.") void rootRsCancelActionFeedback() throws Exception { final DistributionSet ds = testdataFactory.createDistributionSet(""); final Target savedTarget = testdataFactory.createTarget(); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); // cancel action manually final Action cancelAction = deploymentManagement.cancelAction(actionId); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(2); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "proceeding")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(3); mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "resumed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(4); mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "scheduled")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(5); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); // cancellation canceled -> should remove the action from active assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "canceled")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(6); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); // cancellation rejected -> action still active until controller close // it // with finished or // error assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "rejected")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(7); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); // update closed -> should remove the action from active mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/deploymentBase/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(8); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).isEmpty(); } @Test @Description("Tests the feeback chanel of for multiple open cancel operations on the same target.") void multipleCancelActionFeedback() throws Exception { final DistributionSet ds = testdataFactory.createDistributionSet("", true); final DistributionSet ds2 = testdataFactory.createDistributionSet("2", true); final DistributionSet ds3 = testdataFactory.createDistributionSet("3", true); final Target savedTarget = testdataFactory.createTarget(); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); final Long actionId2 = getFirstAssignedActionId( assignDistributionSet(ds2.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); final Long actionId3 = getFirstAssignedActionId( assignDistributionSet(ds3.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(3); // 3 update actions, 0 cancel actions assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(3); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(3); final Action cancelAction = deploymentManagement.cancelAction(actionId); final Action cancelAction2 = deploymentManagement.cancelAction(actionId2); assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(3); assertThat(deploymentManagement.countActionsByTarget(savedTarget.getControllerId())).isEqualTo(3); mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId(), tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE)) .andExpect(jsonPath("$.id", equalTo(String.valueOf(cancelAction.getId())))) .andExpect(jsonPath("$.cancelAction.stopId", equalTo(String.valueOf(actionId)))); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(6); mvc.perform(get("/{tenant}/controller/v1/{controllerId}", tenantAware.getCurrentTenant(), TestdataFactory.DEFAULT_CONTROLLER_ID)).andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaTypes.HAL_JSON)) .andExpect(jsonPath("$.config.polling.sleep", equalTo("00:01:00"))) .andExpect(jsonPath("$._links.cancelAction.href", equalTo("http://localhost/" + tenantAware.getCurrentTenant() + "/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId()))); // now lets return feedback for the first cancelation mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(7); // 1 update actions, 1 cancel actions assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(2); assertThat(deploymentManagement.countActionsByTarget(savedTarget.getControllerId())).isEqualTo(3); mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction2.getId(), tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id", equalTo(String.valueOf(cancelAction2.getId())))) .andExpect(jsonPath("$.cancelAction.stopId", equalTo(String.valueOf(actionId2)))); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(8); mvc.perform(get("/{tenant}/controller/v1/{controller}", tenantAware.getCurrentTenant(), TestdataFactory.DEFAULT_CONTROLLER_ID)).andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaTypes.HAL_JSON)) .andExpect(jsonPath("$.config.polling.sleep", equalTo("00:01:00"))) .andExpect(jsonPath("$._links.cancelAction.href", equalTo("http://localhost/" + tenantAware.getCurrentTenant() + "/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction2.getId()))); // now lets return feedback for the second cancelation mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction2.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction2.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(9); assertThat(deploymentManagement.getAssignedDistributionSet(TestdataFactory.DEFAULT_CONTROLLER_ID).get()) .isEqualTo(ds3); mvc.perform( get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/deploymentBase/" + actionId3, tenantAware.getCurrentTenant())) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(10); // 1 update actions, 0 cancel actions assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); final Action cancelAction3 = deploymentManagement.cancelAction(actionId3); // action is in cancelling state assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).hasSize(1); assertThat(deploymentManagement.countActionsByTarget(savedTarget.getControllerId())).isEqualTo(3); assertThat(deploymentManagement.getAssignedDistributionSet(TestdataFactory.DEFAULT_CONTROLLER_ID).get()) .isEqualTo(ds3); mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction3.getId(), tenantAware.getCurrentTenant()).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id", equalTo(String.valueOf(cancelAction3.getId())))) .andExpect(jsonPath("$.cancelAction.stopId", equalTo(String.valueOf(actionId3)))); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(12); // now lets return feedback for the third cancelation mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction3.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction3.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); assertThat(deploymentManagement.countActionStatusAll()).isEqualTo(13); // final status assertThat(deploymentManagement.findActiveActionsByTarget(PAGE, savedTarget.getControllerId())).isEmpty(); assertThat(deploymentManagement.countActionsByTarget(savedTarget.getControllerId())).isEqualTo(3); } @Test @Description("Tests the feeback channel closing for too many feedbacks, i.e. denial of service prevention.") void tooMuchCancelActionFeedback() throws Exception { testdataFactory.createTarget(); final DistributionSet ds = testdataFactory.createDistributionSet(""); final Long actionId = getFirstAssignedActionId( assignDistributionSet(ds.getId(), TestdataFactory.DEFAULT_CONTROLLER_ID)); final Action cancelAction = deploymentManagement.cancelAction(actionId); final String feedback = JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "proceeding"); // assignDistributionSet creates an ActionStatus and cancel action // stores an action status, so // only 97 action status left for (int i = 0; i < 98; i++) { mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()).content(feedback) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()); } mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()).content(feedback) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andExpect(status().isForbidden()); } @Test @Description("test the correct rejection of various invalid feedback requests") void badCancelActionFeedback() throws Exception { final Action cancelAction = createCancelAction(TestdataFactory.DEFAULT_CONTROLLER_ID); createCancelAction("4715"); // not allowed methods mvc.perform(put("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isMethodNotAllowed()); mvc.perform(delete("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant())) .andDo(MockMvcResultPrinter.print()).andExpect(status().isMethodNotAllowed()); mvc.perform(get("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant())) .andDo(MockMvcResultPrinter.print()).andExpect(status().isMethodNotAllowed()); // bad content type mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_ATOM_XML).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isUnsupportedMediaType()); // bad body mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "546456456")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); // non existing target mvc.perform(post("/{tenant}/controller/v1/12345/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isNotFound()); // invalid action mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback("sdfsdfsdfs", "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isBadRequest()); // finaly, get it right :) mvc.perform(post("/{tenant}/controller/v1/" + TestdataFactory.DEFAULT_CONTROLLER_ID + "/cancelAction/" + cancelAction.getId() + "/feedback", tenantAware.getCurrentTenant()) .content(JsonBuilder.cancelActionFeedback(cancelAction.getId().toString(), "closed")) .contentType(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)) .andDo(MockMvcResultPrinter.print()).andExpect(status().isOk()); } }
eclipse/hawkbit
hawkbit-rest/hawkbit-ddi-resource/src/test/java/org/eclipse/hawkbit/ddi/rest/resource/DdiCancelActionTest.java
Java
epl-1.0
34,040
/******************************************************************************* * Copyright (c) 2015 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ define(["intern!tdd","intern/chai!assert","dojo/Deferred","js/toolbox/toolbox"], function(tdd,assert,Deferred,toolbox) { var server, testBookmark, testFeatureTool; with(assert) { /** * Defines the 'toolbox' module test suite. */ tdd.suite("Toolbox Tests", function() { tdd.before(function() { testBookmark = { id: "myTool", type:"bookmark", name:"myTool", url:"ibm.com", icon:"default.png" }; testToolEntry = { id: "myFeature-1.0", type:"featureTool", }; }); tdd.beforeEach(function() { // Mock the admin center server since it is not available in a unittest server = sinon.fakeServer.create(); }); tdd.afterEach(function() { server.restore(); }); tdd.test("ToolEntry - create from ToolEntry", function() { var tool = new Toolbox.ToolEntry(testToolEntry); console.log("DEBUG", tool); assert.equal(tool.id, testToolEntry.id, "ToolEntry was not constructed with correct value for 'id'"); assert.equal(tool.type, testToolEntry.type, "ToolEntry was not constructed with correct value for 'type'"); assert.isUndefined(tool.name, "ToolEntry should not have a name"); }); tdd.test("ToolEntry - create from Bookmark", function() { var tool = new Toolbox.ToolEntry(testBookmark); console.log("DEBUG", tool); assert.equal(tool.id, testBookmark.id, "ToolEntry was not constructed with correct value for 'id'"); assert.equal(tool.type, testBookmark.type, "ToolEntry was not constructed with correct value for 'type'"); assert.isUndefined(tool.name, "ToolEntry should not have a name even though it was created from an Object that had a name"); }); tdd.test("Bookmark - create", function() { var tool = new Toolbox.Bookmark(testBookmark); assert.isUndefined(tool.id, "Tool was constructed with an 'id'"); assert.isUndefined(tool.type, "Tool was constructed with an 'type'"); assert.equal(tool.name, testBookmark.name, "Tool was not constructed with correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Tool was not constructed with correct value for 'url'"); assert.equal(tool.icon, testBookmark.icon, "Tool was not constructed with correct value for 'icon'"); }); tdd.test("Toolbox - construct", function() { var tb = new Toolbox(); assert.isTrue(tb instanceof Toolbox, "Unable to construct Toolbox"); }); tdd.test("Toolbox - get instance", function() { var tb = toolbox.getToolbox(); assert.isTrue(tb instanceof Toolbox, "Unable to get instance of Toolbox"); }); tdd.test("Toolbox - get instance is same instance", function() { var tbNew = new Toolbox(); var tbInst = toolbox.getToolbox(); assert.isFalse(tbInst === tbNew, "The 'singleton' instance of Toolbox should not match a 'new' instance of Toolbox"); assert.isTrue(tbInst === toolbox.getToolbox(), "The 'singleton' instance of Toolbox should match the previous return for the singleton"); }); tdd.test("toolbox.getToolEntries - returns Deferred", function() { assert.isTrue(toolbox.getToolbox().getToolEntries() instanceof Deferred, "Toolbox.getToolEntries should return a Deferred"); }); tdd.test("toolbox.getToolEntries - resolves with Array (no Tools)", function() { var dfd = this.async(1000); toolbox.getToolbox().getToolEntries().then(dfd.callback(function(tools) { assert.isTrue(tools instanceof Array, "Toolbox.getToolEntries should resolve with an Array"); }), function(err) { dfd.reject(err); }); server.respondWith("GET", "/ibm/api/adminCenter/v1/toolbox/toolEntries", [200, { "Content-Type": "application/json" },'{"toolEntries":[]}']); server.respond(); return dfd; }); tdd.test("toolbox.getToolEntries - resolves with Array of Tool objects", function() { var dfd = this.async(1000); toolbox.getToolbox().getToolEntries().then(dfd.callback(function(tools) { assert.isTrue(tools instanceof Array, "Toolbox.getToolEntries should resolve with an Array"); assert.equal(tools.length, 1, "Expected exactly 1 tool back from the mock response"); var tool = tools[0]; assert.equal(tool.id, testToolEntry.id, "Tool was not constructed with correct value for 'id'"); assert.equal(tool.type, testToolEntry.type, "Tool was not constructed with correct value for 'type'"); }), function(err) { dfd.reject(err); }); server.respondWith("GET", "/ibm/api/adminCenter/v1/toolbox/toolEntries", [200, { "Content-Type": "application/json" },'['+JSON.stringify(testToolEntry)+']']); server.respond(); return dfd; }); tdd.test("toolbox.getTool - returns Deferred", function() { assert.isTrue(toolbox.getToolbox().getTool('myTool') instanceof Deferred, "Toolbox.getTool should return a Deferred"); }); tdd.test("toolbox.getTool - returns a Tool", function() { var dfd = this.async(1000); toolbox.getToolbox().getTool('myTool').then(dfd.callback(function(tool) { assert.equal(tool.id, testBookmark.id, "Returned tool did not have correct value for 'id'"); assert.equal(tool.type, testBookmark.type, "Returned tool did not have correct value for 'type'"); assert.equal(tool.name, testBookmark.name, "Returned tool did not have correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Returned tool did not have correct value for 'url'"); assert.equal(tool.icon, testBookmark.icon, "Returned tool did not have correct value for 'icon'"); }), function(err) { dfd.reject(err); }); server.respondWith("GET", "/ibm/api/adminCenter/v1/toolbox/toolEntries/myTool", [200, { "Content-Type": "application/json" }, JSON.stringify(testBookmark)]); server.respond(); return dfd; }); tdd.test("Toolbox.getTool - filtered Tool", function() { var dfd = this.async(1000); toolbox.getToolbox().getTool('myTool', 'name,url').then(dfd.callback(function(tool) { assert.equal(tool.id, null, "Returned tool was filtered and should not have an 'id'"); assert.equal(tool.type, null, "Returned tool was filtered and should not have a 'type'"); assert.equal(tool.name, testBookmark.name, "Returned tool did not have correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Returned tool did not have correct value for 'url'"); assert.equal(tool.icon, null, "Returned tool was filtered and should not have an 'icon'"); }), function(err) { dfd.reject(err); }); server.respondWith("GET", "/ibm/api/adminCenter/v1/toolbox/toolEntries/myTool?fields=name,url", [200, { "Content-Type": "application/json" }, '{"name":"'+testBookmark.name+'","url":"'+testBookmark.url+'"}']); server.respond(); return dfd; }); tdd.test("Toolbox.getTool - no provided Tool ID", function() { try { toolbox.getToolbox().getTool(); assert.isTrue(false, "Toolbox.getTool should throw an error when no tool ID is provided"); } catch(err) { // Pass } }); tdd.test("Toolbox.addToolEntry - returns Deferred", function() { assert.isTrue(toolbox.getToolbox().addToolEntry(testBookmark) instanceof Deferred, "Toolbox.addToolEntry should return a Deferred"); }); tdd.test("Toolbox.addToolEntry - returns the created ToolEntry", function() { var dfd = this.async(1000); toolbox.getToolbox().addToolEntry(testBookmark).then(dfd.callback(function(tool) { assert.equal(tool.id, testBookmark.id, "Returned tool did not have correct value for 'id'"); assert.equal(tool.type, testBookmark.type, "Returned tool did not have correct value for 'type'"); assert.equal(tool.name, testBookmark.name, "Returned tool did not have correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Returned tool did not have correct value for 'url'"); assert.equal(tool.icon, testBookmark.icon, "Returned tool did not have correct value for 'icon'"); }), function(err) { dfd.reject(err); }); server.respondWith("POST", "/ibm/api/adminCenter/v1/toolbox/toolEntries", [201, { "Content-Type": "application/json" }, JSON.stringify(testBookmark)]); server.respond(); return dfd; }); tdd.test("Toolbox.addToolEntry - no provided ToolEntry props", function() { try { toolbox.getToolbox().addToolEntry(); assert.isTrue(false, "Toolbox.addToolEntry should throw an error when no tool ID is provided"); } catch(err) { // Pass } }); tdd.test("Toolbox.addBookmark - returns Deferred", function() { assert.isTrue(toolbox.getToolbox().addBookmark(testBookmark) instanceof Deferred, "Toolbox.addBookmark should return a Deferred"); }); tdd.test("Toolbox.addBookmark - returns the created Bookmark", function() { var dfd = this.async(1000); toolbox.getToolbox().addBookmark(testBookmark).then(dfd.callback(function(tool) { assert.equal(tool.id, testBookmark.id, "Returned tool did not have correct value for 'id'"); assert.equal(tool.type, testBookmark.type, "Returned tool did not have correct value for 'type'"); assert.equal(tool.name, testBookmark.name, "Returned tool did not have correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Returned tool did not have correct value for 'url'"); assert.equal(tool.icon, testBookmark.icon, "Returned tool did not have correct value for 'icon'"); }), function(err) { dfd.reject(err); }); server.respondWith("POST", "/ibm/api/adminCenter/v1/toolbox/bookmarks", [201, { "Content-Type": "application/json" }, JSON.stringify(testBookmark)]); server.respond(); return dfd; }); tdd.test("Toolbox.addBookmark - no provided Bookmark props", function() { try { toolbox.getToolbox().addBookmark(); assert.isTrue(false, "Toolbox.addBookmark should throw an error when no tool ID is provided"); } catch(err) { // Pass } }); tdd.test("Toolbox.deleteTool - returns Deferred", function() { assert.isTrue(toolbox.getToolbox().deleteTool('myTool') instanceof Deferred, "Toolbox.deleteTool should return a Deferred"); }); tdd.test("Toolbox.deleteTool - returns the deleted entry's JSON", function() { var dfd = this.async(1000); toolbox.getToolbox().deleteTool(testBookmark.id).then(dfd.callback(function(tool) { assert.equal(tool.id, testBookmark.id, "Returned tool did not have correct value for 'id'"); assert.equal(tool.type, testBookmark.type, "Returned tool did not have correct value for 'type'"); assert.equal(tool.name, testBookmark.name, "Returned tool did not have correct value for 'name'"); assert.equal(tool.url, testBookmark.url, "Returned tool did not have correct value for 'url'"); assert.equal(tool.icon, testBookmark.icon, "Returned tool did not have correct value for 'icon'"); }), function(err) { dfd.reject(err); }); server.respondWith("DELETE", "/ibm/api/adminCenter/v1/toolbox/toolEntries/myTool", [200, { "Content-Type": "application/json" }, JSON.stringify(testBookmark)]); server.respond(); return dfd; }); tdd.test("Toolbox.deleteTool - no provided Tool ID", function() { try { toolbox.getToolbox().deleteTool(); assert.isTrue(false, "Toolbox.deleteTool should throw an error when no tool ID is provided"); } catch(err) { // Pass } }); }); } });
OpenLiberty/open-liberty
dev/com.ibm.ws.ui/resources/WEB-CONTENT/unittest/toolbox/toolboxTests.js
JavaScript
epl-1.0
14,570
/******************************************************************************* * Copyright (c) 2017, 2021 Red Hat Inc and others * * This program and the accompanying materials are made * available under the terms of the Eclipse Public License 2.0 * which is available at https://www.eclipse.org/legal/epl-2.0/ * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * Red Hat Inc - initial API and implementation *******************************************************************************/ package org.eclipse.kapua.kura.simulator; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.eclipse.kapua.kura.simulator.app.Application; import org.eclipse.kapua.kura.simulator.app.ApplicationController; import org.eclipse.kapua.kura.simulator.birth.BirthCertificateModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A default Kura simulator */ public class Simulator implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(Simulator.class); protected final Transport transport; protected List<Module> modules = new LinkedList<>(); public Simulator(final GatewayConfiguration configuration, final Transport transport, final Set<Application> applications) { this.transport = transport; // set up callbacks this.transport.whenConnected(this::connected); this.transport.whenDisconnected(this::disconnected); // set up application controller final ApplicationController applicationController = new ApplicationController(transport, applications); modules.add(applicationController); // set up builder modules.add(new BirthCertificateModule(configuration, applicationController::getApplicationIds)); // finally connect this.transport.connect(); } @Override public void close() { // we don't close the transport here } protected void connected() { logger.info("Connected ... sending birth certificate ..."); for (final Module module : modules) { try { module.connected(transport); } catch (final Exception e) { logger.warn("Failed to call module: {}", module, e); } } } protected void disconnected() { for (final Module module : modules) { try { module.disconnected(transport); } catch (final Exception e) { logger.warn("Failed to call module: {}", module, e); } } } }
stzilli/kapua
simulator-kura/src/main/java/org/eclipse/kapua/kura/simulator/Simulator.java
Java
epl-1.0
2,615
/* -*- Mode: C++ -*- */ #include "test.h" #include "random.h" #include "sizes.h" template <typename Constants> class Regtest { public: typedef typename Constants::Sizes Sizes; struct Options { size_t encode_srcwin_maxsz; }; #include "segment.h" #include "modify.h" #include "file.h" #include "cmp.h" #include "delta.h" void InMemoryEncodeDecode(const FileSpec &source_file, const FileSpec &target_file, Block *coded_data, const Options &options = Options()) { xd3_stream encode_stream; xd3_config encode_config; xd3_source encode_source; xd3_stream decode_stream; xd3_config decode_config; xd3_source decode_source; xoff_t verified_bytes = 0; xoff_t encoded_bytes = 0; if (coded_data) { coded_data->Reset(); } memset(&encode_stream, 0, sizeof (encode_stream)); memset(&encode_source, 0, sizeof (encode_source)); memset(&decode_stream, 0, sizeof (decode_stream)); memset(&decode_source, 0, sizeof (decode_source)); xd3_init_config(&encode_config, XD3_ADLER32); xd3_init_config(&decode_config, XD3_ADLER32); encode_config.winsize = Constants::WINDOW_SIZE; encode_config.srcwin_maxsz = options.encode_srcwin_maxsz; // TODO! the smatcher setup isn't working, // if (options.large_cksum_step) { // encode_config.smatch_cfg = XD3_SMATCH_SOFT; // encode_config.smatcher_soft.large_step = options.large_cksum_step; // } // if (options.large_cksum_size) { // encode_config.smatch_cfg = XD3_SMATCH_SOFT; // encode_config.smatcher_soft.large_look = options.large_cksum_size; // } CHECK_EQ(0, xd3_config_stream (&encode_stream, &encode_config)); CHECK_EQ(0, xd3_config_stream (&decode_stream, &decode_config)); encode_source.blksize = Constants::BLOCK_SIZE; decode_source.blksize = Constants::BLOCK_SIZE; xd3_set_source (&encode_stream, &encode_source); xd3_set_source (&decode_stream, &decode_source); BlockIterator source_iterator(source_file, Constants::BLOCK_SIZE); BlockIterator target_iterator(target_file, Constants::READ_SIZE); Block encode_source_block, decode_source_block; Block decoded_block, target_block; bool encoding = true; bool done = false; bool done_after_input = false; IF_DEBUG1 (DP(RINT "source %"Q"u[%"Q"u] target %"Q"u[%lu] winsize %lu\n", source_file.Size(), Constants::BLOCK_SIZE, target_file.Size(), Constants::READ_SIZE, Constants::WINDOW_SIZE)); while (!done) { target_iterator.Get(&target_block); xoff_t blks = target_iterator.Blocks(); IF_DEBUG2(DP(RINT "target in %s: %llu..%llu %"Q"u(%"Q"u) verified %"Q"u\n", encoding ? "encoding" : "decoding", target_iterator.Offset(), target_iterator.Offset() + target_block.Size(), target_iterator.Blkno(), blks, verified_bytes)); if (blks == 0 || target_iterator.Blkno() == (blks - 1)) { xd3_set_flags(&encode_stream, XD3_FLUSH | encode_stream.flags); } xd3_avail_input(&encode_stream, target_block.Data(), target_block.Size()); encoded_bytes += target_block.Size(); process: int ret; const char *msg; if (encoding) { ret = xd3_encode_input(&encode_stream); msg = encode_stream.msg; } else { ret = xd3_decode_input(&decode_stream); msg = decode_stream.msg; } IF_DEBUG1(DP(RINT "%s = %s %s\n", encoding ? "E " : " D", xd3_strerror(ret), msg == NULL ? "" : msg)); switch (ret) { case XD3_OUTPUT: if (encoding) { if (coded_data != NULL) { // Optional encoded-output to the caller coded_data->Append(encode_stream.next_out, encode_stream.avail_out); } // Feed this data to the decoder. xd3_avail_input(&decode_stream, encode_stream.next_out, encode_stream.avail_out); xd3_consume_output(&encode_stream); encoding = false; } else { decoded_block.Append(decode_stream.next_out, decode_stream.avail_out); xd3_consume_output(&decode_stream); } goto process; case XD3_GETSRCBLK: { xd3_source *src = (encoding ? &encode_source : &decode_source); Block *block = (encoding ? &encode_source_block : &decode_source_block); if (encoding) { IF_DEBUG1(DP(RINT "[srcblock] %"Q"u last srcpos %"Q"u " "encodepos %"Q"u\n", encode_source.getblkno, encode_stream.match_last_srcpos, encode_stream.input_position + encode_stream.total_in)); } source_iterator.SetBlock(src->getblkno); source_iterator.Get(block); src->curblkno = src->getblkno; src->onblk = block->Size(); src->curblk = block->Data(); goto process; } case XD3_INPUT: if (!encoding) { encoding = true; goto process; } else { if (done_after_input) { done = true; continue; } if (target_block.Size() < target_iterator.BlockSize()) { encoding = false; } else { target_iterator.Next(); } continue; } case XD3_WINFINISH: if (encoding) { if (encode_stream.flags & XD3_FLUSH) { done_after_input = true; } encoding = false; } else { CHECK_EQ(0, CmpDifferentBlockBytesAtOffset(decoded_block, target_file, verified_bytes)); verified_bytes += decoded_block.Size(); decoded_block.Reset(); encoding = true; } goto process; case XD3_WINSTART: case XD3_GOTHEADER: goto process; default: CHECK_EQ(0, ret); CHECK_EQ(-1, ret); } } CHECK_EQ(target_file.Size(), encoded_bytes); CHECK_EQ(target_file.Size(), verified_bytes); CHECK_EQ(0, xd3_close_stream(&decode_stream)); CHECK_EQ(0, xd3_close_stream(&encode_stream)); xd3_free_stream(&encode_stream); xd3_free_stream(&decode_stream); } ////////////////////////////////////////////////////////////////////// void TestRandomNumbers() { MTRandom rand; int rounds = 1<<20; uint64_t usum = 0; uint64_t esum = 0; for (int i = 0; i < rounds; i++) { usum += rand.Rand32(); esum += rand.ExpRand32(1024); } double allowed_error = 0.01; uint32_t umean = usum / rounds; uint32_t emean = esum / rounds; uint32_t uexpect = UINT32_MAX / 2; uint32_t eexpect = 1024; if (umean < uexpect * (1.0 - allowed_error) || umean > uexpect * (1.0 + allowed_error)) { cerr << "uniform mean error: " << umean << " != " << uexpect << endl; abort(); } if (emean < eexpect * (1.0 - allowed_error) || emean > eexpect * (1.0 + allowed_error)) { cerr << "exponential mean error: " << emean << " != " << eexpect << endl; abort(); } } void TestRandomFile() { MTRandom rand1; FileSpec spec1(&rand1); BlockIterator bi(spec1); spec1.GenerateFixedSize(0); CHECK_EQ(0, spec1.Size()); CHECK_EQ(0, spec1.Segments()); CHECK_EQ(0, spec1.Blocks()); bi.SetBlock(0); CHECK_EQ(0, bi.BytesOnBlock()); spec1.GenerateFixedSize(1); CHECK_EQ(1, spec1.Size()); CHECK_EQ(1, spec1.Segments()); CHECK_EQ(1, spec1.Blocks()); bi.SetBlock(0); CHECK_EQ(1, bi.BytesOnBlock()); spec1.GenerateFixedSize(Constants::BLOCK_SIZE); CHECK_EQ(Constants::BLOCK_SIZE, spec1.Size()); CHECK_EQ(1, spec1.Segments()); CHECK_EQ(1, spec1.Blocks()); bi.SetBlock(0); CHECK_EQ(Constants::BLOCK_SIZE, bi.BytesOnBlock()); bi.SetBlock(1); CHECK_EQ(0, bi.BytesOnBlock()); spec1.GenerateFixedSize(Constants::BLOCK_SIZE + 1); CHECK_EQ(Constants::BLOCK_SIZE + 1, spec1.Size()); CHECK_EQ(2, spec1.Segments()); CHECK_EQ(2, spec1.Blocks()); bi.SetBlock(0); CHECK_EQ(Constants::BLOCK_SIZE, bi.BytesOnBlock()); bi.SetBlock(1); CHECK_EQ(1, bi.BytesOnBlock()); spec1.GenerateFixedSize(Constants::BLOCK_SIZE * 2); CHECK_EQ(Constants::BLOCK_SIZE * 2, spec1.Size()); CHECK_EQ(2, spec1.Segments()); CHECK_EQ(2, spec1.Blocks()); bi.SetBlock(0); CHECK_EQ(Constants::BLOCK_SIZE, bi.BytesOnBlock()); bi.SetBlock(1); CHECK_EQ(Constants::BLOCK_SIZE, bi.BytesOnBlock()); } void TestFirstByte() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(0); spec1.GenerateFixedSize(1); CHECK_EQ(0, CmpDifferentBytes(spec0, spec0)); CHECK_EQ(0, CmpDifferentBytes(spec1, spec1)); CHECK_EQ(1, CmpDifferentBytes(spec0, spec1)); CHECK_EQ(1, CmpDifferentBytes(spec1, spec0)); spec0.GenerateFixedSize(1); spec0.ModifyTo(Modify1stByte(), &spec1); CHECK_EQ(1, CmpDifferentBytes(spec0, spec1)); spec0.GenerateFixedSize(Constants::BLOCK_SIZE + 1); spec0.ModifyTo(Modify1stByte(), &spec1); CHECK_EQ(1, CmpDifferentBytes(spec0, spec1)); SizeIterator<size_t, Sizes> si(&rand, Constants::TEST_ROUNDS); for (; !si.Done(); si.Next()) { size_t size = si.Get(); if (size == 0) { continue; } spec0.GenerateFixedSize(size); spec0.ModifyTo(Modify1stByte(), &spec1); InMemoryEncodeDecode(spec0, spec1, NULL); } } void TestModifyMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 3); struct { size_t size; size_t addr; } test_cases[] = { { Constants::BLOCK_SIZE, 0 }, { Constants::BLOCK_SIZE / 2, 1 }, { Constants::BLOCK_SIZE, 1 }, { Constants::BLOCK_SIZE * 2, 1 }, }; for (size_t i = 0; i < SIZEOF_ARRAY(test_cases); i++) { ChangeList cl1; cl1.push_back(Change(Change::MODIFY, test_cases[i].size, test_cases[i].addr)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size(), spec1.Size()); size_t diff = CmpDifferentBytes(spec0, spec1); CHECK_LE(diff, test_cases[i].size); // There is a 1/256 probability of the changed byte matching the // original value. The following allows double the probability to // pass. CHECK_GE(diff, test_cases[i].size - (2 * test_cases[i].size / 256)); InMemoryEncodeDecode(spec0, spec1, NULL); } } void TestAddMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 2); // TODO: fix this test (for all block sizes)! it's broken because // the same byte could be added? struct { size_t size; size_t addr; size_t expected_adds; } test_cases[] = { { 1, 0, 2 /* 1st byte, last byte (short block) */ }, { 1, 1, 3 /* 1st 2 bytes, last byte */ }, { 1, Constants::BLOCK_SIZE - 1, 2 /* changed, last */ }, { 1, Constants::BLOCK_SIZE, 2 /* changed, last */ }, { 1, Constants::BLOCK_SIZE + 1, 3 /* changed + 1st of 2nd block, last */ }, { 1, 2 * Constants::BLOCK_SIZE, 1 /* last byte */ }, }; for (size_t i = 0; i < SIZEOF_ARRAY(test_cases); i++) { ChangeList cl1; cl1.push_back(Change(Change::ADD, test_cases[i].size, test_cases[i].addr)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size() + test_cases[i].size, spec1.Size()); Block coded; InMemoryEncodeDecode(spec0, spec1, &coded); Delta delta(coded); CHECK_EQ(test_cases[i].expected_adds, delta.AddedBytes()); } } void TestDeleteMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 4); struct { size_t size; size_t addr; } test_cases[] = { // Note: an entry { Constants::BLOCK_SIZE, 0 }, // does not work because the xd3_srcwin_move_point logic won't // find a copy if it occurs >= double its size into the file. { Constants::BLOCK_SIZE / 2, 0 }, { Constants::BLOCK_SIZE / 2, Constants::BLOCK_SIZE / 2 }, { Constants::BLOCK_SIZE, Constants::BLOCK_SIZE / 2 }, { Constants::BLOCK_SIZE * 2, Constants::BLOCK_SIZE * 3 / 2 }, { Constants::BLOCK_SIZE, Constants::BLOCK_SIZE * 2 }, }; for (size_t i = 0; i < SIZEOF_ARRAY(test_cases); i++) { ChangeList cl1; cl1.push_back(Change(Change::DELETE, test_cases[i].size, test_cases[i].addr)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size() - test_cases[i].size, spec1.Size()); Block coded; InMemoryEncodeDecode(spec0, spec1, &coded); Delta delta(coded); CHECK_EQ(0, delta.AddedBytes()); } } void TestCopyMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 3); struct { size_t size; size_t from; size_t to; } test_cases[] = { // Copy is difficult to write tests for because where Xdelta finds // copies, it does not enter checksums. So these tests copy data from // later to earlier so that checksumming will start. { Constants::BLOCK_SIZE / 2, Constants::BLOCK_SIZE / 2, 0 }, { Constants::BLOCK_SIZE, 2 * Constants::BLOCK_SIZE, Constants::BLOCK_SIZE, }, }; for (size_t i = 0; i < SIZEOF_ARRAY(test_cases); i++) { ChangeList cl1; cl1.push_back(Change(Change::COPY, test_cases[i].size, test_cases[i].from, test_cases[i].to)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size() + test_cases[i].size, spec1.Size()); Block coded; InMemoryEncodeDecode(spec0, spec1, &coded); Delta delta(coded); CHECK_EQ(0, delta.AddedBytes()); } } void TestMoveMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 3); struct { size_t size; size_t from; size_t to; } test_cases[] = { // This is easier to test than Copy but has the same trouble as Delete. { Constants::BLOCK_SIZE / 2, Constants::BLOCK_SIZE / 2, 0 }, { Constants::BLOCK_SIZE / 2, 0, Constants::BLOCK_SIZE / 2 }, { Constants::BLOCK_SIZE, Constants::BLOCK_SIZE, 2 * Constants::BLOCK_SIZE }, { Constants::BLOCK_SIZE, 2 * Constants::BLOCK_SIZE, Constants::BLOCK_SIZE }, { Constants::BLOCK_SIZE * 3 / 2, Constants::BLOCK_SIZE, Constants::BLOCK_SIZE * 3 / 2 }, // This is a no-op { Constants::BLOCK_SIZE, Constants::BLOCK_SIZE * 2, 3 * Constants::BLOCK_SIZE }, }; for (size_t i = 0; i < SIZEOF_ARRAY(test_cases); i++) { ChangeList cl1; cl1.push_back(Change(Change::MOVE, test_cases[i].size, test_cases[i].from, test_cases[i].to)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size(), spec1.Size()); Block coded; InMemoryEncodeDecode(spec0, spec1, &coded); Delta delta(coded); CHECK_EQ(0, delta.AddedBytes()); } } void TestOverwriteMutator() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE); ChangeList cl1; cl1.push_back(Change(Change::OVERWRITE, 10, 0, 20)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size(), spec1.Size()); Block b0, b1; BlockIterator(spec0).Get(&b0); BlockIterator(spec1).Get(&b1); CHECK(memcmp(b0.Data(), b1.Data() + 20, 10) == 0); CHECK(memcmp(b0.Data(), b1.Data(), 20) == 0); CHECK(memcmp(b0.Data() + 30, b1.Data() + 30, Constants::BLOCK_SIZE - 30) == 0); cl1.clear(); cl1.push_back(Change(Change::OVERWRITE, 10, 20, (xoff_t)0)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); CHECK_EQ(spec0.Size(), spec1.Size()); BlockIterator(spec0).Get(&b0); BlockIterator(spec1).Get(&b1); CHECK(memcmp(b0.Data() + 20, b1.Data(), 10) == 0); CHECK(memcmp(b0.Data() + 10, b1.Data() + 10, Constants::BLOCK_SIZE - 10) == 0); } // Note: this test is written to expose a problem, but the problem was // only exposed with BLOCK_SIZE = 128. void TestNonBlockingProgress() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); FileSpec spec2(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 3); // This is a lazy target match Change ct(Change::OVERWRITE, 22, Constants::BLOCK_SIZE + 50, Constants::BLOCK_SIZE + 20); // This is a source match just after the block boundary, shorter // than the lazy target match. Change cs1(Change::OVERWRITE, 16, Constants::BLOCK_SIZE + 51, Constants::BLOCK_SIZE - 1); // This overwrites the original source bytes. Change cs2(Change::MODIFY, 108, Constants::BLOCK_SIZE + 20); // This changes the first blocks Change c1st(Change::MODIFY, Constants::BLOCK_SIZE - 2, 0); ChangeList csl; csl.push_back(cs1); csl.push_back(cs2); csl.push_back(c1st); spec0.ModifyTo(ChangeListMutator(csl), &spec1); ChangeList ctl; ctl.push_back(ct); ctl.push_back(c1st); spec0.ModifyTo(ChangeListMutator(ctl), &spec2); InMemoryEncodeDecode(spec1, spec2, NULL); } void TestEmptyInMemory() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); Block block; spec0.GenerateFixedSize(0); spec1.GenerateFixedSize(0); InMemoryEncodeDecode(spec0, spec1, &block); Delta delta(block); CHECK_LT(0, block.Size()); CHECK_EQ(1, delta.Windows()); } void TestBlockInMemory() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); Block block; spec0.GenerateFixedSize(Constants::BLOCK_SIZE); spec1.GenerateFixedSize(Constants::BLOCK_SIZE); InMemoryEncodeDecode(spec0, spec1, &block); Delta delta(block); CHECK_EQ(spec1.Blocks(Constants::WINDOW_SIZE), delta.Windows()); } void TestFifoCopyDiscipline() { MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); spec0.GenerateFixedSize(Constants::BLOCK_SIZE * 4); // Create a half-block copy, 2.5 blocks apart. With 64-byte blocks, // the file in spec0 copies @ 384 from spec1 @ 64. ChangeList cl1; cl1.push_back(Change(Change::MODIFY, Constants::BLOCK_SIZE / 2, 0)); cl1.push_back(Change(Change::OVERWRITE, Constants::BLOCK_SIZE / 2, Constants::BLOCK_SIZE * 3, Constants::BLOCK_SIZE / 2)); cl1.push_back(Change(Change::MODIFY, Constants::BLOCK_SIZE * 3, Constants::BLOCK_SIZE)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); Options options1; options1.encode_srcwin_maxsz = Constants::BLOCK_SIZE * 4; Block block1; InMemoryEncodeDecode(spec1, spec0, &block1, options1); Delta delta1(block1); CHECK_EQ(4 * Constants::BLOCK_SIZE - Constants::BLOCK_SIZE / 2, delta1.AddedBytes()); Options options2; options2.encode_srcwin_maxsz = Constants::BLOCK_SIZE * 3; Block block2; InMemoryEncodeDecode(spec1, spec0, &block2, options2); Delta delta2(block2); CHECK_EQ(4 * Constants::BLOCK_SIZE, delta2.AddedBytes()); } void FourWayMergeTest(const FileSpec &spec0, const FileSpec &spec1, const FileSpec &spec2, const FileSpec &spec3) { Block delta01, delta12, delta23; InMemoryEncodeDecode(spec0, spec1, &delta01); InMemoryEncodeDecode(spec1, spec2, &delta12); InMemoryEncodeDecode(spec2, spec3, &delta23); TmpFile f0, f1, f2, f3, d01, d12, d23; spec0.WriteTmpFile(&f0); spec1.WriteTmpFile(&f1); spec2.WriteTmpFile(&f2); spec2.WriteTmpFile(&f3); delta01.WriteTmpFile(&d01); delta12.WriteTmpFile(&d12); delta23.WriteTmpFile(&d23); // Merge 2 ExtFile out; vector<const char*> mcmd; mcmd.push_back("xdelta3"); mcmd.push_back("merge"); mcmd.push_back("-m"); mcmd.push_back(d01.Name()); mcmd.push_back(d12.Name()); mcmd.push_back(out.Name()); mcmd.push_back(NULL); //DP(RINT "Running one merge: %s\n", CommandToString(mcmd).c_str()); CHECK_EQ(0, xd3_main_cmdline(mcmd.size() - 1, const_cast<char**>(&mcmd[0]))); ExtFile recon; vector<const char*> tcmd; tcmd.push_back("xdelta3"); tcmd.push_back("-d"); tcmd.push_back("-s"); tcmd.push_back(f0.Name()); tcmd.push_back(out.Name()); tcmd.push_back(recon.Name()); tcmd.push_back(NULL); //DP(RINT "Running one recon! %s\n", CommandToString(tcmd).c_str()); CHECK_EQ(0, xd3_main_cmdline(tcmd.size() - 1, const_cast<char**>(&tcmd[0]))); //DP(RINT "Should equal! %s\n", f2.Name()); CHECK(recon.EqualsSpec(spec2)); /* TODO: we've only done 3-way merges, try 4-way. */ } void TestMergeCommand1() { /* Repeat random-input testing for a number of iterations. * Test 2, 3, and 4-file scenarios (i.e., 1, 2, and 3-delta merges). */ MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); FileSpec spec2(&rand); FileSpec spec3(&rand); SizeIterator<size_t, Sizes> si0(&rand, 10); for (; !si0.Done(); si0.Next()) { size_t size0 = si0.Get(); SizeIterator<size_t, Sizes> si1(&rand, 10); for (; !si1.Done(); si1.Next()) { size_t change1 = si1.Get(); if (change1 == 0) { continue; } DP(RINT "S0 = %lu\n", size0); DP(RINT "C1 = %lu\n", change1); size_t add1_pos = size0 ? rand.Rand32() % size0 : 0; size_t del2_pos = size0 ? rand.Rand32() % size0 : 0; spec0.GenerateFixedSize(size0); ChangeList cl1, cl2, cl3; size_t change3 = change1; size_t change3_pos; if (change3 >= size0) { change3 = size0; change3_pos = 0; } else { change3_pos = rand.Rand32() % (size0 - change3); } cl1.push_back(Change(Change::ADD, change1, add1_pos)); cl2.push_back(Change(Change::DELETE, change1, del2_pos)); cl3.push_back(Change(Change::MODIFY, change3, change3_pos)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); spec1.ModifyTo(ChangeListMutator(cl2), &spec2); spec2.ModifyTo(ChangeListMutator(cl3), &spec3); FourWayMergeTest(spec0, spec1, spec2, spec3); FourWayMergeTest(spec3, spec2, spec1, spec0); } } } void TestMergeCommand2() { /* Same as above, different mutation pattern. */ /* TODO: run this with large sizes too */ /* TODO: run this with small sizes too */ MTRandom rand; FileSpec spec0(&rand); FileSpec spec1(&rand); FileSpec spec2(&rand); FileSpec spec3(&rand); SizeIterator<size_t, Sizes> si0(&rand, 10); for (; !si0.Done(); si0.Next()) { size_t size0 = si0.Get(); SizeIterator<size_t, Sizes> si1(&rand, 10); for (; !si1.Done(); si1.Next()) { size_t size1 = si1.Get(); SizeIterator<size_t, Sizes> si2(&rand, 10); for (; !si2.Done(); si2.Next()) { size_t size2 = si2.Get(); SizeIterator<size_t, Sizes> si3(&rand, 10); for (; !si3.Done(); si3.Next()) { size_t size3 = si3.Get(); // We're only interested in three sizes, strictly decreasing. */ if (size3 >= size2 || size2 >= size1 || size1 >= size0) { continue; } DP(RINT "S0 = %lu\n", size0); DP(RINT "S1 = %lu\n", size1); DP(RINT "S2 = %lu\n", size2); DP(RINT "S3 = %lu\n", size3); spec0.GenerateFixedSize(size0); ChangeList cl1, cl2, cl3; cl1.push_back(Change(Change::DELETE, size0 - size1, 0)); cl2.push_back(Change(Change::DELETE, size0 - size2, 0)); cl3.push_back(Change(Change::DELETE, size0 - size3, 0)); spec0.ModifyTo(ChangeListMutator(cl1), &spec1); spec0.ModifyTo(ChangeListMutator(cl2), &spec2); spec0.ModifyTo(ChangeListMutator(cl3), &spec3); FourWayMergeTest(spec0, spec1, spec2, spec3); FourWayMergeTest(spec3, spec2, spec1, spec0); } } } } } }; // class Regtest<Constants> #define TEST(x) cerr << #x << "..." << endl; regtest.x() // These tests are primarily tests of the testing framework itself. template <class T> void UnitTest() { Regtest<T> regtest; TEST(TestRandomNumbers); TEST(TestRandomFile); TEST(TestFirstByte); TEST(TestModifyMutator); TEST(TestAddMutator); TEST(TestDeleteMutator); TEST(TestCopyMutator); TEST(TestMoveMutator); TEST(TestOverwriteMutator); } // These are Xdelta tests. template <class T> void MainTest() { cerr << "Blocksize: " << T::BLOCK_SIZE << endl; Regtest<T> regtest; TEST(TestEmptyInMemory); TEST(TestBlockInMemory); TEST(TestNonBlockingProgress); TEST(TestFifoCopyDiscipline); TEST(TestMergeCommand1); TEST(TestMergeCommand2); } #undef TEST // Run the unittests, followed by xdelta tests for various constants. int main(int argc, char **argv) { UnitTest<SmallBlock>(); MainTest<SmallBlock>(); MainTest<MixedBlock>(); MainTest<PrimeBlock>(); MainTest<OversizeBlock>(); MainTest<LargeBlock>(); return 0; }
ohio813/MultiPatch
xdelta/testing/regtest.cc
C++
gpl-2.0
23,901
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class CreateModuleSectionTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('module_section', function (Blueprint $table) { $table->increments('id'); $table->string('name')->unique(); $table->string('description')->nullable(); $table->integer('module_id')->unsigned(); }); Schema::table('module_section', function($table) { $table->foreign('module_id') ->references('id')->on('module') ->onDelete('cascade'); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::drop('module_section'); } }
ImbalanceGaming/imbalance-gaming-laravel
database/migrations/2016_03_14_011932_create_module_section_table.php
PHP
gpl-2.0
887
using System.ComponentModel; using System.ComponentModel.Design; using System.Drawing.Design; using Microsoft.VisualStudio.Shell; using NLog; using System; using System.Linq.Expressions; namespace CHeaderGenerator { public class CSourceFileOptions : DialogPage, INotifyPropertyChanged { private bool showIncludeGuard = true; private string headerComment = @"/* Header file generated by C Header Generator. Executed by {Name} on {Date}. */"; private bool includeStaticFunctions = false; private bool includeExternFunctions = false; private string logLayout = "[${longdate}] ${level}: ${logger} - ${message}"; private LogLevel logLevel = LogLevel.Info; private bool autoSaveFiles = true; public event PropertyChangedEventHandler PropertyChanged; [Description("Generate an include guard to surround the file")] [DisplayName("Show Include Guard")] [Category("General")] public bool ShowIncludeGuard { get { return this.showIncludeGuard; } set { if (this.showIncludeGuard != value) { this.showIncludeGuard = value; this.OnPropertyChanged(() => this.ShowIncludeGuard); } } } [Description("The format of the header comment to display at the beginning of the header file")] [DisplayName("Header Comment")] [Category("General")] [Editor(typeof(MultilineStringEditor), typeof(UITypeEditor))] public string HeaderComment { get { return this.headerComment; } set { if (this.headerComment != value) { this.headerComment = value; this.OnPropertyChanged(() => this.HeaderComment); } } } [Description("Include static declarations in header file")] [DisplayName("Include Static Declarations")] [Category("General")] public bool IncludeStaticFunctions { get { return this.includeStaticFunctions; } set { if (this.includeStaticFunctions != value) { this.includeStaticFunctions = value; this.OnPropertyChanged(() => this.IncludeStaticFunctions); } } } [Description("Include extern declarations in header file")] [DisplayName("Include Extern Declarations")] [Category("General")] public bool IncludeExternFunctions { get { return this.includeExternFunctions; } set { if (this.includeExternFunctions != value) { this.includeExternFunctions = value; this.OnPropertyChanged(() => this.IncludeExternFunctions); } } } [Description("Layout for log messages to the output window")] [DisplayName("Log Message Layout")] [Category("Logging")] public string LogLayout { get { return this.logLayout; } set { if (this.logLayout != value) { this.logLayout = value; this.OnPropertyChanged(() => this.LogLayout); } } } [Description("Minimum level for log messages")] [DisplayName("Log Level")] [Category("Logging")] public LogLevel LogLevel { get { return this.logLevel; } set { if (this.logLevel != value) { this.logLevel = value; this.OnPropertyChanged(() => this.LogLevel); } } } [Description("Automatically save files that have been modified when generating header files")] [DisplayName("Automatically Save Files")] [Category("General")] public bool AutoSaveFiles { get { return this.autoSaveFiles; } set { if (this.autoSaveFiles != value) { this.autoSaveFiles = value; this.OnPropertyChanged(() => this.AutoSaveFiles); } } } protected virtual void OnPropertyChanged<T>(Expression<Func<T>> propFunc) { var body = propFunc.Body as MemberExpression; if (body != null) this.OnPropertyChanged(body.Member.Name); } protected virtual void OnPropertyChanged(string propertyName) { if (this.PropertyChanged != null) { this.PropertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } } }
cmc13/CHeaderGenerator
CHeaderGenerator/CSourceFileOptions.cs
C#
gpl-2.0
4,992
<?php /** * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; under version 2 * of the License (non-upgradable). * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * Copyright (c) 2013 (original work) Open Assessment Technologies SA (under the project TAO-PRODUCT); * * @author Jérôme Bogaerts, <jerome@taotesting.com> * @license GPLv2 * @package */ namespace qtism\data\expressions; /** * From IMS QTI: * * null is a simple expression that returns the NULL value - the null value is * treated as if it is of any desired baseType. * * @author Jérôme Bogaerts <jerome@taotesting.com> * */ class NullValue extends Expression { public function getQtiClassName() { return 'null'; } }
dhx/tao-comp
vendor/qtism/qtism/qtism/data/expressions/NullValue.php
PHP
gpl-2.0
1,260
/* * Copyright (C) 2010-2012 Project SkyFire <http://www.projectskyfire.org/> * Copyright (C) 2008-2012 TrinityCore <http://www.trinitycore.org/> * Copyright (C) 2005-2011 MaNGOS <http://getmangos.com/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ /* Script Data Start SFName: Boss epoch SFAuthor: Tartalo SF%Complete: 80 SFComment: TODO: Intro, consecutive attacks to a random target durin time wrap, adjust timers SFCategory: Script Data End */ #include "ScriptPCH.h" #include "culling_of_stratholme.h" enum Spells { SPELL_CURSE_OF_EXERTION = 52772, SPELL_TIME_WARP = 52766, //Time slows down, reducing attack, casting and movement speed by 70% for 6 sec. SPELL_TIME_STOP = 58848, //Stops time in a 50 yard sphere for 2 sec. SPELL_WOUNDING_STRIKE = 52771, //Used only on the tank H_SPELL_WOUNDING_STRIKE = 58830 }; enum Yells { SAY_INTRO = -1595000, //"Prince Arthas Menethil, on this day, a powerful darkness has taken hold of your soul. The death you are destined to visit upon others will this day be your own." SAY_AGGRO = -1595001, //"We'll see about that, young prince." SAY_TIME_WARP_1 = -1595002, //"Tick tock, tick tock..." SAY_TIME_WARP_2 = -1595003, //"Not quick enough!" SAY_TIME_WARP_3 = -1595004, //"Let's get this over with. " SAY_SLAY_1 = -1595005, //"There is no future for you." SAY_SLAY_2 = -1595006, //"This is the hour of our greatest triumph!" SAY_SLAY_3 = -1595007, //"You were destined to fail. " SAY_DEATH = -1595008 //"*gurgles*" }; class boss_epoch : public CreatureScript { public: boss_epoch() : CreatureScript("boss_epoch") { } CreatureAI* GetAI(Creature* creature) const { return new boss_epochAI (creature); } struct boss_epochAI : public ScriptedAI { boss_epochAI(Creature* creature) : ScriptedAI(creature) { instance = creature->GetInstanceScript(); } uint8 Step; uint32 StepTimer; uint32 WoundingStrikeTimer; uint32 TimeWarpTimer; uint32 TimeStopTimer; uint32 CurseOfExertionTimer; InstanceScript* instance; void Reset() { Step = 1; StepTimer = 26000; CurseOfExertionTimer = 9300; TimeWarpTimer = 25300; TimeStopTimer = 21300; WoundingStrikeTimer = 5300; if (instance) instance->SetData(DATA_EPOCH_EVENT, NOT_STARTED); } void EnterCombat(Unit* /*who*/) { DoScriptText(SAY_AGGRO, me); if (instance) instance->SetData(DATA_EPOCH_EVENT, IN_PROGRESS); } void UpdateAI(const uint32 diff) { //Return since we have no target if (!UpdateVictim()) return; if (CurseOfExertionTimer < diff) { if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 100, true)) DoCast(target, SPELL_CURSE_OF_EXERTION); CurseOfExertionTimer = 9300; } else CurseOfExertionTimer -= diff; if (WoundingStrikeTimer < diff) { DoCastVictim(SPELL_WOUNDING_STRIKE); WoundingStrikeTimer = 5300; } else WoundingStrikeTimer -= diff; if (TimeStopTimer < diff) { DoCastAOE(SPELL_TIME_STOP); TimeStopTimer = 21300; } else TimeStopTimer -= diff; if (TimeWarpTimer < diff) { DoScriptText(RAND(SAY_TIME_WARP_1, SAY_TIME_WARP_2, SAY_TIME_WARP_3), me); DoCastAOE(SPELL_TIME_WARP); TimeWarpTimer = 25300; } else TimeWarpTimer -= diff; DoMeleeAttackIfReady(); } void JustDied(Unit* /*killer*/) { DoScriptText(SAY_DEATH, me); if (instance) instance->SetData(DATA_EPOCH_EVENT, DONE); } void KilledUnit(Unit* victim) { if (victim == me) return; DoScriptText(RAND(SAY_SLAY_1, SAY_SLAY_2, SAY_SLAY_3), me); } }; }; void AddSC_boss_epoch() { new boss_epoch(); }
SkyFireArchives/SkyFireEMU_433
src/server/scripts/Kalimdor/CavernsOfTime/CullingOfStratholme/boss_epoch.cpp
C++
gpl-2.0
5,315
<?php namespace Google\AdsApi\AdWords\v201702\mcm; /** * This file was generated from WSDL. DO NOT EDIT. */ class CurrencyCodeErrorReason { const UNSUPPORTED_CURRENCY_CODE = 'UNSUPPORTED_CURRENCY_CODE'; }
renshuki/dfp-manager
vendor/googleads/googleads-php-lib/src/Google/AdsApi/AdWords/v201702/mcm/CurrencyCodeErrorReason.php
PHP
gpl-2.0
216
/* * Copyright (C) 2009 Peter Grasch <peter.grasch@bedahr.org> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * or (at your option) any later version, as published by the Free * Software Foundation * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details * * You should have received a copy of the GNU General Public * License along with this program; if not, write to the * Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include "dialogcommandmanager.h" #include "dialogconfiguration.h" #include "dialogcommand.h" #include "createdialogcommandwidget.h" #include <eventsimulation/eventhandler.h> #include <simonactions/actionmanager.h> #include <KLocalizedString> #include <KAction> K_PLUGIN_FACTORY( DialogCommandPluginFactory, registerPlugin< DialogCommandManager >(); ) K_EXPORT_PLUGIN( DialogCommandPluginFactory("simondialogcommand") ) DialogCommandManager::DialogCommandManager(QObject* parent, const QVariantList& args) : CommandManager((Scenario*) parent, args), GreedyReceiver(this), dialogWidget(new QWidget(0, Qt::Dialog|Qt::WindowStaysOnTopHint)), activateAction(new KAction(this)) { setFont(ActionManager::getInstance()->pluginBaseFont()); dialogWidget->setWindowIcon(KIcon("im-user")); ui.setupUi(dialogWidget); dialogWidget->hide(); activateAction->setText(i18n("Activate Dialog")); activateAction->setIcon(KIcon("input-dialog")); connect(activateAction, SIGNAL(triggered(bool)), this, SLOT(activate())); guiActions<<activateAction; } bool DialogCommandManager::shouldAcceptCommand(Command *command) { return (dynamic_cast<DialogCommand*>(command) != 0); } void DialogCommandManager::setFont(const QFont& font) { dialogWidget->setFont(font); } void DialogCommandManager::activate() { dialogWidget->show(); startGreedy(); } void DialogCommandManager::deregister() { stopGreedy(); } const QString DialogCommandManager::iconSrc() const { return "im-user"; } const QString DialogCommandManager::name() const { return i18n("Dialog"); } bool DialogCommandManager::greedyTrigger(const QString& inputText) { return trigger(inputText); } DialogConfiguration* DialogCommandManager::getDialogConfiguration() { return static_cast<DialogConfiguration*>(getConfigurationPage()); } bool DialogCommandManager::deSerializeConfig(const QDomElement& elem) { //Connect to Slots connect(ui.pbOk, SIGNAL(clicked()), dialogWidget, SLOT(hide())); connect(ui.pbOk, SIGNAL(clicked()), this, SLOT(deregister())); if (!config) config->deleteLater(); config = new DialogConfiguration(this, parentScenario); config->deSerialize(elem); bool succ = true; succ &= installInterfaceCommand(this, "activate", i18n("Dialog"), iconSrc(), i18n("Starts dialog"), true /* announce */, true /* show icon */, SimonCommand::DefaultState /* consider this command when in this state */, SimonCommand::GreedyState, /* if executed switch to this state */ QString() /* take default visible id from action name */, "startDialog" /* id */); succ &= installInterfaceCommand(ui.pbOk, "click", i18nc("Close the dialog", "OK"), "dialog-ok", i18n("Hides the dialog"), false, true, SimonCommand::GreedyState, SimonCommand::DefaultState); return succ; } CreateCommandWidget* DialogCommandManager::getCreateCommandWidget(QWidget *parent) { return new CreateDialogCommandWidget(this, parent); } DialogCommandManager::~DialogCommandManager() { dialogWidget->deleteLater(); activateAction->deleteLater(); }
photom/simon
plugins/Commands/Template/dialogcommandmanager.cpp
C++
gpl-2.0
3,878
<?php include_partial('stats_header', array('form' => $form, 'title' => $title)); ?> <?php if ( $sf_user->hasCredential('stats-control') ): ?> <?php include_partial('global/chart_jqplot', array( 'id' => 'control', 'data' => url_for('statistics/controlJson?type=' . $type), 'width' => '1000', 'label' => __('Ticket controls') )) ?> <?php endif ?> </div> <?php use_javascript('/js/jqplot/plugins/jqplot.barRenderer.js') ?> <?php use_javascript('/js/jqplot/plugins/jqplot.cursor.js') ?> <?php use_javascript('/js/jqplot/plugins/jqplot.canvasAxisTickRenderer.js') ?> <?php use_javascript('/js/jqplot/plugins/jqplot.canvasTextRenderer.js') ?> <?php use_javascript('statistics-jqplot') ?> <?php use_javascript('statistics-control') ?>
Fabrice-li/e-venement
apps/statistics/modules/statistics/templates/controlSuccess.php
PHP
gpl-2.0
785
<?php namespace Drupal\jsonapi\Normalizer\Value; /** * Helps normalize config entity "fields" in compliance with the JSON API spec. * * @internal */ class ConfigFieldItemNormalizerValue extends FieldItemNormalizerValue { /** * {@inheritdoc} * * @var mixed */ protected $raw; /** * Instantiate a ConfigFieldItemNormalizerValue object. * * @param mixed $values * The normalized result. */ public function __construct($values) { $this->raw = $values; } /** * {@inheritdoc} */ public function rasterizeValue() { return $this->rasterizeValueRecursive($this->raw); } }
enslyon/ensl
modules/jsonapi/src/Normalizer/Value/ConfigFieldItemNormalizerValue.php
PHP
gpl-2.0
632
package org.telegram.messenger; import android.text.TextUtils; import android.util.LongSparseArray; import android.util.SparseBooleanArray; import org.telegram.tgnet.TLRPC; import java.util.ArrayList; public class ForwardingMessagesParams { public LongSparseArray<MessageObject.GroupedMessages> groupedMessagesMap = new LongSparseArray<>(); public ArrayList<MessageObject> messages; public ArrayList<MessageObject> previewMessages = new ArrayList<>(); public SparseBooleanArray selectedIds = new SparseBooleanArray(); public boolean hideForwardSendersName; public boolean hideCaption; public boolean hasCaption; public boolean hasSenders; public boolean isSecret; public boolean willSeeSenders; public boolean multiplyUsers; public boolean hasSpoilers; public ArrayList<TLRPC.TL_pollAnswerVoters> pollChoosenAnswers = new ArrayList<>(); public ForwardingMessagesParams(ArrayList<MessageObject> messages, long newDialogId) { this.messages = messages; hasCaption = false; hasSenders = false; isSecret = DialogObject.isEncryptedDialog(newDialogId); hasSpoilers = false; ArrayList<String> hiddenSendersName = new ArrayList<>(); for (int i = 0; i < messages.size(); i++) { MessageObject messageObject = messages.get(i); if (!TextUtils.isEmpty(messageObject.caption)) { hasCaption = true; } selectedIds.put(messageObject.getId(), true); TLRPC.Message message = new TLRPC.TL_message(); message.id = messageObject.messageOwner.id; message.grouped_id = messageObject.messageOwner.grouped_id; message.peer_id = messageObject.messageOwner.peer_id; message.from_id = messageObject.messageOwner.from_id; message.message = messageObject.messageOwner.message; message.media = messageObject.messageOwner.media; message.action = messageObject.messageOwner.action; message.edit_date = 0; if (messageObject.messageOwner.entities != null) { message.entities.addAll(messageObject.messageOwner.entities); if (!hasSpoilers) { for (TLRPC.MessageEntity e : message.entities) { if (e instanceof TLRPC.TL_messageEntitySpoiler) { hasSpoilers = true; break; } } } } message.out = true; message.unread = false; message.via_bot_id = messageObject.messageOwner.via_bot_id; message.reply_markup = messageObject.messageOwner.reply_markup; message.post = messageObject.messageOwner.post; message.legacy = messageObject.messageOwner.legacy; message.restriction_reason = messageObject.messageOwner.restriction_reason; TLRPC.MessageFwdHeader header = null; long clientUserId = UserConfig.getInstance(messageObject.currentAccount).clientUserId; if (!isSecret) { if (messageObject.messageOwner.fwd_from != null) { header = messageObject.messageOwner.fwd_from; if (!messageObject.isDice()) { hasSenders = true; } else { willSeeSenders = true; } if (header.from_id == null && !hiddenSendersName.contains(header.from_name)) { hiddenSendersName.add(header.from_name); } } else if (messageObject.messageOwner.from_id.user_id == 0 || messageObject.messageOwner.dialog_id != clientUserId || messageObject.messageOwner.from_id.user_id != clientUserId) { header = new TLRPC.TL_messageFwdHeader(); header.from_id = messageObject.messageOwner.from_id; if (!messageObject.isDice()) { hasSenders = true; } else { willSeeSenders = true; } } } if (header != null) { message.fwd_from = header; message.flags |= TLRPC.MESSAGE_FLAG_FWD; } message.dialog_id = newDialogId; MessageObject previewMessage = new MessageObject(messageObject.currentAccount, message, true, false) { @Override public boolean needDrawForwarded() { if (hideForwardSendersName) { return false; } return super.needDrawForwarded(); } }; previewMessage.preview = true; if (previewMessage.getGroupId() != 0) { MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(previewMessage.getGroupId(), null); if (groupedMessages == null) { groupedMessages = new MessageObject.GroupedMessages(); groupedMessagesMap.put(previewMessage.getGroupId(), groupedMessages); } groupedMessages.messages.add(previewMessage); } previewMessages.add(0, previewMessage); if (messageObject.isPoll()) { TLRPC.TL_messageMediaPoll mediaPoll = (TLRPC.TL_messageMediaPoll) messageObject.messageOwner.media; PreviewMediaPoll newMediaPoll = new PreviewMediaPoll(); newMediaPoll.poll = mediaPoll.poll; newMediaPoll.provider = mediaPoll.provider; newMediaPoll.results = new TLRPC.TL_pollResults(); newMediaPoll.totalVotersCached = newMediaPoll.results.total_voters = mediaPoll.results.total_voters; previewMessage.messageOwner.media = newMediaPoll; if (messageObject.canUnvote()) { for (int a = 0, N = mediaPoll.results.results.size(); a < N; a++) { TLRPC.TL_pollAnswerVoters answer = mediaPoll.results.results.get(a); if (answer.chosen) { TLRPC.TL_pollAnswerVoters newAnswer = new TLRPC.TL_pollAnswerVoters(); newAnswer.chosen = answer.chosen; newAnswer.correct = answer.correct; newAnswer.flags = answer.flags; newAnswer.option = answer.option; newAnswer.voters = answer.voters; pollChoosenAnswers.add(newAnswer); newMediaPoll.results.results.add(newAnswer); } else { newMediaPoll.results.results.add(answer); } } } } } ArrayList<Long> uids = new ArrayList<>(); for (int a = 0; a < messages.size(); a++) { MessageObject object = messages.get(a); long uid; if (object.isFromUser()) { uid = object.messageOwner.from_id.user_id; } else { TLRPC.Chat chat = MessagesController.getInstance(object.currentAccount).getChat(object.messageOwner.peer_id.channel_id); if (ChatObject.isChannel(chat) && chat.megagroup && object.isForwardedChannelPost()) { uid = -object.messageOwner.fwd_from.from_id.channel_id; } else { uid = -object.messageOwner.peer_id.channel_id; } } if (!uids.contains(uid)) { uids.add(uid); } } if (uids.size() + hiddenSendersName.size() > 1) { multiplyUsers = true; } for (int i = 0; i < groupedMessagesMap.size(); i++) { groupedMessagesMap.valueAt(i).calculate(); } } public void getSelectedMessages(ArrayList<MessageObject> messagesToForward) { messagesToForward.clear(); for (int i = 0; i < messages.size(); i++) { MessageObject messageObject = messages.get(i); int id = messageObject.getId(); if (selectedIds.get(id, false)) { messagesToForward.add(messageObject); } } } public class PreviewMediaPoll extends TLRPC.TL_messageMediaPoll { public int totalVotersCached; } }
Telegram-FOSS-Team/Telegram-FOSS
TMessagesProj/src/main/java/org/telegram/messenger/ForwardingMessagesParams.java
Java
gpl-2.0
8,580
/************************************************************************ COPYRIGHT (C) STMicroelectronics 2007 Source file name : collator_pes_video_divx_.cpp Author : Chris Implementation of the pes collator class for player 2. Date Modification Name ---- ------------ -------- 11-Jul-07 Created Chris ************************************************************************/ //////////////////////////////////////////////////////////////////////////// /// \class Collator_PesVideoDivx_c /// /// Implements initialisation of collator video class for Divx /// // ///////////////////////////////////////////////////////////////////// // // Include any component headers // #include "collator_pes_video_divx.h" // ///////////////////////////////////////////////////////////////////////// // // Locally defined constants // // #define ZERO_START_CODE_HEADER_SIZE 7 // Allow us to see 00 00 01 00 00 01 <other code> // ///////////////////////////////////////////////////////////////////////// // // Locally defined structures // //////////////////////////////////////////////////////////////////////////// /// /// Initialize the class by resetting it. /// /// During a constructor calls to virtual methods resolve to the current class (because /// the vtable is still for the class being constructed). This means we need to call /// ::Reset again because the calls made by the sub-constructors will not have called /// our reset method. /// Collator_PesVideoDivx_c::Collator_PesVideoDivx_c( void ) { if( InitializationStatus != CollatorNoError ) return; Collator_PesVideoDivx_c::Reset(); } //////////////////////////////////////////////////////////////////////////// /// /// Resets and configures according to the requirements of this stream content /// /// \return void /// CollatorStatus_t Collator_PesVideoDivx_c::Reset( void ) { CollatorStatus_t Status; // COLLATOR_DEBUG(">><<\n"); Status = Collator_PesVideo_c::Reset(); if( Status != CollatorNoError ) return Status; Configuration.GenerateStartCodeList = true; Configuration.MaxStartCodes = 16; Configuration.StreamIdentifierMask = 0xff; // Video Configuration.StreamIdentifierCode = PES_START_CODE_VIDEO; Configuration.BlockTerminateMask = 0xff; // Picture Configuration.BlockTerminateCode = 0xb6; Configuration.IgnoreCodesRangeStart = 0x01; Configuration.IgnoreCodesRangeEnd = 0x1F; Configuration.InsertFrameTerminateCode = false; Configuration.TerminalCode = 0x00; Configuration.ExtendedHeaderLength = 0; IgnoreCodes = false; Version = 5; Configuration.DeferredTerminateFlag = true; Configuration.StreamTerminateFlushesFrame = false; return CollatorNoError; } CollatorStatus_t Collator_PesVideoDivx_c::Input( PlayerInputDescriptor_t *Input, unsigned int DataLength, void *Data ) { unsigned int i; CollatorStatus_t Status; unsigned int HeaderSize; unsigned int Transfer; unsigned int Skip; unsigned int SpanningWord; unsigned int StartingWord; unsigned int SpanningCount; unsigned int CodeOffset; unsigned char Code; // AssertComponentState( "Collator_PesVideoDivx_c::Input", ComponentRunning ); ActOnInputDescriptor( Input ); // // Initialize scan state // StartCodeCount = 0; RemainingData = (unsigned char *)Data; RemainingLength = DataLength; while( RemainingLength != 0 ) { // // Are we building a pes header // if( GotPartialPesHeader ) { HeaderSize = PES_INITIAL_HEADER_SIZE; if( RemainingLength >= (PES_INITIAL_HEADER_SIZE - GotPartialPesHeaderBytes) ) HeaderSize = GotPartialPesHeaderBytes >= PES_INITIAL_HEADER_SIZE ? PES_HEADER_SIZE(StoredPesHeader) : PES_HEADER_SIZE(RemainingData-GotPartialPesHeaderBytes); Transfer = min( RemainingLength, (HeaderSize - GotPartialPesHeaderBytes) ); memcpy( StoredPesHeader+GotPartialPesHeaderBytes, RemainingData, Transfer ); GotPartialPesHeaderBytes += Transfer; RemainingData += Transfer; RemainingLength -= Transfer; if( GotPartialPesHeaderBytes == PES_HEADER_SIZE(StoredPesHeader) ) { // // Since we are going to process the partial header, we will not have it in future // GotPartialPesHeader = false; // Status = ReadPesHeader(); if( Status != CollatorNoError ) return Status; if( SeekingPesHeader ) { AccumulatedDataSize = 0; // Dump any collected data SeekingPesHeader = false; } } if( RemainingLength == 0 ) return CollatorNoError; } // // Are we building a padding header // if( GotPartialPaddingHeader ) { report(severity_error,"Partial Packing\n"); HeaderSize = PES_PADDING_INITIAL_HEADER_SIZE; Transfer = min( RemainingLength, (HeaderSize - GotPartialPaddingHeaderBytes) ); memcpy( StoredPaddingHeader+GotPartialPaddingHeaderBytes, RemainingData, Transfer ); GotPartialPaddingHeaderBytes += Transfer; RemainingData += Transfer; RemainingLength -= Transfer; if( GotPartialPaddingHeaderBytes == PES_PADDING_INITIAL_HEADER_SIZE ) { Skipping = PES_PADDING_SKIP(StoredPaddingHeader); report(severity_error,"Skipping\n"); GotPartialPaddingHeader = false; } if( RemainingLength == 0 ) return CollatorNoError; } // // Are we skipping padding // if( Skipping != 0 ) { Skip = min( Skipping, RemainingLength ); RemainingData += Skip; RemainingLength -= Skip; Skipping -= Skip; if( RemainingLength == 0 ) return CollatorNoError; } // // Check for spanning header // SpanningWord = 0xffffffff << (8 * min(AccumulatedDataSize,3)); SpanningWord |= BufferBase[AccumulatedDataSize-3] << 16; SpanningWord |= BufferBase[AccumulatedDataSize-2] << 8; SpanningWord |= BufferBase[AccumulatedDataSize-1]; StartingWord = 0x00ffffff >> (8 * min((RemainingLength-1),3)); StartingWord |= RemainingData[0] << 24; StartingWord |= RemainingData[1] << 16; StartingWord |= RemainingData[2] << 8; // // Check for a start code spanning, or in the first word // record the nature of the span in a counter indicating how many // bytes of the code are in the remaining data. // NOTE the 00 at the bottom indicates we have a byte for the code, // not what it is. // SpanningCount = 0; if( (SpanningWord << 8) == 0x00000100 ) { SpanningCount = 1; } else if( ((SpanningWord << 16) | ((StartingWord >> 16) & 0xff00)) == 0x00000100 ) { SpanningCount = 2; } else if( ((SpanningWord << 24) | ((StartingWord >> 8) & 0xffff00)) == 0x00000100 ) { SpanningCount = 3; } else if( StartingWord == 0x00000100 ) { SpanningCount = 4; UseSpanningTime = false; } // // Check that if we have a spanning code, that the code is not to be ignored // if( (SpanningCount != 0) && inrange(RemainingData[SpanningCount-1], Configuration.IgnoreCodesRangeStart, Configuration.IgnoreCodesRangeEnd) ) { SpanningCount = 0; } // // Handle a spanning start code // if( SpanningCount != 0 ) { // // Copy over the spanning bytes // for( i=0; i<SpanningCount; i++ ) BufferBase[AccumulatedDataSize + i] = RemainingData[i]; AccumulatedDataSize += SpanningCount; RemainingData += SpanningCount; RemainingLength -= SpanningCount; Code = BufferBase[AccumulatedDataSize-1]; // report(severity_info,"Start Code %x\n",Code); if( Code == 0x31 ) { Version = *RemainingData; //report(severity_error,"Version Number %d\n",Version); IgnoreCodes = false; } if( Code == 0x00 ) { GotPartialZeroHeader = true; AccumulatedDataSize -= 4; // Wind to before it GotPartialZeroHeaderBytes = 4; StoredZeroHeader = BufferBase + AccumulatedDataSize; continue; } // // Is it a pes header, and is it a pes stream we are interested in // else if( IS_PES_START_CODE_VIDEO(Code) ) { AccumulatedDataSize -= 4; // Wind to before it if( (Code & Configuration.StreamIdentifierMask) == Configuration.StreamIdentifierCode ) { GotPartialPesHeader = true; GotPartialPesHeaderBytes = 4; StoredPesHeader = BufferBase + AccumulatedDataSize; } else { SeekingPesHeader = true; } continue; } // // Or is it a padding block // else if( Code == PES_PADDING_START_CODE ) { GotPartialPaddingHeader = true; AccumulatedDataSize -= 4; // Wind to before it GotPartialPaddingHeaderBytes = 4; StoredPaddingHeader = BufferBase + AccumulatedDataSize; continue; } // // Or if we are seeking a pes header, dump what we have and try again // else if( SeekingPesHeader ) { AccumulatedDataSize = 0; // Wind to before it continue; } // // Or is it a block terminate code // else if( TerminationFlagIsSet) { AccumulatedDataSize -=4; Status = FrameFlush(); if( Status != CollatorNoError ) return Status; BufferBase[0] = 0x00; BufferBase[1] = 0x00; BufferBase[2] = 0x01; BufferBase[3] = Code; AccumulatedDataSize = 4; SeekingPesHeader = false; TerminationFlagIsSet = false; if ( Configuration.DeferredTerminateFlag && ((Code & Configuration.BlockTerminateMask) == Configuration.BlockTerminateCode)) { TerminationFlagIsSet = true; } } else if ( Configuration.DeferredTerminateFlag && ((Code & Configuration.BlockTerminateMask) == Configuration.BlockTerminateCode)) { IgnoreCodes = true; TerminationFlagIsSet = true; } // // Otherwise (and if its a block terminate) accumulate the start code // Status = AccumulateStartCode( PackStartCode(AccumulatedDataSize-4,Code) ); if( Status != CollatorNoError ) { DiscardAccumulatedData(); return Status; } } // // If we had no spanning code, but we had a spanning PTS, and we // had no normal PTS for this frame, the copy the spanning time // to the normal time. // else if( !PlaybackTimeValid ) { PlaybackTimeValid = SpanningPlaybackTimeValid; PlaybackTime = SpanningPlaybackTime; DecodeTimeValid = SpanningDecodeTimeValid; DecodeTime = SpanningDecodeTime; UseSpanningTime = false; SpanningPlaybackTimeValid = false; SpanningDecodeTimeValid = false; } // // Now enter the loop processing start codes // while( true ) { Status = FindNextStartCode( &CodeOffset ); if( Status != CollatorNoError) { // // Terminal code after start code processing copy remaining data into buffer // Status = AccumulateData( RemainingLength, RemainingData ); if( Status != CollatorNoError ) DiscardAccumulatedData(); RemainingLength = 0; return Status; } // // Got one accumulate upto and including it // Status = AccumulateData( CodeOffset+4, RemainingData ); if( Status != CollatorNoError ) { DiscardAccumulatedData(); return Status; } Code = RemainingData[CodeOffset+3]; RemainingLength -= CodeOffset+4; RemainingData += CodeOffset+4; // report(severity_info,"Start Code %x (ignore %d)\n",Code,IgnoreCodes); // second case is for when we have 2 B6's in a group which can cause issues. Test with BatmanBegins if ((IgnoreCodes == false) || ((Code & Configuration.BlockTerminateMask) == Configuration.BlockTerminateCode) ) { // // Is it a pes header, and is it a pes stream we are interested in // if( Code == 0x00 ) { GotPartialZeroHeader = true; AccumulatedDataSize -= 4; // Wind to before it GotPartialZeroHeaderBytes = 4; StoredZeroHeader = BufferBase + AccumulatedDataSize; continue; } else if( IS_PES_START_CODE_VIDEO(Code) ) { AccumulatedDataSize -= 4; // Wind to before it if( (Code & Configuration.StreamIdentifierMask) == Configuration.StreamIdentifierCode ) { GotPartialPesHeader = true; GotPartialPesHeaderBytes = 4; StoredPesHeader = BufferBase + AccumulatedDataSize; } else { SeekingPesHeader = true; } break; } // // Or is it a padding block // else if( Code == PES_PADDING_START_CODE ) { GotPartialPaddingHeader = true; AccumulatedDataSize -= 4; // Wind to before it GotPartialPaddingHeaderBytes = 4; StoredPaddingHeader = BufferBase + AccumulatedDataSize; break; } // // Or if we are seeking a pes header, dump what we have and try again // else if( SeekingPesHeader ) { AccumulatedDataSize = 0; // Wind to before it break; } // // Or is it a block terminate code // else if( TerminationFlagIsSet) { AccumulatedDataSize -=4; Status = FrameFlush(); if( Status != CollatorNoError ) return Status; BufferBase[0] = 0x00; BufferBase[1] = 0x00; BufferBase[2] = 0x01; BufferBase[3] = Code; AccumulatedDataSize = 4; SeekingPesHeader = false; if ( Configuration.DeferredTerminateFlag && ((Code & Configuration.BlockTerminateMask) == Configuration.BlockTerminateCode)) { TerminationFlagIsSet = true; } else TerminationFlagIsSet = false; } else if ( (Code & Configuration.BlockTerminateMask) == Configuration.BlockTerminateCode) { TerminationFlagIsSet = true; IgnoreCodes = true; } // // Otherwise (and if its a block terminate) accumulate the start code // Status = AccumulateStartCode( PackStartCode(AccumulatedDataSize-4,Code) ); if( Status != CollatorNoError ) { DiscardAccumulatedData(); return Status; } else if (Version != 3) // work around for issues with fake start codes in 311 streams { IgnoreCodes = false; } } else { // report (severity_error,"Ignoring Code %x\n",Code); continue; } } } return CollatorNoError; }
project-magpie/tdt-driver
player2_179/player/collator/collator_pes_video_divx.cpp
C++
gpl-2.0
15,639
/************************************************************** TigrStringMap.H bmajoros@tigr.org 1/1/2003 TIGR++ : C++ class template library for bioinformatics Copyright (c) 2003, The Institute for Genomic Research (TIGR), Rockville, Maryland, U.S.A. All rights reserved. ***************************************************************/ #ifndef INCL_TigrStringMap_H #define INCL_TigrStringMap_H #include <list> #include "TigrString.H" using namespace std; /***************************************************************** Some primes to use for table sizes: tens: 13 19 23 31 41 53 61 71 83 89 hundreds: 101 199 293 401 499 601 701 797 887 thousands: 997 1097 1201 1301 1399 1499 1601 1699 1801 1901 1999 2099 2179 2297 2399 2477 2593 2699 2801 2897 3001 3089 3191 3301 3391 3499 3593 3701 3797 3889 4001 4099 4201 4297 4397 4493 4597 4691 4801 4889 4999 5101 5197 5297 5399 5501 5591 5701 5801 5897 5987 6101 6199 6301 6397 6491 6599 6701 6793 6899 7001 7079 7193 7297 7393 7499 7591 7699 7793 7901 7993 8101 8191 8297 8389 8501 8599 8699 8783 8893 9001 9091 9199 9293 9397 9497 9601 9697 9791 9901 ten-thousands: 9973 10993 11987 12983 13999 14983 15991 16993 17989 18979 19997 20983 21997 22993 23993 24989 25999 26993 27997 28979 29989 30983 31991 32999 33997 34981 35999 36997 37997 38993 39989 40993 41999 42989 43997 44987 45989 46997 47981 48991 49999 50993 51991 52999 53993 54983 55997 56999 57991 58997 59999 60961 61991 62989 63997 64997 65993 66977 67993 68993 69997 70999 71999 72997 73999 74959 75997 76991 77999 78989 79999 80989 81973 82997 83987 84991 85999 86993 87991 88997 89989 90997 91997 92993 93997 94999 95989 96997 97987 98999 hundred-thousands: 99961 199999 299993 399989 499979 599999 700001 799999 900001 millions: 999959 1999957 *****************************************************************/ /***************************************************************** */ template<class T> struct StringMapElem { int len; char *first; T second; StringMapElem(const char *,int len,const T &); StringMapElem(const char *,int len); StringMapElem(const StringMapElem<T> &); virtual ~StringMapElem(); }; /***************************************************************** */ template<class T> class StringMapIterator { public: typedef list<StringMapElem<T>*> Bucket; typedef StringMapElem<T> ElemType; StringMapIterator(int index,int tableSize,Bucket *array); inline bool operator!=(const StringMapIterator<T> &); bool operator==(const StringMapIterator<T> &); StringMapIterator<T> &operator++(int); StringMapIterator<T> &operator++(); StringMapElem<T> &operator*(); private: int index, tableSize; typename Bucket::iterator cur, end; Bucket *array; void findNonemptyBucket(); }; /***************************************************************** */ template<class T> class TigrStringMap { public: typedef StringMapElem<T> ElemType; typedef StringMapIterator<T> iterator; typedef StringMapIterator<T> const_iterator; TigrStringMap(int tableSize); TigrStringMap(const TigrStringMap<T> &); virtual ~TigrStringMap(); T &lookup(const char *,int index,int len); T &lookup(const char *,int len); bool isDefined(const char *,int index,int len); bool isDefined(const char *,int len); int size(); iterator begin(); iterator end(); iterator begin() const; iterator end() const; void clear(); void remove(const char *,int index,int len); void remove(const char *,int len); TigrStringMap<T> &operator=(const TigrStringMap &); private: typedef list<StringMapElem<T>*> Bucket; int tableSize, numElements; Bucket *array; unsigned hash(const char *,int len,int tableSize); }; /***************************************************************** */ template<class T> StringMapElem<T>::StringMapElem(const StringMapElem<T> &other) : len(other.len), second(other.second) { first=new char[len+1]; memcpy(first,other.first,len+1); } template<class T> StringMapElem<T>::StringMapElem(const char *p,int len) : len(len) { first=new char[len+1]; first[len]='\0'; strncpy(first,p,len); } template<class T> StringMapElem<T>::StringMapElem(const char *p,int len,const T &t) : second(t), len(len) { first=new char[len+1]; first[len]='\0'; strncpy(first,p,len); } template<class T> StringMapElem<T>::~StringMapElem() { delete [] first; } template<class T> StringMapElem<T> &StringMapIterator<T>::operator*() { return **cur; } template<class T> StringMapIterator<T> &StringMapIterator<T>::operator++() { if(index<tableSize) { ++cur; if(cur==end) { ++index; findNonemptyBucket(); } } return *this; } template<class T> StringMapIterator<T> &StringMapIterator<T>::operator++(int) { if(index<tableSize) { ++cur; if(cur==end) { ++index; findNonemptyBucket(); } } return *this; } template<class T> StringMapIterator<T>::StringMapIterator(int index,int tableSize,Bucket *array) : array(array), tableSize(tableSize), index(index) { findNonemptyBucket(); } template<class T> bool StringMapIterator<T>::operator!=(const StringMapIterator<T> &i) { return !(*this==i); } template<class T> bool StringMapIterator<T>::operator==(const StringMapIterator<T> &other) { if(other.index!=index) return false; if(index>=tableSize) return true; return cur==other.cur; } template<class T> void StringMapIterator<T>::findNonemptyBucket() { for(; index<tableSize ; ++index) { Bucket &bucket=array[index]; if(!bucket.empty()) { cur=bucket.begin(); end=bucket.end(); return; } } } template<class T> T &TigrStringMap<T>::lookup(const char *p,int len) { return lookup(p,0,len); } template<class T> T &TigrStringMap<T>::lookup(const char *pOrigin,int index,int len) { const char *p=pOrigin+index; unsigned hashValue=hash(p,len,tableSize); Bucket &bucket=array[hashValue]; typename Bucket::iterator end=bucket.end(); typename Bucket::iterator cur=bucket.begin(); for(; cur!=end ; ++cur) { StringMapElem<T> *elem=*cur; if(len!=elem->len) continue; if(!strncmp(p,elem->first,len)) return elem->second; } StringMapElem<T> *newElem=new StringMapElem<T>(p,len); bucket.push_back(newElem); ++numElements; return newElem->second; } template<class T> TigrStringMap<T>::TigrStringMap(const TigrStringMap<T> &other) : tableSize(other.tableSize), numElements(other.numElements), array(new Bucket[other.tableSize]) { for(int i=0 ; i<tableSize ; ++i) { Bucket &thisBucket=array[i], &thatBucket=other.array[i]; typename Bucket::iterator cur=thatBucket.begin(), end=thatBucket.end(); for(; cur!=end ; ++cur) thisBucket.push_back(new StringMapElem<T>(**cur)); } } template<class T> TigrStringMap<T> &TigrStringMap<T>::operator=(const TigrStringMap &other) { tableSize=other.tableSize; numElements=other.numElements; array=new Bucket[other.tableSize]; for(int i=0 ; i<tableSize ; ++i) { Bucket &thisBucket=array[i]; const Bucket &thatBucket=other.array[i]; typename Bucket::const_iterator cur=thatBucket.begin(), end=thatBucket.end(); for(; cur!=end ; ++cur) thisBucket.push_back(new StringMapElem<T>(**cur)); } return *this; } template<class T> TigrStringMap<T>::TigrStringMap(int tableSize) : tableSize(tableSize), numElements(0) { array=new Bucket[tableSize]; } template<class T> typename TigrStringMap<T>::iterator TigrStringMap<T>::begin() { return iterator(0,tableSize,array); } template<class T> typename TigrStringMap<T>::iterator TigrStringMap<T>::end() { return iterator(tableSize,tableSize,array); } template<class T> typename TigrStringMap<T>::iterator TigrStringMap<T>::begin() const { return iterator(0,tableSize,const_cast<Bucket*>(array)); } template<class T> typename TigrStringMap<T>::iterator TigrStringMap<T>::end() const { return iterator(tableSize,tableSize,const_cast<Bucket*>(array)); } template<class T> TigrStringMap<T>::~TigrStringMap() { //cout<<"~TigrStringMap"<<endl; for(int i=0 ; i<tableSize ; ++i) { Bucket &bucket=array[i]; typename Bucket::iterator cur=bucket.begin(), end=bucket.end(); for(; cur!=end ; ++cur) delete *cur; } delete [] array; } template<class T> bool TigrStringMap<T>::isDefined(const char *p,int len) { return isDefined(p,0,len); } template<class T> bool TigrStringMap<T>::isDefined(const char *pOrigin,int index,int len) { const char *p=pOrigin+index; unsigned hashValue=hash(p,len,tableSize); Bucket &bucket=array[hashValue]; typename Bucket::iterator cur=bucket.begin(), end=bucket.end(); for(; cur!=end ; ++cur) { StringMapElem<T> *elem=*cur; if(len!=elem->len) continue; if(!strncmp(p,elem->first,len)) return true; } return false; } template<class T> int TigrStringMap<T>::size() { return numElements; } template<class T> unsigned TigrStringMap<T>::hash(const char *s,int length,int tableSize) { int h=0; const char *p=s, *end=s+length; for(; p!=end ; ++p) { h=(h<<4)+*p; int g=h & 0xf000; if(g) h=h^(g>>8); } return labs(h) % tableSize; } template<class T> void TigrStringMap<T>::clear() { for(int i=0 ; i<tableSize ; ++i) { Bucket &bucket=array[i]; typename Bucket::iterator cur=bucket.begin(), end=bucket.end(); for(; cur!=end ; ++cur) delete *cur; bucket.clear(); } numElements=0; } template<class T> void TigrStringMap<T>::remove(const char *p,int len) { remove(p,0,len); } template<class T> void TigrStringMap<T>::remove(const char *pOrigin,int index,int len) { const char *p=pOrigin+index; unsigned hashValue=hash(p,len,tableSize); Bucket &bucket=array[hashValue]; typename Bucket::iterator cur=bucket.begin(), end=bucket.end(); for(; cur!=end ; ++cur) { StringMapElem<T> *elem=*cur; if(len!=elem->len) continue; if(!strncmp(p,elem->first,len)) { bucket.erase(cur); break; } } --numElements; } #endif
bmajoros/UnVeil
tigr++/TigrStringMap.H
C++
gpl-2.0
10,179
<?php /* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ class Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4FindFullHashesRequest extends Google_Collection { protected $collection_key = 'clientStates'; protected $apiClientType = 'Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo'; protected $apiClientDataType = ''; protected $clientType = 'Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo'; protected $clientDataType = ''; public $clientStates; protected $threatInfoType = 'Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ThreatInfo'; protected $threatInfoDataType = ''; /** * @param Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo */ public function setApiClient(Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo $apiClient) { $this->apiClient = $apiClient; } /** * @return Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo */ public function getApiClient() { return $this->apiClient; } /** * @param Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo */ public function setClient(Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo $client) { $this->client = $client; } /** * @return Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ClientInfo */ public function getClient() { return $this->client; } public function setClientStates($clientStates) { $this->clientStates = $clientStates; } public function getClientStates() { return $this->clientStates; } /** * @param Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ThreatInfo */ public function setThreatInfo(Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ThreatInfo $threatInfo) { $this->threatInfo = $threatInfo; } /** * @return Google_Service_Safebrowsing_GoogleSecuritySafebrowsingV4ThreatInfo */ public function getThreatInfo() { return $this->threatInfo; } }
palasthotel/grid-wordpress-box-social
vendor/google/apiclient-services/src/Google/Service/Safebrowsing/GoogleSecuritySafebrowsingV4FindFullHashesRequest.php
PHP
gpl-2.0
2,567
# -*- coding: utf-8 -*- # # This file is part of Zenodo. # Copyright (C) 2016 CERN. # # Zenodo is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Zenodo is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Zenodo; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Zenodo Marcxml mapping test.""" from __future__ import absolute_import, print_function from datetime import datetime from invenio_pidstore.models import PersistentIdentifier from invenio_records import Record from zenodo.modules.records.serializers import marcxml_v1 def test_full_record(app, db, full_record): """Test MARC21 serialization of full record.""" # Add embargo date and OAI-PMH set information. full_record['embargo_date'] = '0900-12-31' full_record['_oai'] = { "id": "oai:zenodo.org:1", "sets": ["user-zenodo", "user-ecfunded"] } # Create record and PID. record = Record.create(full_record) pid = PersistentIdentifier.create( pid_type='recid', pid_value='12345', object_type='rec', object_uuid=record.id, ) assert record.validate() is None expected = { u'control_number': u'12345', u'date_and_time_of_latest_transaction': ( record.model.updated.strftime("%Y%m%d%H%M%S.0")), u'resource_type': { u'subtype': u'book', u'type': u'publication' }, u'title_statement': { u'title': u'Test title' }, u'publication_distribution_imprint': [ {u'date_of_publication_distribution': u'2014-02-27'}, ], u'main_entry_personal_name': { u'affiliation': u'CERN', u'personal_name': u'Doe, John', u'authority_record_control_number_or_standard_number': [ u'(gnd)170118215', u'(orcid)0000-0002-1694-233X' ] }, u'added_entry_personal_name': [ { u'affiliation': u'CERN', u'personal_name': u'Doe, Jane', u'authority_record_control_number_or_standard_number': [ u'(orcid)0000-0002-1825-0097' ] }, { u'affiliation': u'CERN', u'personal_name': u'Smith, John', }, { u'affiliation': u'CERN', u'personal_name': u'Nowak, Jack', u'authority_record_control_number_or_standard_number': [ u'(gnd)170118215' ] }, { u'affiliation': u'CERN', u'relator_code': [u'oth'], u'personal_name': u'Smith, Other', u'authority_record_control_number_or_standard_number': [ u'(orcid)0000-0002-1825-0097' ] }, { u'personal_name': u'Hansen, Viggo', u'relator_code': [u'oth'], }, { u'affiliation': u'CERN', u'relator_code': [u'dtm'], u'personal_name': u'Kowalski, Manager' }, { u'relator_code': [u'ths'], u'personal_name': u'Smith, Professor' }, ], u'summary': { u'summary': u'Test Description' }, u'index_term_uncontrolled': [ {u'uncontrolled_term': u'kw1'}, {u'uncontrolled_term': u'kw2'}, {u'uncontrolled_term': u'kw3'}, ], u'subject_added_entry_topical_term': [ { u'topical_term_or_geographic_name_entry_element': u'cc-by', u'source_of_heading_or_term': u'opendefinition.org', u'level_of_subject': u'Primary', u'thesaurus': u'Source specified in subfield $2', }, { u'topical_term_or_geographic_name_entry_element': u'Astronomy', u'authority_record_control_number_or_standard_number': ( u'(url)http://id.loc.gov/authorities/subjects/sh85009003'), u'level_of_subject': u'Primary', }, ], u'general_note': { u'general_note': u'notes' }, u'information_relating_to_copyright_status': { u'copyright_status': u'open' }, u'terms_governing_use_and_reproduction_note': { u'uniform_resource_identifier': u'https://creativecommons.org/licenses/by/4.0/', u'terms_governing_use_and_reproduction': u'Creative Commons Attribution 4.0' }, u'communities': [ u'zenodo', ], u'funding_information_note': [ {u'grant_number': u'1234', u'text_of_note': u'Grant Title'}, {u'grant_number': u'4321', u'text_of_note': u'Title Grant'} ], u'host_item_entry': [ { u'main_entry_heading': u'10.1234/foo.bar', u'note': u'doi', u'relationship_information': u'cites', }, { 'main_entry_heading': u'1234.4325', 'note': u'arxiv', 'relationship_information': u'isIdenticalTo' }, { u'main_entry_heading': u'1234.4321', u'note': u'arxiv', u'relationship_information': u'cites', }, { 'main_entry_heading': u'1234.4328', 'note': u'arxiv', 'relationship_information': u'references' }, { 'main_entry_heading': u'10.1234/zenodo.4321', 'note': u'doi', 'relationship_information': u'isPartOf' }, { 'main_entry_heading': u'10.1234/zenodo.1234', 'note': u'doi', 'relationship_information': u'hasPart' }, { u'main_entry_heading': u'Staszkowka', u'edition': u'Jol', u'title': u'Bum', u'related_parts': u'1-2', u'international_standard_book_number': u'978-0201633610', }, ], u'other_standard_identifier': [ { u'standard_number_or_code': u'10.1234/foo.bar', u'source_of_number_or_code': u'doi', }, { u'standard_number_or_code': ( u'urn:lsid:ubio.org:namebank:11815'), u'source_of_number_or_code': u'lsid', u'qualifying_information': u'alternateidentifier', }, { u'standard_number_or_code': u'2011ApJS..192...18K', u'source_of_number_or_code': u'ads', u'qualifying_information': u'alternateidentifier', }, { u'standard_number_or_code': u'0317-8471', u'source_of_number_or_code': u'issn', u'qualifying_information': u'alternateidentifier', }, { u'standard_number_or_code': u'10.1234/alternate.doi', u'source_of_number_or_code': u'doi', u'qualifying_information': u'alternateidentifier', } ], u'references': [ { u'raw_reference': u'Doe, John et al (2012). Some title. ' 'Zenodo. 10.5281/zenodo.12' }, { u'raw_reference': u'Smith, Jane et al (2012). Some title. ' 'Zenodo. 10.5281/zenodo.34' } ], u'added_entry_meeting_name': [{ u'date_of_meeting': u'23-25 June, 2014', u'meeting_name_or_jurisdiction_name_as_entry_element': u'The 13th Biennial HITRAN Conference', u'number_of_part_section_meeting': u'VI', u'miscellaneous_information': u'HITRAN13', u'name_of_part_section_of_a_work': u'1', u'location_of_meeting': u'Harvard-Smithsonian Center for Astrophysics' }], u'conference_url': 'http://hitran.org/conferences/hitran-13-2014/', u'dissertation_note': { u'name_of_granting_institution': u'I guess important', }, u'journal': { 'issue': '2', 'pages': '20', 'volume': '20', 'title': 'Bam', 'year': '2014', }, u'embargo_date': '0900-12-31', u'language_code': { 'language_code_of_text_sound_track_or_separate_title': 'eng', }, u'_oai': { u'sets': [u'user-zenodo', u'user-ecfunded'], u'id': u'oai:zenodo.org:1' }, u'_files': [ { 'uri': 'https://zenodo.org/record/12345/files/test', 'checksum': 'md5:11111111111111111111111111111111', 'type': 'txt', 'size': 4, }, ], 'leader': { 'base_address_of_data': '00000', 'bibliographic_level': 'monograph_item', 'character_coding_scheme': 'marc-8', 'descriptive_cataloging_form': 'unknown', 'encoding_level': 'unknown', 'indicator_count': 2, 'length_of_the_implementation_defined_portion': 0, 'length_of_the_length_of_field_portion': 4, 'length_of_the_starting_character_position_portion': 5, 'multipart_resource_record_level': 'not_specified_or_not_applicable', 'record_length': '00000', 'record_status': 'new', 'subfield_code_count': 2, 'type_of_control': 'no_specified_type', 'type_of_record': 'language_material', 'undefined': 0, }, } # Dump MARC21 JSON structure and compare against expected JSON. preprocessed_record = marcxml_v1.preprocess_record(record=record, pid=pid) data = marcxml_v1.schema_class().dump(preprocessed_record).data assert expected == data # Assert that we can output MARCXML. assert marcxml_v1.serialize(record=record, pid=pid) def test_minimal_record(app, db, minimal_record): """Test minimal record.""" # Create record and pid. record = Record.create(minimal_record) record.model.updated = datetime.utcnow() pid = PersistentIdentifier.create( pid_type='recid', pid_value='123', object_type='rec', object_uuid=record.id) assert record.validate() is None expected = { u'date_and_time_of_latest_transaction': ( record.model.updated.strftime("%Y%m%d%H%M%S.0")), u'publication_distribution_imprint': [{ 'date_of_publication_distribution': record['publication_date'] }], u'control_number': '123', u'other_standard_identifier': [ { 'source_of_number_or_code': u'doi', 'standard_number_or_code': u'10.5072/zenodo.123' } ], u'information_relating_to_copyright_status': { 'copyright_status': 'open' }, u'summary': { 'summary': 'My description' }, u'main_entry_personal_name': { 'personal_name': 'Test' }, u'resource_type': { 'type': 'software' }, u'title_statement': { 'title': 'Test' }, u'leader': { 'base_address_of_data': '00000', 'bibliographic_level': 'monograph_item', 'character_coding_scheme': 'marc-8', 'descriptive_cataloging_form': 'unknown', 'encoding_level': 'unknown', 'indicator_count': 2, 'length_of_the_implementation_defined_portion': 0, 'length_of_the_length_of_field_portion': 4, 'length_of_the_starting_character_position_portion': 5, 'multipart_resource_record_level': 'not_specified_or_not_applicable', 'record_length': '00000', 'record_status': 'new', 'subfield_code_count': 2, 'type_of_control': 'no_specified_type', 'type_of_record': 'computer_file', 'undefined': 0, }, } data = marcxml_v1.schema_class().dump(marcxml_v1.preprocess_record( pid=pid, record=record)).data assert expected == data marcxml_v1.serialize(pid=pid, record=record) def assert_array(a1, a2): """Check array.""" for i in range(0, len(a1)): if isinstance(a1[i], dict): assert_dict(a1[i], a2[i]) elif isinstance(a1[i], list) or isinstance(a1[i], tuple): assert_array(a1[i], a2[i]) else: assert a1[i] in a2 assert len(a1) == len(a2) def assert_dict(a1, a2): """Check dict.""" for (k, v) in a1.items(): assert k in a2 if isinstance(v, dict): assert_dict(v, a2[k]) elif isinstance(v, list) or isinstance(v, tuple): assert_array(v, a2[k]) else: assert a2[k] == v assert len(a2) == len(a1)
slint/zenodo
tests/unit/records/test_schemas_marcxml.py
Python
gpl-2.0
13,950
<?php /** * Contact Form 7 Plugin * @since 0.1 * @version 1.0 */ if ( defined( 'myCRED_VERSION' ) ) { /** * Register Hook * @since 0.1 * @version 1.0 */ add_filter( 'mycred_setup_hooks', 'contact_form_seven_myCRED_Hook' ); function contact_form_seven_myCRED_Hook( $installed ) { $installed['contact_form7'] = array( 'title' => __( 'Contact Form 7 Form Submissions', 'mycred' ), 'description' => __( 'Awards %_plural% for successful form submissions (by logged in users).', 'mycred' ), 'callback' => array( 'myCRED_Contact_Form7' ) ); return $installed; } /** * Contact Form 7 Hook * @since 0.1 * @version 1.0 */ if ( !class_exists( 'myCRED_Contact_Form7' ) && class_exists( 'myCRED_Hook' ) ) { class myCRED_Contact_Form7 extends myCRED_Hook { /** * Construct */ function __construct( $hook_prefs ) { parent::__construct( array( 'id' => 'contact_form7', 'defaults' => '' ), $hook_prefs ); } /** * Run * @since 0.1 * @version 1.0 */ public function run() { add_action( 'wpcf7_mail_sent', array( $this, 'form_submission' ) ); } /** * Get Forms * Queries all Contact Form 7 forms. * @uses WP_Query() * @since 0.1 * @version 1.1 */ public function get_forms() { $forms = new WP_Query( array( 'post_type' => 'wpcf7_contact_form', 'post_status' => 'any', 'posts_per_page' => '-1', 'orderby' => 'ID', 'order' => 'ASC' ) ); $result = array(); if ( $forms->have_posts() ) { while ( $forms->have_posts() ) : $forms->the_post(); $result[get_the_ID()] = get_the_title(); endwhile; } wp_reset_postdata(); return $result; } /** * Successful Form Submission * @since 0.1 * @version 1.0 */ public function form_submission( $cf7_form ) { // Login is required if ( !is_user_logged_in() ) return; $form_id = $cf7_form->id; if ( isset( $this->prefs[$form_id] ) && $this->prefs[$form_id]['creds'] != 0 ) { $this->core->add_creds( 'contact_form_submission', get_current_user_id(), $this->prefs[$form_id]['creds'], $this->prefs[$form_id]['log'], $form_id, array( 'ref_type' => 'post' ) ); } } /** * Preferences for Commenting Hook * @since 0.1 * @version 1.0.1 */ public function preferences() { $prefs = $this->prefs; $forms = $this->get_forms(); // No forms found if ( empty( $forms ) ) { echo '<p>' . __( 'No forms found.', 'mycred' ) . '</p>'; return; } // Loop though prefs to make sure we always have a default settings (happens when a new form has been created) foreach ( $forms as $form_id => $form_title ) { if ( !isset( $prefs[$form_id] ) ) { $prefs[$form_id] = array( 'creds' => 1, 'log' => '' ); } } // Set pref if empty if ( empty( $prefs ) ) $this->prefs = $prefs; // Loop for settings foreach ( $forms as $form_id => $form_title ) { ?> <!-- Creds for --> <label for="<?php echo $this->field_id( array( $form_id, 'creds' ) ); ?>" class="subheader"><?php echo $form_title; ?></label> <ol> <li> <div class="h2"><input type="text" name="<?php echo $this->field_name( array( $form_id, 'creds' ) ); ?>" id="<?php echo $this->field_id( array( $form_id, 'creds' ) ); ?>" value="<?php echo $this->core->number( $prefs[$form_id]['creds'] ); ?>" size="8" /></div> </li> <li class="empty">&nbsp;</li> <li> <label for="<?php echo $this->field_id( array( $form_id, 'log' ) ); ?>"><?php _e( 'Log template', 'mycred' ); ?></label> <div class="h2"><input type="text" name="<?php echo $this->field_name( array( $form_id, 'log' ) ); ?>" id="<?php echo $this->field_id( array( $form_id, 'log' ) ); ?>" value="<?php echo $prefs[$form_id]['log']; ?>" class="long" /></div> <span class="description"><?php _e( 'Available template tags: General, Post', 'mycred' ); ?></span> </li> </ol> <?php } unset( $this ); } } } } ?>
crowdcreative/cidade.vc
wp-content/plugins/mycred/plugins/mycred-hook-contact-form7.php
PHP
gpl-2.0
4,148
<?php # MantisBT - a php based bugtracking system # MantisBT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # MantisBT is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with MantisBT. If not, see <http://www.gnu.org/licenses/>. /** * @package CoreAPI * @subpackage ProjaxAPI * @copyright Copyright (C) 2000 - 2002 Kenzaburo Ito - kenito@300baud.org * @copyright Copyright (C) 2002 - 2013 MantisBT Team - mantisbt-dev@lists.sourceforge.net * @link http://www.mantisbt.org */ /** * requires projax.php */ require_once( 'projax' . DIRECTORY_SEPARATOR . 'projax.php' ); # enables the projax library for this page. $g_enable_projax = true; $g_projax = new Projax(); # Outputs an auto-complete field to the HTML form. The supported attribute keys in the attributes array are: # class, size, maxlength, value, and tabindex. function projax_autocomplete( $p_entrypoint, $p_field_name, $p_attributes_array = null ) { global $g_projax; static $s_projax_style_done = false; if ( ON == config_get( 'use_javascript' ) ) { echo $g_projax->text_field_with_auto_complete( $p_field_name, $p_attributes_array, $s_projax_style_done ? array( 'url' => 'xmlhttprequest.php?entrypoint=' . $p_entrypoint, 'skip_style' => '1' ) : array( 'url' => 'xmlhttprequest.php?entrypoint=' . $p_entrypoint ) ); $s_projax_style_done = true; } else { $t_tabindex = isset( $p_attributes_array['tabindex'] ) ? ( ' tabindex="' . $p_attributes_array['tabindex'] . '"' ) : ''; $t_maxlength = isset( $p_attributes_array['maxlength'] ) ?( ' maxlength="' . $p_attributes_array['maxlength'] . '"' ) : ''; echo '<input id="'.$p_field_name.'" name="'.$p_field_name.'"'. $t_tabindex . $t_maxlength . ' size="'.(isset($p_attributes_array['size'])?$p_attributes_array['size']:30).'" type="text" value="'.(isset($p_attributes_array['value'])?$p_attributes_array['value']:'').'" '.(isset($p_attributes_array['class'])?'class = "'.$p_attributes_array['class'].'" ':'').'/>'; } } # Filters the provided array of strings and only returns the ones that start with $p_prefix. # The comparison is not case sensitive. # Returns the array of the filtered strings, or an empty array. If the input array has non-unique # entries, then the output one may contain duplicates. function projax_array_filter_by_prefix( $p_array, $p_prefix ) { $t_matches = array(); foreach( $p_array as $t_entry ) { if( utf8_strtolower( utf8_substr( $t_entry, 0, utf8_strlen( $p_prefix ) ) ) == utf8_strtolower( $p_prefix ) ) { $t_matches[] = $t_entry; } } return $t_matches; } # Serializes the provided array of strings into the format expected by the auto-complete library. function projax_array_serialize_for_autocomplete( $p_array ) { $t_matches = '<ul>'; foreach( $p_array as $t_entry ) { $t_matches .= "<li>$t_entry</li>"; } $t_matches .= '</ul>'; return $t_matches; }
dauclem/Online-Bug-Tracker
private/bt/mantisbt/mantisbt-1.2.15/core/projax_api.php
PHP
gpl-2.0
3,293
package tp.pr2.testprofesor; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; import tp.pr2.Player; public class PlayerTest { private MockItem itemTest; private String itemId; private String itemDescription; private Player playerTest; @Before public void setUp() throws Exception { itemId ="testId"; itemDescription = "no description"; itemTest = new MockItem(itemId, itemDescription); playerTest = new Player(); } @Test public void testDefaultValues() { assertTrue("ERROR: Player starts with 100 health points", 100==playerTest.getHealth()); assertTrue("ERROR: Player starts with a score of 0 points", 0==playerTest.getPoints()); } @Test public void testGetItem() { assertNull("ERROR: When the inventory does not contain an item, getItem returns null",playerTest.getItem(itemId)); } @Test public void testAddItem() { assertTrue("ERROR: addItem adds a new an valid item but it returns false", playerTest.addItem(itemTest)); assertFalse("ERROR: addItem tries to add the same item again but it returns true", playerTest.addItem(itemTest)); assertFalse("ERROR: addItem tries to add the same item again (the id is non case-sensitive) but it returns true", playerTest.addItem(new MockItem(itemId.toUpperCase(),itemDescription))); } @Test public void testRemoveItem() { assertFalse("ERROR: removeItem tries to remove an item from an empty inventory but it returns true", playerTest.removeItem(itemId)); if(playerTest.addItem(itemTest)) { assertTrue("ERROR: removeItem tries to remove an item previously added but it returns false", playerTest.removeItem(itemId)); assertTrue("ERROR: removeItem does not work properly. Test tries to add an item" + "previously removed but it returns false", playerTest.addItem(itemTest)); } else fail("ERROR: addItem is not working properly. Check addItem method before executing this test again"); } @Test public void testAddPoints() { int points = playerTest.getPoints(); int inc = 5; playerTest.addPoints(inc); assertEquals("ERROR: addPoints is not adding correctly positive points", points+inc, playerTest.getPoints()); points = playerTest.getPoints(); inc = -5; playerTest.addPoints(inc); assertEquals("ERROR: addPoints is not adding correctly negative points", points+inc, playerTest.getPoints()); } @Test public void testAddHealth() { int health = playerTest.getHealth(); int inc = 5; playerTest.addHealth(inc); assertEquals("ERROR: addHealth is not adding correctly positive health", health+inc, playerTest.getHealth()); health = playerTest.getHealth(); inc = -5; playerTest.addHealth(inc); assertEquals("ERROR: addHealth is not adding correctly negative health", health+inc, playerTest.getHealth()); } @Test public void testDead() { assertFalse("ERROR: Player health is "+playerTest.getHealth()+" but dead method returns true", playerTest.dead()); int health = playerTest.getHealth(); playerTest.addHealth(-health); assertTrue("ERROR: Player should be dead after removing exactly her health but dead method returns false", playerTest.dead()); playerTest = new Player(); health = playerTest.getHealth(); health+=10; playerTest.addHealth(-health); assertTrue("ERROR: Player should be dead after removing more than her health but dead method returns false", playerTest.dead()); } @Test public void testShowItems() { assertTrue("ERROR: Player inventory is empty but showItems does not show the correct message", playerTest.showItems().contains("You are poor")); playerTest.addItem(itemTest); assertTrue("ERROR: Player inventory contains an items but showItems does not show the item description", playerTest.showItems().contains(itemTest.toString())); } // @Test // public void testLooseLive() { // int health = playerTest.getHealth(); // playerTest.looseLive(); // assertEquals("ERROR: looseLive must remove 5 health points", health-5, playerTest.getHealth()); // } }
cartiago92/Practicas-FDI
Segundo Curso/Tecnologia de la Programación/Pr2/testProfesor/tp/pr2/testprofesor/PlayerTest.java
Java
gpl-2.0
3,992
import { Component, OnInit } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { environment } from '../../environments/environment'; import { AuthService } from '../../services/auth.service'; import { AuthInfo } from '../../types/api-output'; /** * Base skeleton for student pages. */ @Component({ selector: 'tm-student-page', templateUrl: './student-page.component.html', }) export class StudentPageComponent implements OnInit { user: string = ''; institute?: string = ''; isInstructor: boolean = false; isStudent: boolean = false; isAdmin: boolean = false; isMaintainer: boolean = false; navItems: any[] = [ { url: '/web/student', display: 'Home', }, { url: '/web/student/profile', display: 'Profile', }, { url: '/web/student/help', display: 'Help', }, ]; isFetchingAuthDetails: boolean = false; private backendUrl: string = environment.backendUrl; constructor(private route: ActivatedRoute, private authService: AuthService) {} ngOnInit(): void { this.isFetchingAuthDetails = true; this.route.queryParams.subscribe((queryParams: any) => { this.authService.getAuthUser(queryParams.user).subscribe((res: AuthInfo) => { if (res.user) { this.user = res.user.id; if (res.masquerade) { this.user += ' (M)'; } this.institute = res.institute; this.isInstructor = res.user.isInstructor; this.isStudent = res.user.isStudent; this.isAdmin = res.user.isAdmin; this.isMaintainer = res.user.isMaintainer; } else { window.location.href = `${this.backendUrl}${res.studentLoginUrl}`; } this.isFetchingAuthDetails = false; }, () => { this.isInstructor = false; this.isStudent = false; this.isAdmin = false; this.isMaintainer = false; this.isFetchingAuthDetails = false; }); }); } }
TEAMMATES/teammates
src/web/app/pages-student/student-page.component.ts
TypeScript
gpl-2.0
1,999
/* ScummVM - Graphic Adventure Engine * * ScummVM is the legal property of its developers, whose names * are too numerous to list here. Please refer to the COPYRIGHT * file distributed with this source distribution. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ #include "asylum/system/speech.h" #include "asylum/resources/actor.h" #include "asylum/resources/worldstats.h" #include "asylum/system/text.h" #include "asylum/views/scene.h" #include "asylum/asylum.h" #include "asylum/staticres.h" namespace Asylum { Speech::Speech(AsylumEngine *engine): _vm(engine), _textData(0), _textDataPos(0) { _tick = _vm->getTick(); _soundResourceId = kResourceNone; _textResourceId = kResourceNone; } Speech::~Speech() { // Text resource data is disposed as part of the resource manager _textData = 0; _textDataPos = 0; // Zero-out passed pointers _vm = NULL; } ResourceId Speech::play(ResourceId soundResourceId, ResourceId textResourceId) { if (soundResourceId) if (getSound()->isPlaying(soundResourceId)) getSound()->stopAll(soundResourceId); _soundResourceId = soundResourceId; _textResourceId = textResourceId; prepareSpeech(); return soundResourceId; } ResourceId Speech::playIndexed(int32 index) { int processedIndex; if (getWorld()->actorType || index != -1) { processedIndex = (int)speechIndex[index + 5 * getWorld()->actorType] + (int)rnd(speechIndexRandom[index + 5 * getWorld()->actorType]); } else { switch(_vm->getRandom(3)) { default: case 0: processedIndex = 23; break; case 1: processedIndex = 400; break; case 2: processedIndex = 401; break; case 3: processedIndex = index; break; } if (processedIndex >= 259) processedIndex -=9; } switch (getWorld()->actorType) { default: break; case kActorMax: return play(MAKE_RESOURCE(kResourcePackSpeech, processedIndex), MAKE_RESOURCE(kResourcePackText, processedIndex + 83)); case kActorSarah: return play(MAKE_RESOURCE(kResourcePackSharedSound, processedIndex + 1927), MAKE_RESOURCE(kResourcePackText, processedIndex + 586)); case kActorCyclops: return play(MAKE_RESOURCE(kResourcePackSharedSound, processedIndex + 2084), MAKE_RESOURCE(kResourcePackText, processedIndex + 743)); case kActorAztec: return play(MAKE_RESOURCE(kResourcePackSharedSound, processedIndex + 2234), MAKE_RESOURCE(kResourcePackText, processedIndex + 893)); } return kResourceNone; } ResourceId Speech::playScene(int32 type, int32 index) { switch (type) { default: play(kResourceNone, kResourceNone); break; case 0: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2363), MAKE_RESOURCE(kResourcePackText, index + 1022)); case 1: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2366), MAKE_RESOURCE(kResourcePackText, index + 1025)); case 2: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2371), MAKE_RESOURCE(kResourcePackText, index + 1030)); case 3: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2398), MAKE_RESOURCE(kResourcePackText, index + 1057)); case 4: return play(MAKE_RESOURCE(kResourcePackSpeech, index + 503), MAKE_RESOURCE(kResourcePackText, index + 1060)); case 5: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2401), MAKE_RESOURCE(kResourcePackText, index + 1068)); case 6: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2409), MAKE_RESOURCE(kResourcePackText, index + 1076)); case 7: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2415), MAKE_RESOURCE(kResourcePackText, index + 1082)); case 8: return play(MAKE_RESOURCE(kResourcePackSpeech, index + 511), MAKE_RESOURCE(kResourcePackText, index + 1084)); case 9: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2417), MAKE_RESOURCE(kResourcePackText, index + 1088)); case 10: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2417), MAKE_RESOURCE(kResourcePackText, index + 1093)); case 11: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2424), MAKE_RESOURCE(kResourcePackText, index + 1100)); case 12: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2424), MAKE_RESOURCE(kResourcePackText, index + 1102)); case 13: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2430), MAKE_RESOURCE(kResourcePackText, index + 1108)); case 14: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2432), MAKE_RESOURCE(kResourcePackText, index + 1110)); case 15: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2434), MAKE_RESOURCE(kResourcePackText, index + 1112)); case 16: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2435), MAKE_RESOURCE(kResourcePackText, index + 1113)); case 17: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2436), MAKE_RESOURCE(kResourcePackText, index + 1114)); case 18: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2438), MAKE_RESOURCE(kResourcePackText, index + 1116)); case 19: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2439), MAKE_RESOURCE(kResourcePackText, index + 1117)); } return kResourceNone; } ResourceId Speech::playPlayer(int32 index) { switch (getWorld()->actorType) { default: break; case kActorMax: { int32 soundResourceIndex = index; int32 textResourceIndex = index; if (index >= 259) { soundResourceIndex -= 9; textResourceIndex -= 9; } ResourceId soundResourceId = MAKE_RESOURCE(kResourcePackSpeech, soundResourceIndex); return play(soundResourceId, MAKE_RESOURCE(kResourcePackText, textResourceIndex + 83)); } case kActorSarah: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 1927), MAKE_RESOURCE(kResourcePackText, index + 586)); case kActorCyclops: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2084), MAKE_RESOURCE(kResourcePackText, index + 743)); case kActorAztec: return play(MAKE_RESOURCE(kResourcePackSharedSound, index + 2234), MAKE_RESOURCE(kResourcePackText, index + 893)); } return kResourceNone; } void Speech::resetResourceIds() { _soundResourceId = kResourceNone; _textResourceId = kResourceNone; } void Speech::resetTextData() { _textData = NULL; _textDataPos = NULL; } ////////////////////////////////////////////////////////////////////////// // Private methods ////////////////////////////////////////////////////////////////////////// void Speech::prepareSpeech() { int32 startTick = _vm->getTick(); if (_soundResourceId) { if (!getSound()->isPlaying(_soundResourceId) || (_tick && startTick >= _tick)) process(); if (Config.showEncounterSubtitles) { Common::Point point; Actor *actor = getScene()->getActor(); actor->adjustCoordinates(&point); int16 posY = (point.y >= 240) ? 40 : 320; getText()->draw(_textDataPos, getWorld()->font3, posY); getText()->draw(_textData, getWorld()->font1, posY); } } } void Speech::process() { _tick = 0; char *txt = getText()->get(_textResourceId); if (*(txt + strlen((const char *)txt) - 2) == 1) { _textResourceId = kResourceNone; _textData = 0; _textDataPos = 0; } else if (*txt == '{') { _textData = txt + 3; _textDataPos = 0; getText()->loadFont(getWorld()->font1); getSound()->playSound(_soundResourceId, false, Config.voiceVolume, 0); } else { _textData = 0; _textDataPos = txt; if (*txt == '/') { _textDataPos = txt + 2; } getText()->loadFont(getWorld()->font3); getSound()->playSound(_soundResourceId, false, Config.voiceVolume, 0); } } } // end of namespace Asylum
alexbevi/scummvm
engines/asylum/system/speech.cpp
C++
gpl-2.0
8,226
// Copyright 2016 Dolphin Emulator Project // Licensed under GPLv2+ // Refer to the license.txt file included. #include "Core/IOS/USB/Bluetooth/BTBase.h" #include <memory> #include <string> #include <vector> #include "Common/CommonPaths.h" #include "Common/CommonTypes.h" #include "Common/File.h" #include "Common/FileUtil.h" #include "Common/Logging/Log.h" #include "Common/SysConf.h" namespace IOS { namespace HLE { void BackUpBTInfoSection(const SysConf* sysconf) { const std::string filename = File::GetUserPath(D_CONFIG_IDX) + DIR_SEP WII_BTDINF_BACKUP; if (File::Exists(filename)) return; File::IOFile backup(filename, "wb"); const SysConf::Entry* btdinf = sysconf->GetEntry("BT.DINF"); if (!btdinf) return; const std::vector<u8>& section = btdinf->bytes; if (!backup.WriteBytes(section.data(), section.size())) ERROR_LOG(IOS_WIIMOTE, "Failed to back up BT.DINF section"); } void RestoreBTInfoSection(SysConf* sysconf) { const std::string filename = File::GetUserPath(D_CONFIG_IDX) + DIR_SEP WII_BTDINF_BACKUP; File::IOFile backup(filename, "rb"); if (!backup) return; auto& section = sysconf->GetOrAddEntry("BT.DINF", SysConf::Entry::Type::BigArray)->bytes; if (!backup.ReadBytes(section.data(), section.size())) { ERROR_LOG(IOS_WIIMOTE, "Failed to read backed up BT.DINF section"); return; } File::Delete(filename); } } // namespace HLE } // namespace IOS
linkmauve/dolphin
Source/Core/Core/IOS/USB/Bluetooth/BTBase.cpp
C++
gpl-2.0
1,428
class AddMoreColumnsToUsers < ActiveRecord::Migration def change add_column :users, :username, :string add_column :users, :image, :string end end
kirkokada/hackathon
db/migrate/20150524135324_add_more_columns_to_users.rb
Ruby
gpl-2.0
158
/////////////////////////////////////////////////////////////////////////////// // For information as to what this class does, see the Javadoc, below. // // Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, // // 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph // // Ramsey, and Clark Glymour. // // // // This program is free software; you can redistribute it and/or modify // // it under the terms of the GNU General Public License as published by // // the Free Software Foundation; either version 2 of the License, or // // (at your option) any later version. // // // // This program is distributed in the hope that it will be useful, // // but WITHOUT ANY WARRANTY; without even the implied warranty of // // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License // // along with this program; if not, write to the Free Software // // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // /////////////////////////////////////////////////////////////////////////////// package edu.cmu.tetrad.search; import edu.cmu.tetrad.data.DataSet; import edu.cmu.tetrad.data.IKnowledge; import edu.cmu.tetrad.data.Knowledge2; import edu.cmu.tetrad.graph.*; import edu.cmu.tetrad.util.ChoiceGenerator; import edu.cmu.tetrad.util.TetradLogger; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Kernelized PC algorithm. This is the same as the PC class, the nonparametric kernel-based HSIC test is used for * independence testing and the parameters for this test can be set directly when Kpc is initialized. * * @author Robert Tillman. */ public class Kpc implements GraphSearch { /** * The independence test used for the PC search. */ private IndTestHsic independenceTest; /** * Forbidden and required edges for the search. */ private IKnowledge knowledge = new Knowledge2(); /** * Sepset information accumulated in the search. */ private SepsetMap sepset; /** * The maximum number of nodes conditioned on in the search. The default it 1000. */ private int depth = 1000; /** * The graph that's constructed during the search. */ private Graph graph; /** * Elapsed time of the most recent search. */ private long elapsedTime; /** * True if cycles are to be aggressively prevented. May be expensive for large graphs (but also useful for large * graphs). */ private boolean aggressivelyPreventCycles = false; /** * The logger to use. */ private TetradLogger logger = TetradLogger.getInstance(); /** * In an enumeration of triple types, these are the collider triples. */ private Set<Triple> unshieldedColliders; /** * In an enumeration of triple types, these are the noncollider triples. */ private Set<Triple> unshieldedNoncolliders; /** * The number of indepdendence tests in the last search. */ private int numIndependenceTests; /** * The true graph, for purposes of comparison. Temporary. */ private Graph trueGraph; /** * The number of false dependence judgements from FAS, judging from the true graph, if set. Temporary. */ private int numFalseDependenceJudgements; /** * The number of dependence judgements from FAS. Temporary. */ private int numDependenceJudgements; /** * The threshold for rejecting the null */ private double alpha; /** * Use incomplete Choleksy factorization for Gram matrices */ private double useIncompleteCholesky = 1e-18; /** * The regularizer for singular matrices */ private double regularizer = .0001; /** * The number of bootstrap samples to generate during independence testing */ private int perms = 100; private boolean verbose = false; //=============================CONSTRUCTORS==========================// /** * Constructs a new PC search using for the given dataset. * * @param dataset The oracle for conditional independence facts. This does not make a copy of the independence test, * for fear of duplicating the data set! */ public Kpc(DataSet dataset, double alpha) { if (dataset == null) { throw new NullPointerException(); } this.alpha = alpha; this.independenceTest = new IndTestHsic(dataset, alpha); } //==============================PUBLIC METHODS========================// /** * @return true iff edges will not be added if they would create cycles. */ public boolean isAggressivelyPreventCycles() { return this.aggressivelyPreventCycles; } /** * @param aggressivelyPreventCycles Set to true just in case edges will not be addeds if they would create cycles. */ public void setAggressivelyPreventCycles(boolean aggressivelyPreventCycles) { this.aggressivelyPreventCycles = aggressivelyPreventCycles; } /** * @return the independence test being used in the search. */ public IndependenceTest getIndependenceTest() { return independenceTest; } /** * @return the knowledge specification used in the search. Non-null. */ public IKnowledge getKnowledge() { return knowledge; } /** * Sets the knowledge specification to be used in the search. May not be null. */ public void setKnowledge(IKnowledge knowledge) { if (knowledge == null) { throw new NullPointerException(); } this.knowledge = knowledge; } /** * @return the sepset map from the most recent search. Non-null after the first call to <code>search()</code>. */ public SepsetMap getSepset() { return sepset; } /** * @return the getModel depth of search--that is, the maximum number of conditioning nodes for any conditional * independence checked. */ public int getDepth() { return depth; } /** * Sets the depth of the search--that is, the maximum number of conditioning nodes for any conditional independence * checked. * * @param depth The depth of the search. The default is 1000. A value of -1 may be used to indicate that the depth * should be high (1000). A value of Integer.MAX_VALUE may not be used, due to a bug on multi-core * machines. */ public void setDepth(int depth) { if (depth < -1) { throw new IllegalArgumentException("Depth must be -1 or >= 0."); } if (depth > 1000) { throw new IllegalArgumentException("Depth must be <= 1000."); } this.depth = depth; } /** * Runs PC starting with a complete graph over all nodes of the given conditional independence test, using the given * independence test and knowledge and returns the resultant graph. The returned graph will be a pattern if the * independence information is consistent with the hypothesis that there are no latent common causes. It may, * however, contain cycles or bidirected edges if this assumption is not born out, either due to the actual presence * of latent common causes, or due to statistical errors in conditional independence judgments. */ public Graph search() { return search(independenceTest.getVariables()); } /** * Runs PC starting with a commplete graph over the given list of nodes, using the given independence test and * knowledge and returns the resultant graph. The returned graph will be a pattern if the independence information * is consistent with the hypothesis that there are no latent common causes. It may, however, contain cycles or * bidirected edges if this assumption is not born out, either due to the actual presence of latent common causes, * or due to statistical errors in conditional independence judgments. * <p> * All of the given nodes must be in the domain of the given conditional independence test. */ public Graph search(List<Node> nodes) { this.logger.log("info", "Starting kPC algorithm"); this.logger.log("info", "Independence test = " + getIndependenceTest() + "."); // this.logger.log("info", "Variables " + independenceTest.getVariables()); long startTime = System.currentTimeMillis(); if (getIndependenceTest() == null) { throw new NullPointerException(); } List allNodes = getIndependenceTest().getVariables(); if (!allNodes.containsAll(nodes)) { throw new IllegalArgumentException("All of the given nodes must " + "be in the domain of the independence test provided."); } graph = new EdgeListGraph(nodes); graph.fullyConnect(Endpoint.TAIL); Fas fas = new Fas(graph, getIndependenceTest()); fas.setKnowledge(getKnowledge()); fas.setDepth(getDepth()); fas.setTrueGraph(trueGraph); graph = fas.search(); this.sepset = fas.getSepsets(); this.numIndependenceTests = fas.getNumIndependenceTests(); this.numFalseDependenceJudgements = fas.getNumFalseDependenceJudgments(); this.numDependenceJudgements = fas.getNumDependenceJudgments(); enumerateTriples(); SearchGraphUtils.pcOrientbk(knowledge, graph, nodes); SearchGraphUtils.orientCollidersUsingSepsets(sepset, knowledge, graph, verbose); MeekRules rules = new MeekRules(); rules.setAggressivelyPreventCycles(this.aggressivelyPreventCycles); rules.setKnowledge(knowledge); rules.orientImplied(graph); this.logger.log("graph", "\nReturning this graph: " + graph); this.elapsedTime = System.currentTimeMillis() - startTime; this.logger.log("info", "Elapsed time = " + (elapsedTime) / 1000. + " s"); this.logger.log("info", "Finishing PC Algorithm."); this.logger.flush(); return graph; } /** * @return the elapsed time of the search, in milliseconds. */ public long getElapsedTime() { return elapsedTime; } /** * @return the set of unshielded colliders in the graph returned by <code>search()</code>. Non-null after * <code>search</code> is called. */ public Set<Triple> getUnshieldedColliders() { return unshieldedColliders; } /** * @return the set of unshielded noncolliders in the graph returned by <code>search()</code>. Non-null after * <code>search</code> is called. */ public Set<Triple> getUnshieldedNoncolliders() { return unshieldedNoncolliders; } //===============================ADDED FOR KPC=========================// /** * Sets the significance level at which independence judgments should be made. */ public void setAlpha(double alpha) { if (alpha < 0.0 || alpha > 1.0) { throw new IllegalArgumentException("Significance out of range."); } this.alpha = alpha; independenceTest.setAlpha(alpha); } /** * Sets the precision for the Incomplete Choleksy factorization method for approximating Gram matrices. A value <= 0 * indicates that the Incomplete Cholesky method should not be used and instead use the exact matrices. */ public void setIncompleteCholesky(double precision) { this.useIncompleteCholesky = precision; independenceTest.setIncompleteCholesky(precision); } /** * Set the number of bootstrap samples to use */ public void setPerms(int perms) { this.perms = perms; independenceTest.setPerms(perms); } /** * Sets the regularizer */ public void setRegularizer(double regularizer) { this.regularizer = regularizer; independenceTest.setRegularizer(regularizer); } /** * Gets the getModel significance level. */ public double getAlpha() { return this.alpha; } /** * Gets the getModel precision for the Incomplete Cholesky */ public double getPrecision() { return this.useIncompleteCholesky; } /** * Gets the getModel number of bootstrap samples used */ public int getPerms() { return this.perms; } /** * Gets the getModel regularizer */ public double getRegularizer() { return this.regularizer; } //===============================PRIVATE METHODS=======================// private void enumerateTriples() { this.unshieldedColliders = new HashSet<Triple>(); this.unshieldedNoncolliders = new HashSet<Triple>(); for (Node y : graph.getNodes()) { List<Node> adj = graph.getAdjacentNodes(y); if (adj.size() < 2) { continue; } ChoiceGenerator gen = new ChoiceGenerator(adj.size(), 2); int[] choice; while ((choice = gen.next()) != null) { Node x = adj.get(choice[0]); Node z = adj.get(choice[1]); List<Node> nodes = sepset.get(x, z); // Note that checking adj(x, z) does not suffice when knowledge // has been specified. if (nodes == null) { continue; } if (nodes.contains(y)) { getUnshieldedNoncolliders().add(new Triple(x, y, z)); } else { getUnshieldedColliders().add(new Triple(x, y, z)); } } } } public int getNumIndependenceTests() { return numIndependenceTests; } public void setTrueGraph(Graph trueGraph) { this.trueGraph = trueGraph; } public int getNumFalseDependenceJudgements() { return numFalseDependenceJudgements; } public int getNumDependenceJudgements() { return numDependenceJudgements; } public void setVerbose(boolean verbose) { this.verbose = verbose; } }
ajsedgewick/tetrad
tetrad-lib/src/main/java/edu/cmu/tetrad/search/Kpc.java
Java
gpl-2.0
14,785
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated Thu Dec 1 09:58:36 2011 by generateDS.py version 2.7a. # import sys import getopt import re as re_ etree_ = None Verbose_import_ = False (XMLParser_import_none, XMLParser_import_lxml, XMLParser_import_elementtree ) = range(3) XMLParser_import_library = None try: # lxml from lxml import etree as etree_ XMLParser_import_library = XMLParser_import_lxml if Verbose_import_: print("running with lxml.etree") except ImportError: try: # cElementTree from Python 2.5+ # pylint: disable=E0602, E0611 import xml.etree.cElementTree as etree_ XMLParser_import_library = XMLParser_import_elementtree if Verbose_import_: print("running with cElementTree on Python 2.5+") except ImportError: try: # ElementTree from Python 2.5+ # pylint: disable=E0602,E0611 import xml.etree.ElementTree as etree_ XMLParser_import_library = XMLParser_import_elementtree if Verbose_import_: print("running with ElementTree on Python 2.5+") except ImportError: try: # normal cElementTree install import cElementTree as etree_ XMLParser_import_library = XMLParser_import_elementtree if Verbose_import_: print("running with cElementTree") except ImportError: try: # normal ElementTree install import elementtree.ElementTree as etree_ XMLParser_import_library = XMLParser_import_elementtree if Verbose_import_: print("running with ElementTree") except ImportError: raise ImportError("Failed to import ElementTree from any known place") def parsexml_(*args, **kwargs): if (XMLParser_import_library == XMLParser_import_lxml and 'parser' not in kwargs): # Use the lxml ElementTree compatible parser so that, e.g., # we ignore comments. kwargs['parser'] = etree_.ETCompatXMLParser() doc = etree_.parse(*args, **kwargs) return doc # # User methods # # Calls to the methods in these classes are generated by generateDS.py. # You can replace these methods by re-implementing the following class # in a module named generatedssuper.py. try: from generatedssuper import GeneratedsSuper except ImportError, exp: class GeneratedsSuper(object): def gds_format_string(self, input_data, input_name=''): return input_data def gds_validate_string(self, input_data, node, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): return '%d' % input_data def gds_validate_integer(self, input_data, node, input_name=''): return input_data def gds_format_integer_list(self, input_data, input_name=''): return '%s' % input_data def gds_validate_integer_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (TypeError, ValueError), exp: raise_parse_error(node, 'Requires sequence of integers') return input_data def gds_format_float(self, input_data, input_name=''): return '%f' % input_data def gds_validate_float(self, input_data, node, input_name=''): return input_data def gds_format_float_list(self, input_data, input_name=''): return '%s' % input_data def gds_validate_float_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (TypeError, ValueError), exp: raise_parse_error(node, 'Requires sequence of floats') return input_data def gds_format_double(self, input_data, input_name=''): return '%e' % input_data def gds_validate_double(self, input_data, node, input_name=''): return input_data def gds_format_double_list(self, input_data, input_name=''): return '%s' % input_data def gds_validate_double_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: try: fvalue = float(value) except (TypeError, ValueError), exp: raise_parse_error(node, 'Requires sequence of doubles') return input_data def gds_format_boolean(self, input_data, input_name=''): return '%s' % input_data def gds_validate_boolean(self, input_data, node, input_name=''): return input_data def gds_format_boolean_list(self, input_data, input_name=''): return '%s' % input_data def gds_validate_boolean_list(self, input_data, node, input_name=''): values = input_data.split() for value in values: if value not in ('true', '1', 'false', '0', ): raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")') return input_data def gds_str_lower(self, instring): return instring.lower() def get_path_(self, node): path_list = [] self.get_path_list_(node, path_list) path_list.reverse() path = '/'.join(path_list) return path Tag_strip_pattern_ = re_.compile(r'\{.*\}') def get_path_list_(self, node, path_list): if node is None: return tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) if tag: path_list.append(tag) self.get_path_list_(node.getparent(), path_list) def get_class_obj_(self, node, default_class=None): class_obj1 = default_class if 'xsi' in node.nsmap: classname = node.get('{%s}type' % node.nsmap['xsi']) if classname is not None: names = classname.split(':') if len(names) == 2: classname = names[1] class_obj2 = globals().get(classname) if class_obj2 is not None: class_obj1 = class_obj2 return class_obj1 def gds_build_any(self, node, type_name=None): return None # # If you have installed IPython you can uncomment and use the following. # IPython is available from http://ipython.scipy.org/. # ## from IPython.Shell import IPShellEmbed ## args = '' # ipshell = IPShellEmbed(args, ## banner = 'Dropping into IPython', # exit_msg = 'Leaving Interpreter, back to program.') # Then use the following line where and when you want to drop into the # IPython shell: # ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit') # # Globals # ExternalEncoding = 'ascii' Tag_pattern_ = re_.compile(r'({.*})?(.*)') String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') # # Support/utility functions. # def showIndent(outfile, level): for idx in range(level): outfile.write(' ') def quote_xml(inStr): if not inStr: return '' s1 = (isinstance(inStr, basestring) and inStr or '%s' % inStr) s1 = s1.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') return s1 def quote_attrib(inStr): s1 = (isinstance(inStr, basestring) and inStr or '%s' % inStr) s1 = s1.replace('&', '&amp;') s1 = s1.replace('<', '&lt;') s1 = s1.replace('>', '&gt;') if '"' in s1: if "'" in s1: s1 = '"%s"' % s1.replace('"', "&quot;") else: s1 = "'%s'" % s1 else: s1 = '"%s"' % s1 return s1 def quote_python(inStr): s1 = inStr if s1.find("'") == -1: if s1.find('\n') == -1: return "'%s'" % s1 else: return "'''%s'''" % s1 else: if s1.find('"') != -1: s1 = s1.replace('"', '\\"') if s1.find('\n') == -1: return '"%s"' % s1 else: return '"""%s"""' % s1 def get_all_text_(node): if node.text is not None: text = node.text else: text = '' for child in node: if child.tail is not None: text += child.tail return text def find_attr_value_(attr_name, node): attrs = node.attrib attr_parts = attr_name.split(':') value = None if len(attr_parts) == 1: value = attrs.get(attr_name) elif len(attr_parts) == 2: prefix, name = attr_parts namespace = node.nsmap.get(prefix) if namespace is not None: value = attrs.get('{%s}%s' % (namespace, name, )) return value class GDSParseError(Exception): pass def raise_parse_error(node, msg): if XMLParser_import_library == XMLParser_import_lxml: msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) else: msg = '%s (element %s)' % (msg, node.tag, ) raise GDSParseError(msg) class MixedContainer: # Constants for category: CategoryNone = 0 CategoryText = 1 CategorySimple = 2 CategoryComplex = 3 # Constants for content_type: TypeNone = 0 TypeText = 1 TypeString = 2 TypeInteger = 3 TypeFloat = 4 TypeDecimal = 5 TypeDouble = 6 TypeBoolean = 7 def __init__(self, category, content_type, name, value): self.category = category self.content_type = content_type self.name = name self.value = value def getCategory(self): return self.category def getContenttype(self, content_type): return self.content_type def getValue(self): return self.value def getName(self): return self.name def export(self, outfile, level, name, namespace): if self.category == MixedContainer.CategoryText: # Prevent exporting empty content as empty lines. if self.value.strip(): outfile.write(self.value) elif self.category == MixedContainer.CategorySimple: self.exportSimple(outfile, level, name) else: # category == MixedContainer.CategoryComplex self.value.export(outfile, level, namespace, name) def exportSimple(self, outfile, level, name): if self.content_type == MixedContainer.TypeString: outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeInteger or \ self.content_type == MixedContainer.TypeBoolean: outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeFloat or \ self.content_type == MixedContainer.TypeDecimal: outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name)) elif self.content_type == MixedContainer.TypeDouble: outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name)) def exportLiteral(self, outfile, level, name): if self.category == MixedContainer.CategoryText: showIndent(outfile, level) outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (self.category, self.content_type, self.name, self.value)) elif self.category == MixedContainer.CategorySimple: showIndent(outfile, level) outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (self.category, self.content_type, self.name, self.value)) else: # category == MixedContainer.CategoryComplex showIndent(outfile, level) outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % (self.category, self.content_type, self.name,)) self.value.exportLiteral(outfile, level + 1) showIndent(outfile, level) outfile.write(')\n') class MemberSpec_(object): def __init__(self, name='', data_type='', container=0): self.name = name self.data_type = data_type self.container = container def set_name(self, name): self.name = name def get_name(self): return self.name def set_data_type(self, data_type): self.data_type = data_type def get_data_type_chain(self): return self.data_type def get_data_type(self): if isinstance(self.data_type, list): if len(self.data_type) > 0: return self.data_type[-1] else: return 'xs:string' else: return self.data_type def set_container(self, container): self.container = container def get_container(self): return self.container def _cast(typ, value): if typ is None or value is None: return value return typ(value) # # Data representation classes. # class testsuites(GeneratedsSuper): """Contains an aggregation of testsuite results""" subclass = None superclass = None def __init__(self, testsuite=None): if testsuite is None: self.testsuite = [] else: self.testsuite = testsuite def factory(*args_, **kwargs_): if testsuites.subclass: # pylint: disable=E1102 return testsuites.subclass(*args_, **kwargs_) else: return testsuites(*args_, **kwargs_) factory = staticmethod(factory) def get_testsuite(self): return self.testsuite def set_testsuite(self, testsuite): self.testsuite = testsuite def add_testsuite(self, value): self.testsuite.append(value) def insert_testsuite(self, index, value): self.testsuite[index] = value def export(self, outfile, level, namespace_='', name_='testsuites', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='testsuites') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='testsuites'): pass def exportChildren(self, outfile, level, namespace_='', name_='testsuites', fromsubclass_=False): for testsuite_ in self.testsuite: testsuite_.export(outfile, level, namespace_, name_='testsuite') def hasContent_(self): if ( self.testsuite ): return True else: return False def exportLiteral(self, outfile, level, name_='testsuites'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): pass def exportLiteralChildren(self, outfile, level, name_): showIndent(outfile, level) outfile.write('testsuite=[\n') level += 1 for testsuite_ in self.testsuite: showIndent(outfile, level) outfile.write('model_.testsuiteType(\n') testsuite_.exportLiteral(outfile, level, name_='testsuiteType') showIndent(outfile, level) outfile.write('),\n') level -= 1 showIndent(outfile, level) outfile.write('],\n') def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'testsuite': obj_ = testsuiteType.factory() obj_.build(child_) self.testsuite.append(obj_) # end class testsuites class testsuite(GeneratedsSuper): """Contains the results of exexuting a testsuiteFull class name of the test for non-aggregated testsuite documents. Class name without the package for aggregated testsuites documentswhen the test was executed. Timezone may not be specified.Host on which the tests were executed. 'localhost' should be used if the hostname cannot be determined.The total number of tests in the suiteThe total number of tests in the suite that failed. A failure is a test which the code has explicitly failed by using the mechanisms for that purpose. e.g., via an assertEqualsThe total number of tests in the suite that errorrd. An errored test is one that had an unanticipated problem. e.g., an unchecked throwable; or a problem with the implementation of the test.Time taken (in seconds) to execute the tests in the suite""" subclass = None superclass = None def __init__(self, tests=None, errors=None, name=None, timestamp=None, hostname=None, time=None, failures=None, properties=None, testcase=None, system_out=None, system_err=None, extensiontype_=None): self.tests = _cast(int, tests) self.errors = _cast(int, errors) self.name = _cast(None, name) self.timestamp = _cast(None, timestamp) self.hostname = _cast(None, hostname) self.time = _cast(float, time) self.failures = _cast(int, failures) self.properties = properties if testcase is None: self.testcase = [] else: self.testcase = testcase self.system_out = system_out self.system_err = system_err self.extensiontype_ = extensiontype_ def factory(*args_, **kwargs_): if testsuite.subclass: # pylint: disable=E1102 return testsuite.subclass(*args_, **kwargs_) else: return testsuite(*args_, **kwargs_) factory = staticmethod(factory) def get_properties(self): return self.properties def set_properties(self, properties): self.properties = properties def get_testcase(self): return self.testcase def set_testcase(self, testcase): self.testcase = testcase def add_testcase(self, value): self.testcase.append(value) def insert_testcase(self, index, value): self.testcase[index] = value def get_system_out(self): return self.system_out def set_system_out(self, system_out): self.system_out = system_out def get_system_err(self): return self.system_err def set_system_err(self, system_err): self.system_err = system_err def get_tests(self): return self.tests def set_tests(self, tests): self.tests = tests def get_errors(self): return self.errors def set_errors(self, errors): self.errors = errors def get_name(self): return self.name def set_name(self, name): self.name = name def get_timestamp(self): return self.timestamp def set_timestamp(self, timestamp): self.timestamp = timestamp def validate_ISO8601_DATETIME_PATTERN(self, value): # Validate type ISO8601_DATETIME_PATTERN, a restriction on xs:dateTime. pass def get_hostname(self): return self.hostname def set_hostname(self, hostname): self.hostname = hostname def get_time(self): return self.time def set_time(self, time): self.time = time def get_failures(self): return self.failures def set_failures(self, failures): self.failures = failures def get_extensiontype_(self): return self.extensiontype_ def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_ def export(self, outfile, level, namespace_='', name_='testsuite', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='testsuite') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='testsuite'): if self.tests is not None and 'tests' not in already_processed: already_processed.append('tests') outfile.write(' tests="%s"' % self.gds_format_integer(self.tests, input_name='tests')) if self.errors is not None and 'errors' not in already_processed: already_processed.append('errors') outfile.write(' errors="%s"' % self.gds_format_integer(self.errors, input_name='errors')) if self.name is not None and 'name' not in already_processed: already_processed.append('name') outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), )) if self.timestamp is not None and 'timestamp' not in already_processed: already_processed.append('timestamp') outfile.write(' timestamp=%s' % (quote_attrib(self.timestamp), )) if self.hostname is not None and 'hostname' not in already_processed: already_processed.append('hostname') outfile.write(' hostname=%s' % (self.gds_format_string(quote_attrib(self.hostname).encode(ExternalEncoding), input_name='hostname'), )) if self.time is not None and 'time' not in already_processed: already_processed.append('time') outfile.write(' time="%s"' % self.gds_format_float(self.time, input_name='time')) if self.failures is not None and 'failures' not in already_processed: already_processed.append('failures') outfile.write(' failures="%s"' % self.gds_format_integer(self.failures, input_name='failures')) if self.extensiontype_ is not None and 'xsi:type' not in already_processed: already_processed.append('xsi:type') outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"') outfile.write(' xsi:type="%s"' % self.extensiontype_) def exportChildren(self, outfile, level, namespace_='', name_='testsuite', fromsubclass_=False): if self.properties is not None: self.properties.export(outfile, level, namespace_, name_='properties', ) for testcase_ in self.testcase: testcase_.export(outfile, level, namespace_, name_='testcase') if self.system_out is not None: showIndent(outfile, level) outfile.write('<%ssystem-out>%s</%ssystem-out>\n' % (namespace_, self.gds_format_string(quote_xml(self.system_out).encode(ExternalEncoding), input_name='system-out'), namespace_)) if self.system_err is not None: showIndent(outfile, level) outfile.write('<%ssystem-err>%s</%ssystem-err>\n' % (namespace_, self.gds_format_string(quote_xml(self.system_err).encode(ExternalEncoding), input_name='system-err'), namespace_)) def hasContent_(self): if ( self.properties is not None or self.testcase or self.system_out is not None or self.system_err is not None ): return True else: return False def exportLiteral(self, outfile, level, name_='testsuite'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.tests is not None and 'tests' not in already_processed: already_processed.append('tests') showIndent(outfile, level) outfile.write('tests = %d,\n' % (self.tests,)) if self.errors is not None and 'errors' not in already_processed: already_processed.append('errors') showIndent(outfile, level) outfile.write('errors = %d,\n' % (self.errors,)) if self.name is not None and 'name' not in already_processed: already_processed.append('name') showIndent(outfile, level) outfile.write('name = "%s",\n' % (self.name,)) if self.timestamp is not None and 'timestamp' not in already_processed: already_processed.append('timestamp') showIndent(outfile, level) outfile.write('timestamp = "%s",\n' % (self.timestamp,)) if self.hostname is not None and 'hostname' not in already_processed: already_processed.append('hostname') showIndent(outfile, level) outfile.write('hostname = "%s",\n' % (self.hostname,)) if self.time is not None and 'time' not in already_processed: already_processed.append('time') showIndent(outfile, level) outfile.write('time = %f,\n' % (self.time,)) if self.failures is not None and 'failures' not in already_processed: already_processed.append('failures') showIndent(outfile, level) outfile.write('failures = %d,\n' % (self.failures,)) def exportLiteralChildren(self, outfile, level, name_): if self.properties is not None: showIndent(outfile, level) outfile.write('properties=model_.propertiesType(\n') self.properties.exportLiteral(outfile, level, name_='properties') showIndent(outfile, level) outfile.write('),\n') showIndent(outfile, level) outfile.write('testcase=[\n') level += 1 for testcase_ in self.testcase: showIndent(outfile, level) outfile.write('model_.testcaseType(\n') testcase_.exportLiteral(outfile, level, name_='testcaseType') showIndent(outfile, level) outfile.write('),\n') level -= 1 showIndent(outfile, level) outfile.write('],\n') if self.system_out is not None: showIndent(outfile, level) outfile.write('system_out=%s,\n' % quote_python(self.system_out).encode(ExternalEncoding)) if self.system_err is not None: showIndent(outfile, level) outfile.write('system_err=%s,\n' % quote_python(self.system_err).encode(ExternalEncoding)) def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('tests', node) if value is not None and 'tests' not in already_processed: already_processed.append('tests') try: self.tests = int(value) except ValueError, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) value = find_attr_value_('errors', node) if value is not None and 'errors' not in already_processed: already_processed.append('errors') try: self.errors = int(value) except ValueError, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.append('name') self.name = value self.name = ' '.join(self.name.split()) value = find_attr_value_('timestamp', node) if value is not None and 'timestamp' not in already_processed: already_processed.append('timestamp') self.timestamp = value self.validate_ISO8601_DATETIME_PATTERN(self.timestamp) # validate type ISO8601_DATETIME_PATTERN value = find_attr_value_('hostname', node) if value is not None and 'hostname' not in already_processed: already_processed.append('hostname') self.hostname = value self.hostname = ' '.join(self.hostname.split()) value = find_attr_value_('time', node) if value is not None and 'time' not in already_processed: already_processed.append('time') try: self.time = float(value) except ValueError, exp: raise ValueError('Bad float/double attribute (time): %s' % exp) value = find_attr_value_('failures', node) if value is not None and 'failures' not in already_processed: already_processed.append('failures') try: self.failures = int(value) except ValueError, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) value = find_attr_value_('xsi:type', node) if value is not None and 'xsi:type' not in already_processed: already_processed.append('xsi:type') self.extensiontype_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'properties': obj_ = propertiesType.factory() obj_.build(child_) self.set_properties(obj_) elif nodeName_ == 'testcase': obj_ = testcaseType.factory() obj_.build(child_) self.testcase.append(obj_) elif nodeName_ == 'system-out': system_out_ = child_.text system_out_ = self.gds_validate_string(system_out_, node, 'system_out') self.system_out = system_out_ elif nodeName_ == 'system-err': system_err_ = child_.text system_err_ = self.gds_validate_string(system_err_, node, 'system_err') self.system_err = system_err_ # end class testsuite class system_out(GeneratedsSuper): """Data that was written to standard out while the test was executed""" subclass = None superclass = None def __init__(self): pass def factory(*args_, **kwargs_): if system_out.subclass: # pylint: disable=E1102 return system_out.subclass(*args_, **kwargs_) else: return system_out(*args_, **kwargs_) factory = staticmethod(factory) def export(self, outfile, level, namespace_='', name_='system-out', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='system-out') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='system-out'): pass def exportChildren(self, outfile, level, namespace_='', name_='system-out', fromsubclass_=False): pass def hasContent_(self): if ( ): return True else: return False def exportLiteral(self, outfile, level, name_='system-out'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): pass def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class system_out class system_err(GeneratedsSuper): """Data that was written to standard error while the test was executed""" subclass = None superclass = None def __init__(self): pass def factory(*args_, **kwargs_): if system_err.subclass: # pylint: disable=E1102 return system_err.subclass(*args_, **kwargs_) else: return system_err(*args_, **kwargs_) factory = staticmethod(factory) def export(self, outfile, level, namespace_='', name_='system-err', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='system-err') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='system-err'): pass def exportChildren(self, outfile, level, namespace_='', name_='system-err', fromsubclass_=False): pass def hasContent_(self): if ( ): return True else: return False def exportLiteral(self, outfile, level, name_='system-err'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): pass def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class system_err class testsuiteType(testsuite): """Derived from testsuite/@name in the non-aggregated documentsStarts at '0' for the first testsuite and is incremented by 1 for each following testsuite""" subclass = None superclass = testsuite def __init__(self, tests=None, errors=None, name=None, timestamp=None, hostname=None, time=None, failures=None, properties=None, testcase=None, system_out=None, system_err=None, id=None, package=None): super(testsuiteType, self).__init__(tests, errors, name, timestamp, hostname, time, failures, properties, testcase, system_out, system_err, ) self.id = _cast(int, id) self.package = _cast(None, package) pass def factory(*args_, **kwargs_): if testsuiteType.subclass: return testsuiteType.subclass(*args_, **kwargs_) else: return testsuiteType(*args_, **kwargs_) factory = staticmethod(factory) def get_id(self): return self.id def set_id(self, id): self.id = id def get_package(self): return self.package def set_package(self, package): self.package = package def export(self, outfile, level, namespace_='', name_='testsuiteType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='testsuiteType') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='testsuiteType'): super(testsuiteType, self).exportAttributes(outfile, level, already_processed, namespace_, name_='testsuiteType') if self.id is not None and 'id' not in already_processed: already_processed.append('id') outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id')) if self.package is not None and 'package' not in already_processed: already_processed.append('package') outfile.write(' package=%s' % (self.gds_format_string(quote_attrib(self.package).encode(ExternalEncoding), input_name='package'), )) def exportChildren(self, outfile, level, namespace_='', name_='testsuiteType', fromsubclass_=False): super(testsuiteType, self).exportChildren(outfile, level, namespace_, name_, True) def hasContent_(self): if ( super(testsuiteType, self).hasContent_() ): return True else: return False def exportLiteral(self, outfile, level, name_='testsuiteType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.id is not None and 'id' not in already_processed: already_processed.append('id') showIndent(outfile, level) outfile.write('id = %d,\n' % (self.id,)) if self.package is not None and 'package' not in already_processed: already_processed.append('package') showIndent(outfile, level) outfile.write('package = "%s",\n' % (self.package,)) super(testsuiteType, self).exportLiteralAttributes(outfile, level, already_processed, name_) def exportLiteralChildren(self, outfile, level, name_): super(testsuiteType, self).exportLiteralChildren(outfile, level, name_) def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('id', node) if value is not None and 'id' not in already_processed: already_processed.append('id') try: self.id = int(value) except ValueError, exp: raise_parse_error(node, 'Bad integer attribute: %s' % exp) value = find_attr_value_('package', node) if value is not None and 'package' not in already_processed: already_processed.append('package') self.package = value self.package = ' '.join(self.package.split()) super(testsuiteType, self).buildAttributes(node, attrs, already_processed) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): super(testsuiteType, self).buildChildren(child_, node, nodeName_, True) pass # end class testsuiteType class propertiesType(GeneratedsSuper): subclass = None superclass = None def __init__(self, property=None): if property is None: self.property = [] else: self.property = property def factory(*args_, **kwargs_): if propertiesType.subclass: # pylint: disable=E1102 return propertiesType.subclass(*args_, **kwargs_) else: return propertiesType(*args_, **kwargs_) factory = staticmethod(factory) def get_property(self): return self.property def set_property(self, property): self.property = property def add_property(self, value): self.property.append(value) def insert_property(self, index, value): self.property[index] = value def export(self, outfile, level, namespace_='', name_='propertiesType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='propertiesType') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='propertiesType'): pass def exportChildren(self, outfile, level, namespace_='', name_='propertiesType', fromsubclass_=False): for property_ in self.property: property_.export(outfile, level, namespace_, name_='property') def hasContent_(self): if ( self.property ): return True else: return False def exportLiteral(self, outfile, level, name_='propertiesType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): pass def exportLiteralChildren(self, outfile, level, name_): showIndent(outfile, level) outfile.write('property=[\n') level += 1 for property_ in self.property: showIndent(outfile, level) outfile.write('model_.propertyType(\n') property_.exportLiteral(outfile, level, name_='propertyType') showIndent(outfile, level) outfile.write('),\n') level -= 1 showIndent(outfile, level) outfile.write('],\n') def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): pass def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'property': obj_ = propertyType.factory() obj_.build(child_) self.property.append(obj_) # end class propertiesType class propertyType(GeneratedsSuper): subclass = None superclass = None def __init__(self, name=None, value=None): self.name = _cast(None, name) self.value = _cast(None, value) pass def factory(*args_, **kwargs_): if propertyType.subclass: # pylint: disable=E1102 return propertyType.subclass(*args_, **kwargs_) else: return propertyType(*args_, **kwargs_) factory = staticmethod(factory) def get_name(self): return self.name def set_name(self, name): self.name = name def get_value(self): return self.value def set_value(self, value): self.value = value def export(self, outfile, level, namespace_='', name_='propertyType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='propertyType') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='propertyType'): if self.name is not None and 'name' not in already_processed: already_processed.append('name') outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), )) if self.value is not None and 'value' not in already_processed: already_processed.append('value') outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), )) def exportChildren(self, outfile, level, namespace_='', name_='propertyType', fromsubclass_=False): pass def hasContent_(self): if ( ): return True else: return False def exportLiteral(self, outfile, level, name_='propertyType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.name is not None and 'name' not in already_processed: already_processed.append('name') showIndent(outfile, level) outfile.write('name = "%s",\n' % (self.name,)) if self.value is not None and 'value' not in already_processed: already_processed.append('value') showIndent(outfile, level) outfile.write('value = "%s",\n' % (self.value,)) def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.append('name') self.name = value self.name = ' '.join(self.name.split()) value = find_attr_value_('value', node) if value is not None and 'value' not in already_processed: already_processed.append('value') self.value = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class propertyType class testcaseType(GeneratedsSuper): """Name of the test methodFull class name for the class the test method is in.Time taken (in seconds) to execute the test""" subclass = None superclass = None def __init__(self, classname=None, name=None, time=None, error=None, failure=None): self.classname = _cast(None, classname) self.name = _cast(None, name) self.time = _cast(float, time) self.error = error self.failure = failure def factory(*args_, **kwargs_): if testcaseType.subclass: # pylint: disable=E1102 return testcaseType.subclass(*args_, **kwargs_) else: return testcaseType(*args_, **kwargs_) factory = staticmethod(factory) def get_error(self): return self.error def set_error(self, error): self.error = error def get_failure(self): return self.failure def set_failure(self, failure): self.failure = failure def get_classname(self): return self.classname def set_classname(self, classname): self.classname = classname def get_name(self): return self.name def set_name(self, name): self.name = name def get_time(self): return self.time def set_time(self, time): self.time = time def export(self, outfile, level, namespace_='', name_='testcaseType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='testcaseType') if self.hasContent_(): outfile.write('>\n') self.exportChildren(outfile, level + 1, namespace_, name_) showIndent(outfile, level) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='testcaseType'): if self.classname is not None and 'classname' not in already_processed: already_processed.append('classname') outfile.write(' classname=%s' % (self.gds_format_string(quote_attrib(self.classname).encode(ExternalEncoding), input_name='classname'), )) if self.name is not None and 'name' not in already_processed: already_processed.append('name') outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), )) if self.time is not None and 'time' not in already_processed: already_processed.append('time') outfile.write(' time="%s"' % self.gds_format_float(self.time, input_name='time')) def exportChildren(self, outfile, level, namespace_='', name_='testcaseType', fromsubclass_=False): if self.error is not None: self.error.export(outfile, level, namespace_, name_='error') if self.failure is not None: self.failure.export(outfile, level, namespace_, name_='failure') def hasContent_(self): if ( self.error is not None or self.failure is not None ): return True else: return False def exportLiteral(self, outfile, level, name_='testcaseType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.classname is not None and 'classname' not in already_processed: already_processed.append('classname') showIndent(outfile, level) outfile.write('classname = "%s",\n' % (self.classname,)) if self.name is not None and 'name' not in already_processed: already_processed.append('name') showIndent(outfile, level) outfile.write('name = "%s",\n' % (self.name,)) if self.time is not None and 'time' not in already_processed: already_processed.append('time') showIndent(outfile, level) outfile.write('time = %f,\n' % (self.time,)) def exportLiteralChildren(self, outfile, level, name_): if self.error is not None: showIndent(outfile, level) outfile.write('error=model_.errorType(\n') self.error.exportLiteral(outfile, level, name_='error') showIndent(outfile, level) outfile.write('),\n') if self.failure is not None: showIndent(outfile, level) outfile.write('failure=model_.failureType(\n') self.failure.exportLiteral(outfile, level, name_='failure') showIndent(outfile, level) outfile.write('),\n') def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('classname', node) if value is not None and 'classname' not in already_processed: already_processed.append('classname') self.classname = value self.classname = ' '.join(self.classname.split()) value = find_attr_value_('name', node) if value is not None and 'name' not in already_processed: already_processed.append('name') self.name = value self.name = ' '.join(self.name.split()) value = find_attr_value_('time', node) if value is not None and 'time' not in already_processed: already_processed.append('time') try: self.time = float(value) except ValueError, exp: raise ValueError('Bad float/double attribute (time): %s' % exp) def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): if nodeName_ == 'error': obj_ = errorType.factory() obj_.build(child_) self.set_error(obj_) elif nodeName_ == 'failure': obj_ = failureType.factory() obj_.build(child_) self.set_failure(obj_) # end class testcaseType class errorType(GeneratedsSuper): """The error message. e.g., if a java exception is thrown, the return value of getMessage()The type of error that occurred. e.g., if a java execption is thrown the full class name of the exception.""" subclass = None superclass = None def __init__(self, message=None, type_=None, valueOf_=None): self.message = _cast(None, message) self.type_ = _cast(None, type_) self.valueOf_ = valueOf_ def factory(*args_, **kwargs_): if errorType.subclass: # pylint: disable=E1102 return errorType.subclass(*args_, **kwargs_) else: return errorType(*args_, **kwargs_) factory = staticmethod(factory) def get_message(self): return self.message def set_message(self, message): self.message = message def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_valueOf_(self): return self.valueOf_ def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ def export(self, outfile, level, namespace_='', name_='errorType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='errorType') if self.hasContent_(): outfile.write('>') outfile.write(str(self.valueOf_).encode(ExternalEncoding)) self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='errorType'): if self.message is not None and 'message' not in already_processed: already_processed.append('message') outfile.write(' message=%s' % (self.gds_format_string(quote_attrib(self.message).encode(ExternalEncoding), input_name='message'), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.append('type_') outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), )) def exportChildren(self, outfile, level, namespace_='', name_='errorType', fromsubclass_=False): pass def hasContent_(self): if ( self.valueOf_ ): return True else: return False def exportLiteral(self, outfile, level, name_='errorType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) showIndent(outfile, level) outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,)) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.message is not None and 'message' not in already_processed: already_processed.append('message') showIndent(outfile, level) outfile.write('message = "%s",\n' % (self.message,)) if self.type_ is not None and 'type_' not in already_processed: already_processed.append('type_') showIndent(outfile, level) outfile.write('type_ = "%s",\n' % (self.type_,)) def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) self.valueOf_ = get_all_text_(node) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('message', node) if value is not None and 'message' not in already_processed: already_processed.append('message') self.message = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.append('type') self.type_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class errorType class failureType(GeneratedsSuper): """The message specified in the assertThe type of the assert.""" subclass = None superclass = None def __init__(self, message=None, type_=None, valueOf_=None): self.message = _cast(None, message) self.type_ = _cast(None, type_) self.valueOf_ = valueOf_ def factory(*args_, **kwargs_): if failureType.subclass: # pylint: disable=E1102 return failureType.subclass(*args_, **kwargs_) else: return failureType(*args_, **kwargs_) factory = staticmethod(factory) def get_message(self): return self.message def set_message(self, message): self.message = message def get_type(self): return self.type_ def set_type(self, type_): self.type_ = type_ def get_valueOf_(self): return self.valueOf_ def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_ def export(self, outfile, level, namespace_='', name_='failureType', namespacedef_=''): showIndent(outfile, level) outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) already_processed = [] self.exportAttributes(outfile, level, already_processed, namespace_, name_='failureType') if self.hasContent_(): outfile.write('>') outfile.write(str(self.valueOf_).encode(ExternalEncoding)) self.exportChildren(outfile, level + 1, namespace_, name_) outfile.write('</%s%s>\n' % (namespace_, name_)) else: outfile.write('/>\n') def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='failureType'): if self.message is not None and 'message' not in already_processed: already_processed.append('message') outfile.write(' message=%s' % (self.gds_format_string(quote_attrib(self.message).encode(ExternalEncoding), input_name='message'), )) if self.type_ is not None and 'type_' not in already_processed: already_processed.append('type_') outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), )) def exportChildren(self, outfile, level, namespace_='', name_='failureType', fromsubclass_=False): pass def hasContent_(self): if ( self.valueOf_ ): return True else: return False def exportLiteral(self, outfile, level, name_='failureType'): level += 1 self.exportLiteralAttributes(outfile, level, [], name_) if self.hasContent_(): self.exportLiteralChildren(outfile, level, name_) showIndent(outfile, level) outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,)) def exportLiteralAttributes(self, outfile, level, already_processed, name_): if self.message is not None and 'message' not in already_processed: already_processed.append('message') showIndent(outfile, level) outfile.write('message = "%s",\n' % (self.message,)) if self.type_ is not None and 'type_' not in already_processed: already_processed.append('type_') showIndent(outfile, level) outfile.write('type_ = "%s",\n' % (self.type_,)) def exportLiteralChildren(self, outfile, level, name_): pass def build(self, node): self.buildAttributes(node, node.attrib, []) self.valueOf_ = get_all_text_(node) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_) def buildAttributes(self, node, attrs, already_processed): value = find_attr_value_('message', node) if value is not None and 'message' not in already_processed: already_processed.append('message') self.message = value value = find_attr_value_('type', node) if value is not None and 'type' not in already_processed: already_processed.append('type') self.type_ = value def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): pass # end class failureType USAGE_TEXT = """ Usage: python <Parser>.py [ -s ] <in_xml_file> """ def usage(): print USAGE_TEXT sys.exit(1) def get_root_tag(node): tag = Tag_pattern_.match(node.tag).groups()[-1] rootClass = globals().get(tag) return tag, rootClass def parse(inFileName): doc = parsexml_(inFileName) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'testsuite' rootClass = testsuite rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_='') return rootObj def parseString(inString): from StringIO import StringIO doc = parsexml_(StringIO(inString)) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'testsuite' rootClass = testsuite rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export(sys.stdout, 0, name_="testsuite", namespacedef_='') return rootObj def parseLiteral(inFileName): doc = parsexml_(inFileName) rootNode = doc.getroot() rootTag, rootClass = get_root_tag(rootNode) if rootClass is None: rootTag = 'testsuite' rootClass = testsuite rootObj = rootClass.factory() rootObj.build(rootNode) # Enable Python to collect the space used by the DOM. doc = None sys.stdout.write('#from JUnit_api import *\n\n') sys.stdout.write('import JUnit_api as model_\n\n') sys.stdout.write('rootObj = model_.rootTag(\n') rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) sys.stdout.write(')\n') return rootObj def main(): args = sys.argv[1:] if len(args) == 1: parse(args[0]) else: usage() if __name__ == '__main__': #import pdb; pdb.set_trace() main() __all__ = [ "errorType", "failureType", "propertiesType", "propertyType", "system_err", "system_out", "testcaseType", "testsuite", "testsuiteType", "testsuites" ]
joyxu/autotest
client/tools/JUnit_api.py
Python
gpl-2.0
65,100
#include <QFile> #include <QMessageBox> #include "pkg_bind.h" #include "config.h" PkgBind *PkgBind::instance = NULL; /* Constructor */ PkgBind::PkgBind() { loadCommandList(); }; /* Get the unique instance * or create it if there isn't any */ PkgBind *PkgBind::getInstance() { if(instance==NULL) instance = new PkgBind(); return instance; } /* Load the command list from a file */ void PkgBind::loadCommandList() { QString path = PKG_CMD_PATH; QFile file(path); if(file.open(QIODevice::ReadOnly)) { char buffer[1024]; int len; while((len = file.readLine(buffer, sizeof(buffer))) > -1) { if(buffer[len - 1] == '\n') buffer[len - 1] = '\0'; commands << QString(buffer); } printf("[PkgBind::loadCommandList] '%s' loaded\n", path.toLocal8Bit().constData()); }else{ printf("[PkgBind::loadCommandList] '%s' can not be loaded\n", path.toLocal8Bit().constData()); } } /* Check if a symbol is defined * as a funciont included in some package */ bool PkgBind::checkSymbol(const QString &s) { return commands.contains(s); } /* Invoke the package manager * for install the package with the command * "cmd" */ void PkgBind::invokePackageManager(const QString &s) { QMessageBox *msgBox = new QMessageBox(QMessageBox::Question, "Package Manager", "There is a package that provides the command '" + s + "'\n" "Do you want to try to install it now?", QMessageBox::Yes | QMessageBox::No); invokeCmd = QString("qtoctave_pkg -s ") + s + "&"; connect(msgBox, SIGNAL(finished(int)), this, SLOT(invokeResponse(int))); msgBox->show(); } /* The dialog response */ void PkgBind::invokeResponse(int result) { if(result == QMessageBox::Yes) system(invokeCmd.toLocal8Bit().constData()); }
luisivan/QtOctave
qtoctave/src/pkg_bind.cpp
C++
gpl-2.0
1,792
<?php die("Access Denied"); ?>#x#a:2:{s:6:"output";s:0:"";s:6:"result";s:4:"9243";}
jolay/pesatec
cache/mod_vvisit_counter/860aea6b5aac75573e8d7d8ebc839c97-cache-mod_vvisit_counter-12c8414db0decbe26a3df6aa66213421.php
PHP
gpl-2.0
83
#include "SampleCode.h" #include "SkView.h" #include "SkBlurMaskFilter.h" #include "SkCanvas.h" #include "SkGradientShader.h" #include "SkGraphics.h" #include "SkImageDecoder.h" #include "SkPath.h" #include "SkRandom.h" #include "SkRegion.h" #include "SkShader.h" #include "SkUtils.h" #include "SkXfermode.h" #include "SkColorPriv.h" #include "SkColorFilter.h" #include "SkTime.h" #include "SkTypeface.h" #include "SkTextBox.h" #include "SkOSFile.h" #include "SkStream.h" #include "SkSVGParser.h" class SVGView : public SkView { public: SVGView() { SkXMLParserError err; SkFILEStream stream("/testsvg2.svg"); SkSVGParser parser(&err); if (parser.parse(stream)) { const char* text = parser.getFinal(); SkFILEWStream output("/testanim.txt"); output.write(text, strlen(text)); } } protected: // overrides from SkEventSink virtual bool onQuery(SkEvent* evt) { if (SampleCode::TitleQ(*evt)) { SkString str("SVG"); SampleCode::TitleR(evt, str.c_str()); return true; } return this->INHERITED::onQuery(evt); } void drawBG(SkCanvas* canvas) { canvas->drawColor(SK_ColorWHITE); } virtual void onDraw(SkCanvas* canvas) { this->drawBG(canvas); } virtual SkView::Click* onFindClickHandler(SkScalar x, SkScalar y) { return new Click(this); } virtual bool onClick(Click* click) { int y = click->fICurr.fY; if (y < 0) { y = 0; } else if (y > 255) { y = 255; } fByte = y; this->inval(NULL); return true; } private: int fByte; typedef SkView INHERITED; }; ////////////////////////////////////////////////////////////////////////////// static SkView* MyFactory() { return new SVGView; } static SkViewRegister reg(MyFactory);
highfellow/inkscape-remove-duplicates
app/main.cpp
C++
gpl-2.0
2,030
// We do this in this rather complicated manner since Joomla groups // external javascript includes and script declarations seperately // and this call has to be made right after loading jQuery. jQuery.noConflict();
lau-ibarra/ETISIGChacoJoomla
tmp/install_515c4270221dc/plg_system_jquery/site/jquery/no_conflict.js
JavaScript
gpl-2.0
218
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Catalog * @copyright Copyright (c) 2008 Irubin Consulting Inc. DBA Varien (http://www.varien.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Catalog entity abstract model * * @category Mage * @package Mage_Catalog * @author Magento Core Team <core@magentocommerce.com> */ abstract class Mage_Catalog_Model_Resource_Eav_Mysql4_Abstract extends Mage_Eav_Model_Entity_Abstract { /** * Redeclare attribute model * * @return string */ protected function _getDefaultAttributeModel() { return 'catalog/resource_eav_attribute'; } public function getDefaultStoreId() { return Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID; } /** * Check whether the attribute is Applicable to the object * * @param Varien_Object $object * @param Mage_Catalog_Model_Resource_Eav_Attribute $attribute * @return boolean */ protected function _isApplicableAttribute ($object, $attribute) { $applyTo = $attribute->getApplyTo(); return count($applyTo) == 0 || in_array($object->getTypeId(), $applyTo); } /** * Retrieve select object for loading entity attributes values * * Join attribute store value * * @param Varien_Object $object * @param mixed $rowId * @return Zend_Db_Select */ protected function _getLoadAttributesSelect($object, $table) { /** * This condition is applicable for all cases when we was work in not single * store mode, customize some value per specific store view and than back * to single store mode. We should load correct values */ if (Mage::app()->isSingleStoreMode()) { $storeId = Mage::app()->getStore(true)->getId(); } else { $storeId = $object->getStoreId(); } $select = $this->_read->select() ->from(array('default' => $table)); if ($setId = $object->getAttributeSetId()) { $select->join( array('set_table' => $this->getTable('eav/entity_attribute')), 'default.attribute_id=set_table.attribute_id AND ' . 'set_table.attribute_set_id=' . intval($setId), array() ); } $joinCondition = 'main.attribute_id=default.attribute_id AND ' . $this->_read->quoteInto('main.store_id=? AND ', intval($storeId)) . $this->_read->quoteInto('main.'.$this->getEntityIdField() . '=?', $object->getId()); $select->joinLeft( array('main' => $table), $joinCondition, array( 'store_value_id' => 'value_id', 'store_value' => 'value' )) ->where('default.'.$this->getEntityIdField() . '=?', $object->getId()) ->where('default.store_id=?', $this->getDefaultStoreId()); return $select; } /** * Initialize attribute value for object * * @param Varien_Object $object * @param array $valueRow * @return Mage_Eav_Model_Entity_Abstract */ protected function _setAttribteValue($object, $valueRow) { parent::_setAttribteValue($object, $valueRow); if ($attribute = $this->getAttribute($valueRow['attribute_id'])) { $attributeCode = $attribute->getAttributeCode(); if (isset($valueRow['store_value'])) { $object->setAttributeDefaultValue($attributeCode, $valueRow['value']); $object->setData($attributeCode, $valueRow['store_value']); $attribute->getBackend()->setValueId($valueRow['store_value_id']); } } return $this; } /** * Insert entity attribute value * * Insert attribute value we do only for default store * * @param Varien_Object $object * @param Mage_Eav_Model_Entity_Attribute_Abstract $attribute * @param mixed $value * @return Mage_Eav_Model_Entity_Abstract */ protected function _insertAttribute($object, $attribute, $value) { $entityIdField = $attribute->getBackend()->getEntityIdField(); $row = array( $entityIdField => $object->getId(), 'entity_type_id'=> $object->getEntityTypeId(), 'attribute_id' => $attribute->getId(), 'value' => $this->_prepareValueForSave($value, $attribute), 'store_id' => $this->getDefaultStoreId() ); $fields = array(); $values = array(); foreach ($row as $k => $v) { $fields[] = $this->_getWriteAdapter()->quoteIdentifier('?', $k); $values[] = $this->_getWriteAdapter()->quoteInto('?', $v); } $sql = sprintf('INSERT IGNORE INTO %s (%s) VALUES(%s)', $this->_getWriteAdapter()->quoteIdentifier($attribute->getBackend()->getTable()), join(',', array_keys($row)), join(',', $values)); $this->_getWriteAdapter()->query($sql); if (!$lastId = $this->_getWriteAdapter()->lastInsertId()) { $select = $this->_getReadAdapter()->select() ->from($attribute->getBackend()->getTable(), 'value_id') ->where($entityIdField . '=?', $row[$entityIdField]) ->where('entity_type_id=?', $row['entity_type_id']) ->where('attribute_id=?', $row['attribute_id']) ->where('store_id=?', $row['store_id']); $lastId = $select->query()->fetchColumn(); } if ($object->getStoreId() != $this->getDefaultStoreId()) { $this->_updateAttribute($object, $attribute, $lastId, $value); } return $this; } /** * Update entity attribute value * * @param Varien_Object $object * @param Mage_Eav_Model_Entity_Attribute_Abstract $attribute * @param mixed $valueId * @param mixed $value * @return Mage_Eav_Model_Entity_Abstract */ protected function _updateAttribute($object, $attribute, $valueId, $value) { /** * If we work in single store mode all values should be saved just * for default store id * In this case we clear all not default values */ if (Mage::app()->isSingleStoreMode()) { $this->_getWriteAdapter()->delete( $attribute->getBackend()->getTable(), $this->_getWriteAdapter()->quoteInto('attribute_id=?', $attribute->getId()) . $this->_getWriteAdapter()->quoteInto(' AND entity_id=?', $object->getId()) . $this->_getWriteAdapter()->quoteInto(' AND store_id!=?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID) ); } /** * Update attribute value for store */ if ($attribute->isScopeStore()) { $this->_updateAttributeForStore($object, $attribute, $value, $object->getStoreId()); } /** * Update attribute value for website */ elseif ($attribute->isScopeWebsite()) { if ($object->getStoreId() == 0) { $this->_updateAttributeForStore($object, $attribute, $value, $object->getStoreId()); } else { if (is_array($object->getWebsiteStoreIds())) { foreach ($object->getWebsiteStoreIds() as $storeId) { $this->_updateAttributeForStore($object, $attribute, $value, $storeId); } } } } else { $this->_getWriteAdapter()->update($attribute->getBackend()->getTable(), array('value' => $this->_prepareValueForSave($value, $attribute)), 'value_id='.(int)$valueId ); } return $this; } /** * Update attribute value for specific store * * @param Mage_Catalog_Model_Abstract $object * @param object $attribute * @param mixed $value * @param int $storeId * @return Mage_Catalog_Model_Resource_Eav_Mysql4_Abstract */ protected function _updateAttributeForStore($object, $attribute, $value, $storeId) { $entityIdField = $attribute->getBackend()->getEntityIdField(); $select = $this->_getWriteAdapter()->select() ->from($attribute->getBackend()->getTable(), 'value_id') ->where('entity_type_id=?', $object->getEntityTypeId()) ->where("$entityIdField=?",$object->getId()) ->where('store_id=?', $storeId) ->where('attribute_id=?', $attribute->getId()); /** * When value for store exist */ if ($valueId = $this->_getWriteAdapter()->fetchOne($select)) { $this->_getWriteAdapter()->update($attribute->getBackend()->getTable(), array('value' => $this->_prepareValueForSave($value, $attribute)), 'value_id='.$valueId ); } else { $this->_getWriteAdapter()->insert($attribute->getBackend()->getTable(), array( $entityIdField => $object->getId(), 'entity_type_id'=> $object->getEntityTypeId(), 'attribute_id' => $attribute->getId(), 'value' => $this->_prepareValueForSave($value, $attribute), 'store_id' => $storeId )); } return $this; } /** * Delete entity attribute values * * @param Varien_Object $object * @param string $table * @param array $info * @return Varien_Object */ protected function _deleteAttributes($object, $table, $info) { $entityIdField = $this->getEntityIdField(); $globalValues = array(); $websiteAttributes = array(); $storeAttributes = array(); /** * Separate attributes by scope */ foreach ($info as $itemData) { $attribute = $this->getAttribute($itemData['attribute_id']); if ($attribute->isScopeStore()) { $storeAttributes[] = $itemData['attribute_id']; } elseif ($attribute->isScopeWebsite()) { $websiteAttributes[] = $itemData['attribute_id']; } else { $globalValues[] = $itemData['value_id']; } } /** * Delete global scope attributes */ if (!empty($globalValues)) { $condition = $this->_getWriteAdapter()->quoteInto('value_id IN (?)', $globalValues); $this->_getWriteAdapter()->delete($table, $condition); } $condition = $this->_getWriteAdapter()->quoteInto("$entityIdField=?", $object->getId()) . $this->_getWriteAdapter()->quoteInto(' AND entity_type_id=?', $object->getEntityTypeId()); /** * Delete website scope attributes */ if (!empty($websiteAttributes)) { $storeIds = $object->getWebsiteStoreIds(); if (!empty($storeIds)) { $delCondition = $condition . $this->_getWriteAdapter()->quoteInto(' AND attribute_id IN(?)', $websiteAttributes) . $this->_getWriteAdapter()->quoteInto(' AND store_id IN(?)', $storeIds); $this->_getWriteAdapter()->delete($table, $delCondition); } } /** * Delete store scope attributes */ if (!empty($storeAttributes)) { $delCondition = $condition . $this->_getWriteAdapter()->quoteInto(' AND attribute_id IN(?)', $storeAttributes) . $this->_getWriteAdapter()->quoteInto(' AND store_id =?', $object->getStoreId()); $this->_getWriteAdapter()->delete($table, $delCondition);; } return $this; } protected function _getOrigObject($object) { $className = get_class($object); $origObject = new $className(); $origObject->setData(array()); $origObject->setStoreId($object->getStoreId()); $this->load($origObject, $object->getData($this->getEntityIdField())); return $origObject; } protected function _collectOrigData($object) { $this->loadAllAttributes($object); if ($this->getUseDataSharing()) { $storeId = $object->getStoreId(); } else { $storeId = $this->getStoreId(); } $allStores = Mage::getConfig()->getStoresConfigByPath('system/store/id', array(), 'code'); //echo "<pre>".print_r($allStores ,1)."</pre>"; exit; $data = array(); foreach ($this->getAttributesByTable() as $table=>$attributes) { $entityIdField = current($attributes)->getBackend()->getEntityIdField(); $select = $this->_read->select() ->from($table) ->where($this->getEntityIdField()."=?", $object->getId()); $where = $this->_read->quoteInto("store_id=?", $storeId); $globalAttributeIds = array(); foreach ($attributes as $attrCode=>$attr) { if ($attr->getIsGlobal()) { $globalAttributeIds[] = $attr->getId(); } } if (!empty($globalAttributeIds)) { $where .= ' or '.$this->_read->quoteInto('attribute_id in (?)', $globalAttributeIds); } $select->where($where); $values = $this->_read->fetchAll($select); if (empty($values)) { continue; } foreach ($values as $row) { $data[$this->getAttribute($row['attribute_id'])->getName()][$row['store_id']] = $row; } foreach ($attributes as $attrCode=>$attr) { } } return $data; } }
tonio-44/tikflak
shop/app/code/core/Mage/Catalog/Model/Resource/Eav/Mysql4/Abstract.php
PHP
gpl-2.0
14,799
/** * Copyright (c) 2009 Pyxis Technologies inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, * or see the FSF site: http://www.fsf.org. */ package com.greenpepper.interpreter.flow.scenario; import com.greenpepper.converter.*; public class ExpectationTypeConverter extends AbstractTypeConverter { @SuppressWarnings("unchecked") public boolean canConvertTo(Class type) { return Expectation.class.isAssignableFrom( type ); } protected Object doConvert(String value) { return new Expectation( value ); } }
hwellmann/greenpepper-core
src/main/java/com/greenpepper/interpreter/flow/scenario/ExpectationTypeConverter.java
Java
gpl-2.0
1,216
/*********************************************************************************** * * * Voreen - The Volume Rendering Engine * * * * Copyright (C) 2005-2013 University of Muenster, Germany. * * Visualization and Computer Graphics Group <http://viscg.uni-muenster.de> * * For a list of authors please refer to the file "CREDITS.txt". * * * * This file is part of the Voreen software package. Voreen is free software: * * you can redistribute it and/or modify it under the terms of the GNU General * * Public License version 2 as published by the Free Software Foundation. * * * * Voreen is distributed in the hope that it will be useful, but WITHOUT ANY * * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * * A PARTICULAR PURPOSE. See the GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License in the file * * "LICENSE.txt" along with this file. If not, see <http://www.gnu.org/licenses/>. * * * * For non-commercial academic use see the license exception specified in the file * * "LICENSE-academic.txt". To get information about commercial licensing please * * contact the authors. * * * ***********************************************************************************/ #include "shiftscaleimagefilter.h" #include "voreen/core/datastructures/volume/volumeram.h" #include "voreen/core/datastructures/volume/volume.h" #include "voreen/core/datastructures/volume/volumeatomic.h" #include "voreen/core/ports/conditions/portconditionvolumetype.h" #include "modules/itk/utils/itkwrapper.h" #include "voreen/core/datastructures/volume/operators/volumeoperatorconvert.h" #include "itkImage.h" #include "itkShiftScaleImageFilter.h" #include <iostream> namespace voreen { const std::string ShiftScaleImageFilterITK::loggerCat_("voreen.ShiftScaleImageFilterITK"); ShiftScaleImageFilterITK::ShiftScaleImageFilterITK() : ITKProcessor(), inport1_(Port::INPORT, "InputImage"), outport1_(Port::OUTPORT, "OutputImage"), enableProcessing_("enabled", "Enable", false), scale_("scale", "Scale", 1.0f, 0.0f, 5000.0f), shift_("shift", "Shift", 1.0f, 0.0f, 5000.0f) { addPort(inport1_); PortConditionLogicalOr* orCondition1 = new PortConditionLogicalOr(); orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt8()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt8()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt16()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt16()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeUInt32()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeInt32()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeFloat()); orCondition1->addLinkedCondition(new PortConditionVolumeTypeDouble()); inport1_.addCondition(orCondition1); addPort(outport1_); addProperty(enableProcessing_); addProperty(scale_); addProperty(shift_); } Processor* ShiftScaleImageFilterITK::create() const { return new ShiftScaleImageFilterITK(); } template<class T> void ShiftScaleImageFilterITK::shiftScaleImageFilterITK() { if (!enableProcessing_.get()) { outport1_.setData(inport1_.getData(), false); return; } typedef itk::Image<T, 3> InputImageType1; typedef itk::Image<T, 3> OutputImageType1; typename InputImageType1::Pointer p1 = voreenToITK<T>(inport1_.getData()); //Filter define typedef itk::ShiftScaleImageFilter<InputImageType1, OutputImageType1> FilterType; typename FilterType::Pointer filter = FilterType::New(); filter->SetInput(p1); filter->SetScale(scale_.get()); filter->SetShift(shift_.get()); observe(filter.GetPointer()); try { filter->Update(); } catch (itk::ExceptionObject &e) { LERROR(e); } Volume* outputVolume1 = 0; outputVolume1 = ITKToVoreenCopy<T>(filter->GetOutput()); if (outputVolume1) { transferRWM(inport1_.getData(), outputVolume1); transferTransformation(inport1_.getData(), outputVolume1); outport1_.setData(outputVolume1); } else outport1_.setData(0); } void ShiftScaleImageFilterITK::process() { const VolumeBase* inputHandle1 = inport1_.getData(); const VolumeRAM* inputVolume1 = inputHandle1->getRepresentation<VolumeRAM>(); if (dynamic_cast<const VolumeRAM_UInt8*>(inputVolume1)) { shiftScaleImageFilterITK<uint8_t>(); } else if (dynamic_cast<const VolumeRAM_Int8*>(inputVolume1)) { shiftScaleImageFilterITK<int8_t>(); } else if (dynamic_cast<const VolumeRAM_UInt16*>(inputVolume1)) { shiftScaleImageFilterITK<uint16_t>(); } else if (dynamic_cast<const VolumeRAM_Int16*>(inputVolume1)) { shiftScaleImageFilterITK<int16_t>(); } else if (dynamic_cast<const VolumeRAM_UInt32*>(inputVolume1)) { shiftScaleImageFilterITK<uint32_t>(); } else if (dynamic_cast<const VolumeRAM_Int32*>(inputVolume1)) { shiftScaleImageFilterITK<int32_t>(); } else if (dynamic_cast<const VolumeRAM_Float*>(inputVolume1)) { shiftScaleImageFilterITK<float>(); } else if (dynamic_cast<const VolumeRAM_Double*>(inputVolume1)) { shiftScaleImageFilterITK<double>(); } else { LERROR("Inputformat of Volume 1 is not supported!"); } } } // namespace
lathen/voreen
modules/itk_generated/processors/itk_ImageIntensity/shiftscaleimagefilter.cpp
C++
gpl-2.0
6,242
<?php /** * OpenSocialWebsite * * @package OpenSocialWebsite * @author Open Social Website Core Team <info@opensocialwebsite.com> * @copyright 2014 iNFORMATIKON TECHNOLOGIES * @license General Public Licence http://www.opensocialwebsite.com/licence * @link http://www.opensocialwebsite.com/licence */ define('__OSSN_SEARCH__', ossn_route()->com.'OssnSearch/'); require_once(__OSSN_SEARCH__.'classes/OssnSearch.php'); function ossn_search(){ ossn_register_page('search', 'ossn_search_page'); ossn_add_hook('search', "left", 'search_menu_handler'); ossn_extend_view('css/ossn.default', 'components/OssnSearch/css/search'); } function search_menu_handler($hook, $type, $return){ $return[] = ossn_view_menu('search'); return $return; } function ossn_search_page($pages){ $page = $pages[0]; if(empty($page)){ $page = 'search'; } ossn_trigger_callback('page', 'load:search'); switch($page){ case 'search': $query = input('q'); $type = input('type'); if(empty($type)){ $params['type'] = 'users'; } else { $params['type'] = $type; } $type = $params['type']; if(ossn_is_hook('search', "type:{$type}")){ $contents['contents'] = ossn_call_hook('search', "type:{$type}", array('q' => input('q'))); } $contents = array( 'content' => ossn_view('components/OssnSearch/pages/search', $contents), ); $content = ossn_set_page_layout('search', $contents); echo ossn_view_page($title, $content); break; default: ossn_error_page(); break; } } ossn_register_callback('ossn', 'init', 'ossn_search');
lianglee/opensource-socialnetwork
components/OssnSearch/ossn_com.php
PHP
gpl-2.0
1,627
/* * Copyright (C) 2008-2012 TrinityCore <http://www.trinitycore.org/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ /* ScriptData Name: gm_commandscript %Complete: 100 Comment: All gm related commands Category: commandscripts EndScriptData */ #include "ScriptMgr.h" #include "ObjectMgr.h" #include "Chat.h" #include "AccountMgr.h" class gm_commandscript : public CommandScript { public: gm_commandscript() : CommandScript("gm_commandscript") { } ChatCommand* GetCommands() const { static ChatCommand gmCommandTable[] = { { "chat", SEC_MODERATOR, false, &HandleGMChatCommand, "", NULL }, { "fly", SEC_ADMINISTRATOR, false, &HandleGMFlyCommand, "", NULL }, { "ingame", SEC_PLAYER, true, &HandleGMListIngameCommand, "", NULL }, { "list", SEC_ADMINISTRATOR, true, &HandleGMListFullCommand, "", NULL }, { "visible", SEC_MODERATOR, false, &HandleGMVisibleCommand, "", NULL }, { "", SEC_MODERATOR, false, &HandleGMCommand, "", NULL }, { NULL, 0, false, NULL, "", NULL } }; static ChatCommand commandTable[] = { { "gm", SEC_MODERATOR, false, NULL, "", gmCommandTable }, { NULL, 0, false, NULL, "", NULL } }; return commandTable; } // Enables or disables hiding of the staff badge static bool HandleGMChatCommand(ChatHandler* handler, char const* args) { if (!*args) { WorldSession* session = handler->GetSession(); if (!AccountMgr::IsPlayerAccount(session->GetSecurity()) && session->GetPlayer()->isGMChat()) session->SendNotification(LANG_GM_CHAT_ON); else session->SendNotification(LANG_GM_CHAT_OFF); return true; } std::string param = (char*)args; if (param == "on") { handler->GetSession()->GetPlayer()->SetGMChat(true); handler->GetSession()->SendNotification(LANG_GM_CHAT_ON); return true; } if (param == "off") { handler->GetSession()->GetPlayer()->SetGMChat(false); handler->GetSession()->SendNotification(LANG_GM_CHAT_OFF); return true; } handler->SendSysMessage(LANG_USE_BOL); handler->SetSentErrorMessage(true); return false; } static bool HandleGMFlyCommand(ChatHandler* handler, char const* args) { if (!*args) return false; Player* target = handler->getSelectedPlayer(); if (!target) target = handler->GetSession()->GetPlayer(); WorldPacket data(12); if (strncmp(args, "on", 3) == 0) data.SetOpcode(SMSG_MOVE_SET_CAN_FLY); else if (strncmp(args, "off", 4) == 0) data.SetOpcode(SMSG_MOVE_UNSET_CAN_FLY); else { handler->SendSysMessage(LANG_USE_BOL); return false; } data.append(target->GetPackGUID()); data << uint32(0); // unknown target->SendMessageToSet(&data, true); handler->PSendSysMessage(LANG_COMMAND_FLYMODE_STATUS, handler->GetNameLink(target).c_str(), args); return true; } static bool HandleGMListIngameCommand(ChatHandler* handler, char const* /*args*/) { bool first = true; bool footer = false; TRINITY_READ_GUARD(HashMapHolder<Player>::LockType, *HashMapHolder<Player>::GetLock()); HashMapHolder<Player>::MapType const& m = sObjectAccessor->GetPlayers(); for (HashMapHolder<Player>::MapType::const_iterator itr = m.begin(); itr != m.end(); ++itr) { AccountTypes itrSec = itr->second->GetSession()->GetSecurity(); if ((itr->second->isGameMaster() || (!AccountMgr::IsPlayerAccount(itrSec) && itrSec <= AccountTypes(sWorld->getIntConfig(CONFIG_GM_LEVEL_IN_GM_LIST)))) && (!handler->GetSession() || itr->second->IsVisibleGloballyFor(handler->GetSession()->GetPlayer()))) { if (first) { first = false; footer = true; handler->SendSysMessage(LANG_GMS_ON_SRV); handler->SendSysMessage("========================"); } char const* name = itr->second->GetName(); uint8 security = itrSec; uint8 max = ((16 - strlen(name)) / 2); uint8 max2 = max; if ((max + max2 + strlen(name)) == 16) max2 = max - 1; if (handler->GetSession()) handler->PSendSysMessage("| %s GMLevel %u", name, security); else handler->PSendSysMessage("|%*s%s%*s| %u |", max, " ", name, max2, " ", security); } } if (footer) handler->SendSysMessage("========================"); if (first) handler->SendSysMessage(LANG_GMS_NOT_LOGGED); return true; } /// Display the list of GMs static bool HandleGMListFullCommand(ChatHandler* handler, char const* /*args*/) { ///- Get the accounts with GM Level >0 QueryResult result = LoginDatabase.PQuery("SELECT a.username, aa.gmlevel FROM account a, account_access aa WHERE a.id=aa.id AND aa.gmlevel >= %u", SEC_MODERATOR); if (result) { handler->SendSysMessage(LANG_GMLIST); handler->SendSysMessage("========================"); ///- Cycle through them. Display username and GM level do { Field* fields = result->Fetch(); char const* name = fields[0].GetCString(); uint8 security = fields[1].GetUInt8(); uint8 max = (16 - strlen(name)) / 2; uint8 max2 = max; if ((max + max2 + strlen(name)) == 16) max2 = max - 1; if (handler->GetSession()) handler->PSendSysMessage("| %s GMLevel %u", name, security); else handler->PSendSysMessage("|%*s%s%*s| %u |", max, " ", name, max2, " ", security); } while (result->NextRow()); handler->SendSysMessage("========================"); } else handler->PSendSysMessage(LANG_GMLIST_EMPTY); return true; } //Enable\Disable Invisible mode static bool HandleGMVisibleCommand(ChatHandler* handler, char const* args) { if (!*args) { handler->PSendSysMessage(LANG_YOU_ARE, handler->GetSession()->GetPlayer()->isGMVisible() ? handler->GetTrinityString(LANG_VISIBLE) : handler->GetTrinityString(LANG_INVISIBLE)); return true; } std::string param = (char*)args; if (param == "on") { handler->GetSession()->GetPlayer()->SetGMVisible(true); handler->GetSession()->SendNotification(LANG_INVISIBLE_VISIBLE); return true; } if (param == "off") { handler->GetSession()->SendNotification(LANG_INVISIBLE_INVISIBLE); handler->GetSession()->GetPlayer()->SetGMVisible(false); return true; } handler->SendSysMessage(LANG_USE_BOL); handler->SetSentErrorMessage(true); return false; } //Enable\Disable GM Mode static bool HandleGMCommand(ChatHandler* handler, char const* args) { if (!*args) { if (handler->GetSession()->GetPlayer()->isGameMaster()) handler->GetSession()->SendNotification(LANG_GM_ON); else handler->GetSession()->SendNotification(LANG_GM_OFF); return true; } std::string param = (char*)args; if (param == "on") { handler->GetSession()->GetPlayer()->SetGameMaster(true); handler->GetSession()->SendNotification(LANG_GM_ON); handler->GetSession()->GetPlayer()->UpdateTriggerVisibility(); #ifdef _DEBUG_VMAPS VMAP::IVMapManager* vMapManager = VMAP::VMapFactory::createOrGetVMapManager(); vMapManager->processCommand("stoplog"); #endif return true; } if (param == "off") { handler->GetSession()->GetPlayer()->SetGameMaster(false); handler->GetSession()->SendNotification(LANG_GM_OFF); handler->GetSession()->GetPlayer()->UpdateTriggerVisibility(); #ifdef _DEBUG_VMAPS VMAP::IVMapManager* vMapManager = VMAP::VMapFactory::createOrGetVMapManager(); vMapManager->processCommand("startlog"); #endif return true; } handler->SendSysMessage(LANG_USE_BOL); handler->SetSentErrorMessage(true); return false; } }; void AddSC_gm_commandscript() { new gm_commandscript(); }
rebirth-core/Rebirth---old
src/server/scripts/Commands/cs_gm.cpp
C++
gpl-2.0
9,929
/* Copyright (C) 2014 InfiniDB, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ /***************************************************************************** * $Id: we_getfilesizes.cpp 4450 2013-01-21 14:13:24Z rdempsey $ * ****************************************************************************/ #include "we_getfilesizes.h" #include <iostream> #include <stdexcept> using namespace std; #include "calpontsystemcatalog.h" using namespace execplan; #include "threadpool.h" using namespace threadpool; #include "bytestream.h" using namespace messageqcpp; #include "we_fileop.h" #include "idbcompress.h" using namespace compress; #include "IDBFileSystem.h" #include "IDBPolicy.h" using namespace idbdatafile; namespace WriteEngine { struct FileInfo { uint32_t partition; /** @brief Partition for a file*/ uint16_t segment; /** @brief Segment for a file */ uint16_t dbRoot; /** @brief DbRoot for a file */ std::string segFileName; /** @brief seg file path */ double fileSize; /** @brief seg file size in giga bytes */ void serialize(messageqcpp::ByteStream& bs) { bs << partition; bs << segment; bs << dbRoot; bs << segFileName; bs << (*(uint64_t*)(&fileSize)); } }; typedef std::vector<FileInfo> Files; typedef std::map<uint32_t, Files> columnMap; typedef std::map<int, columnMap*> allColumnMap; allColumnMap wholeMap; boost::mutex columnMapLock; ActiveThreadCounter *activeThreadCounter; size_t readFillBuffer( idbdatafile::IDBDataFile* pFile, char* buffer, size_t bytesReq) { char* pBuf = buffer; ssize_t nBytes; size_t bytesToRead = bytesReq; size_t totalBytesRead = 0; while (1) { nBytes = pFile->read(pBuf, bytesToRead); if (nBytes > 0) totalBytesRead += nBytes; else break; if ((size_t)nBytes == bytesToRead) break; pBuf += nBytes; bytesToRead = bytesToRead - (size_t)nBytes; } return totalBytesRead; } off64_t getCompressedDataSize(string& fileName) { off64_t dataSize = 0; IDBDataFile* pFile = 0; size_t nBytes; // Some IDBPolicy functions can throw exceptions, caller will catch it IDBPolicy::configIDBPolicy(); bool bHdfsFile = IDBPolicy::useHdfs(); if (bHdfsFile) pFile = IDBDataFile::open(IDBDataFile::HDFS, fileName.c_str(), "r", 0); else pFile = IDBDataFile::open(IDBDataFile::BUFFERED, fileName.c_str(), "r", 0); if (!pFile) { std::ostringstream oss; oss << "Cannot open file " << fileName << " for read."; throw std::runtime_error(oss.str()); } IDBCompressInterface decompressor; //-------------------------------------------------------------------------- // Read headers and extract compression pointers //-------------------------------------------------------------------------- char hdr1[IDBCompressInterface::HDR_BUF_LEN]; nBytes = readFillBuffer( pFile,hdr1,IDBCompressInterface::HDR_BUF_LEN); if ( nBytes != IDBCompressInterface::HDR_BUF_LEN ) { std::ostringstream oss; oss << "Error reading first header from file " << fileName; throw std::runtime_error(oss.str()); } int64_t ptrSecSize = decompressor.getHdrSize(hdr1) - IDBCompressInterface::HDR_BUF_LEN; char* hdr2 = new char[ptrSecSize]; nBytes = readFillBuffer( pFile,hdr2,ptrSecSize); if ( (int64_t)nBytes != ptrSecSize ) { std::ostringstream oss; oss << "Error reading second header from file " << fileName; throw std::runtime_error(oss.str()); } CompChunkPtrList chunkPtrs; int rc = decompressor.getPtrList(hdr2, ptrSecSize, chunkPtrs); delete[] hdr2; if (rc != 0) { std::ostringstream oss; oss << "Error decompressing second header from file " << fileName; throw std::runtime_error(oss.str()); } unsigned k = chunkPtrs.size(); // last header's offset + length will be the data bytes dataSize = chunkPtrs[k-1].first + chunkPtrs[k-1].second; delete pFile; return dataSize; } struct ColumnThread { ColumnThread(uint32_t oid, int32_t compressionType, bool reportRealUse, int key) : fOid(oid), fCompressionType(compressionType), fReportRealUse(reportRealUse), fKey(key) {} void operator()() { Config config; config.initConfigCache(); std::vector<uint16_t> rootList; config.getRootIdList( rootList ); FileOp fileOp; Files aFiles; // This function relies on IDBPolicy being initialized by // IDBPolicy::init(). This is done when WriteEngineServer main() calls // IDBPolicy::configIDBPolicy(); IDBDataFile::Types fileType; bool bUsingHdfs = IDBPolicy::useHdfs(); if (bUsingHdfs) fileType = IDBDataFile::HDFS; else fileType = IDBDataFile::UNBUFFERED; IDBFileSystem& fs = IDBFileSystem::getFs( fileType ); for (uint32_t i=0; i < rootList.size(); i++) { std::vector<struct BRM::EMEntry> entries; (void)BRMWrapper::getInstance()->getExtents_dbroot(fOid, entries, rootList[i]); std::vector<struct BRM::EMEntry>::const_iterator iter = entries.begin(); while ( iter != entries.end() ) //organize extents into files { //Find the size of this file //string fileName; char fileName[200]; (void)fileOp.getFileName( fOid, fileName, rootList[i], entries[0].partitionNum, entries[0].segmentNum); string aFile(fileName); //convert between char* and string off64_t fileSize = 0; if (fReportRealUse && (fCompressionType > 0)) { try { fileSize = getCompressedDataSize(aFile); } catch (std::exception& ex) { cerr << ex.what(); } } else fileSize = fs.size( fileName ); if (fileSize > 0) // File exists, add to list { FileInfo aFileInfo; aFileInfo.partition = entries[0].partitionNum; aFileInfo.segment = entries[0].segmentNum; aFileInfo.dbRoot = rootList[i]; aFileInfo.segFileName = aFile; aFileInfo.fileSize = (double)fileSize / (1024 * 1024 * 1024); aFiles.push_back(aFileInfo); //cout.precision(15); //cout << "The file " << aFileInfo.segFileName << " has size " << fixed << aFileInfo.fileSize << "GB" << endl; } //erase the entries from this dbroot. std::vector<struct BRM::EMEntry> entriesTrimed; for (uint32_t m=0; m<entries.size(); m++) { if ((entries[0].partitionNum != entries[m].partitionNum) || (entries[0].segmentNum != entries[m].segmentNum)) entriesTrimed.push_back(entries[m]); } entriesTrimed.swap(entries); iter = entries.begin(); } } boost::mutex::scoped_lock lk(columnMapLock); //cout << "Current size of columnsMap is " << columnsMap.size() << endl; allColumnMap::iterator colMapiter = wholeMap.find(fKey); if (colMapiter != wholeMap.end()) { (colMapiter->second)->insert(make_pair(fOid,aFiles)); activeThreadCounter->decr(); //cout << "Added to columnsMap aFiles with size " << aFiles.size() << " for oid " << fOid << endl; } } uint32_t fOid; int32_t fCompressionType; bool fReportRealUse; int fKey; }; //------------------------------------------------------------------------------ // Process a table size based on input from the // bytestream object. //------------------------------------------------------------------------------ int WE_GetFileSizes::processTable( messageqcpp::ByteStream& bs, std::string& errMsg, int key) { uint8_t rc = 0; errMsg.clear(); try { std::string aTableName; std::string schemaName; bool reportRealUse = false; ByteStream::byte tmp8; bs >> schemaName; //cout << "schema: "<< schemaName << endl; bs >> aTableName; //cout << "tableName: " << aTableName << endl; bs >> tmp8; reportRealUse = (tmp8 != 0); //get column oids boost::shared_ptr<CalpontSystemCatalog> systemCatalogPtr = CalpontSystemCatalog::makeCalpontSystemCatalog(0); CalpontSystemCatalog::TableName tableName; tableName.schema = schemaName; tableName.table = aTableName; CalpontSystemCatalog::RIDList columnList = systemCatalogPtr->columnRIDs(tableName); CalpontSystemCatalog::ColType colType; CalpontSystemCatalog::DictOIDList dictOidList = systemCatalogPtr->dictOIDs(tableName); int serverThreads = 20; int serverQueueSize = serverThreads * 100; threadpool::ThreadPool tp(serverThreads,serverQueueSize); int totalSize = columnList.size() + dictOidList.size(); activeThreadCounter = new ActiveThreadCounter(totalSize); columnMap *columnsMap = new columnMap(); { boost::mutex::scoped_lock lk(columnMapLock); wholeMap[key] = columnsMap; } for (uint32_t i=0; i < columnList.size(); i++) { colType = systemCatalogPtr->colType(columnList[i].objnum); tp.invoke(ColumnThread(columnList[i].objnum, colType.compressionType, reportRealUse, key)); if (colType.ddn.dictOID > 0) tp.invoke(ColumnThread(colType.ddn.dictOID, colType.compressionType, reportRealUse, key)); } /* for (uint32_t i=0; i < dictOidList.size(); i++) { tp.invoke(ColumnThread(dictOidList[i].dictOID)); } */ //check whether all threads finish int sleepTime = 100; // sleep 100 milliseconds between checks struct timespec rm_ts; rm_ts.tv_sec = sleepTime/1000; rm_ts.tv_nsec = sleepTime%1000 *1000000; uint32_t currentActiveThreads = 10; while (currentActiveThreads > 0) { #ifdef _MSC_VER Sleep(sleepTime); #else struct timespec abs_ts; do { abs_ts.tv_sec = rm_ts.tv_sec; abs_ts.tv_nsec = rm_ts.tv_nsec; } while(nanosleep(&abs_ts,&rm_ts) < 0); #endif currentActiveThreads = activeThreadCounter->cur(); } } catch(std::exception& ex) { //cout << "WE_GetFileSizes got exception-" << ex.what() << // std::endl; errMsg = ex.what(); rc = 1; } //Build the message to send to the caller bs.reset(); boost::mutex::scoped_lock lk(columnMapLock); allColumnMap::iterator colMapiter = wholeMap.find(key); if (colMapiter != wholeMap.end()) { columnMap::iterator iter = colMapiter->second->begin(); uint64_t size; Files::iterator it; while ( iter != colMapiter->second->end()) { bs << iter->first; //cout << "processTable::coloid = " << iter->first << endl; size = iter->second.size(); bs << size; for (it = iter->second.begin(); it != iter->second.end(); it++) it->serialize(bs); //cout << "length now is " << bs.length() << endl; iter++; } wholeMap.erase(colMapiter); } return rc; } }
infinidb/infinidb
writeengine/server/we_getfilesizes.cpp
C++
gpl-2.0
11,001
package kawigi.util; import java.util.Comparator; /** */ public final class StringsUtil { /** * Line separator in current Operating System. */ public static final String CRLF = System.getProperty("line.separator"); /** * Regular expression for line separator matching. */ public static final String sCRLFregex = "\r?\n"; private static StringsComparator compar = new StringsComparator(); private StringsUtil() {} public static int getFirstNonSpaceInd(CharSequence val, int startInd) { int res = startInd; while (val.length() > res) { char c = val.charAt(res); if (!Character.isSpaceChar(c) && !Character.isWhitespace(c)) break; ++res; } return res; } public static int getFirstNonSpaceInd(CharSequence val) { return getFirstNonSpaceInd(val, 0); } public static int getLastNonSpaceInd(CharSequence val, int startInd) { int res = startInd; while (0 <= res) { char c = val.charAt(res); if (!Character.isSpaceChar(c) && !Character.isWhitespace(c)) break; --res; } return res; } public static int getLastNonSpaceInd(CharSequence val) { return getLastNonSpaceInd(val, val.length() - 1); } public static void removeAllNextSpace(StringBuilder val, int startInd) { int endInd = getFirstNonSpaceInd(val, startInd); val.delete(startInd, endInd); } public static void removeAllPrevSpace(StringBuilder val, int endInd) { int startInd = getLastNonSpaceInd(val, endInd); val.delete(startInd + 1, endInd + 1); } public static void trim(StringBuilder val) { removeAllNextSpace(val, 0); removeAllPrevSpace(val, val.length() - 1); } public static void addArrayMarks(StringBuilder val) { val.insert(0, '{').append('}'); } public static void removeArrayMarks(StringBuilder val) { trim(val); if (0 < val.length()) { if ('{' == val.charAt(0)) { val.deleteCharAt(0); if ('}' == val.charAt(val.length() - 1)) val.setLength(val.length() - 1); } } } public static void addStringMarks(StringBuilder val) { val.insert(0, '"').append('"'); } public static void removeStringMarks(StringBuilder val) { trim(val); if (0 < val.length()) { if ('"' == val.charAt(0)) { val.deleteCharAt(0); if ('"' == val.charAt(val.length() - 1)) val.setLength(val.length() - 1); } } } public static Comparator<CharSequence> getComparator() { return compar; } /** * Compares two strings in CharSequences on equality. CharSequences can't * do this check themselves, so I was bound to write this method. * * @param val1 First value to compare * @param val2 Second value to compare * @return <code>true</code> if two values given are equal, * <code>false</code> otherwise */ public static boolean isEqual(CharSequence val1, CharSequence val2) { return 0 == compar.compare(val1, val2); } /** * Converts string value to lowercase. Converting made inplace. * * @param val Value to be converted and place to make result */ public static void toLower(StringBuilder val) { for (int i = 0; val.length() > i; ++i) { val.setCharAt(i, Character.toLowerCase(val.charAt(i))); } } /** * Replaces the value in StringBuilder with another CharSeqence. * * @param val StringBuilder for the result to be placed * @param seq Value that must be placed into <code>val</code> */ public static void reset(StringBuilder val, CharSequence seq) { val.setLength(0); val.append(seq); } /** * Replaces the part of string with another string without unnecessary * deletions and insertions. Value is changed inplace. * * @param val Value to be changed * @param start Start index of replacement object * @param end End index (one char behind end) of replacement object * @param seq Sequence to be inserted instead */ public static void replace(StringBuilder val, int start, int end, CharSequence seq) { int valPos = start, seqPos = 0; // First we char-by-char replace what we can for (; valPos != end && seqPos != seq.length(); ++valPos, ++seqPos) val.setCharAt(valPos, seq.charAt(seqPos)); // Then we do deletion and insertion of the rest if (valPos < end) val.delete(valPos, end); else if (seqPos < seq.length()) val.insert(end, seq, seqPos, seq.length()); } /** * Finds first occurence of character in CharSequence starting from index start. * * @param val Sequence to search for character * @param c Character to search for * @param start Starting index in sequence to search * @return Index in sequence where character is found or -1 * if it wasn't found */ public static int indexOf(CharSequence val, char c, int start) { int res = -1; for (int i = start; val.length() > i; ++i) { if (val.charAt(i) == c) { res = i; break; } } return res; } /** * Finds first occurence of character in CharSequence starting from the beginning. * * @param val Sequence to search for character * @param c Character to search for * @return Index in sequence where character is found or -1 * if it wasn't found */ public static int indexOf(CharSequence val, char c) { return indexOf(val, c, 0); } /** * Finds last occurence of character in CharSequence starting from index start. * * @param val Sequence to search for character * @param c Character to search for * @param start Starting index in sequence to search * @return Index in sequence where character is found or -1 * if it wasn't found */ public static int lastIndexOf(CharSequence val, char c, int start) { int res = -1; int i = start; if (val.length() <= i) i = val.length() - 1; for (; 0 <= i; --i) { if (val.charAt(i) == c) { res = i; break; } } return res; } /** * Finds last occurence of character in CharSequence starting from the beginning. * * @param val Sequence to search for character * @param c Character to search for * @return Index in sequence where character is found or -1 * if it wasn't found */ public static int lastIndexOf(CharSequence val, char c) { return lastIndexOf(val, c, val.length() - 1); } /** * Checks if some string ('needle') stands at particular point ('startInd') in * another string ('hay'). * * @param hay String to look for 'needle' in * @param needle String to check for appearance in 'hay' * @param startInd Index in 'hay' at which 'needle' should stand * @return <code>true</code> if 'needle' stands in 'hay' at position * 'startInd'. <code>false</code> otherwise. */ public static boolean isStringAt(CharSequence hay, CharSequence needle, int startInd) { boolean res = false; if (hay.length() >= startInd + needle.length()) { res = true; for (int i = 0, j = startInd; needle.length() > i; ++i, ++j) { if (hay.charAt(j) != needle.charAt(i)) { res = false; break; } } } return res; } /** * Appends some excerpt from character sequence to StringBuilder trimming all * white space from excerpt beforehand. * * @param val Value to be modified * @param fromVal Sequence to make excerpt from * @param start Starting index of excerpt from sequence * @param end Ending index of excerpt from sequence (one character after end) */ public static void appendTrimmed(StringBuilder val, CharSequence fromVal, int start, int end) { val.append(fromVal, getFirstNonSpaceInd(fromVal, start), getLastNonSpaceInd(fromVal, end - 1) + 1); } }
vexorian/kawigi-edit
kawigi/util/StringsUtil.java
Java
gpl-2.0
8,044
<?PHP $server = $_SERVER['SERVER_NAME']; //Required for MYSQL query unless you remove it from here and line 39 /*-----------------INFORMATION & LICENSING----------------- * AUTHOR: Christopher Sparrowgrove * WEBSITE: https://github.com/nam2long/WMACS * DATE: January 13, 2014 * NAME: WMACS (Website Maintenance/Availability Check System)(Ver 3.0.0.008) * DESCRIPTION: This script check the health status of a webapp via a database or script. * LICENSE: GNU GENERAL PUBLIC LICENSE (Ver 2.0) */ //DEFAULT CONFIGURATIONS //PAGES $hpage = "home.php"; //Default Page/Script $dpage = "down.php"; //Down Page/Script //OVERRIDE $wmacs = false; //WMACS Database Support Override: Set below switch if disabled $override = "1"; // Check ERROR PAGE CONFIG for status numbers //MYSQL CONFIG $mysql_host = "localhost"; //Database Host $mysql_user = "root"; //Database Username $mysql_pass = ""; //Database Password $mysql_database = "cms"; //Database $mysql_table = "wmacs"; //Database Table // DO NOT MODIFY BEYOND THIS POINT \\ // UNLESS YOU KNOW WHAT YOU ARE DOIN \\ if ($wmacs == true) //if override is true { $switch = $override; //set switch from below override option } else { $dbconnect = mysqli_connect($mysql_host, $mysql_user, $mysql_pass, $mysql_database) or die ("Sorry, There was an issue connecting to database"); if ($result = $dbconnect->query('SELECT srv,switch FROM wmacs WHERE srv = "'.$server.'"') or die ("Sorry, There was an issue when querying the database")) //Grab only current accessed site data { $count = $result->num_rows; $row = $result->fetch_object(); $srv = $row->srv; //Grab & Assign Srv URI $switch = $row->switch; //Grab & Assign SRV Switch } } ?>
nam2long/WMACS---DP-PHP
config/WMACS.Config.php
PHP
gpl-2.0
1,879
package org.oztrack.validator; import javax.mail.internet.AddressException; import javax.mail.internet.InternetAddress; import org.apache.commons.lang3.StringUtils; import org.oztrack.app.OzTrackApplication; import org.oztrack.data.access.UserDao; import org.oztrack.data.model.User; import org.springframework.validation.Errors; import org.springframework.validation.ValidationUtils; import org.springframework.validation.Validator; public class UserFormValidator implements Validator { private UserDao userDao; public UserFormValidator(UserDao userDao) { this.userDao = userDao; } @Override public boolean supports(@SuppressWarnings("rawtypes") Class clazz) { return User.class.isAssignableFrom(clazz); } @Override public void validate(Object obj, Errors errors) { User loginUser = (User) obj; ValidationUtils.rejectIfEmptyOrWhitespace(errors, "username", "error.empty.field", "Please enter username"); User existingUserByUsername = userDao.getByUsername(loginUser.getUsername()); if ((existingUserByUsername != null) && (existingUserByUsername != loginUser)) { errors.rejectValue("username", "unavailable.user", "This username is unavailable. Please try another."); } User existingUserByEmail = userDao.getByEmail(loginUser.getEmail()); if ((existingUserByEmail != null) && (existingUserByEmail != loginUser)) { errors.rejectValue("email", "unavailable.email", "This email address is already associated with another account."); } if (OzTrackApplication.getApplicationContext().isAafEnabled() && StringUtils.isNotBlank(loginUser.getAafId())) { User existingUserByAafId = userDao.getByAafId(loginUser.getAafId()); if ((existingUserByAafId != null) && (existingUserByAafId != loginUser)) { errors.rejectValue("aafId", "aafId.user", "This AAF ID is already associated with another account."); } } ValidationUtils.rejectIfEmptyOrWhitespace(errors, "firstName", "error.empty.field", "Please enter first name"); ValidationUtils.rejectIfEmptyOrWhitespace(errors, "lastName", "error.empty.field", "Please enter last name"); ValidationUtils.rejectIfEmptyOrWhitespace(errors, "email", "error.empty.field", "Please enter email"); if (!errors.hasFieldErrors("email")) { try { InternetAddress emailAddr = new InternetAddress(loginUser.getEmail()); emailAddr.validate(); } catch (AddressException ex) { errors.rejectValue("email", "invalid.email", "Email error: " + ex.getMessage()); } } } }
uq-eresearch/oztrack
src/main/java/org/oztrack/validator/UserFormValidator.java
Java
gpl-2.0
2,724
<?php namespace TYPO3\CMS\Extbase\Utility; /* * This file is part of the TYPO3 CMS project. * * It is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License, either version 2 * of the License, or any later version. * * For the full copyright and license information, please read the * LICENSE.txt file that was distributed with this source code. * * The TYPO3 project - inspiring people to share! */ use TYPO3\CMS\Core\Localization\Locales; use TYPO3\CMS\Core\Localization\LocalizationFactory; use TYPO3\CMS\Core\Utility\GeneralUtility; use TYPO3\CMS\Extbase\Configuration\ConfigurationManagerInterface; use TYPO3\CMS\Extbase\Object\ObjectManager; /** * Localization helper which should be used to fetch localized labels. * * @api */ class LocalizationUtility { /** * @var string */ protected static $locallangPath = 'Resources/Private/Language/'; /** * Local Language content * * @var array */ protected static $LOCAL_LANG = []; /** * Contains those LL keys, which have been set to (empty) in TypoScript. * This is necessary, as we cannot distinguish between a nonexisting * translation and a label that has been cleared by TS. * In both cases ['key'][0]['target'] is "". * * @var array */ protected static $LOCAL_LANG_UNSET = []; /** * Key of the language to use * * @var string */ protected static $languageKey = 'default'; /** * Pointer to alternative fall-back language to use * * @var array */ protected static $alternativeLanguageKeys = []; /** * @var \TYPO3\CMS\Extbase\Configuration\ConfigurationManagerInterface */ protected static $configurationManager = null; /** * Returns the localized label of the LOCAL_LANG key, $key. * * @param string $key The key from the LOCAL_LANG array for which to return the value. * @param string $extensionName The name of the extension * @param array $arguments the arguments of the extension, being passed over to vsprintf * @return string|NULL The value from LOCAL_LANG or NULL if no translation was found. * @api * @todo : If vsprintf gets a malformed string, it returns FALSE! Should we throw an exception there? */ public static function translate($key, $extensionName, $arguments = null) { $value = null; if (GeneralUtility::isFirstPartOfStr($key, 'LLL:')) { $value = self::translateFileReference($key); } else { self::initializeLocalization($extensionName); // The "from" charset of csConv() is only set for strings from TypoScript via _LOCAL_LANG if (!empty(self::$LOCAL_LANG[$extensionName][self::$languageKey][$key][0]['target']) || isset(self::$LOCAL_LANG_UNSET[$extensionName][self::$languageKey][$key]) ) { // Local language translation for key exists $value = self::$LOCAL_LANG[$extensionName][self::$languageKey][$key][0]['target']; } elseif (!empty(self::$alternativeLanguageKeys)) { $languages = array_reverse(self::$alternativeLanguageKeys); foreach ($languages as $language) { if (!empty(self::$LOCAL_LANG[$extensionName][$language][$key][0]['target']) || isset(self::$LOCAL_LANG_UNSET[$extensionName][$language][$key]) ) { // Alternative language translation for key exists $value = self::$LOCAL_LANG[$extensionName][$language][$key][0]['target']; break; } } } if ($value === null && (!empty(self::$LOCAL_LANG[$extensionName]['default'][$key][0]['target']) || isset(self::$LOCAL_LANG_UNSET[$extensionName]['default'][$key])) ) { // Default language translation for key exists // No charset conversion because default is English and thereby ASCII $value = self::$LOCAL_LANG[$extensionName]['default'][$key][0]['target']; } } if (is_array($arguments) && $value !== null) { return vsprintf($value, $arguments); } else { return $value; } } /** * Returns the localized label of the LOCAL_LANG key, $key. * * @param string $key The language key including the path to a custom locallang file ("LLL:path:key"). * @return string The value from LOCAL_LANG or NULL if no translation was found. * @see language::sL() * @see \TYPO3\CMS\Frontend\Controller\TypoScriptFrontendController::sL() */ protected static function translateFileReference($key) { if (TYPO3_MODE === 'FE') { $value = self::getTypoScriptFrontendController()->sL($key); return $value !== false ? $value : null; } elseif (is_object($GLOBALS['LANG'])) { $value = self::getLanguageService()->sL($key); return $value !== '' ? $value : null; } else { return $key; } } /** * Loads local-language values by looking for a "locallang.xlf" (or "locallang.xml") file in the plugin resources directory and if found includes it. * Also locallang values set in the TypoScript property "_LOCAL_LANG" are merged onto the values found in the "locallang.xlf" file. * * @param string $extensionName */ protected static function initializeLocalization($extensionName) { if (isset(self::$LOCAL_LANG[$extensionName])) { return; } $locallangPathAndFilename = 'EXT:' . GeneralUtility::camelCaseToLowerCaseUnderscored($extensionName) . '/' . self::$locallangPath . 'locallang.xlf'; self::setLanguageKeys(); /** @var $languageFactory LocalizationFactory */ $languageFactory = GeneralUtility::makeInstance(LocalizationFactory::class); self::$LOCAL_LANG[$extensionName] = $languageFactory->getParsedData($locallangPathAndFilename, self::$languageKey, 'utf-8'); foreach (self::$alternativeLanguageKeys as $language) { $tempLL = $languageFactory->getParsedData($locallangPathAndFilename, $language, 'utf-8'); if (self::$languageKey !== 'default' && isset($tempLL[$language])) { self::$LOCAL_LANG[$extensionName][$language] = $tempLL[$language]; } } self::loadTypoScriptLabels($extensionName); } /** * Sets the currently active language/language_alt keys. * Default values are "default" for language key and "" for language_alt key. */ protected static function setLanguageKeys() { self::$languageKey = 'default'; self::$alternativeLanguageKeys = []; if (TYPO3_MODE === 'FE') { if (isset(self::getTypoScriptFrontendController()->config['config']['language'])) { self::$languageKey = self::getTypoScriptFrontendController()->config['config']['language']; if (isset(self::getTypoScriptFrontendController()->config['config']['language_alt'])) { self::$alternativeLanguageKeys[] = self::getTypoScriptFrontendController()->config['config']['language_alt']; } else { /** @var $locales \TYPO3\CMS\Core\Localization\Locales */ $locales = GeneralUtility::makeInstance(Locales::class); if (in_array(self::$languageKey, $locales->getLocales())) { foreach ($locales->getLocaleDependencies(self::$languageKey) as $language) { self::$alternativeLanguageKeys[] = $language; } } } } } elseif (!empty($GLOBALS['BE_USER']->uc['lang'])) { self::$languageKey = $GLOBALS['BE_USER']->uc['lang']; } elseif (!empty(self::getLanguageService()->lang)) { self::$languageKey = self::getLanguageService()->lang; } } /** * Overwrites labels that are set via TypoScript. * TS locallang labels have to be configured like: * plugin.tx_myextension._LOCAL_LANG.languageKey.key = value * * @param string $extensionName */ protected static function loadTypoScriptLabels($extensionName) { $configurationManager = static::getConfigurationManager(); $frameworkConfiguration = $configurationManager->getConfiguration(ConfigurationManagerInterface::CONFIGURATION_TYPE_FRAMEWORK, $extensionName); if (!is_array($frameworkConfiguration['_LOCAL_LANG'])) { return; } self::$LOCAL_LANG_UNSET[$extensionName] = []; foreach ($frameworkConfiguration['_LOCAL_LANG'] as $languageKey => $labels) { if (!(is_array($labels) && isset(self::$LOCAL_LANG[$extensionName][$languageKey]))) { continue; } foreach ($labels as $labelKey => $labelValue) { if (is_string($labelValue)) { self::$LOCAL_LANG[$extensionName][$languageKey][$labelKey][0]['target'] = $labelValue; if ($labelValue === '') { self::$LOCAL_LANG_UNSET[$extensionName][$languageKey][$labelKey] = ''; } } elseif (is_array($labelValue)) { $labelValue = self::flattenTypoScriptLabelArray($labelValue, $labelKey); foreach ($labelValue as $key => $value) { self::$LOCAL_LANG[$extensionName][$languageKey][$key][0]['target'] = $value; if ($value === '') { self::$LOCAL_LANG_UNSET[$extensionName][$languageKey][$key] = ''; } } } } } } /** * Flatten TypoScript label array; converting a hierarchical array into a flat * array with the keys separated by dots. * * Example Input: array('k1' => array('subkey1' => 'val1')) * Example Output: array('k1.subkey1' => 'val1') * * @param array $labelValues Hierarchical array of labels * @param string $parentKey the name of the parent key in the recursion; is only needed for recursion. * @return array flattened array of labels. */ protected static function flattenTypoScriptLabelArray(array $labelValues, $parentKey = '') { $result = []; foreach ($labelValues as $key => $labelValue) { if (!empty($parentKey)) { $key = $parentKey . '.' . $key; } if (is_array($labelValue)) { $labelValue = self::flattenTypoScriptLabelArray($labelValue, $key); $result = array_merge($result, $labelValue); } else { $result[$key] = $labelValue; } } return $result; } /** * Returns instance of the configuration manager * * @return \TYPO3\CMS\Extbase\Configuration\ConfigurationManagerInterface */ protected static function getConfigurationManager() { if (!is_null(static::$configurationManager)) { return static::$configurationManager; } $objectManager = GeneralUtility::makeInstance(ObjectManager::class); $configurationManager = $objectManager->get(ConfigurationManagerInterface::class); static::$configurationManager = $configurationManager; return $configurationManager; } /** * @return \TYPO3\CMS\Frontend\Controller\TypoScriptFrontendController */ protected static function getTypoScriptFrontendController() { return $GLOBALS['TSFE']; } /** * @return \TYPO3\CMS\Lang\LanguageService */ protected static function getLanguageService() { return $GLOBALS['LANG']; } }
ksjogo/TYPO3.CMS
typo3/sysext/extbase/Classes/Utility/LocalizationUtility.php
PHP
gpl-2.0
12,059
<?php class EmployeesSubjectsController extends RController { /** * @var string the default layout for the views. Defaults to '//layouts/column2', meaning * using two-column layout. See 'protected/views/layouts/column2.php'. */ public $layout='//layouts/column2'; /** * @return array action filters */ public function filters() { return array( 'rights', // perform access control for CRUD operations ); } /** * Specifies the access control rules. * This method is used by the 'accessControl' filter. * @return array access control rules */ public function accessRules() { return array( array('allow', // allow all users to perform 'index' and 'view' actions 'actions'=>array('index','view','Assign','Deleterow'), 'users'=>array('*'), ), array('allow', // allow authenticated user to perform 'create' and 'update' actions 'actions'=>array('create','update','subject','current','remove','employee'), 'users'=>array('@'), ), array('allow', // allow admin user to perform 'admin' and 'delete' actions 'actions'=>array('admin','delete'), 'users'=>array('admin'), ), array('deny', // deny all users 'users'=>array('*'), ), ); } /** * Displays a particular model. * @param integer $id the ID of the model to be displayed */ public function actionView($id) { $this->render('view',array( 'model'=>$this->loadModel($id), )); } /** * Creates a new model. * If creation is successful, the browser will be redirected to the 'view' page. */ public function actionCreate() { $model=new EmployeesSubjects; // Uncomment the following line if AJAX validation is needed // $this->performAjaxValidation($model); if(isset($_POST['EmployeesSubjects'])) { $model->attributes=$_POST['EmployeesSubjects']; if($model->save()) $this->redirect(array('view','id'=>$model->id)); } $this->render('create',array( 'model'=>$model, )); } /** * Updates a particular model. * If update is successful, the browser will be redirected to the 'view' page. * @param integer $id the ID of the model to be updated */ public function actionUpdate($id) { $model=$this->loadModel($id); // Uncomment the following line if AJAX validation is needed // $this->performAjaxValidation($model); if(isset($_POST['EmployeesSubjects'])) { $model->attributes=$_POST['EmployeesSubjects']; if($model->save()) $this->redirect(array('view','id'=>$model->id)); } $this->render('update',array( 'model'=>$model, )); } public function actionDeleterow() { $postRecord = EmployeesSubjects::model()->findByPk($_REQUEST['id']); $postRecord->delete(); $this->redirect(array('create','cou'=>$_REQUEST['cou'],'sub'=>$_REQUEST['sub'],'dept'=>$_REQUEST['dept'])); } /** * Deletes a particular model. * If deletion is successful, the browser will be redirected to the 'admin' page. * @param integer $id the ID of the model to be deleted */ public function actionDelete($id) { if(Yii::app()->request->isPostRequest) { // we only allow deletion via POST request $this->loadModel($id)->delete(); // if AJAX request (triggered by deletion via admin grid view), we should not redirect the browser if(!isset($_GET['ajax'])) $this->redirect(isset($_POST['returnUrl']) ? $_POST['returnUrl'] : array('admin')); } else throw new CHttpException(400,'Invalid request. Please do not repeat this request again.'); } /** * Lists all models. */ public function actionIndex() { $dataProvider=new CActiveDataProvider('EmployeesSubjects'); $this->render('index',array( 'dataProvider'=>$dataProvider, )); } public function actionAssign() { $model = new EmployeesSubjects; $model->employee_id = $_REQUEST['emp_id']; $model->subject_id = $_REQUEST['sub']; $model->save(); $this->redirect(array('create','cou'=>$_REQUEST['cou'],'sub'=>$_REQUEST['sub'],'dept'=>$_REQUEST['dept'])); } /** * Manages all models. */ public function actionAdmin() { $model=new EmployeesSubjects('search'); $model->unsetAttributes(); // clear any default values if(isset($_GET['EmployeesSubjects'])) $model->attributes=$_GET['EmployeesSubjects']; $this->render('admin',array( 'model'=>$model, )); } /** * Returns the data model based on the primary key given in the GET variable. * If the data model is not found, an HTTP exception will be raised. * @param integer the ID of the model to be loaded */ public function loadModel($id) { $model=EmployeesSubjects::model()->findByPk($id); if($model===null) throw new CHttpException(404,'The requested page does not exist.'); return $model; } /** * Performs the AJAX validation. * @param CModel the model to be validated */ protected function performAjaxValidation($model) { if(isset($_POST['ajax']) && $_POST['ajax']==='employees-subjects-form') { echo CActiveForm::validate($model); Yii::app()->end(); } } public function actionSubject() { $data=Subjects::model()->findAll(array('join' => 'JOIN batches ON batch_id = batches.id','condition'=>'batches.course_id=:id', 'params'=>array(':id'=>(int) $_POST['id']))); $data=CHtml::listData($data,'id','name'); foreach($data as $value=>$name) { echo CHtml::tag('option', array('value'=>$value),CHtml::encode($name),true); } } public function actionEmployee() { $data=Employees::model()->findAll(array('order'=>'first_name DESC','condition'=>'employee_department_id=:id', 'params'=>array(':id'=>(int) $_POST['did']))); $data=CHtml::listData($data,'id','first_name'); foreach($data as $value=>$name) { echo CHtml::tag('option', array('value'=>$value),CHtml::encode($name),true); } } public function actionCurrent() { if(isset($_POST['EmployeesSubjects']['subject_id'])) { $this->renderPartial('assign',array('subject_id' =>$_POST['EmployeesSubjects']['subject_id'])); } else { echo 'remove'; } } public function actionRemove() { EmployeesSubjects::model()->findByAttributes(array('subject_id'=>$_REQUEST['subject_id'],'employee_id'=>$_REQUEST['employee_id']))->delete(); $this->redirect(Yii::app()->createUrl('EmployeesSubjects/create')); } }
napoleon789/qlkh
osv/protected/modules/employees/controllers/EmployeesSubjectsController.php
PHP
gpl-2.0
6,334
<article <?php hybrid_attr( 'post' ); ?>> <?php if ( is_singular( get_post_type() ) ) : // If viewing a single post. ?> <header class="entry-header"> <h1 <?php hybrid_attr( 'entry-title' ); ?>><?php single_post_title(); ?></h1> <?php hybrid_post_terms( array( 'taxonomy' => 'category' ) ); ?> <div class="entry-byline"> </div><!-- .entry-byline --> </header><!-- .entry-header --> <div <?php hybrid_attr( 'entry-content' ); ?>> <?php the_content(); ?> <?php wp_link_pages(); ?> </div><!-- .entry-content --> <footer class="entry-footer"> <time <?php hybrid_attr( 'entry-published' ); ?>><?php echo get_the_date(); ?></time> <?php esc_html_e('by', 'rakiya' ); ?> <span <?php hybrid_attr( 'entry-author' ); ?>><?php the_author_posts_link(); ?></span> <?php edit_post_link(); ?> <?php hybrid_post_terms( array( 'taxonomy' => 'post_tag', 'text' => esc_html__( 'Tagged: %s', 'rakiya' ), 'before' => '<br />' ) ); ?> </footer><!-- .entry-footer --> <?php else : // If not viewing a single post. ?> <?php get_the_image(); ?> <header class="entry-header"> <?php the_title( '<h2 ' . hybrid_get_attr( 'entry-title' ) . '><a href="' . get_permalink() . '" rel="bookmark" itemprop="url">', '</a></h2>' ); ?> <?php hybrid_post_terms( array( 'taxonomy' => 'category' ) ); ?> </header><!-- .entry-header --> <div <?php hybrid_attr( 'entry-summary' ); ?>> <?php the_excerpt(); ?> </div><!-- .entry-summary --> <footer class="entry-footer"> <time <?php hybrid_attr( 'entry-published' ); ?>><?php echo get_the_date(); ?></time> <?php esc_html_e( 'by', 'rakiya' ); ?> <span <?php hybrid_attr( 'entry-author' ); ?>><?php the_author_posts_link(); ?></span> </footer> <?php endif; // End single post check. ?> </article><!-- .entry -->
mlloewen/chinhama
wp-content/themes/rakiya/content/content.php
PHP
gpl-2.0
1,824
/** * OpenKM, Open Document Management System (http://www.openkm.com) * Copyright (c) 2006-2017 Paco Avila & Josep Llort * <p> * No bytes were intentionally harmed during the development of this application. * <p> * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * <p> * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * <p> * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package com.openkm.api; import com.openkm.bean.AppVersion; import com.openkm.bean.ExtendedAttributes; import com.openkm.bean.Folder; import com.openkm.core.*; import com.openkm.module.ModuleManager; import com.openkm.module.RepositoryModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OKMRepository implements RepositoryModule { private static Logger log = LoggerFactory.getLogger(OKMRepository.class); private static OKMRepository instance = new OKMRepository(); private OKMRepository() { } public static OKMRepository getInstance() { return instance; } @Override public Folder getRootFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getRootFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder rootFolder = rm.getRootFolder(token); log.debug("getRootFolder: {}", rootFolder); return rootFolder; } @Override public Folder getTrashFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getTrashFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder trashFolder = rm.getTrashFolder(token); log.debug("getTrashFolder: {}", trashFolder); return trashFolder; } @Override public Folder getTrashFolderBase(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getTrashFolderBase({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder trashFolder = rm.getTrashFolderBase(token); log.debug("getTrashFolderBase: {}", trashFolder); return trashFolder; } @Override public Folder getTemplatesFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getTemplatesFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder templatesFolder = rm.getTemplatesFolder(token); log.debug("getTemplatesFolder: {}", templatesFolder); return templatesFolder; } @Override public Folder getPersonalFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getPersonalFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder personalFolder = rm.getPersonalFolder(token); log.debug("getPersonalFolder: {}", personalFolder); return personalFolder; } @Override public Folder getPersonalFolderBase(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getPersonalFolderBase({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder personalFolder = rm.getPersonalFolderBase(token); log.debug("getPersonalFolderBase: {}", personalFolder); return personalFolder; } @Override public Folder getMailFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getMailFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder mailFolder = rm.getMailFolder(token); log.debug("getMailFolder: {}", mailFolder); return mailFolder; } @Override public Folder getMailFolderBase(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getMailFolderBase({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder mailFolder = rm.getMailFolderBase(token); log.debug("getMailFolderBase: {}", mailFolder); return mailFolder; } @Override public Folder getThesaurusFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getThesaurusFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder thesaurusFolder = rm.getThesaurusFolder(token); log.debug("getThesaurusFolder: {}", thesaurusFolder); return thesaurusFolder; } @Override public Folder getCategoriesFolder(String token) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getCategoriesFolder({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); Folder categoriesFolder = rm.getCategoriesFolder(token); log.debug("getCategoriesFolder: {}", categoriesFolder); return categoriesFolder; } @Override public void purgeTrash(String token) throws PathNotFoundException, AccessDeniedException, LockException, RepositoryException, DatabaseException { log.debug("purgeTrash({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); rm.purgeTrash(token); log.debug("purgeTrash: void"); } @Override public String getUpdateMessage(String token) throws RepositoryException { log.debug("getUpdateMessage({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); String updateMessage = rm.getUpdateMessage(token); log.debug("getUpdateMessage: {}", updateMessage); return updateMessage; } @Override public String getRepositoryUuid(String token) throws RepositoryException { log.debug("getRepositoryUuid({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); String uuid = rm.getRepositoryUuid(token); log.debug("getRepositoryUuid: {}", uuid); return uuid; } @Override public boolean hasNode(String token, String path) throws AccessDeniedException, RepositoryException, DatabaseException { log.debug("hasNode({})", token, path); RepositoryModule rm = ModuleManager.getRepositoryModule(); boolean ret = rm.hasNode(token, path); log.debug("hasNode: {}", ret); return ret; } @Override public String getNodePath(String token, String uuid) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getNodePath({}, {})", token, uuid); RepositoryModule rm = ModuleManager.getRepositoryModule(); String ret = rm.getNodePath(token, uuid); log.debug("getNodePath: {}", ret); return ret; } @Override public String getNodeUuid(String token, String path) throws AccessDeniedException, PathNotFoundException, RepositoryException, DatabaseException { log.debug("getNodeUuid({}, {})", token, path); RepositoryModule rm = ModuleManager.getRepositoryModule(); String ret = rm.getNodeUuid(token, path); log.debug("getNodeUuid: {}", ret); return ret; } public AppVersion getAppVersion(String token) throws AccessDeniedException, RepositoryException, DatabaseException { log.debug("getAppVersion({})", token); RepositoryModule rm = ModuleManager.getRepositoryModule(); AppVersion ret = rm.getAppVersion(token); log.debug("getAppVersion: {}", ret); return ret; } @Override public void copyAttributes(String token, String srcId, String dstId, ExtendedAttributes extAttr) throws AccessDeniedException, PathNotFoundException, DatabaseException { log.debug("copyAttributes({}, {}, {}, {})", new Object[]{token, srcId, dstId, extAttr}); RepositoryModule rm = ModuleManager.getRepositoryModule(); rm.copyAttributes(token, srcId, dstId, extAttr); log.debug("copyAttributes: void"); } }
Beau-M/document-management-system
src/main/java/com/openkm/api/OKMRepository.java
Java
gpl-2.0
8,219
akeeba.jQuery(document).ready(function($){ akeeba.jQuery('#addTickets').click(function(){ if(document.adminForm.boxchecked.value == 0) { alert(akeeba.jQuery('#chooseone').val()); return false; } if(document.adminForm.boxchecked.value > 1) { alert(akeeba.jQuery('#chooseonlyone').val()); return false; } Joomla.submitbutton('addtickets'); }); });
SirPiter/folk
www/media/com_ats/js/adm_buckets_choose.js
JavaScript
gpl-2.0
382
# -*- coding: utf-8 -*- """ *************************************************************************** PostGISExecuteSQL.py --------------------- Date : October 2012 Copyright : (C) 2012 by Victor Olaya and Carterix Geomatics Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ from sextante.core.GeoAlgorithm import GeoAlgorithm __author__ = 'Victor Olaya, Carterix Geomatics' __date__ = 'October 2012' __copyright__ = '(C) 2012, Victor Olaya, Carterix Geomatics' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os from qgis.core import * from PyQt4.QtCore import * from PyQt4.QtGui import * from sextante.parameters.ParameterString import ParameterString from sextante.admintools import postgis_utils from sextante.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException class PostGISExecuteSQL(GeoAlgorithm): DATABASE = "DATABASE" SQL = "SQL" def getIcon(self): return QIcon(os.path.dirname(__file__) + "/../images/postgis.png") def processAlgorithm(self, progress): connection = self.getParameterValue(self.DATABASE) settings = QSettings() mySettings = "/PostgreSQL/connections/"+ connection try: database = settings.value(mySettings+"/database").toString() username = settings.value(mySettings+"/username").toString() host = settings.value(mySettings+"/host").toString() port = int(settings.value(mySettings+"/port").toString()) password = settings.value(mySettings+"/password").toString() except Exception, e: raise GeoAlgorithmExecutionException("Wrong database connection name: " + connection) try: self.db = postgis_utils.GeoDB(host=host, port=port, dbname=database, user=username, passwd=password) except postgis_utils.DbError, e: raise GeoAlgorithmExecutionException("Couldn't connect to database:\n"+e.message) sql = self.getParameterValue(self.SQL).replace("\n", " ") try: self.db._exec_sql_and_commit(str(sql)) except postgis_utils.DbError, e: raise GeoAlgorithmExecutionException("Error executing SQL:\n"+e.message) def defineCharacteristics(self): self.name = "PostGIS execute SQL" self.group = "PostGIS management tools" self.addParameter(ParameterString(self.DATABASE, "Database")) self.addParameter(ParameterString(self.SQL, "SQL query", "", True))
bstroebl/QGIS
python/plugins/sextante/admintools/PostGISExecuteSQL.py
Python
gpl-2.0
3,170
<?php /* * Theme Note * ------------------- * - Changed the location of cross-sells section (woocommerce_cart_collaterals action) and adjusted the layout a bit * - Added if-else for "wc_print_notices()", "wp_nonce_field( 'woocommerce-cart' );" * - Note: This file is based on WooC 2.1.9 * */ /** * Cart Page * * @author WooThemes * @package WooCommerce/Templates * @version 2.1.0 */ if ( ! defined( 'ABSPATH' ) ) exit; // Exit if accessed directly global $woocommerce; if ( version_compare( WOOCOMMERCE_VERSION, '2.1' ) >= 0 ) { wc_print_notices(); } else { $woocommerce->show_messages(); } do_action( 'woocommerce_before_cart' ); ?> <form action="<?php echo esc_url( WC()->cart->get_cart_url() ); ?>" method="post"> <?php do_action( 'woocommerce_before_cart_table' ); ?> <table class="shop_table cart" cellspacing="0"> <thead> <tr> <th class="product-remove">&nbsp;</th> <th class="product-thumbnail">&nbsp;</th> <th class="product-name"><?php _e( 'Product', 'woocommerce' ); ?></th> <th class="product-price"><?php _e( 'Price', 'woocommerce' ); ?></th> <th class="product-quantity"><?php _e( 'Quantity', 'woocommerce' ); ?></th> <th class="product-subtotal"><?php _e( 'Total', 'woocommerce' ); ?></th> </tr> </thead> <tbody> <?php do_action( 'woocommerce_before_cart_contents' ); ?> <?php foreach ( WC()->cart->get_cart() as $cart_item_key => $cart_item ) { $_product = apply_filters( 'woocommerce_cart_item_product', $cart_item['data'], $cart_item, $cart_item_key ); $product_id = apply_filters( 'woocommerce_cart_item_product_id', $cart_item['product_id'], $cart_item, $cart_item_key ); if ( $_product && $_product->exists() && $cart_item['quantity'] > 0 && apply_filters( 'woocommerce_cart_item_visible', true, $cart_item, $cart_item_key ) ) { ?> <tr class="<?php echo esc_attr( apply_filters( 'woocommerce_cart_item_class', 'cart_item', $cart_item, $cart_item_key ) ); ?>"> <td class="product-remove"> <?php echo apply_filters( 'woocommerce_cart_item_remove_link', sprintf( '<a href="%s" class="remove" title="%s">&times;</a>', esc_url( WC()->cart->get_remove_url( $cart_item_key ) ), __( 'Remove this item', 'woocommerce' ) ), $cart_item_key ); ?> </td> <td class="product-thumbnail"> <?php $thumbnail = apply_filters( 'woocommerce_cart_item_thumbnail', $_product->get_image(), $cart_item, $cart_item_key ); if ( ! $_product->is_visible() ) echo $thumbnail; else printf( '<a href="%s">%s</a>', $_product->get_permalink(), $thumbnail ); ?> </td> <td class="product-name"> <?php if ( ! $_product->is_visible() ) echo apply_filters( 'woocommerce_cart_item_name', $_product->get_title(), $cart_item, $cart_item_key ); else echo apply_filters( 'woocommerce_cart_item_name', sprintf( '<a href="%s">%s</a>', $_product->get_permalink(), $_product->get_title() ), $cart_item, $cart_item_key ); // Meta data echo WC()->cart->get_item_data( $cart_item ); // Backorder notification if ( $_product->backorders_require_notification() && $_product->is_on_backorder( $cart_item['quantity'] ) ) echo '<p class="backorder_notification">' . __( 'Available on backorder', 'woocommerce' ) . '</p>'; ?> </td> <td class="product-price"> <?php echo apply_filters( 'woocommerce_cart_item_price', WC()->cart->get_product_price( $_product ), $cart_item, $cart_item_key ); ?> </td> <td class="product-quantity"> <?php if ( $_product->is_sold_individually() ) { $product_quantity = sprintf( '1 <input type="hidden" name="cart[%s][qty]" value="1" />', $cart_item_key ); } else { $product_quantity = woocommerce_quantity_input( array( 'input_name' => "cart[{$cart_item_key}][qty]", 'input_value' => $cart_item['quantity'], 'max_value' => $_product->backorders_allowed() ? '' : $_product->get_stock_quantity(), 'min_value' => '0' ), $_product, false ); } echo apply_filters( 'woocommerce_cart_item_quantity', $product_quantity, $cart_item_key ); ?> </td> <td class="product-subtotal"> <?php echo apply_filters( 'woocommerce_cart_item_subtotal', WC()->cart->get_product_subtotal( $_product, $cart_item['quantity'] ), $cart_item, $cart_item_key ); ?> </td> </tr> <?php } } do_action( 'woocommerce_cart_contents' ); ?> <tr> <td colspan="6" class="actions"> <?php if ( WC()->cart->coupons_enabled() ) { ?> <div class="coupon"> <label for="coupon_code"><?php _e( 'Coupon', 'woocommerce' ); ?>:</label> <input type="text" name="coupon_code" class="input-text" id="coupon_code" value="" placeholder="<?php _e( 'Coupon code', 'woocommerce' ); ?>" /> <input type="submit" class="button" name="apply_coupon" value="<?php _e( 'Apply Coupon', 'woocommerce' ); ?>" /> <?php do_action('woocommerce_cart_coupon'); ?> </div> <?php } ?> <input type="submit" class="button" name="update_cart" value="<?php _e( 'Update Cart', 'woocommerce' ); ?>" /> <input type="submit" class="checkout-button button alt wc-forward" name="proceed" value="<?php _e( 'Proceed to Checkout', 'woocommerce' ); ?>" /> <?php do_action( 'woocommerce_proceed_to_checkout' ); ?> <?php if ( version_compare( WOOCOMMERCE_VERSION, '2.1' ) >= 0 ) { wp_nonce_field( 'woocommerce-cart' ); } else { $woocommerce->nonce_field('cart'); } ?> </td> </tr> <?php do_action( 'woocommerce_after_cart_contents' ); ?> </tbody> </table> <?php do_action( 'woocommerce_after_cart_table' ); ?> </form> <div class="row"> <div class="uxb-col large-6 columns"> <?php woocommerce_cart_totals(); ?> </div> <div class="uxb-col large-6 columns"> <?php woocommerce_shipping_calculator(); ?> </div> </div> <?php do_action( 'woocommerce_cart_collaterals' ); ?> <?php do_action( 'woocommerce_after_cart' ); ?>
gitprj/samantha-ohlsen-photography
wp-content/themes/fineliner/woocommerce/cart/cart.php
PHP
gpl-2.0
6,140
package com.numhero.client.model.datacargo.auth; import com.numhero.shared.datacargo.CommandResponse; import java.util.Date; public class AuthResponse implements CommandResponse { private String sessionID; private Date expirationDate; public String getSessionID() { return sessionID; } public void setSessionID(String sessionID) { this.sessionID = sessionID; } public Date getExpirationDate() { return expirationDate; } public void setExpirationDate(Date expirationDate) { this.expirationDate = expirationDate; } }
midaboghetich/netnumero
src/com/numhero/client/model/datacargo/auth/AuthResponse.java
Java
gpl-2.0
594
'use strict'; angular.module('syliusApp', [ 'ngCookies', 'ngResource', 'ngSanitize', 'ui.router', 'ui.bootstrap' ]) .config(function ($stateProvider, $urlRouterProvider, $locationProvider) { $urlRouterProvider .otherwise('/'); $locationProvider.html5Mode(true); });
nike-17/sylius.ru
client/app/app.js
JavaScript
gpl-2.0
295
<?php class bx_streams_db2xml extends bx_streams_buffer { function contentOnRead($path) { $db2xml = new XML_db2xml(NULL, NULL, 'Extended'); $xml = ''; $options = array( 'formatOptions' => array ( 'xml_seperator' => '', 'element_id' => 'id' ) ); $db2xml->Format->SetOptions($options); if(preg_match('/\/(.*)[\/]/', $path, $matches)) { $table = $matches[1]; } $where = $this->getParameter('where'); if(!empty($table)) { $query = "select * from $table"; if(!empty($where)) { $query .= " where $where"; } $res = $GLOBALS['POOL']->db->query($query); if (PEAR::isError($res) || $res->numRows() == 0) { $xml = "<nothingFound/>"; } else { $xml = $db2xml->getXML($res); } } return $xml; } function contentOnWrite($content) { } }
chregu/fluxcms
inc/bx/streams/db2xml.php
PHP
gpl-2.0
1,061
package to.rtc.rtc2jira.exporter.jira.entities; import java.util.Date; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.codehaus.jackson.map.annotate.JsonView; public class Version extends NamedEntity { String description; boolean archived = false; boolean released = true; Date releaseDate; String project; Integer projectId; @JsonView(IssueView.Filtered.class) @Override public String getKey() { return super.getKey(); } @Override public String getPath() { return "/version"; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isArchived() { return archived; } public void setArchived(boolean archived) { this.archived = archived; } public boolean isReleased() { return released; } public void setReleased(boolean released) { this.released = released; } @XmlJavaTypeAdapter(JiraDateStringAdapter.class) public Date getReleaseDate() { return releaseDate; } public void setReleaseDate(Date releaseDate) { this.releaseDate = releaseDate; } @JsonView(IssueView.Create.class) public String getProject() { return project; } public void setProject(String project) { this.project = project; } public Integer getProjectId() { return projectId; } public void setProjectId(Integer projectId) { this.projectId = projectId; } }
ohumbel/rtc2jira
src/main/java/to/rtc/rtc2jira/exporter/jira/entities/Version.java
Java
gpl-2.0
1,511
/** * Copyright (C) 2003 Frerich Raabe <raabe@kde.org> * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License as published by the Free Software * Foundation; either version 2 of the License, or (at your option) any later * version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with * this program. If not, see <http://www.gnu.org/licenses/>. */ #include "tagguesserconfigdlg.h" #include "tagguesser.h" #include <kicon.h> #include <klocale.h> #include <kpushbutton.h> #include <klineedit.h> #include <kapplication.h> #include <QKeyEvent> #include <QStringListModel> TagGuesserConfigDlg::TagGuesserConfigDlg(QWidget *parent, const char *name) : KDialog(parent) { setObjectName( QLatin1String( name ) ); setModal(true); setCaption(i18n("Tag Guesser Configuration")); setButtons(Ok | Cancel); setDefaultButton(Ok); showButtonSeparator(true); m_child = new TagGuesserConfigDlgWidget(this); setMainWidget(m_child); m_child->bMoveUp->setIcon(KIcon( QLatin1String( "arrow-up" ))); m_child->bMoveDown->setIcon(KIcon( QLatin1String( "arrow-down" ))); m_tagSchemeModel = new QStringListModel(m_child->lvSchemes); m_child->lvSchemes->setModel(m_tagSchemeModel); m_child->lvSchemes->setHeaderHidden(true); m_tagSchemeModel->setStringList(TagGuesser::schemeStrings()); connect(m_child->lvSchemes, SIGNAL(clicked(QModelIndex)), this, SLOT(slotCurrentChanged(QModelIndex))); connect(m_child->bMoveUp, SIGNAL(clicked()), this, SLOT(slotMoveUpClicked())); connect(m_child->bMoveDown, SIGNAL(clicked()), this, SLOT(slotMoveDownClicked())); connect(m_child->bAdd, SIGNAL(clicked()), this, SLOT(slotAddClicked())); connect(m_child->bModify, SIGNAL(clicked()), this, SLOT(slotModifyClicked())); connect(m_child->bRemove, SIGNAL(clicked()), this, SLOT(slotRemoveClicked())); resize( 400, 300 ); } void TagGuesserConfigDlg::slotCurrentChanged(QModelIndex item) { m_child->bRemove->setEnabled(m_tagSchemeModel->rowCount() != 0); // Ensure up/down buttons are appropriately enabled. if (!m_tagSchemeModel->rowCount() || item == m_tagSchemeModel->index(0, 0, QModelIndex())) m_child->bMoveUp->setEnabled(false); else m_child->bMoveUp->setEnabled(true); if (!m_tagSchemeModel->rowCount() || item == m_tagSchemeModel->index(m_tagSchemeModel->rowCount(QModelIndex())-1, 0, QModelIndex())) m_child->bMoveDown->setEnabled(false); else m_child->bMoveDown->setEnabled(true); } void TagGuesserConfigDlg::slotMoveUpClicked() { QModelIndex currentItem = m_child->lvSchemes->currentIndex(); int row = currentItem.row(); m_tagSchemeModel->insertRow(row - 1); // Insert in front of item above row++; // Now we're one row down QModelIndex newItem = m_tagSchemeModel->index(row - 2, 0); // Copy over, then delete old item currentItem = m_tagSchemeModel->index(row, 0); m_tagSchemeModel->setData(newItem, m_tagSchemeModel->data(currentItem, Qt::DisplayRole), Qt::DisplayRole); m_tagSchemeModel->removeRow(row); m_child->lvSchemes->setCurrentIndex(newItem); slotCurrentChanged(newItem); } void TagGuesserConfigDlg::slotMoveDownClicked() { QModelIndex currentItem = m_child->lvSchemes->currentIndex(); int row = currentItem.row(); m_tagSchemeModel->insertRow(row + 2); // Insert in front of 2 items below QModelIndex newItem = m_tagSchemeModel->index(row + 2, 0); // Copy over, then delete old item currentItem = m_tagSchemeModel->index(row, 0); m_tagSchemeModel->setData(newItem, m_tagSchemeModel->data(currentItem, Qt::DisplayRole), Qt::DisplayRole); m_tagSchemeModel->removeRow(row); newItem = m_tagSchemeModel->index(row + 1, 0); m_child->lvSchemes->setCurrentIndex(newItem); slotCurrentChanged(newItem); } void TagGuesserConfigDlg::slotAddClicked() { m_tagSchemeModel->insertRow(0, QModelIndex()); m_child->lvSchemes->setCurrentIndex(m_tagSchemeModel->index(0, 0, QModelIndex())); m_child->lvSchemes->edit(m_child->lvSchemes->currentIndex()); slotCurrentChanged(m_child->lvSchemes->currentIndex()); } void TagGuesserConfigDlg::slotModifyClicked() { m_child->lvSchemes->edit(m_child->lvSchemes->currentIndex()); } void TagGuesserConfigDlg::slotRemoveClicked() { m_tagSchemeModel->removeRow(m_child->lvSchemes->currentIndex().row(), QModelIndex()); slotCurrentChanged(m_child->lvSchemes->currentIndex()); } void TagGuesserConfigDlg::accept() { TagGuesser::setSchemeStrings(m_tagSchemeModel->stringList()); KDialog::accept(); } // vim: set et sw=4 tw=0 sta:
shubhamchaudhary/juk
tagguesserconfigdlg.cpp
C++
gpl-2.0
4,971
/* * @(#)BufferSize.java 1.5 00/05/04 SMI * * Author: Tom Corson * * Copyright 2003 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * Sun grants you ("Licensee") a non-exclusive, royalty free, license * to use, modify and redistribute this software in source and binary * code form, provided that i) this copyright notice and license appear * on all copies of the software; and ii) Licensee does not utilize the * software in a manner which is disparaging to Sun. * * This software is provided "AS IS," without a warranty of any kind. * ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, * INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND * ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY * LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE * SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS * BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, * INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, * HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING * OUT OF THE USE OF OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN * ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * This software is not designed or intended for use in on-line control * of aircraft, air traffic, aircraft navigation or aircraft * communications; or in the design, construction, operation or * maintenance of any nuclear facility. Licensee represents and * warrants that it will not use or redistribute the Software for such * purposes. */ import java.awt.Panel; import java.awt.Label; import java.awt.TextField; import java.awt.BorderLayout; import java.awt.event.MouseListener; import java.awt.event.MouseEvent; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import javax.comm.ParallelPort; /** * Class declaration * * * @author * @version 1.5, 05/04/00 */ public class BufferSize extends Panel implements MouseListener, ActionListener { private int value, defaultValue; private Label label; private TextField data; private ParallelPort port = null; private boolean inputBuffer; /** * Constructor declaration * * * @param size * @param port * @param inputBuffer * * @see */ public BufferSize(int size, ParallelPort port, boolean inputBuffer) { super(); this.setPort(port); this.inputBuffer = inputBuffer; this.setLayout(new BorderLayout()); this.label = new Label("Buffer Size"); this.label.addMouseListener(this); this.add("West", this.label); this.data = new TextField(new Integer(defaultValue).toString(), size); this.data.addActionListener(this); this.add("East", this.data); this.showValue(); this.defaultValue = this.value; } /** * Method declaration * * * @param port * * @see */ public void setPort(ParallelPort port) { this.port = port; } /** * Method declaration * * * @return * * @see */ public int getValue() { if (this.port != null) { /* * Get the buffer size. */ if (inputBuffer) { this.value = port.getInputBufferSize(); } else { this.value = port.getOutputBufferSize(); } return this.value; } else { return (0); } } /** * Method declaration * * * @see */ public void showValue() { this.data.setText(new Integer(this.getValue()).toString()); } /** * Method declaration * * * @param val * * @see */ public void setValue(int val) { if (this.port != null) { /* * Set the new buffer size. */ if (inputBuffer) { port.setInputBufferSize(val); } else { port.setOutputBufferSize(val); } } this.showValue(); } /** * Method declaration * * * @param val * * @see */ public void setDefaultValue(int val) { this.defaultValue = val; } /** * Method declaration * * * @param e * * @see */ public void actionPerformed(ActionEvent e) { String s = e.getActionCommand(); try { Integer newValue = new Integer(s); this.setValue(newValue.intValue()); } catch (NumberFormatException ex) { System.out.println("Bad value = " + e.getActionCommand()); this.showValue(); } } /** * Method declaration * * * @param e * * @see */ public void mouseClicked(MouseEvent e) {} /** * Method declaration * * * @param e * * @see */ public void mouseEntered(MouseEvent e) {} /** * Method declaration * * * @param e * * @see */ public void mouseExited(MouseEvent e) {} /** * Method declaration * * * @param e * * @see */ public void mousePressed(MouseEvent e) { this.setValue(this.defaultValue); } /** * Method declaration * * * @param e * * @see */ public void mouseReleased(MouseEvent e) {} }
tstuckey/WirelessCautionLights
comm_api/commapi_linux/examples/ParallelBlackBox/BufferSize.java
Java
gpl-2.0
5,316
# -*- coding: utf-8 -*- from django.db import models class Profiles(models.Model): userid = models.AutoField(primary_key=True) login_name = models.CharField(max_length=255, unique=True) cryptpassword = models.CharField(max_length=128, blank=True) realname = models.CharField(max_length=255) disabledtext = models.TextField() disable_mail = models.IntegerField(default=0) mybugslink = models.IntegerField() extern_id = models.IntegerField(blank=True) class Meta: db_table = "profiles" def get_groups(self): q = UserGroupMap.objects.filter(user__userid=self.userid) q = q.select_related() groups = [assoc.group for assoc in q.all()] return groups class Groups(models.Model): name = models.CharField(unique=True, max_length=255) description = models.TextField() isbuggroup = models.IntegerField() userregexp = models.TextField() isactive = models.IntegerField() class Meta: db_table = "groups" class UserGroupMap(models.Model): user = models.ForeignKey(Profiles, on_delete=models.CASCADE) # user_id # (actually has two primary keys) group = models.ForeignKey(Groups, on_delete=models.CASCADE) # group_id isbless = models.IntegerField(default=0) grant_type = models.IntegerField(default=0) class Meta: db_table = "user_group_map" unique_together = ("user", "group") # # Extra information for users # class UserProfile(models.Model): user = models.OneToOneField( "auth.User", unique=True, related_name="profile", on_delete=models.CASCADE ) phone_number = models.CharField(blank=True, default="", max_length=128) url = models.URLField(blank=True, default="") im = models.CharField(blank=True, default="", max_length=128) im_type_id = models.IntegerField(blank=True, default=1, null=True) address = models.TextField(blank=True, default="") notes = models.TextField(blank=True, default="") class Meta: db_table = "tcms_user_profiles" def get_im(self): from .forms import IM_CHOICES if not self.im: return None for c in IM_CHOICES: if self.im_type_id == c[0]: return "[{}] {}".format(c[1], self.im) @classmethod def get_user_profile(cls, user): return cls.objects.get(user=user)
Nitrate/Nitrate
src/tcms/profiles/models.py
Python
gpl-2.0
2,379
/* * MekWars - Copyright (C) 2004 * * Derived from MegaMekNET (http://www.sourceforge.net/projects/megameknet) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * for more details. */ package server.campaign.commands; import java.util.Enumeration; import java.util.StringTokenizer; import java.util.Vector; import server.campaign.CampaignMain; import server.campaign.SPlayer; import server.campaign.votes.Vote; public class RemoveVoteCommand implements Command { int accessLevel = 0; String syntax = ""; public int getExecutionLevel(){return accessLevel;} public void setExecutionLevel(int i) {accessLevel = i;} public String getSyntax() { return syntax;} public void process(StringTokenizer command,String Username) { if (accessLevel != 0) { int userLevel = CampaignMain.cm.getServer().getUserLevel(Username); if(userLevel < getExecutionLevel()) { CampaignMain.cm.toUser("AM:Insufficient access level for command. Level: " + userLevel + ". Required: " + accessLevel + ".",Username,true); return; } } SPlayer castingPlayer = CampaignMain.cm.getPlayer(Username); String recipientName = "";//blank string try { recipientName = new String(command.nextToken()).toString(); }//end try catch (NumberFormatException ex) { CampaignMain.cm.toUser("AM:RemoveVote command failed. Check your input. It should be something like this: /c removevote#name",Username,true); return; }//end catch //break out if a player is trying to vote for himself if (Username.equals(recipientName)) { CampaignMain.cm.toUser("AM:You may not vote for youself.",Username,true); return; } //break out if voting isnt enabled on the server boolean canVote = new Boolean(CampaignMain.cm.getConfig("VotingEnabled")).booleanValue(); if (!canVote) { CampaignMain.cm.toUser("AM:Voting is disabled on this server.",Username,true); return; } //get all votes cast by the player issuing the command Vector<Vote> castersVotes = CampaignMain.cm.getVoteManager().getAllVotesBy(castingPlayer); //break out if the player has no outstanding votes to remove if (castersVotes.isEmpty()) { CampaignMain.cm.toUser("AM:You have not cast any votes. Removal is impossible.",Username,true); return; } //get the SPlayer who is receiving for the next couple of checks SPlayer recipientPlayer = CampaignMain.cm.getPlayer(recipientName); //break out if the recieving player isnt known if (recipientPlayer == null) { CampaignMain.cm.toUser("AM:You can't remove a vote for a player who doesn't exist.",Username,true); return; } //break out if player has no votes cast for recipient Enumeration<Vote> e = castersVotes.elements(); boolean hasVoteForRecipient = false; Vote v = null; while (e.hasMoreElements() && !hasVoteForRecipient) { v = e.nextElement(); if (v.getRecipient().equals(recipientName)) { hasVoteForRecipient = true; } }//end while(more elements) /* * The last vote drawn from the enumeration has the proper recipient, if * hasVoteForRecipient is true, because the loop ends before a replacement * element is drawn. If true, attempt to remove the vote. If false, break the * bad news to the player. */ if (!hasVoteForRecipient) { CampaignMain.cm.toUser("AM:You have not voted for this player.",Username,true); return; } //else if boolean voteRemoved = CampaignMain.cm.getVoteManager().removeVote(v); if (!voteRemoved) { CampaignMain.cm.toUser("AM:There was an error removing the vote. Please contact your " + "server admin or file a bug report.", Username, true); return; } //else CampaignMain.cm.toUser("AM:Your vote for " + recipientName + " has been removed.",Username,true); } }
mekwars-legends/mekwars-upstream
src/server/campaign/commands/RemoveVoteCommand.java
Java
gpl-2.0
4,319
/*************************************************************************** qgsspatialitesourceselect.cpp Dialog to select SpatiaLite layer(s) and add it to the map canvas ------------------- begin : Dec 2008 copyright : (C) 2008 by Sandro Furieri email : a.furieri@lqt.it ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "qgsspatialitesourceselect.h" #include "qgsspatialiteconnection.h" #include "qgslogger.h" #include "qgsapplication.h" #include "qgscontexthelp.h" #include "qgsquerybuilder.h" #include "qgsdatasourceuri.h" #include "qgsvectorlayer.h" #include <QInputDialog> #include <QMessageBox> #include <QSettings> #include <QTextStream> #include <QTableWidgetItem> #include <QHeaderView> #include <QStringList> #include <QPushButton> #ifdef _MSC_VER #define strcasecmp(a,b) stricmp(a,b) #endif QgsSpatiaLiteSourceSelect::QgsSpatiaLiteSourceSelect( QWidget * parent, Qt::WindowFlags fl, bool embedded ): QDialog( parent, fl ) { setupUi( this ); QSettings settings; restoreGeometry( settings.value( "/Windows/SpatiaLiteSourceSelect/geometry" ).toByteArray() ); mHoldDialogOpen->setChecked( settings.value( "/Windows/SpatiaLiteSourceSelect/HoldDialogOpen", false ).toBool() ); setWindowTitle( tr( "Add SpatiaLite Table(s)" ) ); connectionsGroupBox->setTitle( tr( "Databases" ) ); btnEdit->hide(); // hide the edit button btnSave->hide(); btnLoad->hide(); mStatsButton = new QPushButton( tr( "&Update Statistics" ) ); connect( mStatsButton, SIGNAL( clicked() ), this, SLOT( updateStatistics() ) ); mStatsButton->setEnabled( false ); mAddButton = new QPushButton( tr( "&Add" ) ); connect( mAddButton, SIGNAL( clicked() ), this, SLOT( addClicked() ) ); mAddButton->setEnabled( false ); mBuildQueryButton = new QPushButton( tr( "&Set Filter" ) ); connect( mBuildQueryButton, SIGNAL( clicked() ), this, SLOT( buildQuery() ) ); mBuildQueryButton->setEnabled( false ); if ( embedded ) { buttonBox->button( QDialogButtonBox::Close )->hide(); } else { buttonBox->addButton( mAddButton, QDialogButtonBox::ActionRole ); buttonBox->addButton( mBuildQueryButton, QDialogButtonBox::ActionRole ); buttonBox->addButton( mStatsButton, QDialogButtonBox::ActionRole ); } populateConnectionList(); mSearchModeComboBox->addItem( tr( "Wildcard" ) ); mSearchModeComboBox->addItem( tr( "RegExp" ) ); mSearchColumnComboBox->addItem( tr( "All" ) ); mSearchColumnComboBox->addItem( tr( "Table" ) ); mSearchColumnComboBox->addItem( tr( "Type" ) ); mSearchColumnComboBox->addItem( tr( "Geometry column" ) ); mSearchColumnComboBox->addItem( tr( "Sql" ) ); mProxyModel.setParent( this ); mProxyModel.setFilterKeyColumn( -1 ); mProxyModel.setFilterCaseSensitivity( Qt::CaseInsensitive ); mProxyModel.setDynamicSortFilter( true ); mProxyModel.setSourceModel( &mTableModel ); mTablesTreeView->setModel( &mProxyModel ); mTablesTreeView->setSortingEnabled( true ); connect( mTablesTreeView->selectionModel(), SIGNAL( selectionChanged( const QItemSelection&, const QItemSelection& ) ), this, SLOT( treeWidgetSelectionChanged( const QItemSelection&, const QItemSelection& ) ) ); //for Qt < 4.3.2, passing -1 to include all model columns //in search does not seem to work mSearchColumnComboBox->setCurrentIndex( 1 ); //hide the search options by default //they will be shown when the user ticks //the search options group box mSearchLabel->setVisible( false ); mSearchColumnComboBox->setVisible( false ); mSearchColumnsLabel->setVisible( false ); mSearchModeComboBox->setVisible( false ); mSearchModeLabel->setVisible( false ); mSearchTableEdit->setVisible( false ); cbxAllowGeometrylessTables->setDisabled( true ); } QgsSpatiaLiteSourceSelect::~QgsSpatiaLiteSourceSelect() { QSettings settings; settings.setValue( "/Windows/SpatiaLiteSourceSelect/geometry", saveGeometry() ); settings.setValue( "/Windows/SpatiaLiteSourceSelect/HoldDialogOpen", mHoldDialogOpen->isChecked() ); } // Slot for performing action when the Add button is clicked void QgsSpatiaLiteSourceSelect::addClicked() { addTables(); } /** End Autoconnected SLOTS **/ // Remember which database is selected void QgsSpatiaLiteSourceSelect::on_cmbConnections_activated( int ) { dbChanged(); } void QgsSpatiaLiteSourceSelect::buildQuery() { setSql( mTablesTreeView->currentIndex() ); } void QgsSpatiaLiteSourceSelect::updateStatistics() { QString subKey = cmbConnections->currentText(); int idx = subKey.indexOf( '@' ); if ( idx > 0 ) subKey.truncate( idx ); QString msg = tr( "Are you sure you want to update the internal statistics for DB: %1?\n\n" "This could take a long time (depending on the DB size),\n" "but implies better performance thereafter." ).arg( subKey ); QMessageBox::StandardButton result = QMessageBox::information( this, tr( "Confirm Update Statistics" ), msg, QMessageBox::Ok | QMessageBox::Cancel ); if ( result != QMessageBox::Ok ) return; // trying to connect to SpatiaLite DB QgsSpatiaLiteConnection conn( subKey ); if ( conn.updateStatistics() ) { QMessageBox::information( this, tr( "Update Statistics" ), tr( "Internal statistics successfully updated for: %1" ).arg( subKey ) ); } else { QMessageBox::critical( this, tr( "Update Statistics" ), tr( "Error while updating internal statistics for: %1" ).arg( subKey ) ); } } void QgsSpatiaLiteSourceSelect::on_cbxAllowGeometrylessTables_stateChanged( int ) { on_btnConnect_clicked(); } void QgsSpatiaLiteSourceSelect::on_mTablesTreeView_clicked( const QModelIndex &index ) { mBuildQueryButton->setEnabled( index.parent().isValid() ); } void QgsSpatiaLiteSourceSelect::on_mTablesTreeView_doubleClicked( const QModelIndex &index ) { setSql( index ); } void QgsSpatiaLiteSourceSelect::on_mSearchGroupBox_toggled( bool checked ) { if ( mSearchTableEdit->text().isEmpty() ) return; on_mSearchTableEdit_textChanged( checked ? mSearchTableEdit->text() : "" ); } void QgsSpatiaLiteSourceSelect::on_mSearchTableEdit_textChanged( const QString & text ) { if ( mSearchModeComboBox->currentText() == tr( "Wildcard" ) ) { mProxyModel._setFilterWildcard( text ); } else if ( mSearchModeComboBox->currentText() == tr( "RegExp" ) ) { mProxyModel._setFilterRegExp( text ); } } void QgsSpatiaLiteSourceSelect::on_mSearchColumnComboBox_currentIndexChanged( const QString & text ) { if ( text == tr( "All" ) ) { mProxyModel.setFilterKeyColumn( -1 ); } else if ( text == tr( "Table" ) ) { mProxyModel.setFilterKeyColumn( 0 ); } else if ( text == tr( "Type" ) ) { mProxyModel.setFilterKeyColumn( 1 ); } else if ( text == tr( "Geometry column" ) ) { mProxyModel.setFilterKeyColumn( 2 ); } else if ( text == tr( "Sql" ) ) { mProxyModel.setFilterKeyColumn( 3 ); } } void QgsSpatiaLiteSourceSelect::on_mSearchModeComboBox_currentIndexChanged( const QString & text ) { Q_UNUSED( text ); on_mSearchTableEdit_textChanged( mSearchTableEdit->text() ); } void QgsSpatiaLiteSourceSelect::setLayerType( const QString& table, const QString& column, const QString& type ) { mTableModel.setGeometryTypesForTable( table, column, type ); mTablesTreeView->sortByColumn( 0, Qt::AscendingOrder ); } void QgsSpatiaLiteSourceSelect::populateConnectionList() { cmbConnections->clear(); Q_FOREACH ( const QString& name, QgsSpatiaLiteConnection::connectionList() ) { // retrieving the SQLite DB name and full path QString text = name + tr( "@" ) + QgsSpatiaLiteConnection::connectionPath( name ); cmbConnections->addItem( text ); } setConnectionListPosition(); btnConnect->setDisabled( cmbConnections->count() == 0 ); btnDelete->setDisabled( cmbConnections->count() == 0 ); cmbConnections->setDisabled( cmbConnections->count() == 0 ); } void QgsSpatiaLiteSourceSelect::on_btnNew_clicked() { if ( ! newConnection( this ) ) return; populateConnectionList(); emit connectionsChanged(); } bool QgsSpatiaLiteSourceSelect::newConnection( QWidget* parent ) { // Retrieve last used project dir from persistent settings QSettings settings; QString lastUsedDir = settings.value( "/UI/lastSpatiaLiteDir", QDir::homePath() ).toString(); QString myFile = QFileDialog::getOpenFileName( parent, tr( "Choose a SpatiaLite/SQLite DB to open" ), lastUsedDir, tr( "SpatiaLite DB" ) + " (*.sqlite *.db *.sqlite3 *.db3 *.s3db);;" + tr( "All files" ) + " (*)" ); if ( myFile.isEmpty() ) return false; QFileInfo myFI( myFile ); QString myPath = myFI.path(); QString myName = myFI.fileName(); QString savedName = myFI.fileName(); QString baseKey = "/SpatiaLite/connections/"; // TODO: keep the test //handle = openSpatiaLiteDb( myFI.canonicalFilePath() ); //if ( !handle ) // return false; // OK, this one is a valid SpatiaLite DB //closeSpatiaLiteDb( handle ); // if there is already a connection with this name, ask for a new name while ( ! settings.value( baseKey + savedName + "/sqlitepath", "" ).toString().isEmpty() ) { bool ok; savedName = QInputDialog::getText( nullptr , tr( "Cannot add connection '%1'" ).arg( myName ) , tr( "A connection with the same name already exists,\nplease provide a new name:" ), QLineEdit::Normal, "", &ok ); if ( !ok || savedName.isEmpty() ) { return false; } } // Persist last used SpatiaLite dir settings.setValue( "/UI/lastSpatiaLiteDir", myPath ); // inserting this SQLite DB path settings.setValue( baseKey + "selected", savedName ); settings.setValue( baseKey + savedName + "/sqlitepath", myFI.canonicalFilePath() ); return true; } QString QgsSpatiaLiteSourceSelect::layerURI( const QModelIndex &index ) { QString tableName = mTableModel.itemFromIndex( index.sibling( index.row(), 0 ) )->text(); QString geomColumnName = mTableModel.itemFromIndex( index.sibling( index.row(), 2 ) )->text(); QString sql = mTableModel.itemFromIndex( index.sibling( index.row(), 3 ) )->text(); if ( geomColumnName.contains( " AS " ) ) { int a = geomColumnName.indexOf( " AS " ); QString typeName = geomColumnName.mid( a + 4 ); //only the type name geomColumnName = geomColumnName.left( a ); //only the geom column name QString geomFilter; if ( typeName == "POINT" ) { geomFilter = QString( "geometrytype(\"%1\") IN ('POINT','MULTIPOINT')" ).arg( geomColumnName ); } else if ( typeName == "LINESTRING" ) { geomFilter = QString( "geometrytype(\"%1\") IN ('LINESTRING','MULTILINESTRING')" ).arg( geomColumnName ); } else if ( typeName == "POLYGON" ) { geomFilter = QString( "geometrytype(\"%1\") IN ('POLYGON','MULTIPOLYGON')" ).arg( geomColumnName ); } if ( !geomFilter.isEmpty() && !sql.contains( geomFilter ) ) { if ( !sql.isEmpty() ) { sql += " AND "; } sql += geomFilter; } } QgsDataSourceURI uri( connectionInfo() ); uri.setDataSource( "", tableName, geomColumnName, sql, "" ); return uri.uri(); } // Slot for deleting an existing connection void QgsSpatiaLiteSourceSelect::on_btnDelete_clicked() { QString subKey = cmbConnections->currentText(); int idx = subKey.indexOf( '@' ); if ( idx > 0 ) subKey.truncate( idx ); QString msg = tr( "Are you sure you want to remove the %1 connection and all associated settings?" ).arg( subKey ); QMessageBox::StandardButton result = QMessageBox::information( this, tr( "Confirm Delete" ), msg, QMessageBox::Ok | QMessageBox::Cancel ); if ( result != QMessageBox::Ok ) return; QgsSpatiaLiteConnection::deleteConnection( subKey ); populateConnectionList(); emit connectionsChanged(); } void QgsSpatiaLiteSourceSelect::addTables() { m_selectedTables.clear(); typedef QMap < int, bool >schemaInfo; QMap < QString, schemaInfo > dbInfo; QItemSelection selection = mTablesTreeView->selectionModel()->selection(); QModelIndexList selectedIndices = selection.indexes(); QStandardItem *currentItem = nullptr; QModelIndexList::const_iterator selected_it = selectedIndices.constBegin(); for ( ; selected_it != selectedIndices.constEnd(); ++selected_it ) { if ( !selected_it->parent().isValid() ) { //top level items only contain the schema names continue; } currentItem = mTableModel.itemFromIndex( mProxyModel.mapToSource( *selected_it ) ); if ( !currentItem ) { continue; } QString currentSchemaName = currentItem->parent()->text(); int currentRow = currentItem->row(); if ( !dbInfo[currentSchemaName].contains( currentRow ) ) { dbInfo[currentSchemaName][currentRow] = true; m_selectedTables << layerURI( mProxyModel.mapToSource( *selected_it ) ); } } if ( m_selectedTables.empty() ) { QMessageBox::information( this, tr( "Select Table" ), tr( "You must select a table in order to add a Layer." ) ); } else { emit addDatabaseLayers( m_selectedTables, "spatialite" ); if ( !mHoldDialogOpen->isChecked() ) { accept(); } } } void QgsSpatiaLiteSourceSelect::on_btnConnect_clicked() { cbxAllowGeometrylessTables->setEnabled( false ); QString subKey = cmbConnections->currentText(); int idx = subKey.indexOf( '@' ); if ( idx > 0 ) subKey.truncate( idx ); // trying to connect to SpatiaLite DB QgsSpatiaLiteConnection conn( subKey ); mSqlitePath = conn.path(); QApplication::setOverrideCursor( Qt::WaitCursor ); QgsSpatiaLiteConnection::Error err; err = conn.fetchTables( cbxAllowGeometrylessTables->isChecked() ); QApplication::restoreOverrideCursor(); if ( err != QgsSpatiaLiteConnection::NoError ) { QString errCause = conn.errorMessage(); switch ( err ) { case QgsSpatiaLiteConnection::NotExists: QMessageBox::critical( this, tr( "SpatiaLite DB Open Error" ), tr( "Database does not exist: %1" ).arg( mSqlitePath ) ); break; case QgsSpatiaLiteConnection::FailedToOpen: QMessageBox::critical( this, tr( "SpatiaLite DB Open Error" ), tr( "Failure while connecting to: %1\n\n%2" ).arg( mSqlitePath, errCause ) ); break; case QgsSpatiaLiteConnection::FailedToGetTables: QMessageBox::critical( this, tr( "SpatiaLite getTableInfo Error" ), tr( "Failure exploring tables from: %1\n\n%2" ).arg( mSqlitePath, errCause ) ); break; default: QMessageBox::critical( this, tr( "SpatiaLite Error" ), tr( "Unexpected error when working with: %1\n\n%2" ).arg( mSqlitePath, errCause ) ); } mSqlitePath = QString(); return; } QModelIndex rootItemIndex = mTableModel.indexFromItem( mTableModel.invisibleRootItem() ); mTableModel.removeRows( 0, mTableModel.rowCount( rootItemIndex ), rootItemIndex ); // populate the table list // get the list of suitable tables and columns and populate the UI mTableModel.setSqliteDb( subKey ); QList<QgsSpatiaLiteConnection::TableEntry> tables = conn.tables(); Q_FOREACH ( const QgsSpatiaLiteConnection::TableEntry& table, tables ) { mTableModel.addTableEntry( table.type, table.tableName, table.column, "" ); } if ( cmbConnections->count() > 0 ) { mStatsButton->setEnabled( true ); } mTablesTreeView->sortByColumn( 0, Qt::AscendingOrder ); //expand all the toplevel items int numTopLevelItems = mTableModel.invisibleRootItem()->rowCount(); for ( int i = 0; i < numTopLevelItems; ++i ) { mTablesTreeView->expand( mProxyModel.mapFromSource( mTableModel.indexFromItem( mTableModel.invisibleRootItem()->child( i ) ) ) ); } mTablesTreeView->resizeColumnToContents( 0 ); mTablesTreeView->resizeColumnToContents( 1 ); cbxAllowGeometrylessTables->setEnabled( true ); } QStringList QgsSpatiaLiteSourceSelect::selectedTables() { return m_selectedTables; } QString QgsSpatiaLiteSourceSelect::connectionInfo() { return QString( "dbname='%1'" ).arg( QString( mSqlitePath ).replace( '\'', "\\'" ) ); } void QgsSpatiaLiteSourceSelect::setSql( const QModelIndex &index ) { QModelIndex idx = mProxyModel.mapToSource( index ); QString tableName = mTableModel.itemFromIndex( idx.sibling( idx.row(), 0 ) )->text(); QgsVectorLayer *vlayer = new QgsVectorLayer( layerURI( idx ), tableName, "spatialite" ); if ( !vlayer->isValid() ) { delete vlayer; return; } // create a query builder object QgsQueryBuilder *gb = new QgsQueryBuilder( vlayer, this ); if ( gb->exec() ) { mTableModel.setSql( mProxyModel.mapToSource( index ), gb->sql() ); } delete gb; delete vlayer; } QString QgsSpatiaLiteSourceSelect::fullDescription( const QString& table, const QString& column, const QString& type ) { QString full_desc = ""; full_desc += table + "\" (" + column + ") " + type; return full_desc; } void QgsSpatiaLiteSourceSelect::dbChanged() { // Remember which database was selected. QSettings settings; settings.setValue( "/SpatiaLite/connections/selected", cmbConnections->currentText() ); } void QgsSpatiaLiteSourceSelect::setConnectionListPosition() { QSettings settings; // If possible, set the item currently displayed database QString toSelect = settings.value( "/SpatiaLite/connections/selected" ).toString(); toSelect += '@' + settings.value( "/SpatiaLite/connections/" + toSelect + "/sqlitepath" ).toString(); cmbConnections->setCurrentIndex( cmbConnections->findText( toSelect ) ); if ( cmbConnections->currentIndex() < 0 ) { if ( toSelect.isNull() ) cmbConnections->setCurrentIndex( 0 ); else cmbConnections->setCurrentIndex( cmbConnections->count() - 1 ); } } void QgsSpatiaLiteSourceSelect::setSearchExpression( const QString & regexp ) { Q_UNUSED( regexp ); } void QgsSpatiaLiteSourceSelect::treeWidgetSelectionChanged( const QItemSelection &selected, const QItemSelection &deselected ) { Q_UNUSED( deselected ) mAddButton->setEnabled( !selected.isEmpty() ); }
AsgerPetersen/QGIS
src/providers/spatialite/qgsspatialitesourceselect.cpp
C++
gpl-2.0
19,027
@extends('emails.layout') @section('header') {{HTML::linkRoute('admin-index', 'Admin Dashboard')}} @stop @section('title') Request Baru @stop @section('description') Halo admin {{{Config::get('setting.site_name')}}}, <br> Request permintaan informasi dari {{{$name}}}:<br> <br> Untuk melihat detail request silahkan klik link berikut:<br> <h3>{{HTML::link($link, 'Detail request')}}</h3> <br><br> Dikirim oleh<br> Sistem {{{Config::get('setting.site_name')}}} @stop
airputih/kip
app/views/emails/request.blade.php
PHP
gpl-2.0
475
/* * Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 4923484 8146293 * @summary test ASN.1 encoding generation/parsing for the OAEPParameters * implementation in SunJCE provider. * @author Valerie Peng */ import java.math.BigInteger; import java.util.*; import java.security.*; import java.security.spec.MGF1ParameterSpec; import javax.crypto.*; import javax.crypto.spec.OAEPParameterSpec; import javax.crypto.spec.PSource; public class TestOAEPParameterSpec { private static Provider cp; private static boolean runTest(String mdName, MGF1ParameterSpec mgfSpec, byte[] p) throws Exception { OAEPParameterSpec spec = new OAEPParameterSpec(mdName, "MGF1", mgfSpec, new PSource.PSpecified(p)); cp = Security.getProvider("SunJCE"); System.out.println("Testing provider " + cp.getName() + "..."); AlgorithmParameters ap = AlgorithmParameters.getInstance("OAEP", cp); ap.init(spec); byte[] encoding = ap.getEncoded(); AlgorithmParameters ap2 = AlgorithmParameters.getInstance("OAEP", cp); ap2.init(encoding); OAEPParameterSpec spec2 = (OAEPParameterSpec) ap2.getParameterSpec (OAEPParameterSpec.class); return compareSpec(spec, spec2); } private static boolean compareMD(OAEPParameterSpec s1, OAEPParameterSpec s2) { boolean result = false; String alg1 = s1.getDigestAlgorithm().toUpperCase().trim(); String alg2 = s2.getDigestAlgorithm().toUpperCase().trim(); alg1 = alg1.replaceAll("\\-", ""); alg2 = alg2.replaceAll("\\-", ""); if (alg1.equals("SHA") || alg1.equals("SHA1")) { result = (alg2.equals("SHA") || alg2.equals("SHA1")); } else { result = (alg1.equals(alg2)); } return result; } private static boolean compareMGF(OAEPParameterSpec s1, OAEPParameterSpec s2) { String alg1 = s1.getMGFAlgorithm(); String alg2 = s2.getMGFAlgorithm(); if (alg1.equals(alg2)) { MGF1ParameterSpec mp1 = (MGF1ParameterSpec)s1.getMGFParameters(); MGF1ParameterSpec mp2 = (MGF1ParameterSpec)s2.getMGFParameters(); alg1 = mp1.getDigestAlgorithm(); alg2 = mp2.getDigestAlgorithm(); if (alg1.equals(alg2)) { return true; } else { System.out.println("MGF's MD algos: " + alg1 + " vs " + alg2); return false; } } else { System.out.println("MGF algos: " + alg1 + " vs " + alg2); return false; } } private static boolean comparePSource(OAEPParameterSpec s1, OAEPParameterSpec s2) { PSource src1 = s1.getPSource(); PSource src2 = s2.getPSource(); String alg1 = src1.getAlgorithm(); String alg2 = src2.getAlgorithm(); if (alg1.equals(alg2)) { // assumes they are PSource.PSpecified return Arrays.equals(((PSource.PSpecified) src1).getValue(), ((PSource.PSpecified) src2).getValue()); } else { System.out.println("PSource algos: " + alg1 + " vs " + alg2); return false; } } private static boolean compareSpec(OAEPParameterSpec s1, OAEPParameterSpec s2) { return (compareMD(s1, s2) && compareMGF(s1, s2) && comparePSource(s1, s2)); } public static void main(String[] argv) throws Exception { boolean status = true; byte[] p = { (byte) 0x01, (byte) 0x02, (byte) 0x03, (byte) 0x04 }; status &= runTest("SHA-224", MGF1ParameterSpec.SHA224, p); status &= runTest("SHA-256", MGF1ParameterSpec.SHA256, p); status &= runTest("SHA-384", MGF1ParameterSpec.SHA384, p); status &= runTest("SHA-512", MGF1ParameterSpec.SHA512, p); status &= runTest("SHA-512/224", MGF1ParameterSpec.SHA512_224, p); status &= runTest("SHA-512/256", MGF1ParameterSpec.SHA512_256, p); status &= runTest("SHA", MGF1ParameterSpec.SHA1, new byte[0]); status &= runTest("SHA-1", MGF1ParameterSpec.SHA1, new byte[0]); status &= runTest("SHA1", MGF1ParameterSpec.SHA1, new byte[0]); if (status) { System.out.println("Test Passed"); } else { throw new Exception("One or More Test Failed"); } } }
JetBrains/jdk8u_jdk
test/com/sun/crypto/provider/Cipher/RSA/TestOAEPParameterSpec.java
Java
gpl-2.0
5,439
<?php /** * Browse action support trait * * PHP version 7 * * Copyright (C) The National Library of Finland 2018. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category VuFind * @package AJAX * @author Ere Maijala <ere.maijala@helsinki.fi> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org/wiki/development Wiki */ namespace Finna\AjaxHandler; /** * Browse action support trait * * @category VuFind * @package AJAX * @author Ere Maijala <ere.maijala@helsinki.fi> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org/wiki/development Wiki */ trait BrowseActionTrait { /** * Return browse action from the request. * * @param \Zend\Http\Request $request Request * * @return null|string Browse action or null if request is not a browse action */ protected function getBrowseAction($request) { $referer = $request->getServer()->get('HTTP_REFERER'); $match = null; $regex = '/^http[s]?:.*\/Browse\/(Database|Journal)[\/.*]?/'; if (preg_match($regex, $referer, $match)) { return $match[1]; } return null; } }
samuli/NDL-VuFind2
module/Finna/src/Finna/AjaxHandler/BrowseActionTrait.php
PHP
gpl-2.0
1,874
# If you start collecting a wave and then regret it, you can use this # to roll back the data collection. I would recommend duplicating the database # first and letting this program loose on a copy, as you won't be able to # get back any of the data you don't explicitly tell it to keep. import sqlite3 import itertools import add_data as ad def rollback(db_path,waves_to_keep=[],waves_to_lose=[]): '''waves_to_keep and waves_to_lose should be lists of names of wave tables in the database currently being cleaned''' conn=sqlite3.connect(db_path) curs=conn.cursor() ''' for wave in waves_to_lose: curs.execute('DROP TABLE {}'.format(wave)) users_to_keep=[] for wave in waves_to_keep: curs.execute('SELECT id FROM {}'.format(wave)) users_to_keep.extend(curs.fetchall()) curs.execute('ALTER TABLE users RENAME TO old_users') ad.create_table(curs,'users') curs.execute('ALTER TABLE x_follows_y RENAME TO old_x_follows_y') ad.create_table(curs,'x_follows_y') follow_data=set([]) for n, user in enumerate(users_to_keep): curs.execute('SELECT follower,followed FROM old_x_follows_y ' 'WHERE follower=?',user) follow_data.update(curs.fetchall()) curs.execute('SELECT follower,followed FROM old_x_follows_y ' 'WHERE followed=?',user) follow_data.update(curs.fetchall()) if n % 250 == 0: print "{} users' follow data read.".format(n) curs.executemany('INSERT INTO x_follows_y VALUES (?,?)', follow_data) conn.commit() print 'Cleaned x_follows_y table filled.' ''' curs.execute('SELECT follower,followed FROM old_x_follows_y') follow_data=curs.fetchall() print 'Got follow data: {} follows'.format(len(follow_data)) users_to_keep = set(itertools.chain.from_iterable(follow_data)) print 'Got users from follow data: {} of them'.format(len(users_to_keep)) print list(users_to_keep)[:10] n=0 curs.execute('SELECT * FROM old_users') for i,user_data in enumerate(curs.fetchall()): if user_data[0] in users_to_keep: curs.execute('INSERT INTO users VALUES (' '?,?,?,?,?,?,?,?,?,?,' '?,?,?,?,?,?,?,?,?,?)',user_data) n+=1 if i % 1000 == 0: print '{}th user details checked.'.format(i) if n % 1000 == 0: print '{}th user\'s details copied.'.format(n) print 'Gone through them all now' conn.commit() print 'Cleaned users table filled.'
ValuingElectronicMusic/network-analysis
remove_waves.py
Python
gpl-2.0
2,612
<?php /* BufeteAplicacionBundle:Documento:index.html.twig */ class __TwigTemplate_6cacef96adfcc32d4e0dd4163ee51b1694ed3e96e083117399aab131e4fd4a6e extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); // line 1 try { $this->parent = $this->env->loadTemplate("BufeteAplicacionBundle::Default/admin.html.twig"); } catch (Twig_Error_Loader $e) { $e->setTemplateFile($this->getTemplateName()); $e->setTemplateLine(1); throw $e; } $this->blocks = array( 'page' => array($this, 'block_page'), ); } protected function doGetParent(array $context) { return "BufeteAplicacionBundle::Default/admin.html.twig"; } protected function doDisplay(array $context, array $blocks = array()) { $this->parent->display($context, array_merge($this->blocks, $blocks)); } // line 2 public function block_page($context, array $blocks = array()) { // line 3 echo " <div id=\"micaso\" class=\"fullsize\" style=\" background-color: #F0F0D8;height: 540px;\"> <h1>Documento list</h1> <table class=\"records_list\"> <thead> <tr> <th>Id</th> <th>Nombre</th> <th>Descripcion</th> <th>Documento</th> <th>Actions</th> </tr> </thead> <tbody> "; // line 17 $context['_parent'] = (array) $context; $context['_seq'] = twig_ensure_traversable((isset($context["entities"]) ? $context["entities"] : null)); foreach ($context['_seq'] as $context["_key"] => $context["entity"]) { // line 18 echo " <tr> <td><a href=\""; // line 19 echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("documento_show", array("id" => $this->getAttribute($context["entity"], "id", array()))), "html", null, true); echo "\">"; echo twig_escape_filter($this->env, $this->getAttribute($context["entity"], "id", array()), "html", null, true); echo "</a></td> <td>"; // line 20 echo twig_escape_filter($this->env, $this->getAttribute($context["entity"], "nombre", array()), "html", null, true); echo "</td> <td>"; // line 21 echo twig_escape_filter($this->env, $this->getAttribute($context["entity"], "descripcion", array()), "html", null, true); echo "</td> <td>"; // line 22 echo twig_escape_filter($this->env, $this->getAttribute($context["entity"], "documento", array()), "html", null, true); echo "</td> <td> <ul> <li> <a href=\""; // line 26 echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("documento_show", array("id" => $this->getAttribute($context["entity"], "id", array()))), "html", null, true); echo "\">show</a> </li> <li> <a href=\""; // line 29 echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("documento_edit", array("id" => $this->getAttribute($context["entity"], "id", array()))), "html", null, true); echo "\">edit</a> </li> </ul> </td> </tr> "; } $_parent = $context['_parent']; unset($context['_seq'], $context['_iterated'], $context['_key'], $context['entity'], $context['_parent'], $context['loop']); $context = array_intersect_key($context, $_parent) + $_parent; // line 35 echo " </tbody> </table> <ul> <li> <a href=\""; // line 40 echo $this->env->getExtension('routing')->getPath("documento_new"); echo "\"> Create a new entry </a> </li> </ul> </div> "; } public function getTemplateName() { return "BufeteAplicacionBundle:Documento:index.html.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 108 => 40, 101 => 35, 89 => 29, 83 => 26, 76 => 22, 72 => 21, 68 => 20, 62 => 19, 59 => 18, 55 => 17, 39 => 3, 36 => 2, 11 => 1,); } }
AndresFelipe27/ProyectoIngenieriaSoftware3
SW3/app/cache/prod/twig/6c/ac/ef96adfcc32d4e0dd4163ee51b1694ed3e96e083117399aab131e4fd4a6e.php
PHP
gpl-2.0
4,652
// Copyright 2015 Dolphin Emulator Project // Licensed under GPLv2+ // Refer to the license.txt file included. #include "DolphinQt2/Resources.h" #include "DolphinQt2/GameList/GameListModel.h" #include "DolphinQt2/GameList/GameListProxyModel.h" static constexpr QSize NORMAL_BANNER_SIZE(96, 32); static constexpr QSize LARGE_BANNER_SIZE(144, 48); // Convert an integer size to a friendly string representation. static QString FormatSize(qint64 size) { QStringList units{ QStringLiteral("KB"), QStringLiteral("MB"), QStringLiteral("GB"), QStringLiteral("TB") }; QStringListIterator i(units); QString unit = QStringLiteral("B"); double num = (double) size; while (num > 1024.0 && i.hasNext()) { unit = i.next(); num /= 1024.0; } return QStringLiteral("%1 %2").arg(QString::number(num, 'f', 1)).arg(unit); } GameListProxyModel::GameListProxyModel(QObject* parent) : QSortFilterProxyModel(parent) { } QVariant GameListProxyModel::data(const QModelIndex& i, int role) const { QModelIndex source_index = mapToSource(i); QVariant source_data = sourceModel()->data(source_index, Qt::DisplayRole); if (role == Qt::DisplayRole) { switch (i.column()) { // Sort by the integer but display the formatted string. case GameListModel::COL_SIZE: return FormatSize(source_data.toULongLong()); // These fall through to the underlying model. case GameListModel::COL_ID: case GameListModel::COL_TITLE: case GameListModel::COL_DESCRIPTION: case GameListModel::COL_MAKER: return source_data; // Show the title in the display role of the icon view. case GameListModel::COL_LARGE_ICON: return data(index(i.row(), GameListModel::COL_TITLE), Qt::DisplayRole); } } else if (role == Qt::DecorationRole) { switch (i.column()) { // Show icons in the decoration roles. This lets us sort by the // underlying ints, but display just the icons without doing any // fixed-width hacks. case GameListModel::COL_PLATFORM: return Resources::GetPlatform(source_data.toInt()); case GameListModel::COL_BANNER: return source_data.value<QPixmap>().scaled( NORMAL_BANNER_SIZE, Qt::KeepAspectRatio, Qt::SmoothTransformation); case GameListModel::COL_COUNTRY: return Resources::GetCountry(source_data.toInt()); case GameListModel::COL_RATING: return Resources::GetRating(source_data.toInt()); // Show a scaled icon in the decoration role of the icon view. case GameListModel::COL_LARGE_ICON: return data(index(i.row(), GameListModel::COL_BANNER), Qt::DecorationRole) .value<QPixmap>().scaled( LARGE_BANNER_SIZE, Qt::KeepAspectRatio, Qt::SmoothTransformation); } } return QVariant(); }
asuradaimao/dolphin
Source/Core/DolphinQt2/GameList/GameListProxyModel.cpp
C++
gpl-2.0
2,678
<?php /** * @file * Bartik's theme implementation to display a single Drupal page. * * The doctype, html, head and body tags are not in this template. Instead they * can be found in the html.tpl.php template normally located in the * modules/system folder. * * Available variables: * * General utility variables: * - $base_path: The base URL path of the Drupal installation. At the very * least, this will always default to /. * - $directory: The directory the template is located in, e.g. modules/system * or themes/bartik. * - $is_front: TRUE if the current page is the front page. * - $logged_in: TRUE if the user is registered and signed in. * - $is_admin: TRUE if the user has permission to access administration pages. * * Site identity: * - $front_page: The URL of the front page. Use this instead of $base_path, * when linking to the front page. This includes the language domain or * prefix. * - $logo: The path to the logo image, as defined in theme configuration. * - $site_name: The name of the site, empty when display has been disabled * in theme settings. * - $site_slogan: The slogan of the site, empty when display has been disabled * in theme settings. * - $hide_site_name: TRUE if the site name has been toggled off on the theme * settings page. If hidden, the "element-invisible" class is added to make * the site name visually hidden, but still accessible. * - $hide_site_slogan: TRUE if the site slogan has been toggled off on the * theme settings page. If hidden, the "element-invisible" class is added to * make the site slogan visually hidden, but still accessible. * * Navigation: * - $main_menu (array): An array containing the Main menu links for the * site, if they have been configured. * - $secondary_menu (array): An array containing the Secondary menu links for * the site, if they have been configured. * - $breadcrumb: The breadcrumb trail for the current page. * * Page content (in order of occurrence in the default page.tpl.php): * - $title_prefix (array): An array containing additional output populated by * modules, intended to be displayed in front of the main title tag that * appears in the template. * - $title: The page title, for use in the actual HTML content. * - $title_suffix (array): An array containing additional output populated by * modules, intended to be displayed after the main title tag that appears in * the template. * - $messages: HTML for status and error messages. Should be displayed * prominently. * - $tabs (array): Tabs linking to any sub-pages beneath the current page * (e.g., the view and edit tabs when displaying a node). * - $action_links (array): Actions local to the page, such as 'Add menu' on the * menu administration interface. * - $feed_icons: A string of all feed icons for the current page. * - $node: The node object, if there is an automatically-loaded node * associated with the page, and the node ID is the second argument * in the page's path (e.g. node/12345 and node/12345/revisions, but not * comment/reply/12345). * * Regions: * - $page['search']: Items for the header region. * - $page['top']: Items for the featured region. * - $page['left']: Items for the highlighted content region. * - $page['center']: Dynamic help text, mostly for admin pages. * - $page['right']: The main content of the current page. * - $page['footer']: Items for the footer region. * * @see template_preprocess() * @see template_preprocess_page() * @see template_process() * @see bartik_process_page() */ ?> <script type="text/javascript"> var uvOptions = {}; (function() { var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true; uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/aKrd0ayIyn6BsSmA8yegCQ.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s); })(); </script> <div id="header"> <div id="header-content"> <div id="header-left"> <a href="<?php print $front_page; ?>"><img src="<?php print $logo; ?>" alt="The Open Curriculum Project" /></a><br/> <span id="one-liner"> <?php if ($site_slogan): ?> <?php print $site_slogan; ?> <?php endif; ?> </span> </div> <div id="header-right"> <?php /*if ($secondary_menu): ?> <?php print theme('links__system_secondary_menu', array( 'links' => $secondary_menu, 'attributes' => array( 'id' => 'secondary-menu-links', 'class' => array('links', 'inline', 'clearfix'), ), 'heading' => array( 'text' => t('Secondary menu'), 'level' => 'h2', 'class' => array('element-invisible'), ), )); ?> <!-- /#secondary-menu --> <?php endif;*/ ?> <?php include('login.inc') ?> <?php print render($page['search']); ?> <?php include('gradeblock.inc') ?> </div> </div> </div> <div id="main"> <?php if ($main_menu): ?> <div id="main-menu" class="navigation"> <?php print theme('links__system_main_menu', array( 'links' => $main_menu, 'attributes' => array( 'id' => 'main-menu-links', 'class' => array('links', 'clearfix'), ), 'heading' => array( 'text' => t('Main menu'), 'level' => 'h2', 'class' => array('element-invisible'), ), )); ?> </div> <!-- /#main-menu --> <?php endif; ?> <div id="left-panel"> <?php include('left_menu.inc'); ?> </div> <div id="right-panel"> <?php print render($title_prefix); ?> <?php if ($title): ?> <h1 class="title" id="page-title"> <?php print $title; ?> </h1> <?php endif; ?> <?php print render($title_suffix); ?> <?php if ($breadcrumb): ?> <div id="breadcrumb"><?php print $breadcrumb; ?></div> <?php endif; ?> <?php if ($tabs): ?><p/> <div class="tabs"> <?php print render($tabs); ?> </div> <?php endif; ?> <?php print render($page['help']); ?> <?php if ($action_links): ?> <ul class="action-links"> <?php print render($action_links); ?> </ul> <?php endif; ?><p/> <?php if ($messages): ?> <div id="messages"><div class="section clearfix"> <?php print $messages; ?> </div></div> <!-- /.section, /#messages --> <?php endif; ?> <?php print render($page['content']); ?> </div> </div> <?php include('footer.inc') ?> <?php if ($page['highlighted']): ?><div id="highlighted"><?php print render($page['highlighted']); ?></div><?php endif; ?> <!-- Content was HERE-->
varunarora/OpenCurriculum
sites/all/themes/opencurriculum/templates/page.tpl.php
PHP
gpl-2.0
6,764
/* * Copyright (C) 2013-2015 Tim Mayberry <mojofunk@gmail.com> * Copyright (C) 2014-2016 Paul Davis <paul@linuxaudiosystems.com> * Copyright (C) 2014-2019 Robin Gareus <robin@gareus.org> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ #include <iostream> #include <cstdlib> #include <string> #ifdef PLATFORM_WINDOWS #include <fcntl.h> #endif #include <giomm.h> #include <glibmm/thread.h> #include "pbd/pbd.h" #include "pbd/debug.h" #include "pbd/error.h" #include "pbd/id.h" #include "pbd/enumwriter.h" #include "pbd/fpu.h" #include "pbd/xml++.h" #ifdef PLATFORM_WINDOWS #include <winsock2.h> #include "pbd/windows_timer_utils.h" #include "pbd/windows_mmcss.h" #endif #include "pbd/i18n.h" extern void setup_libpbd_enums (); namespace { static bool libpbd_initialized = false; #ifdef PLATFORM_WINDOWS static void test_timers_from_env () { bool set; std::string options; options = Glib::getenv ("PBD_TEST_TIMERS", set); if (set) { if (!PBD::QPC::check_timer_valid ()) { PBD::error << X_("Windows QPC Timer source not usable") << endmsg; } else { PBD::info << X_("Windows QPC Timer source usable") << endmsg; } } } #endif } // namespace bool PBD::init () { if (libpbd_initialized) { return true; } #ifdef PLATFORM_WINDOWS // Essential!! Make sure that any files used by Ardour // will be created or opened in BINARY mode! _fmode = O_BINARY; WSADATA wsaData; /* Initialize windows socket DLL for PBD::CrossThreadChannel */ if (WSAStartup(MAKEWORD(1,1),&wsaData) != 0) { error << X_("Windows socket initialization failed with error: ") << WSAGetLastError() << endmsg; return false; } QPC::initialize(); test_timers_from_env (); if (!PBD::MMCSS::initialize()) { PBD::info << X_("Unable to initialize MMCSS") << endmsg; } else { PBD::info << X_("MMCSS Initialized") << endmsg; } #endif if (!Glib::thread_supported()) { Glib::thread_init(); } Gio::init (); PBD::ID::init (); setup_libpbd_enums (); libpbd_initialized = true; return true; } void PBD::cleanup () { #ifdef PLATFORM_WINDOWS PBD::MMCSS::deinitialize (); WSACleanup(); #endif EnumWriter::destroy (); FPU::destroy (); }
napcode/ardour
libs/pbd/pbd.cc
C++
gpl-2.0
2,849
#!/usr/bin/python3 # -*- coding: utf-8 -*-, import sys import os.path import unittest from io import StringIO from suse_git import header class TestHeaderChecker(unittest.TestCase): def test_empty(self): try: self.header = header.Checker("") except header.HeaderException as e: self.assertEqual(4, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('patch-mainline')) self.assertTrue(e.tag_is_missing('from')) self.assertTrue(e.tag_is_missing('subject')) self.assertTrue(e.tag_is_missing('references')) self.assertEqual(4, e.errors()) def test_subject_dupe(self): text = """ From: develoepr@site.com Subject: some patch Subject: some patch Patch-mainline: v4.2-rc2 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.DuplicateTagError)) self.assertEqual(1, e.errors()) def test_patch_mainline_dupe(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Patch-mainline: v4.2-rc2 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.DuplicateTagError)) self.assertEqual(1, e.errors()) def test_patch_mainline_empty(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.EmptyTagError)) self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('patch-mainline')) self.assertEqual(2, e.errors()) def test_patch_mainline_version_no_ack_or_sob(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 References: bsc#12345 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa """ try: self.header = header.Checker(text) except header.HeaderException as e: self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('acked-by')) self.assertTrue(e.tag_is_missing('signed-off-by')) self.assertEqual(1, e.errors()) def test_patch_mainline_version_correct_multi_ack(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@external.com Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_multi_ack_ext_last(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com Acked-by: developer@external.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_mixed_ack_sob(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Signed-off-by: developer@external.com Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_ack(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_from(self): text = """ From: developer@suse.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 """ self.header = header.Checker(text) def test_patch_mainline_version_correct_review(self): text = """ From: developer@external.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Reviewed-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_sob(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Signed-off-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_multi_sob(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Signed-off-by: developer2@external.com Signed-off-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_version_correct_multi_sob_ext_last(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Signed-off-by: developer@suse.com Signed-off-by: developer2@external.com """ self.header = header.Checker(text) def test_patch_mainline_na(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: n/a References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_submitted_correct_ml(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted, 19 July 2015 - linux-btrfs References: bsc#12345 Acked-by: developer@suse.com """ errors = self.header = header.Checker(text) def test_patch_mainline_submitted_correct_url(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted, https://lkml.org/archive/link-to-post References: bsc#12345 Acked-by: developer@suse.com """ errors = self.header = header.Checker(text) def test_patch_mainline_submitted_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_submitted_detail_git_commit(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted, https://lkml.org/archive/link-to-post Git-repo: git://host/valid/path/to/repo References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.ExcludedTagError)) self.assertEqual(1, e.errors()) # Required/Excluded conflict between Patch-mainline (Submitted) # and Git-commit def test_patch_mainline_submitted_detail_git_commit(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted, https://lkml.org/archive/link-to-post Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.MissingTagError)) self.assertEqual(1, e.errors(header.ExcludedTagError)) self.assertEqual(2, e.errors()) def test_patch_mainline_submitted_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Submitted References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_never_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Never References: bsc#12345 Acked-by: developer@suse.com """ try: self.header = header.Checker(text) except header.HeaderException as e: self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_yes_with_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Yes, v4.1-rc1 References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_yes_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Yes References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_not_yet_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Not yet References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_never_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Never, SLES-specific feature References: FATE#123456 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_no_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: No, handled differently upstream References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_patch_mainline_not_yet_detail(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Not yet, rare reason References: bsc#12345 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_git_commit_standalone(self): text = """ From: developer@site.com Subject: some patch Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ try: self.header = header.Checker(text) except header.HeaderException as e: # Both policy and Git-commit require Patch-mainline self.assertEqual(2, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('patch-mainline')) self.assertEqual(2, e.errors()) def test_patch_mainline_queued_correct(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Queued Git-repo: git://path/to/git/repo Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_mainline_queued_standalone(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Queued References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(2, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('git-commit')) self.assertTrue(e.tag_is_missing('git-repo')) self.assertEqual(2, e.errors()) def test_patch_mainline_queued_with_git_repo(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Queued Git-repo: git://path/to/git/repo References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception # Required by both Patch-mainline (Queued) and # Git-repo self.assertEqual(2, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('git-commit')) self.assertEqual(2, e.errors()) def test_patch_mainline_queued_with_git_commit(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: Queued Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('git-repo')) self.assertEqual(1, e.errors()) def test_patch_mainline_invalid(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: n/a References: bsc#12345 Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.FormatError)) self.assertEqual(1, e.errors()) def test_diff_like_description(self): text = """ From: developer@external.com Subject: blablah Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 This is a thing. I ran across it: *** Testing resulted in failure Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_diff_like_description2(self): text = """ From: developer@external.com Subject: blablah Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 This is a thing. I ran across it: --- Testing resulted in failure Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_references_empty(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.EmptyTagError)) self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('references')) self.assertEqual(2, e.errors()) def test_patch_references_missing(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('references')) self.assertEqual(1, e.errors()) def test_patch_references_multi(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 References: bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_references_multi2(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_references_multi3(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345, bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text) def test_patch_references_multi3(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345, bsc#12354 References: fix for blahblah Acked-by: developer@suse.com """ self.header = header.Checker(text) @unittest.skip("Enable this check when we want to require a real " "References tag") def test_patch_references_only_freeform(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: fix for blahblah Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text) e = cm.exception self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('references')) self.assertEqual(1, e.errors()) def test_patch_references_empty_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text, True) e = cm.exception self.assertEqual(1, e.errors(header.EmptyTagError)) self.assertEqual(1, e.errors()) def test_patch_references_missing_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa Acked-by: developer@suse.com """ self.header = header.Checker(text, True) def test_patch_references_multi_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 References: bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text, True) def test_patch_references_multi2_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345 bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text, True) def test_patch_references_multi3_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345, bsc#12354 Acked-by: developer@suse.com """ self.header = header.Checker(text, True) def test_patch_references_multi3_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: bsc#12345, bsc#12354 References: fix for blahblah Acked-by: developer@suse.com """ self.header = header.Checker(text, True) @unittest.skip("Enable this check when we want to require a real " "References tag") def test_patch_references_only_freeform_update(self): text = """ From: developer@site.com Subject: some patch Patch-mainline: v4.2-rc1 Git-commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa References: fix for blahblah Acked-by: developer@suse.com """ with self.assertRaises(header.HeaderException) as cm: self.header = header.Checker(text, True) e = cm.exception self.assertEqual(1, e.errors(header.MissingTagError)) self.assertTrue(e.tag_is_missing('references')) self.assertEqual(1, e.errors()) def test_no_patch_mainline_for_kabi(self): text = """ From: developer@site.com Subject: some patch References: FATE#123456 Acked-by: developer@suse.com """ self.header = header.Checker(text, False, "patches.kabi/FATE123456_fix_kabi.patch")
kdave/kernel-source
scripts/python/tests/test_header.py
Python
gpl-2.0
20,987
/* Copyright (C) 2005 Versant Inc. http://www.db4o.com */ using System; using System.Reflection; namespace Sharpen.Lang { public class IdentityHashCodeProvider { #if !CF public static int IdentityHashCode(object obj) { return System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(obj); } #else public static int IdentityHashCode(object obj) { if (obj == null) return 0; return (int) _hashMethod.Invoke(null, new object[] { obj }); } private static MethodInfo _hashMethod = GetIdentityHashCodeMethod(); private static MethodInfo GetIdentityHashCodeMethod() { Assembly assembly = typeof(object).Assembly; try { Type t = assembly.GetType("System.PInvoke.EE"); return t.GetMethod( "Object_GetHashCode", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); } catch (Exception e) { } // We may be running the CF app on .NET Framework 1.1 // for profiling, let's give that a chance try { Type t = assembly.GetType( "System.Runtime.CompilerServices.RuntimeHelpers"); return t.GetMethod( "GetHashCode", BindingFlags.Public | BindingFlags.Static); } catch { } return null; } #endif } }
meebey/smuxi-head-mirror
lib/db4o-net/Db4objects.Db4o/native/Sharpen/Lang/IdentityHashCodeProvider.cs
C#
gpl-2.0
1,240
/***************************************************************************** Copyright (c) 1996, 2016, Oracle and/or its affiliates. All Rights Reserved. Copyright (c) 2008, Google Inc. Copyright (c) 2017, MariaDB Corporation. Portions of this file contain modifications contributed and copyrighted by Google, Inc. Those modifications are gratefully acknowledged and are described briefly in the InnoDB documentation. The contributions by Google are incorporated with their permission, and subject to the conditions contained in the file COPYING.Google. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA *****************************************************************************/ /********************************************************************//** @file btr/btr0sea.cc The index tree adaptive search Created 2/17/1996 Heikki Tuuri *************************************************************************/ #include "btr0sea.h" #ifdef BTR_CUR_HASH_ADAPT #include "buf0buf.h" #include "page0page.h" #include "page0cur.h" #include "btr0cur.h" #include "btr0pcur.h" #include "btr0btr.h" #include "ha0ha.h" #include "srv0mon.h" #include "sync0sync.h" /** Is search system enabled. Search system is protected by array of latches. */ char btr_search_enabled = true; /** Number of adaptive hash index partition. */ ulong btr_ahi_parts = 8; #ifdef UNIV_SEARCH_PERF_STAT /** Number of successful adaptive hash index lookups */ ulint btr_search_n_succ = 0; /** Number of failed adaptive hash index lookups */ ulint btr_search_n_hash_fail = 0; #endif /* UNIV_SEARCH_PERF_STAT */ /** padding to prevent other memory update hotspots from residing on the same memory cache line as btr_search_latches */ UNIV_INTERN byte btr_sea_pad1[CACHE_LINE_SIZE]; /** The latches protecting the adaptive search system: this latches protects the (1) positions of records on those pages where a hash index has been built. NOTE: It does not protect values of non-ordering fields within a record from being updated in-place! We can use fact (1) to perform unique searches to indexes. We will allocate the latches from dynamic memory to get it to the same DRAM page as other hotspot semaphores */ rw_lock_t** btr_search_latches; /** padding to prevent other memory update hotspots from residing on the same memory cache line */ UNIV_INTERN byte btr_sea_pad2[CACHE_LINE_SIZE]; /** The adaptive hash index */ btr_search_sys_t* btr_search_sys; /** If the number of records on the page divided by this parameter would have been successfully accessed using a hash index, the index is then built on the page, assuming the global limit has been reached */ #define BTR_SEARCH_PAGE_BUILD_LIMIT 16 /** The global limit for consecutive potentially successful hash searches, before hash index building is started */ #define BTR_SEARCH_BUILD_LIMIT 100 /** Determine the number of accessed key fields. @param[in] n_fields number of complete fields @param[in] n_bytes number of bytes in an incomplete last field @return number of complete or incomplete fields */ inline MY_ATTRIBUTE((warn_unused_result)) ulint btr_search_get_n_fields( ulint n_fields, ulint n_bytes) { return(n_fields + (n_bytes > 0 ? 1 : 0)); } /** Determine the number of accessed key fields. @param[in] cursor b-tree cursor @return number of complete or incomplete fields */ inline MY_ATTRIBUTE((warn_unused_result)) ulint btr_search_get_n_fields( const btr_cur_t* cursor) { return(btr_search_get_n_fields(cursor->n_fields, cursor->n_bytes)); } /********************************************************************//** Builds a hash index on a page with the given parameters. If the page already has a hash index with different parameters, the old hash index is removed. If index is non-NULL, this function checks if n_fields and n_bytes are sensible values, and does not build a hash index if not. */ static void btr_search_build_page_hash_index( /*=============================*/ dict_index_t* index, /*!< in: index for which to build, or NULL if not known */ buf_block_t* block, /*!< in: index page, s- or x-latched */ ulint n_fields,/*!< in: hash this many full fields */ ulint n_bytes,/*!< in: hash this many bytes from the next field */ ibool left_side);/*!< in: hash for searches from left side? */ /** This function should be called before reserving any btr search mutex, if the intended operation might add nodes to the search system hash table. Because of the latching order, once we have reserved the btr search system latch, we cannot allocate a free frame from the buffer pool. Checks that there is a free buffer frame allocated for hash table heap in the btr search system. If not, allocates a free frames for the heap. This check makes it probable that, when have reserved the btr search system latch and we need to allocate a new node to the hash table, it will succeed. However, the check will not guarantee success. @param[in] index index handler */ static void btr_search_check_free_space_in_heap(dict_index_t* index) { hash_table_t* table; mem_heap_t* heap; ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_S)); ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); table = btr_get_search_table(index); heap = table->heap; /* Note that we peek the value of heap->free_block without reserving the latch: this is ok, because we will not guarantee that there will be enough free space in the hash table. */ if (heap->free_block == NULL) { buf_block_t* block = buf_block_alloc(NULL); btr_search_x_lock(index); if (btr_search_enabled && heap->free_block == NULL) { heap->free_block = block; } else { buf_block_free(block); } btr_search_x_unlock(index); } } /** Creates and initializes the adaptive search system at a database start. @param[in] hash_size hash table size. */ void btr_search_sys_create(ulint hash_size) { /* Search System is divided into n parts. Each part controls access to distinct set of hash buckets from hash table through its own latch. */ /* Step-1: Allocate latches (1 per part). */ btr_search_latches = reinterpret_cast<rw_lock_t**>( ut_malloc(sizeof(rw_lock_t*) * btr_ahi_parts, mem_key_ahi)); for (ulint i = 0; i < btr_ahi_parts; ++i) { btr_search_latches[i] = reinterpret_cast<rw_lock_t*>( ut_malloc(sizeof(rw_lock_t), mem_key_ahi)); rw_lock_create(btr_search_latch_key, btr_search_latches[i], SYNC_SEARCH_SYS); } /* Step-2: Allocate hash tablees. */ btr_search_sys = reinterpret_cast<btr_search_sys_t*>( ut_malloc(sizeof(btr_search_sys_t), mem_key_ahi)); btr_search_sys->hash_tables = reinterpret_cast<hash_table_t**>( ut_malloc(sizeof(hash_table_t*) * btr_ahi_parts, mem_key_ahi)); for (ulint i = 0; i < btr_ahi_parts; ++i) { btr_search_sys->hash_tables[i] = ib_create((hash_size / btr_ahi_parts), LATCH_ID_HASH_TABLE_MUTEX, 0, MEM_HEAP_FOR_BTR_SEARCH); #if defined UNIV_AHI_DEBUG || defined UNIV_DEBUG btr_search_sys->hash_tables[i]->adaptive = TRUE; #endif /* UNIV_AHI_DEBUG || UNIV_DEBUG */ } } /** Resize hash index hash table. @param[in] hash_size hash index hash table size */ void btr_search_sys_resize(ulint hash_size) { /* Step-1: Lock all search latches in exclusive mode. */ btr_search_x_lock_all(); if (btr_search_enabled) { btr_search_x_unlock_all(); ib::error() << "btr_search_sys_resize failed because" " hash index hash table is not empty."; ut_ad(0); return; } /* Step-2: Recreate hash tables with new size. */ for (ulint i = 0; i < btr_ahi_parts; ++i) { mem_heap_free(btr_search_sys->hash_tables[i]->heap); hash_table_free(btr_search_sys->hash_tables[i]); btr_search_sys->hash_tables[i] = ib_create((hash_size / btr_ahi_parts), LATCH_ID_HASH_TABLE_MUTEX, 0, MEM_HEAP_FOR_BTR_SEARCH); #if defined UNIV_AHI_DEBUG || defined UNIV_DEBUG btr_search_sys->hash_tables[i]->adaptive = TRUE; #endif /* UNIV_AHI_DEBUG || UNIV_DEBUG */ } /* Step-3: Unlock all search latches from exclusive mode. */ btr_search_x_unlock_all(); } /** Frees the adaptive search system at a database shutdown. */ void btr_search_sys_free() { ut_ad(btr_search_sys != NULL && btr_search_latches != NULL); /* Step-1: Release the hash tables. */ for (ulint i = 0; i < btr_ahi_parts; ++i) { mem_heap_free(btr_search_sys->hash_tables[i]->heap); hash_table_free(btr_search_sys->hash_tables[i]); } ut_free(btr_search_sys->hash_tables); ut_free(btr_search_sys); btr_search_sys = NULL; /* Step-2: Release all allocates latches. */ for (ulint i = 0; i < btr_ahi_parts; ++i) { rw_lock_free(btr_search_latches[i]); ut_free(btr_search_latches[i]); } ut_free(btr_search_latches); btr_search_latches = NULL; } /** Set index->ref_count = 0 on all indexes of a table. @param[in,out] table table handler */ static void btr_search_disable_ref_count( dict_table_t* table) { dict_index_t* index; ut_ad(mutex_own(&dict_sys->mutex)); for (index = dict_table_get_first_index(table); index != NULL; index = dict_table_get_next_index(index)) { ut_ad(rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); index->search_info->ref_count = 0; } } /** Disable the adaptive hash search system and empty the index. @param[in] need_mutex need to acquire dict_sys->mutex */ void btr_search_disable( bool need_mutex) { dict_table_t* table; if (need_mutex) { mutex_enter(&dict_sys->mutex); } ut_ad(mutex_own(&dict_sys->mutex)); btr_search_x_lock_all(); if (!btr_search_enabled) { if (need_mutex) { mutex_exit(&dict_sys->mutex); } btr_search_x_unlock_all(); return; } btr_search_enabled = false; /* Clear the index->search_info->ref_count of every index in the data dictionary cache. */ for (table = UT_LIST_GET_FIRST(dict_sys->table_LRU); table; table = UT_LIST_GET_NEXT(table_LRU, table)) { btr_search_disable_ref_count(table); } for (table = UT_LIST_GET_FIRST(dict_sys->table_non_LRU); table; table = UT_LIST_GET_NEXT(table_LRU, table)) { btr_search_disable_ref_count(table); } if (need_mutex) { mutex_exit(&dict_sys->mutex); } /* Set all block->index = NULL. */ buf_pool_clear_hash_index(); /* Clear the adaptive hash index. */ for (ulint i = 0; i < btr_ahi_parts; ++i) { hash_table_clear(btr_search_sys->hash_tables[i]); mem_heap_empty(btr_search_sys->hash_tables[i]->heap); } btr_search_x_unlock_all(); } /** Enable the adaptive hash search system. */ void btr_search_enable() { buf_pool_mutex_enter_all(); if (srv_buf_pool_old_size != srv_buf_pool_size) { buf_pool_mutex_exit_all(); return; } buf_pool_mutex_exit_all(); btr_search_x_lock_all(); btr_search_enabled = true; btr_search_x_unlock_all(); } /** Returns the value of ref_count. The value is protected by latch. @param[in] info search info @param[in] index index identifier @return ref_count value. */ ulint btr_search_info_get_ref_count( btr_search_t* info, dict_index_t* index) { ulint ret = 0; if (!btr_search_enabled) { return(ret); } ut_ad(info); ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_S)); ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); btr_search_s_lock(index); ret = info->ref_count; btr_search_s_unlock(index); return(ret); } /** Updates the search info of an index about hash successes. NOTE that info is NOT protected by any semaphore, to save CPU time! Do not assume its fields are consistent. @param[in,out] info search info @param[in] cursor cursor which was just positioned */ static void btr_search_info_update_hash( btr_search_t* info, btr_cur_t* cursor) { dict_index_t* index = cursor->index; ulint n_unique; int cmp; ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_S)); ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); if (dict_index_is_ibuf(index)) { /* So many deletes are performed on an insert buffer tree that we do not consider a hash index useful on it: */ return; } n_unique = dict_index_get_n_unique_in_tree(index); if (info->n_hash_potential == 0) { goto set_new_recomm; } /* Test if the search would have succeeded using the recommended hash prefix */ if (info->n_fields >= n_unique && cursor->up_match >= n_unique) { increment_potential: info->n_hash_potential++; return; } cmp = ut_pair_cmp(info->n_fields, info->n_bytes, cursor->low_match, cursor->low_bytes); if (info->left_side ? cmp <= 0 : cmp > 0) { goto set_new_recomm; } cmp = ut_pair_cmp(info->n_fields, info->n_bytes, cursor->up_match, cursor->up_bytes); if (info->left_side ? cmp <= 0 : cmp > 0) { goto increment_potential; } set_new_recomm: /* We have to set a new recommendation; skip the hash analysis for a while to avoid unnecessary CPU time usage when there is no chance for success */ info->hash_analysis = 0; cmp = ut_pair_cmp(cursor->up_match, cursor->up_bytes, cursor->low_match, cursor->low_bytes); if (cmp == 0) { info->n_hash_potential = 0; /* For extra safety, we set some sensible values here */ info->n_fields = 1; info->n_bytes = 0; info->left_side = TRUE; } else if (cmp > 0) { info->n_hash_potential = 1; if (cursor->up_match >= n_unique) { info->n_fields = n_unique; info->n_bytes = 0; } else if (cursor->low_match < cursor->up_match) { info->n_fields = cursor->low_match + 1; info->n_bytes = 0; } else { info->n_fields = cursor->low_match; info->n_bytes = cursor->low_bytes + 1; } info->left_side = TRUE; } else { info->n_hash_potential = 1; if (cursor->low_match >= n_unique) { info->n_fields = n_unique; info->n_bytes = 0; } else if (cursor->low_match > cursor->up_match) { info->n_fields = cursor->up_match + 1; info->n_bytes = 0; } else { info->n_fields = cursor->up_match; info->n_bytes = cursor->up_bytes + 1; } info->left_side = FALSE; } } /** Update the block search info on hash successes. NOTE that info and block->n_hash_helps, n_fields, n_bytes, left_side are NOT protected by any semaphore, to save CPU time! Do not assume the fields are consistent. @return TRUE if building a (new) hash index on the block is recommended @param[in,out] info search info @param[in,out] block buffer block @param[in] cursor cursor */ static ibool btr_search_update_block_hash_info( btr_search_t* info, buf_block_t* block, const btr_cur_t* cursor) { ut_ad(!rw_lock_own(btr_get_search_latch(cursor->index), RW_LOCK_S)); ut_ad(!rw_lock_own(btr_get_search_latch(cursor->index), RW_LOCK_X)); ut_ad(rw_lock_own(&block->lock, RW_LOCK_S) || rw_lock_own(&block->lock, RW_LOCK_X)); info->last_hash_succ = FALSE; ut_a(buf_block_state_valid(block)); ut_ad(info->magic_n == BTR_SEARCH_MAGIC_N); if ((block->n_hash_helps > 0) && (info->n_hash_potential > 0) && (block->n_fields == info->n_fields) && (block->n_bytes == info->n_bytes) && (block->left_side == info->left_side)) { if ((block->index) && (block->curr_n_fields == info->n_fields) && (block->curr_n_bytes == info->n_bytes) && (block->curr_left_side == info->left_side)) { /* The search would presumably have succeeded using the hash index */ info->last_hash_succ = TRUE; } block->n_hash_helps++; } else { block->n_hash_helps = 1; block->n_fields = info->n_fields; block->n_bytes = info->n_bytes; block->left_side = info->left_side; } if ((block->n_hash_helps > page_get_n_recs(block->frame) / BTR_SEARCH_PAGE_BUILD_LIMIT) && (info->n_hash_potential >= BTR_SEARCH_BUILD_LIMIT)) { if ((!block->index) || (block->n_hash_helps > 2 * page_get_n_recs(block->frame)) || (block->n_fields != block->curr_n_fields) || (block->n_bytes != block->curr_n_bytes) || (block->left_side != block->curr_left_side)) { /* Build a new hash index on the page */ return(TRUE); } } return(FALSE); } /** Updates a hash node reference when it has been unsuccessfully used in a search which could have succeeded with the used hash parameters. This can happen because when building a hash index for a page, we do not check what happens at page boundaries, and therefore there can be misleading hash nodes. Also, collisions in the fold value can lead to misleading references. This function lazily fixes these imperfections in the hash index. @param[in] info search info @param[in] block buffer block where cursor positioned @param[in] cursor cursor */ static void btr_search_update_hash_ref( const btr_search_t* info, buf_block_t* block, const btr_cur_t* cursor) { dict_index_t* index; ulint fold; rec_t* rec; ut_ad(cursor->flag == BTR_CUR_HASH_FAIL); ut_ad(rw_lock_own(btr_get_search_latch(cursor->index), RW_LOCK_X)); ut_ad(rw_lock_own(&(block->lock), RW_LOCK_S) || rw_lock_own(&(block->lock), RW_LOCK_X)); ut_ad(page_align(btr_cur_get_rec(cursor)) == buf_block_get_frame(block)); assert_block_ahi_valid(block); index = block->index; if (!index) { return; } ut_ad(block->page.id.space() == index->space); ut_a(index == cursor->index); ut_a(!dict_index_is_ibuf(index)); if ((info->n_hash_potential > 0) && (block->curr_n_fields == info->n_fields) && (block->curr_n_bytes == info->n_bytes) && (block->curr_left_side == info->left_side)) { mem_heap_t* heap = NULL; ulint offsets_[REC_OFFS_NORMAL_SIZE]; rec_offs_init(offsets_); rec = btr_cur_get_rec(cursor); if (!page_rec_is_user_rec(rec)) { return; } fold = rec_fold(rec, rec_get_offsets(rec, index, offsets_, ULINT_UNDEFINED, &heap), block->curr_n_fields, block->curr_n_bytes, index->id); if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } ut_ad(rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); ha_insert_for_fold(btr_get_search_table(index), fold, block, rec); MONITOR_INC(MONITOR_ADAPTIVE_HASH_ROW_ADDED); } } /** Updates the search info. @param[in,out] info search info @param[in] cursor cursor which was just positioned */ void btr_search_info_update_slow( btr_search_t* info, btr_cur_t* cursor) { buf_block_t* block; ibool build_index; ut_ad(!rw_lock_own(btr_get_search_latch(cursor->index), RW_LOCK_S)); ut_ad(!rw_lock_own(btr_get_search_latch(cursor->index), RW_LOCK_X)); block = btr_cur_get_block(cursor); /* NOTE that the following two function calls do NOT protect info or block->n_fields etc. with any semaphore, to save CPU time! We cannot assume the fields are consistent when we return from those functions! */ btr_search_info_update_hash(info, cursor); build_index = btr_search_update_block_hash_info(info, block, cursor); if (build_index || (cursor->flag == BTR_CUR_HASH_FAIL)) { btr_search_check_free_space_in_heap(cursor->index); } if (cursor->flag == BTR_CUR_HASH_FAIL) { /* Update the hash node reference, if appropriate */ #ifdef UNIV_SEARCH_PERF_STAT btr_search_n_hash_fail++; #endif /* UNIV_SEARCH_PERF_STAT */ btr_search_x_lock(cursor->index); btr_search_update_hash_ref(info, block, cursor); btr_search_x_unlock(cursor->index); } if (build_index) { /* Note that since we did not protect block->n_fields etc. with any semaphore, the values can be inconsistent. We have to check inside the function call that they make sense. */ btr_search_build_page_hash_index(cursor->index, block, block->n_fields, block->n_bytes, block->left_side); } } /** Checks if a guessed position for a tree cursor is right. Note that if mode is PAGE_CUR_LE, which is used in inserts, and the function returns TRUE, then cursor->up_match and cursor->low_match both have sensible values. @param[in,out] cursor guess cursor position @param[in] can_only_compare_to_cursor_rec if we do not have a latch on the page of cursor, but a latch corresponding search system, then ONLY the columns of the record UNDER the cursor are protected, not the next or previous record in the chain: we cannot look at the next or previous record to check our guess! @param[in] tuple data tuple @param[in] mode PAGE_CUR_L, PAGE_CUR_LE, PAGE_CUR_G, PAGE_CUR_GE @param[in] mtr mini transaction @return TRUE if success */ static ibool btr_search_check_guess( btr_cur_t* cursor, ibool can_only_compare_to_cursor_rec, const dtuple_t* tuple, ulint mode, mtr_t* mtr) { rec_t* rec; ulint n_unique; ulint match; int cmp; mem_heap_t* heap = NULL; ulint offsets_[REC_OFFS_NORMAL_SIZE]; ulint* offsets = offsets_; ibool success = FALSE; rec_offs_init(offsets_); n_unique = dict_index_get_n_unique_in_tree(cursor->index); rec = btr_cur_get_rec(cursor); ut_ad(page_rec_is_user_rec(rec)); match = 0; offsets = rec_get_offsets(rec, cursor->index, offsets, n_unique, &heap); cmp = cmp_dtuple_rec_with_match(tuple, rec, offsets, &match); if (mode == PAGE_CUR_GE) { if (cmp > 0) { goto exit_func; } cursor->up_match = match; if (match >= n_unique) { success = TRUE; goto exit_func; } } else if (mode == PAGE_CUR_LE) { if (cmp < 0) { goto exit_func; } cursor->low_match = match; } else if (mode == PAGE_CUR_G) { if (cmp >= 0) { goto exit_func; } } else if (mode == PAGE_CUR_L) { if (cmp <= 0) { goto exit_func; } } if (can_only_compare_to_cursor_rec) { /* Since we could not determine if our guess is right just by looking at the record under the cursor, return FALSE */ goto exit_func; } match = 0; if ((mode == PAGE_CUR_G) || (mode == PAGE_CUR_GE)) { rec_t* prev_rec; ut_ad(!page_rec_is_infimum(rec)); prev_rec = page_rec_get_prev(rec); if (page_rec_is_infimum(prev_rec)) { success = btr_page_get_prev(page_align(prev_rec), mtr) == FIL_NULL; goto exit_func; } offsets = rec_get_offsets(prev_rec, cursor->index, offsets, n_unique, &heap); cmp = cmp_dtuple_rec_with_match( tuple, prev_rec, offsets, &match); if (mode == PAGE_CUR_GE) { success = cmp > 0; } else { success = cmp >= 0; } goto exit_func; } else { rec_t* next_rec; ut_ad(!page_rec_is_supremum(rec)); next_rec = page_rec_get_next(rec); if (page_rec_is_supremum(next_rec)) { if (btr_page_get_next(page_align(next_rec), mtr) == FIL_NULL) { cursor->up_match = 0; success = TRUE; } goto exit_func; } offsets = rec_get_offsets(next_rec, cursor->index, offsets, n_unique, &heap); cmp = cmp_dtuple_rec_with_match( tuple, next_rec, offsets, &match); if (mode == PAGE_CUR_LE) { success = cmp < 0; cursor->up_match = match; } else { success = cmp <= 0; } } exit_func: if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } return(success); } static void btr_search_failure(btr_search_t* info, btr_cur_t* cursor) { cursor->flag = BTR_CUR_HASH_FAIL; #ifdef UNIV_SEARCH_PERF_STAT ++info->n_hash_fail; if (info->n_hash_succ > 0) { --info->n_hash_succ; } #endif /* UNIV_SEARCH_PERF_STAT */ info->last_hash_succ = FALSE; } /** Tries to guess the right search position based on the hash search info of the index. Note that if mode is PAGE_CUR_LE, which is used in inserts, and the function returns TRUE, then cursor->up_match and cursor->low_match both have sensible values. @param[in,out] index index @param[in,out] info index search info @param[in] tuple logical record @param[in] mode PAGE_CUR_L, .... @param[in] latch_mode BTR_SEARCH_LEAF, ...; NOTE that only if has_search_latch is 0, we will have a latch set on the cursor page, otherwise we assume the caller uses his search latch to protect the record! @param[out] cursor tree cursor @param[in] has_search_latch latch mode the caller currently has on search system: RW_S/X_LATCH or 0 @param[in] mtr mini transaction @return TRUE if succeeded */ ibool btr_search_guess_on_hash( dict_index_t* index, btr_search_t* info, const dtuple_t* tuple, ulint mode, ulint latch_mode, btr_cur_t* cursor, ulint has_search_latch, mtr_t* mtr) { const rec_t* rec; ulint fold; index_id_t index_id; #ifdef notdefined btr_cur_t cursor2; btr_pcur_t pcur; #endif if (!btr_search_enabled) { return(FALSE); } ut_ad(index && info && tuple && cursor && mtr); ut_ad(!dict_index_is_ibuf(index)); ut_ad((latch_mode == BTR_SEARCH_LEAF) || (latch_mode == BTR_MODIFY_LEAF)); /* Not supported for spatial index */ ut_ad(!dict_index_is_spatial(index)); /* Note that, for efficiency, the struct info may not be protected by any latch here! */ if (info->n_hash_potential == 0) { return(FALSE); } cursor->n_fields = info->n_fields; cursor->n_bytes = info->n_bytes; if (dtuple_get_n_fields(tuple) < btr_search_get_n_fields(cursor)) { return(FALSE); } index_id = index->id; #ifdef UNIV_SEARCH_PERF_STAT info->n_hash_succ++; #endif fold = dtuple_fold(tuple, cursor->n_fields, cursor->n_bytes, index_id); cursor->fold = fold; cursor->flag = BTR_CUR_HASH; if (!has_search_latch) { btr_search_s_lock(index); if (!btr_search_enabled) { btr_search_s_unlock(index); btr_search_failure(info, cursor); return(FALSE); } } ut_ad(rw_lock_get_writer(btr_get_search_latch(index)) != RW_LOCK_X); ut_ad(rw_lock_get_reader_count(btr_get_search_latch(index)) > 0); rec = (rec_t*) ha_search_and_get_data( btr_get_search_table(index), fold); if (rec == NULL) { if (!has_search_latch) { btr_search_s_unlock(index); } btr_search_failure(info, cursor); return(FALSE); } buf_block_t* block = buf_block_from_ahi(rec); if (!has_search_latch) { if (!buf_page_get_known_nowait( latch_mode, block, BUF_MAKE_YOUNG, __FILE__, __LINE__, mtr)) { if (!has_search_latch) { btr_search_s_unlock(index); } btr_search_failure(info, cursor); return(FALSE); } btr_search_s_unlock(index); buf_block_dbg_add_level(block, SYNC_TREE_NODE_FROM_HASH); } if (buf_block_get_state(block) != BUF_BLOCK_FILE_PAGE) { ut_ad(buf_block_get_state(block) == BUF_BLOCK_REMOVE_HASH); if (!has_search_latch) { btr_leaf_page_release(block, latch_mode, mtr); } btr_search_failure(info, cursor); return(FALSE); } ut_ad(page_rec_is_user_rec(rec)); btr_cur_position(index, (rec_t*) rec, block, cursor); /* Check the validity of the guess within the page */ /* If we only have the latch on search system, not on the page, it only protects the columns of the record the cursor is positioned on. We cannot look at the next of the previous record to determine if our guess for the cursor position is right. */ if (index_id != btr_page_get_index_id(block->frame) || !btr_search_check_guess(cursor, has_search_latch, tuple, mode, mtr)) { if (!has_search_latch) { btr_leaf_page_release(block, latch_mode, mtr); } btr_search_failure(info, cursor); return(FALSE); } if (info->n_hash_potential < BTR_SEARCH_BUILD_LIMIT + 5) { info->n_hash_potential++; } #ifdef notdefined /* These lines of code can be used in a debug version to check the correctness of the searched cursor position: */ info->last_hash_succ = FALSE; /* Currently, does not work if the following fails: */ ut_ad(!has_search_latch); btr_leaf_page_release(block, latch_mode, mtr); btr_cur_search_to_nth_level( index, 0, tuple, mode, latch_mode, &cursor2, 0, mtr); if (mode == PAGE_CUR_GE && page_rec_is_supremum(btr_cur_get_rec(&cursor2))) { /* If mode is PAGE_CUR_GE, then the binary search in the index tree may actually take us to the supremum of the previous page */ info->last_hash_succ = FALSE; btr_pcur_open_on_user_rec( index, tuple, mode, latch_mode, &pcur, mtr); ut_ad(btr_pcur_get_rec(&pcur) == btr_cur_get_rec(cursor)); } else { ut_ad(btr_cur_get_rec(&cursor2) == btr_cur_get_rec(cursor)); } /* NOTE that it is theoretically possible that the above assertions fail if the page of the cursor gets removed from the buffer pool meanwhile! Thus it might not be a bug. */ #endif info->last_hash_succ = TRUE; #ifdef UNIV_SEARCH_PERF_STAT btr_search_n_succ++; #endif if (!has_search_latch && buf_page_peek_if_too_old(&block->page)) { buf_page_make_young(&block->page); } /* Increment the page get statistics though we did not really fix the page: for user info only */ { buf_pool_t* buf_pool = buf_pool_from_bpage(&block->page); ++buf_pool->stat.n_page_gets; } return(TRUE); } /** Drop any adaptive hash index entries that point to an index page. @param[in,out] block block containing index page, s- or x-latched, or an index page for which we know that block->buf_fix_count == 0 or it is an index page which has already been removed from the buf_pool->page_hash i.e.: it is in state BUF_BLOCK_REMOVE_HASH */ void btr_search_drop_page_hash_index(buf_block_t* block) { ulint n_fields; ulint n_bytes; const page_t* page; const rec_t* rec; ulint fold; ulint prev_fold; ulint n_cached; ulint n_recs; ulint* folds; ulint i; mem_heap_t* heap; const dict_index_t* index; ulint* offsets; rw_lock_t* latch; btr_search_t* info; retry: /* Do a dirty check on block->index, return if the block is not in the adaptive hash index. */ index = block->index; /* This debug check uses a dirty read that could theoretically cause false positives while buf_pool_clear_hash_index() is executing. */ assert_block_ahi_valid(block); if (index == NULL) { return; } ut_ad(block->page.buf_fix_count == 0 || buf_block_get_state(block) == BUF_BLOCK_REMOVE_HASH || rw_lock_own(&block->lock, RW_LOCK_S) || rw_lock_own(&block->lock, RW_LOCK_X)); /* We must not dereference index here, because it could be freed if (index->table->n_ref_count == 0 && !mutex_own(&dict_sys->mutex)). Determine the ahi_slot based on the block contents. */ const index_id_t index_id = btr_page_get_index_id(block->frame); const ulint ahi_slot = ut_fold_ulint_pair(static_cast<ulint>(index_id), static_cast<ulint>(block->page.id.space())) % btr_ahi_parts; latch = btr_search_latches[ahi_slot]; ut_ad(!btr_search_own_any(RW_LOCK_S)); ut_ad(!btr_search_own_any(RW_LOCK_X)); rw_lock_s_lock(latch); assert_block_ahi_valid(block); if (block->index == NULL) { rw_lock_s_unlock(latch); return; } /* The index associated with a block must remain the same, because we are holding block->lock or the block is not accessible by other threads (BUF_BLOCK_REMOVE_HASH), or the index is not accessible to other threads (buf_fix_count == 0 when DROP TABLE or similar is executing buf_LRU_drop_page_hash_for_tablespace()). */ ut_a(index == block->index); #ifdef MYSQL_INDEX_DISABLE_AHI ut_ad(!index->disable_ahi); #endif ut_ad(btr_search_enabled); ut_ad(block->page.id.space() == index->space); ut_a(index_id == index->id); ut_a(!dict_index_is_ibuf(index)); #ifdef UNIV_DEBUG switch (dict_index_get_online_status(index)) { case ONLINE_INDEX_CREATION: /* The index is being created (bulk loaded). */ case ONLINE_INDEX_COMPLETE: /* The index has been published. */ case ONLINE_INDEX_ABORTED: /* Either the index creation was aborted due to an error observed by InnoDB (in which case there should not be any adaptive hash index entries), or it was completed and then flagged aborted in rollback_inplace_alter_table(). */ break; case ONLINE_INDEX_ABORTED_DROPPED: /* The index should have been dropped from the tablespace already, and the adaptive hash index entries should have been dropped as well. */ ut_error; } #endif /* UNIV_DEBUG */ n_fields = block->curr_n_fields; n_bytes = block->curr_n_bytes; /* NOTE: The AHI fields of block must not be accessed after releasing search latch, as the index page might only be s-latched! */ rw_lock_s_unlock(latch); ut_a(n_fields > 0 || n_bytes > 0); page = block->frame; n_recs = page_get_n_recs(page); /* Calculate and cache fold values into an array for fast deletion from the hash index */ folds = (ulint*) ut_malloc_nokey(n_recs * sizeof(ulint)); n_cached = 0; rec = page_get_infimum_rec(page); rec = page_rec_get_next_low(rec, page_is_comp(page)); prev_fold = 0; heap = NULL; offsets = NULL; while (!page_rec_is_supremum(rec)) { offsets = rec_get_offsets( rec, index, offsets, btr_search_get_n_fields(n_fields, n_bytes), &heap); fold = rec_fold(rec, offsets, n_fields, n_bytes, index_id); if (fold == prev_fold && prev_fold != 0) { goto next_rec; } /* Remove all hash nodes pointing to this page from the hash chain */ folds[n_cached] = fold; n_cached++; next_rec: rec = page_rec_get_next_low(rec, page_rec_is_comp(rec)); prev_fold = fold; } if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } rw_lock_x_lock(latch); if (UNIV_UNLIKELY(!block->index)) { /* Someone else has meanwhile dropped the hash index */ goto cleanup; } ut_a(block->index == index); if (block->curr_n_fields != n_fields || block->curr_n_bytes != n_bytes) { /* Someone else has meanwhile built a new hash index on the page, with different parameters */ rw_lock_x_unlock(latch); ut_free(folds); goto retry; } for (i = 0; i < n_cached; i++) { ha_remove_all_nodes_to_page( btr_search_sys->hash_tables[ahi_slot], folds[i], page); } info = btr_search_get_info(block->index); ut_a(info->ref_count > 0); info->ref_count--; block->index = NULL; MONITOR_INC(MONITOR_ADAPTIVE_HASH_PAGE_REMOVED); MONITOR_INC_VALUE(MONITOR_ADAPTIVE_HASH_ROW_REMOVED, n_cached); cleanup: assert_block_ahi_valid(block); rw_lock_x_unlock(latch); ut_free(folds); } /** Drop any adaptive hash index entries that may point to an index page that may be in the buffer pool, when a page is evicted from the buffer pool or freed in a file segment. @param[in] page_id page id @param[in] page_size page size */ void btr_search_drop_page_hash_when_freed( const page_id_t& page_id, const page_size_t& page_size) { buf_block_t* block; mtr_t mtr; dberr_t err = DB_SUCCESS; ut_d(export_vars.innodb_ahi_drop_lookups++); mtr_start(&mtr); /* If the caller has a latch on the page, then the caller must have a x-latch on the page and it must have already dropped the hash index for the page. Because of the x-latch that we are possibly holding, we cannot s-latch the page, but must (recursively) x-latch it, even though we are only reading. */ block = buf_page_get_gen(page_id, page_size, RW_X_LATCH, NULL, BUF_PEEK_IF_IN_POOL, __FILE__, __LINE__, &mtr, &err); if (block) { /* If AHI is still valid, page can't be in free state. AHI is dropped when page is freed. */ ut_ad(!block->page.file_page_was_freed); buf_block_dbg_add_level(block, SYNC_TREE_NODE_FROM_HASH); dict_index_t* index = block->index; if (index != NULL) { /* In all our callers, the table handle should be open, or we should be in the process of dropping the table (preventing eviction). */ ut_ad(index->table->n_ref_count > 0 || mutex_own(&dict_sys->mutex)); btr_search_drop_page_hash_index(block); } } mtr_commit(&mtr); } /** Build a hash index on a page with the given parameters. If the page already has a hash index with different parameters, the old hash index is removed. If index is non-NULL, this function checks if n_fields and n_bytes are sensible, and does not build a hash index if not. @param[in,out] index index for which to build. @param[in,out] block index page, s-/x- latched. @param[in] n_fields hash this many full fields @param[in] n_bytes hash this many bytes of the next field @param[in] left_side hash for searches from left side */ static void btr_search_build_page_hash_index( dict_index_t* index, buf_block_t* block, ulint n_fields, ulint n_bytes, ibool left_side) { hash_table_t* table; page_t* page; rec_t* rec; rec_t* next_rec; ulint fold; ulint next_fold; ulint n_cached; ulint n_recs; ulint* folds; rec_t** recs; ulint i; mem_heap_t* heap = NULL; ulint offsets_[REC_OFFS_NORMAL_SIZE]; ulint* offsets = offsets_; #ifdef MYSQL_INDEX_DISABLE_AHI if (index->disable_ahi) return; #endif if (!btr_search_enabled) { return; } rec_offs_init(offsets_); ut_ad(index); ut_ad(block->page.id.space() == index->space); ut_a(!dict_index_is_ibuf(index)); ut_ad(!rw_lock_own(btr_get_search_latch(index), RW_LOCK_X)); ut_ad(rw_lock_own(&(block->lock), RW_LOCK_S) || rw_lock_own(&(block->lock), RW_LOCK_X)); btr_search_s_lock(index); table = btr_get_search_table(index); page = buf_block_get_frame(block); if (block->index && ((block->curr_n_fields != n_fields) || (block->curr_n_bytes != n_bytes) || (block->curr_left_side != left_side))) { btr_search_s_unlock(index); btr_search_drop_page_hash_index(block); } else { btr_search_s_unlock(index); } /* Check that the values for hash index build are sensible */ if (n_fields == 0 && n_bytes == 0) { return; } if (dict_index_get_n_unique_in_tree(index) < btr_search_get_n_fields(n_fields, n_bytes)) { return; } n_recs = page_get_n_recs(page); if (n_recs == 0) { return; } /* Calculate and cache fold values and corresponding records into an array for fast insertion to the hash index */ folds = (ulint*) ut_malloc_nokey(n_recs * sizeof(ulint)); recs = (rec_t**) ut_malloc_nokey(n_recs * sizeof(rec_t*)); n_cached = 0; ut_a(index->id == btr_page_get_index_id(page)); rec = page_rec_get_next(page_get_infimum_rec(page)); offsets = rec_get_offsets( rec, index, offsets, btr_search_get_n_fields(n_fields, n_bytes), &heap); ut_ad(page_rec_is_supremum(rec) || n_fields + (n_bytes > 0) == rec_offs_n_fields(offsets)); fold = rec_fold(rec, offsets, n_fields, n_bytes, index->id); if (left_side) { folds[n_cached] = fold; recs[n_cached] = rec; n_cached++; } for (;;) { next_rec = page_rec_get_next(rec); if (page_rec_is_supremum(next_rec)) { if (!left_side) { folds[n_cached] = fold; recs[n_cached] = rec; n_cached++; } break; } offsets = rec_get_offsets( next_rec, index, offsets, btr_search_get_n_fields(n_fields, n_bytes), &heap); next_fold = rec_fold(next_rec, offsets, n_fields, n_bytes, index->id); if (fold != next_fold) { /* Insert an entry into the hash index */ if (left_side) { folds[n_cached] = next_fold; recs[n_cached] = next_rec; n_cached++; } else { folds[n_cached] = fold; recs[n_cached] = rec; n_cached++; } } rec = next_rec; fold = next_fold; } btr_search_check_free_space_in_heap(index); btr_search_x_lock(index); if (!btr_search_enabled) { goto exit_func; } if (block->index && ((block->curr_n_fields != n_fields) || (block->curr_n_bytes != n_bytes) || (block->curr_left_side != left_side))) { goto exit_func; } /* This counter is decremented every time we drop page hash index entries and is incremented here. Since we can rebuild hash index for a page that is already hashed, we have to take care not to increment the counter in that case. */ if (!block->index) { assert_block_ahi_empty(block); index->search_info->ref_count++; } block->n_hash_helps = 0; block->curr_n_fields = unsigned(n_fields); block->curr_n_bytes = unsigned(n_bytes); block->curr_left_side = unsigned(left_side); block->index = index; for (i = 0; i < n_cached; i++) { ha_insert_for_fold(table, folds[i], block, recs[i]); } MONITOR_INC(MONITOR_ADAPTIVE_HASH_PAGE_ADDED); MONITOR_INC_VALUE(MONITOR_ADAPTIVE_HASH_ROW_ADDED, n_cached); exit_func: assert_block_ahi_valid(block); btr_search_x_unlock(index); ut_free(folds); ut_free(recs); if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } } /** Moves or deletes hash entries for moved records. If new_page is already hashed, then the hash index for page, if any, is dropped. If new_page is not hashed, and page is hashed, then a new hash index is built to new_page with the same parameters as page (this often happens when a page is split). @param[in,out] new_block records are copied to this page. @param[in,out] block index page from which record are copied, and the copied records will be deleted from this page. @param[in,out] index record descriptor */ void btr_search_move_or_delete_hash_entries( buf_block_t* new_block, buf_block_t* block, dict_index_t* index) { #ifdef MYSQL_INDEX_DISABLE_AHI if (index->disable_ahi) return; #endif if (!btr_search_enabled) { return; } ut_ad(rw_lock_own(&(block->lock), RW_LOCK_X)); ut_ad(rw_lock_own(&(new_block->lock), RW_LOCK_X)); btr_search_s_lock(index); ut_a(!new_block->index || new_block->index == index); ut_a(!block->index || block->index == index); ut_a(!(new_block->index || block->index) || !dict_index_is_ibuf(index)); assert_block_ahi_valid(block); assert_block_ahi_valid(new_block); if (new_block->index) { btr_search_s_unlock(index); btr_search_drop_page_hash_index(block); return; } if (block->index) { ulint n_fields = block->curr_n_fields; ulint n_bytes = block->curr_n_bytes; ibool left_side = block->curr_left_side; new_block->n_fields = block->curr_n_fields; new_block->n_bytes = block->curr_n_bytes; new_block->left_side = left_side; btr_search_s_unlock(index); ut_a(n_fields > 0 || n_bytes > 0); btr_search_build_page_hash_index( index, new_block, n_fields, n_bytes, left_side); ut_ad(n_fields == block->curr_n_fields); ut_ad(n_bytes == block->curr_n_bytes); ut_ad(left_side == block->curr_left_side); return; } btr_search_s_unlock(index); } /** Updates the page hash index when a single record is deleted from a page. @param[in] cursor cursor which was positioned on the record to delete using btr_cur_search_, the record is not yet deleted.*/ void btr_search_update_hash_on_delete(btr_cur_t* cursor) { hash_table_t* table; buf_block_t* block; const rec_t* rec; ulint fold; dict_index_t* index; ulint offsets_[REC_OFFS_NORMAL_SIZE]; mem_heap_t* heap = NULL; rec_offs_init(offsets_); #ifdef MYSQL_INDEX_DISABLE_AHI if (cursor->index->disable_ahi) return; #endif if (!btr_search_enabled) { return; } block = btr_cur_get_block(cursor); ut_ad(rw_lock_own(&(block->lock), RW_LOCK_X)); assert_block_ahi_valid(block); index = block->index; if (!index) { return; } ut_ad(block->page.id.space() == index->space); ut_a(index == cursor->index); ut_a(block->curr_n_fields > 0 || block->curr_n_bytes > 0); ut_a(!dict_index_is_ibuf(index)); table = btr_get_search_table(index); rec = btr_cur_get_rec(cursor); fold = rec_fold(rec, rec_get_offsets(rec, index, offsets_, ULINT_UNDEFINED, &heap), block->curr_n_fields, block->curr_n_bytes, index->id); if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } btr_search_x_lock(index); assert_block_ahi_valid(block); if (block->index) { ut_a(block->index == index); if (ha_search_and_delete_if_found(table, fold, rec)) { MONITOR_INC(MONITOR_ADAPTIVE_HASH_ROW_REMOVED); } else { MONITOR_INC( MONITOR_ADAPTIVE_HASH_ROW_REMOVE_NOT_FOUND); } assert_block_ahi_valid(block); } btr_search_x_unlock(index); } /** Updates the page hash index when a single record is inserted on a page. @param[in] cursor cursor which was positioned to the place to insert using btr_cur_search_, and the new record has been inserted next to the cursor. */ void btr_search_update_hash_node_on_insert(btr_cur_t* cursor) { hash_table_t* table; buf_block_t* block; dict_index_t* index; rec_t* rec; #ifdef MYSQL_INDEX_DISABLE_AHI if (cursor->index->disable_ahi) return; #endif if (!btr_search_enabled) { return; } rec = btr_cur_get_rec(cursor); block = btr_cur_get_block(cursor); ut_ad(rw_lock_own(&(block->lock), RW_LOCK_X)); index = block->index; if (!index) { return; } ut_a(cursor->index == index); ut_a(!dict_index_is_ibuf(index)); btr_search_x_lock(index); if (!block->index) { goto func_exit; } ut_a(block->index == index); if ((cursor->flag == BTR_CUR_HASH) && (cursor->n_fields == block->curr_n_fields) && (cursor->n_bytes == block->curr_n_bytes) && !block->curr_left_side) { table = btr_get_search_table(index); if (ha_search_and_update_if_found( table, cursor->fold, rec, block, page_rec_get_next(rec))) { MONITOR_INC(MONITOR_ADAPTIVE_HASH_ROW_UPDATED); } func_exit: assert_block_ahi_valid(block); btr_search_x_unlock(index); } else { btr_search_x_unlock(index); btr_search_update_hash_on_insert(cursor); } } /** Updates the page hash index when a single record is inserted on a page. @param[in,out] cursor cursor which was positioned to the place to insert using btr_cur_search_..., and the new record has been inserted next to the cursor */ void btr_search_update_hash_on_insert(btr_cur_t* cursor) { hash_table_t* table; buf_block_t* block; dict_index_t* index; const rec_t* rec; const rec_t* ins_rec; const rec_t* next_rec; ulint fold; ulint ins_fold; ulint next_fold = 0; /* remove warning (??? bug ???) */ ulint n_fields; ulint n_bytes; ibool left_side; ibool locked = FALSE; mem_heap_t* heap = NULL; ulint offsets_[REC_OFFS_NORMAL_SIZE]; ulint* offsets = offsets_; rec_offs_init(offsets_); #ifdef MYSQL_INDEX_DISABLE_AHI if (cursor->index->disable_ahi) return; #endif if (!btr_search_enabled) { return; } block = btr_cur_get_block(cursor); ut_ad(rw_lock_own(&(block->lock), RW_LOCK_X)); assert_block_ahi_valid(block); index = block->index; if (!index) { return; } ut_ad(block->page.id.space() == index->space); btr_search_check_free_space_in_heap(index); table = btr_get_search_table(index); rec = btr_cur_get_rec(cursor); #ifdef MYSQL_INDEX_DISABLE_AHI ut_a(!index->disable_ahi); #endif ut_a(index == cursor->index); ut_a(!dict_index_is_ibuf(index)); n_fields = block->curr_n_fields; n_bytes = block->curr_n_bytes; left_side = block->curr_left_side; ins_rec = page_rec_get_next_const(rec); next_rec = page_rec_get_next_const(ins_rec); offsets = rec_get_offsets(ins_rec, index, offsets, ULINT_UNDEFINED, &heap); ins_fold = rec_fold(ins_rec, offsets, n_fields, n_bytes, index->id); if (!page_rec_is_supremum(next_rec)) { offsets = rec_get_offsets( next_rec, index, offsets, btr_search_get_n_fields(n_fields, n_bytes), &heap); next_fold = rec_fold(next_rec, offsets, n_fields, n_bytes, index->id); } if (!page_rec_is_infimum(rec)) { offsets = rec_get_offsets( rec, index, offsets, btr_search_get_n_fields(n_fields, n_bytes), &heap); fold = rec_fold(rec, offsets, n_fields, n_bytes, index->id); } else { if (left_side) { btr_search_x_lock(index); locked = TRUE; if (!btr_search_enabled) { goto function_exit; } ha_insert_for_fold(table, ins_fold, block, ins_rec); } goto check_next_rec; } if (fold != ins_fold) { if (!locked) { btr_search_x_lock(index); locked = TRUE; if (!btr_search_enabled) { goto function_exit; } } if (!left_side) { ha_insert_for_fold(table, fold, block, rec); } else { ha_insert_for_fold(table, ins_fold, block, ins_rec); } } check_next_rec: if (page_rec_is_supremum(next_rec)) { if (!left_side) { if (!locked) { btr_search_x_lock(index); locked = TRUE; if (!btr_search_enabled) { goto function_exit; } } ha_insert_for_fold(table, ins_fold, block, ins_rec); } goto function_exit; } if (ins_fold != next_fold) { if (!locked) { btr_search_x_lock(index); locked = TRUE; if (!btr_search_enabled) { goto function_exit; } } if (!left_side) { ha_insert_for_fold(table, ins_fold, block, ins_rec); } else { ha_insert_for_fold(table, next_fold, block, next_rec); } } function_exit: if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } if (locked) { btr_search_x_unlock(index); } } #if defined UNIV_AHI_DEBUG || defined UNIV_DEBUG /** Validates the search system for given hash table. @param[in] hash_table_id hash table to validate @return TRUE if ok */ static ibool btr_search_hash_table_validate(ulint hash_table_id) { ha_node_t* node; ibool ok = TRUE; ulint i; ulint cell_count; mem_heap_t* heap = NULL; ulint offsets_[REC_OFFS_NORMAL_SIZE]; ulint* offsets = offsets_; if (!btr_search_enabled) { return(TRUE); } /* How many cells to check before temporarily releasing search latches. */ ulint chunk_size = 10000; rec_offs_init(offsets_); btr_search_x_lock_all(); buf_pool_mutex_enter_all(); cell_count = hash_get_n_cells( btr_search_sys->hash_tables[hash_table_id]); for (i = 0; i < cell_count; i++) { /* We release search latches every once in a while to give other queries a chance to run. */ if ((i != 0) && ((i % chunk_size) == 0)) { buf_pool_mutex_exit_all(); btr_search_x_unlock_all(); os_thread_yield(); btr_search_x_lock_all(); buf_pool_mutex_enter_all(); ulint curr_cell_count = hash_get_n_cells( btr_search_sys->hash_tables[hash_table_id]); if (cell_count != curr_cell_count) { cell_count = curr_cell_count; if (i >= cell_count) { break; } } } node = (ha_node_t*) hash_get_nth_cell( btr_search_sys->hash_tables[hash_table_id], i)->node; for (; node != NULL; node = node->next) { const buf_block_t* block = buf_block_from_ahi((byte*) node->data); const buf_block_t* hash_block; buf_pool_t* buf_pool; index_id_t page_index_id; buf_pool = buf_pool_from_bpage((buf_page_t*) block); if (UNIV_LIKELY(buf_block_get_state(block) == BUF_BLOCK_FILE_PAGE)) { /* The space and offset are only valid for file blocks. It is possible that the block is being freed (BUF_BLOCK_REMOVE_HASH, see the assertion and the comment below) */ hash_block = buf_block_hash_get( buf_pool, block->page.id); } else { hash_block = NULL; } if (hash_block) { ut_a(hash_block == block); } else { /* When a block is being freed, buf_LRU_search_and_free_block() first removes the block from buf_pool->page_hash by calling buf_LRU_block_remove_hashed_page(). After that, it invokes btr_search_drop_page_hash_index() to remove the block from btr_search_sys->hash_tables[i]. */ ut_a(buf_block_get_state(block) == BUF_BLOCK_REMOVE_HASH); } ut_a(!dict_index_is_ibuf(block->index)); ut_ad(block->page.id.space() == block->index->space); page_index_id = btr_page_get_index_id(block->frame); offsets = rec_get_offsets( node->data, block->index, offsets, btr_search_get_n_fields(block->curr_n_fields, block->curr_n_bytes), &heap); const ulint fold = rec_fold( node->data, offsets, block->curr_n_fields, block->curr_n_bytes, page_index_id); if (node->fold != fold) { const page_t* page = block->frame; ok = FALSE; ib::error() << "Error in an adaptive hash" << " index pointer to page " << page_id_t(page_get_space_id(page), page_get_page_no(page)) << ", ptr mem address " << reinterpret_cast<const void*>( node->data) << ", index id " << page_index_id << ", node fold " << node->fold << ", rec fold " << fold; fputs("InnoDB: Record ", stderr); rec_print_new(stderr, node->data, offsets); fprintf(stderr, "\nInnoDB: on that page." " Page mem address %p, is hashed %p," " n fields %lu\n" "InnoDB: side %lu\n", (void*) page, (void*) block->index, (ulong) block->curr_n_fields, (ulong) block->curr_left_side); ut_ad(0); } } } for (i = 0; i < cell_count; i += chunk_size) { /* We release search latches every once in a while to give other queries a chance to run. */ if (i != 0) { buf_pool_mutex_exit_all(); btr_search_x_unlock_all(); os_thread_yield(); btr_search_x_lock_all(); buf_pool_mutex_enter_all(); ulint curr_cell_count = hash_get_n_cells( btr_search_sys->hash_tables[hash_table_id]); if (cell_count != curr_cell_count) { cell_count = curr_cell_count; if (i >= cell_count) { break; } } } ulint end_index = ut_min(i + chunk_size - 1, cell_count - 1); if (!ha_validate(btr_search_sys->hash_tables[hash_table_id], i, end_index)) { ok = FALSE; } } buf_pool_mutex_exit_all(); btr_search_x_unlock_all(); if (UNIV_LIKELY_NULL(heap)) { mem_heap_free(heap); } return(ok); } /** Validate the search system. @return true if ok. */ bool btr_search_validate() { for (ulint i = 0; i < btr_ahi_parts; ++i) { if (!btr_search_hash_table_validate(i)) { return(false); } } return(true); } #endif /* defined UNIV_AHI_DEBUG || defined UNIV_DEBUG */ #endif /* BTR_CUR_HASH_ADAPT */
chidelmun/server
storage/innobase/btr/btr0sea.cc
C++
gpl-2.0
53,277
/*FreeMind - A Program for creating and viewing Mindmaps *Copyright (C) 2000-2006 Joerg Mueller, Daniel Polansky, Christian Foltin, Dimitri Polivaev and others. * *See COPYING for Details * *This program is free software; you can redistribute it and/or *modify it under the terms of the GNU General Public License *as published by the Free Software Foundation; either version 2 *of the License, or (at your option) any later version. * *This program is distributed in the hope that it will be useful, *but WITHOUT ANY WARRANTY; without even the implied warranty of *MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *GNU General Public License for more details. * *You should have received a copy of the GNU General Public License *along with this program; if not, write to the Free Software *Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* * Created on 08.05.2005 * */ package freemind.common; /** * Utility Class for displaying local object names in GUI components. * * @author Dimitri Polivaev * 18.01.2007 */ public class NamedObject{ private String name; private Object object; private NamedObject(){ } public NamedObject(Object object, String name) { this.object = object; this.name = name; } static public NamedObject literal(String literal){ NamedObject result = new NamedObject(); result.object = literal; result.name = literal; return result; } public boolean equals(Object o){ if (o instanceof NamedObject){ NamedObject ts = (NamedObject)o; return object.equals(ts.object); } return object.equals(o); } public String toString(){ return name; } public Object getObject(){ return object; } }
TheProjecter/sharedmind
freemind/common/NamedObject.java
Java
gpl-2.0
1,843
<?php /* * This file is part of Respect/Validation. * * (c) Alexandre Gomes Gaigalas <alexandre@gaigalas.net> * * For the full copyright and license information, please view the "LICENSE.md" * file that was distributed with this source code. */ namespace Respect\Validation\Rules; use Respect\Validation\TestCase; /** * @group rule * @covers Respect\Validation\Rules\NoneOf * @covers Respect\Validation\Exceptions\NoneOfException */ class NoneOfTest extends TestCase { public function testValid() { $valid1 = new Callback(function () { return false; }); $valid2 = new Callback(function () { return false; }); $valid3 = new Callback(function () { return false; }); $o = new NoneOf($valid1, $valid2, $valid3); $this->assertTrue($o->validate('any')); $this->assertTrue($o->assert('any')); $this->assertTrue($o->check('any')); } /** * @expectedException Respect\Validation\Exceptions\NoneOfException */ public function testInvalid() { $valid1 = new Callback(function () { return false; }); $valid2 = new Callback(function () { return false; }); $valid3 = new Callback(function () { return true; }); $o = new NoneOf($valid1, $valid2, $valid3); $this->assertFalse($o->validate('any')); $this->assertFalse($o->assert('any')); } }
PaymentHighway/woocommerce-gateway-paymenthighway
includes/vendor/respect/validation/tests/unit/Rules/NoneOfTest.php
PHP
gpl-2.0
1,502
<?php namespace Pantheon\Terminus\UnitTests\Models; use Pantheon\Terminus\Collections\Workflows; use Pantheon\Terminus\Models\Environment; use Pantheon\Terminus\Models\Domain; use Pantheon\Terminus\Models\Workflow; /** * Class DomainTest * Testing class for Pantheon\Terminus\Models\Domain * @package Pantheon\Terminus\UnitTests\Models */ class DomainTest extends ModelTestCase { public function setUp() { parent::setUp(); $this->domain = $this->_createDomain(['id' => 'dev.example.com']); } protected function _createDomain($attr) { $this->workflow = $this->getMockBuilder(Workflow::class) ->disableOriginalConstructor() ->getMock(); $this->workflows = $this->getMockBuilder(Workflows::class) ->disableOriginalConstructor() ->getMock(); $this->environment = $this->getMockBuilder(Environment::class) ->disableOriginalConstructor() ->getMock(); $this->environment->method('getWorkflows')->willReturn($this->workflows); $this->environment->site = (object)['id' => 'abc']; $this->environment->id = 'dev'; $domain = new Domain((object)$attr, ['collection' => (object)['environment' => $this->environment]]); $domain->setRequest($this->request); return $domain; } public function testDelete() { $this->request->expects($this->once()) ->method('request') ->with('sites/abc/environments/dev/hostnames/dev.example.com', ['method' => 'delete']); $this->domain->delete(); } public function testSerialize() { $data = [ 'dns_zone_name' => 'pantheonsite.io', 'environment' => 'live', 'site_id' => '1111-1111-1111-1111-1111', 'type' => 'platform', 'id' => 'live-mysite.pantheonsite.io', 'key' => 'live-mysite.pantheonsite.io', 'deletable' => false, ]; $domain = $this->_createDomain($data); $expected = [ 'domain' => 'live-mysite.pantheonsite.io', 'dns_zone_name' => 'pantheonsite.io', 'environment' => 'live', 'site_id' => '1111-1111-1111-1111-1111', 'key' => 'live-mysite.pantheonsite.io', 'deletable' => false, ]; $actual = $domain->serialize(); $this->assertEquals($expected, $actual); } }
rloos289/PDXwing
vendor/pantheon-systems/terminus/tests/unit_tests/Models/DomainTest.php
PHP
gpl-2.0
2,462
/* Special test file for Semantic Analyzer and complex C++ inheritance. */ //#include <iostream> #include "testsubclass.hh" void animal::moose::setFeet(int numfeet) //^1^ { if (numfeet > 4) { std::cerr << "Why would a moose have more than 4 feet?" << std::endl; return; } fFeet = numfeet; } int animal::moose::getFeet() //^2^ { return fFeet; } void animal::moose::doNothing() //^3^ { animal::moose foo(); fFeet = 3; } void deer::moose::setAntlers(bool have_antlers) //^4^ { fAntlers = have_antlers; } bool deer::moose::getAntlers() //^5^ // %1% ( ( "testsubclass.cpp" "testsubclass.hh" ) ( "deer::moose::getAntlers" "deer::moose::doSomething" "moose" ) ) { return fAntlers; } bool i_dont_have_symrefs() // %2% ( ("testsubclass.cpp" ) ("i_dont_have_symrefs")) { } void deer::moose::doSomething() //^6^ { // All these functions should be identified by semantic analyzer. getAntlers(); setAntlers(true); getFeet(); setFeet(true); doNothing(); fSomeField = true; fIsValid = true; } void deer::alces::setLatin(bool l) { fLatin = l; } bool deer::alces::getLatin() { return fLatin; } void deer::alces::doLatinStuff(moose moosein) { // All these functions should be identified by semantic analyzer. getFeet(); setFeet(true); getLatin(); setLatin(true); doNothing(); deer::moose foo(); } moose deer::alces::createMoose() { moose MooseVariableName; bool tmp; int itmp; bool fool; int fast; MooseVariableName = createMoose(); doLatinStuff(MooseVariableName); tmp = this.f// -1- // #1# ( "fAlcesBool" "fIsValid" "fLatin" ) ; itmp = this.f// -2- // #2# ( "fAlcesInt" "fGreek" "fIsProtectedInt" ) ; tmp = f// -3- // #3# ( "fAlcesBool" "fIsValid" "fLatin" "fool" ) ; itmp = f// -4- // #4# ( "fAlcesInt" "fGreek" "fIsProtectedInt" "fast" ) ; MooseVariableName = m// -5- // #5# ( "moose" ) return MooseVariableName; } /** Test Scope Changes * * This function is rigged to make sure the scope changes to account * for different locations in local variable parsing. */ int someFunction(int mPickle) { moose mMoose = deer::alces::createMoose(); if (mPickle == 1) { int mOption1 = 2; m// -5- // #5# ( "mMoose" "mOption1" "mPickle" ) ; } else { int mOption2 = 2; m// -6- // #6# ( "mMoose" "mOption2" "mPickle" ) ; } } // Thanks Ming-Wei Chang for this next example. namespace pub_priv { class A{ private: void private_a(){} public: void public_a(); }; void A::public_a() { A other_a; other_a.p// -7- // #7# ( "private_a" "public_a" ) ; } int some_regular_function(){ A a; a.p// -8- // #8# ( "public_a" ) ; return 0; } } /** Test Scope w/in a function (non-method) with classes using * different levels of inheritance. */ int otherFunction() { sneaky::antelope Antelope(1); sneaky::jackalope Jackalope(1); sneaky::bugalope Bugalope(1); Antelope.// -9- // #9# ( "fAntyPublic" "fQuadPublic" "testAccess") ; Jackalope.// -10- // #10# ( "fBunnyPublic" "testAccess") ; Bugalope.// -11- // #11# ( "fBugPublic" "testAccess") ; } /** Test methods within each class for types of access to the baseclass. */ bool sneaky::antelope::testAccess() //^7^ { this.// -12- // #12# ( "fAntyPrivate" "fAntyProtected" "fAntyPublic" "fQuadProtected" "fQuadPublic" "testAccess" ) ; } bool sneaky::jackalope::testAccess() //^8^ { this.// -13- // #13# ( "fBunnyPrivate" "fBunnyProtected" "fBunnyPublic" "fQuadProtected" "fQuadPublic" "testAccess" ) ; } bool sneaky::bugalope::testAccess() //^9^ { this.// -14- // #14# ( "fBugPrivate" "fBugProtected" "fBugPublic" "fQuadPublic" "testAccess" ) ; }
saintkepha/hackenv-emacs
packages/cedet-1.0pre6/semantic/tests/testsubclass.cpp
C++
gpl-2.0
3,820
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('QuickBooking', '0010_auto_20150704_1942'), ] operations = [ migrations.AlterField( model_name='seat', name='seat_type', field=models.CharField(max_length=10, primary_key=True), ), ]
noorelden/QuickBooking
QuickBooking/migrations/0011_auto_20150704_2001.py
Python
gpl-2.0
426
/* * libjingle * Copyright 2004--2011 Google Inc. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "talk/app/webrtc/peerconnectionfactory.h" #include "talk/app/webrtc/audiotrack.h" #include "talk/app/webrtc/localaudiosource.h" #include "talk/app/webrtc/mediastream.h" #include "talk/app/webrtc/mediastreamproxy.h" #include "talk/app/webrtc/mediastreamtrackproxy.h" #include "talk/app/webrtc/peerconnection.h" #include "talk/app/webrtc/peerconnectionfactoryproxy.h" #include "talk/app/webrtc/peerconnectionproxy.h" #include "talk/app/webrtc/portallocatorfactory.h" #include "talk/app/webrtc/videosource.h" #include "talk/app/webrtc/videosourceproxy.h" #include "talk/app/webrtc/videotrack.h" #include "talk/media/webrtc/webrtcmediaengine.h" #include "talk/media/webrtc/webrtcvideodecoderfactory.h" #include "talk/media/webrtc/webrtcvideoencoderfactory.h" #include "webrtc/base/bind.h" #include "webrtc/modules/audio_device/include/audio_device.h" namespace webrtc { namespace { // Passes down the calls to |store_|. See usage in CreatePeerConnection. class DtlsIdentityStoreWrapper : public DtlsIdentityStoreInterface { public: DtlsIdentityStoreWrapper( const rtc::scoped_refptr<RefCountedDtlsIdentityStore>& store) : store_(store) { RTC_DCHECK(store_); } void RequestIdentity( rtc::KeyType key_type, const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>& observer) override { store_->RequestIdentity(key_type, observer); } private: rtc::scoped_refptr<RefCountedDtlsIdentityStore> store_; }; } // anonymous namespace rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory() { rtc::scoped_refptr<PeerConnectionFactory> pc_factory( new rtc::RefCountedObject<PeerConnectionFactory>()); // Call Initialize synchronously but make sure its executed on // |signaling_thread|. MethodCall0<PeerConnectionFactory, bool> call( pc_factory.get(), &PeerConnectionFactory::Initialize); bool result = call.Marshal(pc_factory->signaling_thread()); if (!result) { return NULL; } return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(), pc_factory); } rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory( rtc::Thread* worker_thread, rtc::Thread* signaling_thread, AudioDeviceModule* default_adm, cricket::WebRtcVideoEncoderFactory* encoder_factory, cricket::WebRtcVideoDecoderFactory* decoder_factory) { rtc::scoped_refptr<PeerConnectionFactory> pc_factory( new rtc::RefCountedObject<PeerConnectionFactory>(worker_thread, signaling_thread, default_adm, encoder_factory, decoder_factory)); // Call Initialize synchronously but make sure its executed on // |signaling_thread|. MethodCall0<PeerConnectionFactory, bool> call( pc_factory.get(), &PeerConnectionFactory::Initialize); bool result = call.Marshal(signaling_thread); if (!result) { return NULL; } return PeerConnectionFactoryProxy::Create(signaling_thread, pc_factory); } PeerConnectionFactory::PeerConnectionFactory() : owns_ptrs_(true), wraps_current_thread_(false), signaling_thread_(rtc::ThreadManager::Instance()->CurrentThread()), worker_thread_(new rtc::Thread) { if (!signaling_thread_) { signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread(); wraps_current_thread_ = true; } worker_thread_->Start(); } PeerConnectionFactory::PeerConnectionFactory( rtc::Thread* worker_thread, rtc::Thread* signaling_thread, AudioDeviceModule* default_adm, cricket::WebRtcVideoEncoderFactory* video_encoder_factory, cricket::WebRtcVideoDecoderFactory* video_decoder_factory) : owns_ptrs_(false), wraps_current_thread_(false), signaling_thread_(signaling_thread), worker_thread_(worker_thread), default_adm_(default_adm), video_encoder_factory_(video_encoder_factory), video_decoder_factory_(video_decoder_factory) { ASSERT(worker_thread != NULL); ASSERT(signaling_thread != NULL); // TODO: Currently there is no way creating an external adm in // libjingle source tree. So we can 't currently assert if this is NULL. // ASSERT(default_adm != NULL); } PeerConnectionFactory::~PeerConnectionFactory() { RTC_DCHECK(signaling_thread_->IsCurrent()); channel_manager_.reset(nullptr); default_allocator_factory_ = nullptr; // Make sure |worker_thread_| and |signaling_thread_| outlive // |dtls_identity_store_|. dtls_identity_store_ = nullptr; if (owns_ptrs_) { if (wraps_current_thread_) rtc::ThreadManager::Instance()->UnwrapCurrentThread(); delete worker_thread_; } } bool PeerConnectionFactory::Initialize() { RTC_DCHECK(signaling_thread_->IsCurrent()); rtc::InitRandom(rtc::Time()); default_allocator_factory_ = PortAllocatorFactory::Create(worker_thread_); if (!default_allocator_factory_) return false; // TODO: Need to make sure only one VoE is created inside // WebRtcMediaEngine. cricket::MediaEngineInterface* media_engine = worker_thread_->Invoke<cricket::MediaEngineInterface*>(rtc::Bind( &PeerConnectionFactory::CreateMediaEngine_w, this)); channel_manager_.reset( new cricket::ChannelManager(media_engine, worker_thread_)); channel_manager_->SetVideoRtxEnabled(true); if (!channel_manager_->Init()) { return false; } dtls_identity_store_ = new RefCountedDtlsIdentityStore( signaling_thread_, worker_thread_); return true; } rtc::scoped_refptr<AudioSourceInterface> PeerConnectionFactory::CreateAudioSource( const MediaConstraintsInterface* constraints) { RTC_DCHECK(signaling_thread_->IsCurrent()); rtc::scoped_refptr<LocalAudioSource> source( LocalAudioSource::Create(options_, constraints)); return source; } rtc::scoped_refptr<VideoSourceInterface> PeerConnectionFactory::CreateVideoSource( cricket::VideoCapturer* capturer, const MediaConstraintsInterface* constraints) { RTC_DCHECK(signaling_thread_->IsCurrent()); rtc::scoped_refptr<VideoSource> source( VideoSource::Create(channel_manager_.get(), capturer, constraints)); return VideoSourceProxy::Create(signaling_thread_, source); } bool PeerConnectionFactory::StartAecDump(rtc::PlatformFile file) { RTC_DCHECK(signaling_thread_->IsCurrent()); return channel_manager_->StartAecDump(file); } void PeerConnectionFactory::StopAecDump() { RTC_DCHECK(signaling_thread_->IsCurrent()); channel_manager_->StopAecDump(); } bool PeerConnectionFactory::StartRtcEventLog(rtc::PlatformFile file) { RTC_DCHECK(signaling_thread_->IsCurrent()); return channel_manager_->StartRtcEventLog(file); } void PeerConnectionFactory::StopRtcEventLog() { RTC_DCHECK(signaling_thread_->IsCurrent()); channel_manager_->StopRtcEventLog(); } rtc::scoped_refptr<PeerConnectionInterface> PeerConnectionFactory::CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, const MediaConstraintsInterface* constraints, PortAllocatorFactoryInterface* allocator_factory, rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store, PeerConnectionObserver* observer) { RTC_DCHECK(signaling_thread_->IsCurrent()); RTC_DCHECK(allocator_factory || default_allocator_factory_); if (!dtls_identity_store.get()) { // Because |pc|->Initialize takes ownership of the store we need a new // wrapper object that can be deleted without deleting the underlying // |dtls_identity_store_|, protecting it from being deleted multiple times. dtls_identity_store.reset( new DtlsIdentityStoreWrapper(dtls_identity_store_)); } PortAllocatorFactoryInterface* chosen_allocator_factory = allocator_factory ? allocator_factory : default_allocator_factory_.get(); chosen_allocator_factory->SetNetworkIgnoreMask(options_.network_ignore_mask); rtc::scoped_refptr<PeerConnection> pc( new rtc::RefCountedObject<PeerConnection>(this)); if (!pc->Initialize( configuration, constraints, chosen_allocator_factory, dtls_identity_store.Pass(), observer)) { return NULL; } return PeerConnectionProxy::Create(signaling_thread(), pc); } rtc::scoped_refptr<MediaStreamInterface> PeerConnectionFactory::CreateLocalMediaStream(const std::string& label) { RTC_DCHECK(signaling_thread_->IsCurrent()); return MediaStreamProxy::Create(signaling_thread_, MediaStream::Create(label)); } rtc::scoped_refptr<VideoTrackInterface> PeerConnectionFactory::CreateVideoTrack( const std::string& id, VideoSourceInterface* source) { RTC_DCHECK(signaling_thread_->IsCurrent()); rtc::scoped_refptr<VideoTrackInterface> track( VideoTrack::Create(id, source)); return VideoTrackProxy::Create(signaling_thread_, track); } rtc::scoped_refptr<AudioTrackInterface> PeerConnectionFactory::CreateAudioTrack(const std::string& id, AudioSourceInterface* source) { RTC_DCHECK(signaling_thread_->IsCurrent()); rtc::scoped_refptr<AudioTrackInterface> track( AudioTrack::Create(id, source)); return AudioTrackProxy::Create(signaling_thread_, track); } webrtc::MediaControllerInterface* PeerConnectionFactory::CreateMediaController() const { RTC_DCHECK(signaling_thread_->IsCurrent()); return MediaControllerInterface::Create(worker_thread_, channel_manager_.get()); } rtc::Thread* PeerConnectionFactory::signaling_thread() { // This method can be called on a different thread when the factory is // created in CreatePeerConnectionFactory(). return signaling_thread_; } rtc::Thread* PeerConnectionFactory::worker_thread() { RTC_DCHECK(signaling_thread_->IsCurrent()); return worker_thread_; } cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() { ASSERT(worker_thread_ == rtc::Thread::Current()); return cricket::WebRtcMediaEngineFactory::Create( default_adm_.get(), video_encoder_factory_.get(), video_decoder_factory_.get()); } } // namespace webrtc
raj-bhatia/grooveip-ios-public
submodules/mswebrtc/webrtc/talk/app/webrtc/peerconnectionfactory.cc
C++
gpl-2.0
11,762
using UnityEngine; using System.Collections; public class RUISOculusFollow : MonoBehaviour { RUISCoordinateSystem coordinateSystem; void Start() { coordinateSystem = MonoBehaviour.FindObjectOfType(typeof(RUISCoordinateSystem)) as RUISCoordinateSystem; } void Update () { if(RUISOVRManager.ovrHmd != null) { Vector3 tempSample = Vector3.zero; Ovr.Posef headpose = RUISOVRManager.ovrHmd.GetTrackingState().HeadPose.ThePose; float px = headpose.Position.x; float py = headpose.Position.y; float pz = -headpose.Position.z; // This needs to be negated TODO: might change with future OVR version tempSample = new Vector3(px, py, pz); tempSample = coordinateSystem.ConvertRawOculusDK2Location(tempSample); Vector3 convertedLocation = coordinateSystem.ConvertLocation(tempSample, RUISDevice.Oculus_DK2); this.transform.localPosition = convertedLocation; if(OVRManager.capiHmd != null) { try { this.transform.localRotation = OVRManager.capiHmd.GetTrackingState().HeadPose.ThePose.Orientation.ToQuaternion(); } catch(System.Exception e) { Debug.LogError(e.Message); } } } } }
znjRoLS/RUISHarryPotter
HarryPoter/RUISunity/Assets/RUIS/Scripts/Input/Calibration/RUISOculusFollow.cs
C#
gpl-2.0
1,178
var lvl1 = (function () { var xPartition = 320; var preload = function () { // tilemap this.xPartition = xPartition; game.load.tilemap('map', 'assets/map.json', null, Phaser.Tilemap.TILED_JSON); game.load.image('floor', 'assets/floor.png'); game.load.image('tileset', 'assets/tileset.png'); game.load.image('wall', 'assets/wall.png'); // furniture game.load.spritesheet('door', 'assets/door.png', 48, 80); game.load.spritesheet('phone', 'assets/phone.png', 32, 48); game.load.spritesheet('atm', 'assets/atm.png', 48, 80); // ppj game.load.spritesheet('cracker', 'assets/cracker.png', 48, 96); game.load.spritesheet('sysadmin', 'assets/sysadmin.png', 48, 96); game.load.spritesheet('secre0', 'assets/ingenuous.png', 48, 96); game.load.spritesheet('secre1', 'assets/ingenuous2.png', 48, 96); game.load.image('pear', 'assets/pear.png'); // removing blury images game.stage.smoothed = false; }; var create = function () { // Background color. game.stage.backgroundColor = '#eee'; // Physics. game.physics.startSystem(Phaser.Physics.ARCADE); // Sprites creation this.tilemap = map(this, xPartition); this.cracker = cracker(this); this.cursor = cursor(); // this is a horrible patch: do not remove it, unless // you wanna fix cracker's overlapDoor conflict this.cracker.cursor = this.cursor; // creating doors this.doors = this.tilemap.parseDoors(); // creating phones this.phones = parsePhones(this, this.tilemap, this.tilemap.phone); // creating sysadmins this.sysadmins = parseSysadmins(this, this.tilemap, this.tilemap.sysadmin, this.phones); // creating atms this.atms = parseAtms(this, this.tilemap, this.tilemap.atm); // scoreboard this.scoreboard = scoreboard(this.phones); // creating secres this.secres = parseSecres(this, this.tilemap, this.tilemap.secre, this.phones, this.scoreboard); this.spawner = new spawner(this); // bringing to top things (below this line) this.cracker.bringToTop(); this.sysadmins.forEach( function (sysadmin) { sysadmin.bringToTop(); }); this.secres.forEach( function (secre) { secre.bringToTop(); }); }; var update = function () { this.spawner.update(); // sysadmin fixes the atm's game.physics.arcade.overlap(this.atms, this.sysadmins, function (atm, sysadmin) { atm.animations.play('ok'); }); this.secres.lookPhone(); }; // check the cracker.js file! the overlapDoor function ;) return { create : create, preload : preload, update : update }; })(); var game = new Phaser.Game(800, 640, Phaser.AUTO, 'game'); game.state.add('lvl1', lvl1); game.state.start('lvl1'); // Global variables window.firstAtmCracked = false; window.firstPhishing = false;
mehhhh/TheLammerGame
game.js
JavaScript
gpl-2.0
3,254
/*---------------------------------------------------------------------------*\ ## #### ###### | ## ## ## | Copyright: ICE Stroemungsfoschungs GmbH ## ## #### | ## ## ## | http://www.ice-sf.at ## #### ###### | ------------------------------------------------------------------------------- ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright (C) 1991-2008 OpenCFD Ltd. \\/ M anipulation | ------------------------------------------------------------------------------- License This file is based on OpenFOAM. OpenFOAM is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Class Description Define types for searchableSurfaces that changed SourceFiles Contributors/Copyright: 2014 Bernhard F.W. Gschaider <bgschaid@ice-sf.at> SWAK Revision: $Id$ \*---------------------------------------------------------------------------*/ #ifndef SwakSurfaceTypesMacroHeader_H #define SwakSurfaceTypesMacroHeader_H #include "swak.H" #ifdef FOAM_VOLUMETYPE_IS_TYPE #define INSIDE volumeType::INSIDE #define OUTSIDE volumeType::OUTSIDE #define UNKNOWN volumeType::UNKNOWN #define MIXED volumeType::MIXED #endif #if FOAM_VERSION4SWAK > VERSION_NR2(2,2) && !defined(FOAM_DEV) #define FOAM_SEARCHABLE_SURF_NEEDS_BOUNDING_SPHERES #endif #endif // ************************************************************************* //
aliozel/swak4Foam
Libraries/simpleSearchableSurfaces/include/swakSurfaceTypes.H
C++
gpl-2.0
2,158
<?php /* * This file is part of EC-CUBE * * Copyright(c) 2000-2011 LOCKON CO.,LTD. All Rights Reserved. * * http://www.lockon.co.jp/ * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // {{{ requires require_once CLASS_EX_REALDIR . 'page_extends/LC_Page_Ex.php'; /** * 会員登録完了のページクラス. * * @package Page * @author LOCKON CO.,LTD. * @version $Id:LC_Page_Regist_Complete.php 15532 2007-08-31 14:39:46Z nanasess $ */ class LC_Page_Regist_Complete extends LC_Page_Ex { // }}} // {{{ functions /** * Page を初期化する. * * @return void */ function init() { parent::init(); $this->tpl_title = '会員登録(完了ページ)'; $this->tpl_conv_page = AFF_ENTRY_COMPLETE; } /** * Page のプロセス. * * @return void */ function process() { parent::process(); $this->action(); $this->sendResponse(); } /** * Page のAction. * * @return void */ function action() { } /** * デストラクタ. * * @return void */ function destroy() { parent::destroy(); } } ?>
sin3fu3/inkjet-conveni
data/class/pages/regist/LC_Page_Regist_Complete.php
PHP
gpl-2.0
1,863
<?php class Auth extends Eloquent{ protected $table = 'auth'; }
mhndev/shopping
app/models/Auth.php
PHP
gpl-2.0
70
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2009-2012 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.provision.adapters.link; import org.opennms.core.utils.BeanUtils; import org.opennms.netmgt.provision.adapters.link.config.dao.DefaultLinkAdapterConfigurationDao; import org.opennms.netmgt.provision.adapters.link.config.linkadapter.LinkPattern; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * <p>DefaultLinkMatchResolverImpl class.</p> * * @author ranger * @version $Id: $ */ public class DefaultLinkMatchResolverImpl implements LinkMatchResolver, InitializingBean { @Autowired private DefaultLinkAdapterConfigurationDao m_configDao; @Override public void afterPropertiesSet() throws Exception { BeanUtils.assertAutowiring(this); } /** {@inheritDoc} */ @Override public String getAssociatedEndPoint(String endPoint) { if (m_configDao != null) { for (LinkPattern p : m_configDao.getPatterns()) { String endPointResolvedTemplate = p.resolveTemplate(endPoint); if (endPointResolvedTemplate != null) { return endPointResolvedTemplate; } } } return null; } }
rfdrake/opennms
integrations/opennms-link-provisioning-adapter/src/main/java/org/opennms/netmgt/provision/adapters/link/DefaultLinkMatchResolverImpl.java
Java
gpl-2.0
2,417
/* QueryJ Copyright (C) 2002-today Jose San Leandro Armendariz chous@acm-sl.org This library is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Thanks to ACM S.L. for distributing this library under the GPL license. Contact info: jose.sanleandro@acm-sl.com ****************************************************************************** * * Filename: DataAccessManagerTemplateGenerator.java * * Author: Jose San Leandro Armendariz * * Description: Is able to generate DataAccessManager implementations * according to database metadata. * */ package org.acmsl.queryj.templates.dao; /* * Importing some project-specific classes. */ import org.acmsl.queryj.api.AbstractTemplateGenerator; import org.acmsl.queryj.api.PerRepositoryTemplateGenerator; /* * Importing checkthread.org annotations. */ import org.checkthread.annotations.ThreadSafe; /** * Is able to generate DataAccessManager implementations according * to database metadata. * @author <a href="mailto:chous@acm-sl.org">Jose San Leandro Armendariz</a> */ @ThreadSafe public class DataAccessManagerTemplateGenerator extends AbstractTemplateGenerator<DataAccessManagerTemplate> implements PerRepositoryTemplateGenerator<DataAccessManagerTemplate> { /** * Creates a new {@link DataAccessContextLocalTemplateGenerator} with given settings. * @param caching whether to enable caching. * @param threadCount the number of threads to use. */ public DataAccessManagerTemplateGenerator(final boolean caching, final int threadCount) { super(caching, threadCount); } }
rydnr/queryj-rt
queryj-templates-deprecated/src/main/java/org/acmsl/queryj/templates/dao/DataAccessManagerTemplateGenerator.java
Java
gpl-2.0
2,309
// K-3D // Copyright (c) 1995-2009, Timothy M. Shead // // Contact: tshead@k-3d.com // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public // License as published by the Free Software Foundation; either // version 2 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // // You should have received a copy of the GNU General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #include <k3dsdk/log_control.h> #include <k3dsdk/xml.h> #include <k3dsdk/xpath.h> using namespace k3d::xml; #include <iostream> #include <stdexcept> #include <sstream> #define test_expression(expression) \ { \ if(!(expression)) \ { \ std::ostringstream buffer; \ buffer << "Expression failed at line " << __LINE__ << ": " << #expression; \ throw std::runtime_error(buffer.str()); \ } \ } int main(int argc, char* argv[]) { k3d::log_color_level(true); k3d::log_show_level(true); k3d::log_minimum_level(k3d::K3D_LOG_LEVEL_DEBUG); try { element document("k3d", element("nodes", element("node", attribute("class", "foo") ), element("node", attribute("factory", "bar"), element("properties", element("property", attribute("user_property", ""), attribute("type", "double") ) ) ) ), element("dependencies" ) ); xpath::result_set results; results = xpath::match(document, ""); test_expression(results.size() == 0); results = xpath::match(document, "/"); test_expression(results.size() == 0); results = xpath::match(document, "/foo"); test_expression(results.size() == 0); results = xpath::match(document, "/k3d"); test_expression(results.size() == 1); test_expression(results[0]->name == "k3d"); results = xpath::match(document, "/k3d/*"); test_expression(results.size() == 2); test_expression(results[0]->name == "nodes"); test_expression(results[1]->name == "dependencies"); results = xpath::match(document, "/k3d/nodes"); test_expression(results.size() == 1); test_expression(results[0]->name == "nodes"); results = xpath::match(document, "/k3d/nodes/node"); test_expression(results.size() == 2); test_expression(results[0]->name == "node"); test_expression(results[1]->name == "node"); results = xpath::match(document, "/k3d/nodes/node[@class]"); test_expression(results.size() == 1); test_expression(find_attribute(*results[0], "class")); test_expression(!find_attribute(*results[0], "factory")); results = xpath::match(document, "/k3d/nodes/node/properties/property[@user_property][@type='double']"); test_expression(results.size() == 1); results = xpath::match(document, "nodes"); test_expression(results.size() == 1); test_expression(results[0]->name == "nodes"); results = xpath::match(document, "nodes/node"); test_expression(results.size() == 2); test_expression(results[0]->name == "node"); test_expression(results[1]->name == "node"); } catch(std::exception& e) { std::cerr << e.what() << std::endl; return 1; } return 0; }
barche/k3d
tests/sdk/xml_xpath.cpp
C++
gpl-2.0
3,438
/* * RapidMiner * * Copyright (C) 2001-2008 by Rapid-I and the contributors * * Complete list of developers available at our web site: * * http://rapid-i.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see http://www.gnu.org/licenses/. */ package com.rapidminer.operator.performance; import com.rapidminer.example.Attribute; import com.rapidminer.example.Example; import com.rapidminer.example.ExampleSet; import com.rapidminer.operator.OperatorException; import com.rapidminer.tools.math.Averagable; /** * Returns the average value of the prediction. This criterion can be used to * detect whether a learning scheme predicts nonsense, e.g. always make the same * error. This criterion is not suitable for evaluating the performance and * should never be used as main criterion. The {@link #getFitness()} method * always returns 0. * * @author Ingo Mierswa * @version $Id: PredictionAverage.java,v 2.18 2006/03/21 15:35:51 ingomierswa * Exp $ */ public class PredictionAverage extends MeasuredPerformance { private static final long serialVersionUID = -5316112625406102611L; private double sum; private double squaredSum; private double count; private Attribute labelAttribute; private Attribute weightAttribute; public PredictionAverage() { } public PredictionAverage(PredictionAverage pa) { super(pa); this.sum = pa.sum; this.squaredSum = pa.squaredSum; this.count = pa.count; this.labelAttribute = (Attribute)pa.labelAttribute.clone(); if (pa.weightAttribute != null) this.weightAttribute = (Attribute)pa.weightAttribute.clone(); } public double getExampleCount() { return count; } public void countExample(Example example) { double weight = 1.0d; if (weightAttribute != null) weight = example.getValue(weightAttribute); count += weight; double v = example.getLabel(); if (!Double.isNaN(v)) { sum += v * weight; squaredSum += v * v * weight * weight; } } public double getMikroAverage() { return sum / count; } public double getMikroVariance() { double avg = getMikroAverage(); return (squaredSum / count) - avg * avg; } public void startCounting(ExampleSet set, boolean useExampleWeights) throws OperatorException { super.startCounting(set, useExampleWeights); count = 0; sum = 0.0; this.labelAttribute = set.getAttributes().getLabel(); if (useExampleWeights) this.weightAttribute = set.getAttributes().getWeight(); } public String getName() { return "prediction_average"; } /** Returns 0. */ public double getFitness() { return 0.0; } public void buildSingleAverage(Averagable performance) { PredictionAverage other = (PredictionAverage) performance; this.sum += other.sum; this.squaredSum += other.squaredSum; this.count += other.count; } public String getDescription() { return "This is not a real performance measure, but merely the average of the predicted labels."; } }
ntj/ComplexRapidMiner
src/com/rapidminer/operator/performance/PredictionAverage.java
Java
gpl-2.0
3,662
<?php /** * @package Joomla.Site * @subpackage Layout * * @copyright Copyright (C) 2005 - 2014 Open Source Matters, Inc. All rights reserved. * @license GNU General Public License version 2 or later; see LICENSE.txt */ defined('_JEXEC') or die; // Create a shortcut for params. $params = $displayData->params; $canEdit = $displayData->params->get('access-edit'); JHtml::addIncludePath(JPATH_COMPONENT.'/helpers/html'); ?> <?php if ($params->get('show_title') || $displayData->state == 0 || ($params->get('show_author') && !empty($displayData->author ))) : ?> <div class="page-header"> <?php if ($params->get('show_title')) : ?> <h2 > <?php if ($params->get('link_titles') && $params->get('access-view')) : ?> <a href="<?php echo JRoute::_(ContentHelperRoute::getArticleRoute($displayData->slug, $displayData->catid)); ?>" > <?php echo $this->escape($displayData->title); ?></a> <?php else : ?> <?php echo $this->escape($displayData->title); ?> <?php endif; ?> </h2> <?php endif; ?> <?php if ($displayData->state == 0) : ?> <span class="label label-warning"><?php echo JText::_('JUNPUBLISHED'); ?></span> <?php endif; ?> <?php if (strtotime($displayData->publish_up) > strtotime(JFactory::getDate())) : ?> <span class="label label-warning"><?php echo JText::_('JNOTPUBLISHEDYET'); ?></span> <?php endif; ?> <?php if ((strtotime($displayData->publish_down) < strtotime(JFactory::getDate())) && $displayData->publish_down != '0000-00-00 00:00:00') : ?> <span class="label label-warning"><?php echo JText::_('JEXPIRED'); ?></span> <?php endif; ?> </div> <?php endif; ?>
site4com/j-gov-3-it
layouts/joomla/content/blog_style_default_item_title.php
PHP
gpl-2.0
1,670
// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in the project root for license information. // using HUX.Buttons; using System; using System.Collections.Generic; using System.Reflection; using UnityEditor; using UnityEngine; namespace HUX { public class HUXEditorUtils { public readonly static Color DefaultColor = new Color(1f, 1f, 1f); public readonly static Color DisabledColor = new Color(0.6f, 0.6f, 0.6f); public readonly static Color BorderedColor = new Color(0.8f, 0.8f, 0.8f); public readonly static Color WarningColor = new Color(1f, 0.85f, 0.6f); public readonly static Color ErrorColor = new Color(1f, 0.55f, 0.5f); public readonly static Color SuccessColor = new Color(0.8f, 1f, 0.75f); public readonly static Color ObjectColor = new Color(0.85f, 0.9f, 1f); public readonly static Color HelpBoxColor = new Color(0.22f, 0.23f, 0.24f, 0.45f); public readonly static Color SectionColor = new Color(0.42f, 0.43f, 0.47f, 0.25f); public readonly static Color DarkColor = new Color(0.1f, 0.1f, 0.1f); public readonly static Color ObjectColorEmpty = new Color(0.75f, 0.8f, 0.9f); /// <summary> /// Draws a field for scriptable object profiles /// If base class T is abstract, includes a button for creating a profile of each type that inherits from base class T /// Otherwise just includes button for creating a profile of type T /// </summary> /// <typeparam name="T"></typeparam> /// <param name="profile"></param> /// <returns></returns> public static T DrawProfileField<T>(T profile) where T : ButtonProfile { Color prevColor = GUI.color; GUI.color = Color.Lerp(Color.white, Color.gray, 0.5f); EditorGUILayout.BeginVertical(EditorStyles.helpBox); GUI.color = Color.Lerp(Color.white, Color.gray, 0.25f); EditorGUILayout.LabelField("Select a " + typeof(T).Name + " or create a new profile", EditorStyles.miniBoldLabel); T newProfile = profile; EditorGUILayout.BeginHorizontal(); newProfile = (T)EditorGUILayout.ObjectField(profile, typeof(T), false); // is this an abstract class? if (typeof(T).IsAbstract) { EditorGUILayout.BeginVertical(); List<Type> types = GetDerivedTypes(typeof(T), Assembly.GetAssembly(typeof(T))); foreach (Type profileType in types) { if (GUILayout.Button("Create " + profileType.Name)) { profile = CreateProfile<T>(profileType); } } EditorGUILayout.EndVertical(); } else { if (GUILayout.Button("Create Profile")) { profile = CreateProfile<T>(); } } EditorGUILayout.EndHorizontal(); EditorGUILayout.EndVertical(); if (profile == null) { ErrorMessage("You must choose a button profile.", null); } GUI.color = prevColor; return newProfile; } public static T CreateProfile<T>(Type profileType) where T : ButtonProfile { T asset = (T)ScriptableObject.CreateInstance(profileType); if (asset != null) { AssetDatabase.CreateAsset(asset, "Assets/New" + profileType.Name + ".asset"); AssetDatabase.SaveAssets(); } else { Debug.LogError("Couldn't create profile of type " + profileType.Name); } return asset; } public static T CreateProfile<T>() where T : ButtonProfile { T asset = ScriptableObject.CreateInstance<T>(); AssetDatabase.CreateAsset(asset, "Assets/New" + typeof(T).Name + ".asset"); AssetDatabase.SaveAssets(); return asset; } public static void DrawFilterTagField(SerializedObject serializedObject, string propertyName) { SerializedProperty p = serializedObject.FindProperty(propertyName); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(p); if (EditorGUI.EndChangeCheck()) serializedObject.ApplyModifiedProperties(); } public static void DrawProfileInspector(ButtonProfile profile, Component targetComponent) { ProfileInspector profileEditor = (ProfileInspector)Editor.CreateEditor(profile); profileEditor.targetComponent = targetComponent; profileEditor.OnInspectorGUI(); } public static T DropDownComponentField<T>(string label, T obj, Transform transform, bool showComponentName = false) where T : UnityEngine.Component { T[] optionObjects = transform.GetComponentsInChildren<T>(true); int selectedIndex = 0; string[] options = new string[optionObjects.Length + 1]; options[0] = "(None)"; for (int i = 0; i < optionObjects.Length; i++) { if (showComponentName) { options[i + 1] = optionObjects[i].GetType().Name + " (" + optionObjects[i].name + ")"; } else { options[i + 1] = optionObjects[i].name; } if (obj == optionObjects[i]) { selectedIndex = i + 1; } } EditorGUILayout.BeginHorizontal(); int newIndex = EditorGUILayout.Popup(label, selectedIndex, options); if (newIndex == 0) { // Zero means '(None)' obj = null; } else { obj = optionObjects[newIndex - 1]; } //draw the object field so people can click it obj = (T)EditorGUILayout.ObjectField(obj, typeof(T), true); EditorGUILayout.EndHorizontal(); return obj; } /// <summary> /// Draws enum values as a set of toggle fields /// </summary> /// <typeparam name="T"></typeparam> /// <param name="label"></param> /// <param name="enumObj"></param> /// <returns></returns> public static int EnumCheckboxField<T>(string label, T enumObj) where T : struct, IConvertible { if (!typeof(T).IsEnum) { throw new ArgumentException("T must be an enum."); } return EnumCheckboxField<T>(label, enumObj, string.Empty, (T)Activator.CreateInstance(typeof(T))); } public static T SceneObjectField<T>(string label, T sceneObject) where T : Component { EditorGUILayout.BeginHorizontal(); if (string.IsNullOrEmpty(label)) { sceneObject = (T)EditorGUILayout.ObjectField(sceneObject, typeof(T), true); } else { sceneObject = (T)EditorGUILayout.ObjectField(label, sceneObject, typeof(T), true); } if (sceneObject != null && sceneObject.gameObject.scene.name == null) { // Don't allow objects that aren't in the scene! sceneObject = null; } T[] objectsInScene = GameObject.FindObjectsOfType<T>(); int selectedIndex = 0; string[] displayedOptions = new string[objectsInScene.Length + 1]; displayedOptions[0] = "(None)"; for (int i = 0; i < objectsInScene.Length; i++) { displayedOptions[i + 1] = objectsInScene[i].name; if (objectsInScene[i] == sceneObject) { selectedIndex = i + 1; } } selectedIndex = EditorGUILayout.Popup(selectedIndex, displayedOptions); if (selectedIndex == 0) { sceneObject = null; } else { sceneObject = objectsInScene[selectedIndex - 1]; } EditorGUILayout.EndHorizontal(); return sceneObject; } /// <summary> /// Draws enum values as a set of toggle fields /// Also draws a button the user can click to set to a 'default' value /// </summary> /// <typeparam name="T"></typeparam> /// <param name="label"></param> /// <param name="enumObj"></param> /// <param name="defaultName"></param> /// <param name="defaultVal"></param> /// <returns></returns> public static int EnumCheckboxField<T>(string label, T enumObj, string defaultName, T defaultVal, bool ignoreNone = true, bool ignoreAll = true) where T : struct, IConvertible { if (!typeof(T).IsEnum) { throw new ArgumentException("T must be an enum."); } // Convert enum value to an int64 so we can treat it as a flag set int enumFlags = Convert.ToInt32(enumObj); EditorGUILayout.BeginVertical(EditorStyles.helpBox); if (!string.IsNullOrEmpty(label)) { EditorGUILayout.LabelField(label, EditorStyles.miniLabel); DrawDivider(); } System.Array enumVals = Enum.GetValues(typeof(T)); int lastvalue = Convert.ToInt32((T)enumVals.GetValue(enumVals.GetLength(0) - 1)); foreach (T enumVal in enumVals) { int flagVal = Convert.ToInt32(enumVal); if (ignoreNone && flagVal == 0 && enumVal.ToString().ToLower() == "none") { continue; } if (ignoreAll && flagVal == lastvalue && enumVal.ToString().ToLower() == "all") { continue; } bool selected = (flagVal & enumFlags) != 0; selected = EditorGUILayout.Toggle(enumVal.ToString(), selected); // If it's selected add it to the enumObj, otherwise remove it if (selected) { enumFlags |= flagVal; } else { enumFlags &= ~flagVal; } } if (!string.IsNullOrEmpty(defaultName)) { if (GUILayout.Button(defaultName, EditorStyles.miniButton)) { enumFlags = Convert.ToInt32(defaultVal); } } EditorGUILayout.EndVertical(); return enumFlags; } public static int EnumCheckboxField<T>(string label, T enumObj, string defaultName, T defaultVal, T valOnZero, bool ignoreNone = true, bool ignoreAll = true) where T : struct, IConvertible { if (!typeof(T).IsEnum) { throw new ArgumentException("T must be an enum."); } // Convert enum value to an int64 so we can treat it as a flag set int enumFlags = Convert.ToInt32(enumObj); EditorGUILayout.BeginVertical(EditorStyles.helpBox); EditorGUILayout.LabelField(label, EditorStyles.miniLabel); DrawDivider(); System.Array enumVals = Enum.GetValues(typeof(T)); int lastvalue = Convert.ToInt32((T)enumVals.GetValue(enumVals.GetLength(0) - 1)); foreach (T enumVal in enumVals) { int flagVal = Convert.ToInt32(enumVal); if (ignoreNone && flagVal == 0 && enumVal.ToString().ToLower() == "none") { continue; } if (ignoreAll && flagVal == lastvalue && enumVal.ToString().ToLower() == "all") { continue; } bool selected = (flagVal & enumFlags) != 0; selected = EditorGUILayout.Toggle(enumVal.ToString(), selected); // If it's selected add it to the enumObj, otherwise remove it if (selected) { enumFlags |= flagVal; } else { enumFlags &= ~flagVal; } } if (!string.IsNullOrEmpty(defaultName)) { if (GUILayout.Button(defaultName, EditorStyles.miniButton)) { enumFlags = Convert.ToInt32(defaultVal); } } EditorGUILayout.EndVertical(); if (enumFlags == 0) { enumFlags = Convert.ToInt32(valOnZero); } return enumFlags; } public static string MaterialPropertyName(string property, Material mat, ShaderUtil.ShaderPropertyType type, bool allowNone = true, string defaultProperty = "_Color", string labelName = null) { Color tColor = GUI.color; // Create a list of available color and value properties List<string> props = new List<string>(); int selectedPropIndex = 0; if (allowNone) { props.Add("(None)"); } if (mat != null) { int propertyCount = ShaderUtil.GetPropertyCount(mat.shader); string propName = string.Empty; for (int i = 0; i < propertyCount; i++) { if (ShaderUtil.GetPropertyType(mat.shader, i) == type) { propName = ShaderUtil.GetPropertyName(mat.shader, i); if (propName == property) { // We've found our current property selectedPropIndex = props.Count; } props.Add(propName); } } GUI.color = string.IsNullOrEmpty(property) ? HUXEditorUtils.DisabledColor : HUXEditorUtils.DefaultColor; if (string.IsNullOrEmpty (labelName)) { labelName = type.ToString(); } int newPropIndex = EditorGUILayout.Popup(labelName, selectedPropIndex, props.ToArray()); if (allowNone) { property = (newPropIndex > 0 ? props[newPropIndex] : string.Empty); } else { if (props.Count > 0) { property = props[newPropIndex]; } else { property = defaultProperty; } } GUI.color = HUXEditorUtils.DefaultColor; return property; } else { WarningMessage("Can't get material " + type.ToString() + " properties because material is null."); GUI.color = HUXEditorUtils.DefaultColor; return string.Empty; } } public static void Header (string header) { GUIStyle headerStyle = new GUIStyle(EditorStyles.boldLabel); headerStyle.fontSize = 18; EditorGUILayout.LabelField(header, headerStyle, GUILayout.MinHeight(24)); } public static void WarningMessage(string warning, string buttonMessage = null, Action buttonAction = null) { Color tColor = GUI.color; HUXEditorUtils.BeginSectionBox("Warning", HUXEditorUtils.WarningColor); EditorGUILayout.LabelField(warning, EditorStyles.wordWrappedLabel); if (!string.IsNullOrEmpty(buttonMessage) && buttonAction != null) { if (GUILayout.Button(buttonMessage)) { buttonAction.Invoke(); } } HUXEditorUtils.EndSectionBox(); GUI.color = tColor; } public static void ErrorMessage(string error, Action action = null, string fixMessage = null) { Color tColor = GUI.color; HUXEditorUtils.BeginSectionBox("Error", HUXEditorUtils.ErrorColor); EditorGUILayout.LabelField(error, EditorStyles.wordWrappedLabel); if (action != null && GUILayout.Button((fixMessage != null) ? fixMessage : "Fix now")) { action.Invoke(); } HUXEditorUtils.EndSectionBox(); GUI.color = tColor; } public static void BeginProfileBox() { GUI.color = HUXEditorUtils.WarningColor; EditorGUILayout.BeginVertical(EditorStyles.helpBox); DrawSubtleMiniLabel("Profile" + ":"); DrawSubtleMiniLabel("(Warning: this section edits the button profile. These changes will affect all buttons that use this profile.)"); } public static void EndProfileBox() { EndSectionBox(); } public static void BeginSectionBox(string label) { GUI.color = DefaultColor; /*GUIStyle boxStyle = new GUIStyle(EditorStyles.helpBox); boxStyle.normal.background = SectionBackground;*/ EditorGUILayout.BeginVertical(EditorStyles.helpBox); DrawSubtleMiniLabel(label + ":"); } public static void HelpBox(bool show, string text) { if (show) { GUI.color = ObjectColor; GUIStyle helpBoxStyle = new GUIStyle(EditorStyles.helpBox); helpBoxStyle.wordWrap = true; helpBoxStyle.fontSize = 9; helpBoxStyle.normal.background = HelpBoxBackground; EditorGUILayout.LabelField(text, helpBoxStyle); } GUI.color = DefaultColor; } public static bool BeginSectionBox(string label, ref bool foldout) { GUI.color = DefaultColor; /*GUIStyle boxStyle = new GUIStyle(EditorStyles.helpBox); boxStyle.normal.background = SectionBackground;*/ EditorGUILayout.BeginVertical(EditorStyles.helpBox); GUI.color = Color.Lerp(DefaultColor, Color.grey, 0.5f); ; //GUI.contentColor = DarkColor; GUIStyle foldoutStyle = new GUIStyle(EditorStyles.foldout); foldoutStyle.fontStyle = FontStyle.Normal; foldoutStyle.fontSize = 9; foldoutStyle.fontStyle = FontStyle.Normal; foldout = EditorGUILayout.Foldout(foldout, label + (foldout ? ":" : ""), true, foldoutStyle); GUI.color = DefaultColor; //GUI.contentColor = Color.white; return foldout; } public static void BeginSectionBox(string label, Color color) { GUI.color = color; /*GUIStyle boxStyle = new GUIStyle(EditorStyles.helpBox); boxStyle.normal.background = SectionBackground;*/ EditorGUILayout.BeginVertical(EditorStyles.helpBox); /*GUIStyle foldoutStyle = new GUIStyle(EditorStyles.wordWrappedLabel); foldoutStyle.fontStyle = FontStyle.Normal; foldoutStyle.fontSize = 12; foldoutStyle.fontStyle = FontStyle.Bold; EditorGUILayout.LabelField(label + ":", foldoutStyle);*/ DrawSubtleMiniLabel(label + ":"); } public static void EndSectionBox() { EditorGUILayout.EndVertical(); } public static void BeginSubSectionBox(string label, Color sectionColor) { GUI.color = sectionColor; GUIStyle boxStyle = new GUIStyle(EditorStyles.helpBox); boxStyle.normal.background = SectionBackground; EditorGUILayout.BeginVertical(boxStyle); EditorGUILayout.LabelField(label + ":", EditorStyles.boldLabel); } public static void BeginSubSectionBox(string label) { GUI.color = DefaultColor; GUIStyle boxStyle = new GUIStyle(EditorStyles.helpBox); boxStyle.normal.background = SectionBackground; EditorGUILayout.BeginVertical(boxStyle); EditorGUILayout.LabelField(label + ":", EditorStyles.boldLabel); } public static void EndSubSectionBox() { EditorGUILayout.EndVertical(); } public static void DrawSubtleMiniLabel(string label) { Color tColor = GUI.color; GUI.color = Color.Lerp(tColor, Color.grey, 0.5f); EditorGUILayout.LabelField(label, EditorStyles.wordWrappedMiniLabel); GUI.color = tColor; } public static void DrawDivider() { GUIStyle styleHR = new GUIStyle(GUI.skin.box); styleHR.stretchWidth = true; styleHR.fixedHeight = 2; GUILayout.Box("", styleHR); } public static void SaveChanges(UnityEngine.Object target) { if (Application.isPlaying) return; if (GUI.changed) { EditorUtility.SetDirty(target); UnityEditor.SceneManagement.EditorSceneManager.MarkSceneDirty(UnityEngine.SceneManagement.SceneManager.GetActiveScene()); } } public static void SaveChanges(UnityEngine.Object target1, UnityEngine.Object target2) { if (Application.isPlaying) return; if (GUI.changed) { EditorUtility.SetDirty(target1); EditorUtility.SetDirty(target2); UnityEditor.SceneManagement.EditorSceneManager.MarkSceneDirty(UnityEngine.SceneManagement.SceneManager.GetActiveScene()); } } public static string[] getMethodOptions(GameObject comp, List<System.Type> ignoreTypes = null) { List<string> methods = new List<string>(); if (comp != null) { Component[] allComponents = comp.GetComponents<Component>(); List<System.Type> doneTypes = new List<System.Type>(); for (int index = 0; index < allComponents.Length; index++) { System.Type compType = allComponents[index].GetType(); if (!doneTypes.Contains(compType) && (ignoreTypes == null || !ignoreTypes.Contains(compType))) { MethodInfo[] allMemebers = compType.GetMethods(); for (int memberIndex = 0; memberIndex < allMemebers.Length; memberIndex++) { if (allMemebers[memberIndex].IsPublic && allMemebers[memberIndex].GetParameters().Length == 0 && !methods.Contains(allMemebers[memberIndex].Name) && allMemebers[memberIndex].ReturnType == typeof(void)) { methods.Add(allMemebers[memberIndex].Name); } } doneTypes.Add(compType); } } } return methods.ToArray(); } /// <summary> /// Adds a prefab to the scene. /// </summary> /// <param name="prefabPath"></param> /// <param name="ignoreAlreadyInScene">If false the prefab will not be added if it exists in the hierarchy.</param> /// <returns>A refernce to the newly created prefab instance or one that exists in the scene if ignoreAlreadyInScene is false.</returns> public static GameObject AddToScene(string prefabPath, bool ignoreAlreadyInScene = true) { GameObject prefab = AssetDatabase.LoadAssetAtPath(prefabPath, typeof(GameObject)) as GameObject; GameObject instance = null; if (prefab != null) { instance = FindFirstPrefabInstance(prefab); if (instance == null || ignoreAlreadyInScene) { instance = PrefabUtility.InstantiatePrefab(prefab) as GameObject; } else { Debug.LogWarning("Instance already exits in the scene: " + prefabPath); } } else { Debug.LogError("Could not load prefab: " + prefabPath); } return instance; } /// <summary> /// Finds the first instance of a preface in the Hierarchy. /// </summary> /// <param name="prefab"></param> /// <returns>First instance of the prefab or null if one is not found.</returns> public static GameObject FindFirstPrefabInstance(GameObject prefab) { GameObject result = null; GameObject[] allObjects = (GameObject[])GameObject.FindObjectsOfType(typeof(GameObject)); foreach (GameObject obj in allObjects) { PrefabType type = PrefabUtility.GetPrefabType(obj); if (type == PrefabType.PrefabInstance) { UnityEngine.Object GO_prefab = PrefabUtility.GetPrefabParent(obj); if (prefab == GO_prefab) { result = obj; break; } } } return result; } public static void CorrectAmbientLightingInScene() { RenderSettings.ambientMode = UnityEngine.Rendering.AmbientMode.Flat; RenderSettings.ambientIntensity = 1.0f; // Normalize and set ambient light to default. Vector4 c = new Vector4(51.0f, 51.0f, 51.0f, 255.0f); c.Normalize(); RenderSettings.ambientLight = new Color(c.x, c.y, c.z, c.w); RenderSettings.reflectionBounces = 1; RenderSettings.reflectionIntensity = 1.0f; RenderSettings.skybox = null; RenderSettings.fog = false; } private static List<Type> GetDerivedTypes(Type baseType, Assembly assembly) { Type[] types = assembly.GetTypes(); List<Type> derivedTypes = new List<Type>(); for (int i = 0, count = types.Length; i < count; i++) { Type type = types[i]; if (IsSubclassOf(type, baseType)) { derivedTypes.Add(type); } } return derivedTypes; } private static bool IsSubclassOf(Type type, Type baseType) { if (type == null || baseType == null || type == baseType) return false; if (baseType.IsGenericType == false) { if (type.IsGenericType == false) return type.IsSubclassOf(baseType); } else { baseType = baseType.GetGenericTypeDefinition(); } type = type.BaseType; Type objectType = typeof(object); while (type != objectType && type != null) { Type curentType = type.IsGenericType ? type.GetGenericTypeDefinition() : type; if (curentType == baseType) return true; type = type.BaseType; } return false; } private static Texture2D SectionBackground { get { if (sectionBackground == null) { sectionBackground = new Texture2D(2, 2); var pix = new Color[2 * 2]; for (int i = 0; i < pix.Length; i++) { pix[i] = SectionColor; } sectionBackground.SetPixels(pix); sectionBackground.Apply(); } return sectionBackground; } } private static Texture2D HelpBoxBackground { get { if (helpBoxBackground == null) { helpBoxBackground = new Texture2D(2, 2); var pix = new Color[2 * 2]; for (int i = 0; i < pix.Length; i++) { pix[i] = HelpBoxColor; } helpBoxBackground.SetPixels(pix); helpBoxBackground.Apply(); } return helpBoxBackground; } } private static Texture2D helpBoxBackground = null; private static Texture2D sectionBackground = null; } }
elbruno/Blog
20170807 Holo MRDesignLab Move Resize Holograms/Assets/HUX/Editor/Utility/HUXEditorUtils.cs
C#
gpl-2.0
29,118