answer
stringlengths
17
10.2M
package com.netflix.governator.lifecycle; /** * Callback for injected instances */ public interface LifecycleListener { /** * When Guice injects an object, this callback will be notified * * @param obj object being injected */ public void objectInjected(Object obj); /** * Called when an object's lifecycle state changes * * @param obj the object * @param newState new state */ public void stateChanged(Object obj, LifecycleState newState); }
// This file is part of the OpenNMS(R) Application. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // and included code are below. // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // Modifications: // 2003 Jan 31: Cleaned up some unused imports. // This program is free software; you can redistribute it and/or modify // (at your option) any later version. // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // For more information contact: // Tab Size = 8 package org.opennms.netmgt.capsd; import java.net.InetAddress; import java.net.UnknownHostException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import org.apache.log4j.Category; import org.opennms.core.utils.ThreadCategory; import org.opennms.netmgt.config.DatabaseConnectionFactory; final class DbSnmpInterfaceEntry { /** * The SQL statement used to read a node from the database. This record is * keyed by the node identifier and the ifIndex. */ private static final String SQL_LOAD_REC = "SELECT ipAddr, snmpIpAdEntNetMask, snmpPhysAddr, snmpIfDescr, snmpIfType, snmpIfName, snmpIfSpeed, snmpIfAdminStatus, snmpIfOperStatus, snmpIfAlias FROM snmpInterface WHERE nodeID = ? AND snmpIfIndex = ?"; /** * True if this recored was loaded from the database. False if it's new. */ private boolean m_fromDb; /** * The node identifier */ private int m_nodeId; /** * The IP address. */ private InetAddress m_ipAddr; /** * The SNMP ifIndex */ private int m_ifIndex; private InetAddress m_netmask; private String m_physAddr; private String m_ifDescription; private int m_ifType; private String m_ifName; private String m_ifAlias; private long m_ifSpeed; private int m_ifAdminStatus; private int m_ifOperStatus; /** * The bit map used to determine which elements have changed since the * record was created. */ private int m_changed; // Mask fields private static final int CHANGED_IFADDRESS = 1 << 0; private static final int CHANGED_NETMASK = 1 << 1; private static final int CHANGED_PHYSADDR = 1 << 2; private static final int CHANGED_DESCRIPTION = 1 << 3; private static final int CHANGED_IFTYPE = 1 << 4; private static final int CHANGED_IFNAME = 1 << 5; private static final int CHANGED_IFSPEED = 1 << 6; private static final int CHANGED_IFADMINSTATUS = 1 << 7; private static final int CHANGED_IFOPERSTATUS = 1 << 8; private static final int CHANGED_IFALIAS = 1 << 9; /** * Inserts the new interface into the ipInterface table of the OpenNMS * databasee. * * @param c * The connection to the database. * * @throws java.sql.SQLException * Thrown if an error occurs with the connection */ private void insert(Connection c) throws SQLException { if (m_fromDb) throw new IllegalStateException("The record already exists in the database"); Category log = ThreadCategory.getInstance(getClass()); // first extract the next node identifier StringBuffer names = new StringBuffer("INSERT INTO snmpInterface (nodeID,snmpIfIndex,ipaddr"); StringBuffer values = new StringBuffer("?,?,?"); // We *must* have an IP address that is not null in the snmpinterface table // if ((m_changed & CHANGED_IFADDRESS) == CHANGED_IFADDRESS) { // values.append(",?"); // names.append(",ipAddr"); if ((m_changed & CHANGED_NETMASK) == CHANGED_NETMASK) { values.append(",?"); names.append(",snmpIpAdEntNetMask"); } if ((m_changed & CHANGED_PHYSADDR) == CHANGED_PHYSADDR) { values.append(",?"); names.append(",snmpPhysAddr"); } if ((m_changed & CHANGED_DESCRIPTION) == CHANGED_DESCRIPTION) { values.append(",?"); names.append(",snmpIfDescr"); } if ((m_changed & CHANGED_IFTYPE) == CHANGED_IFTYPE) { values.append(",?"); names.append(",snmpIfType"); } if ((m_changed & CHANGED_IFNAME) == CHANGED_IFNAME) { values.append(",?"); names.append(",snmpIfName"); } if ((m_changed & CHANGED_IFSPEED) == CHANGED_IFSPEED) { values.append(",?"); names.append(",snmpIfSpeed"); } if ((m_changed & CHANGED_IFADMINSTATUS) == CHANGED_IFADMINSTATUS) { values.append(",?"); names.append(",snmpIfAdminStatus"); } if ((m_changed & CHANGED_IFOPERSTATUS) == CHANGED_IFOPERSTATUS) { values.append(",?"); names.append(",snmpIfOperStatus"); } if ((m_changed & CHANGED_IFALIAS) == CHANGED_IFALIAS) { values.append(",?"); names.append(",snmpIfAlias"); } names.append(") VALUES (").append(values).append(')'); log.debug("DbSnmpInterfaceEntry.insert: SQL insert statment = " + names.toString()); // create the Prepared statment and then // start setting the result values PreparedStatement stmt = c.prepareStatement(names.toString()); names = null; int ndx = 1; stmt.setInt(ndx++, m_nodeId); stmt.setInt(ndx++, m_ifIndex); if ((m_changed & CHANGED_IFADDRESS) == CHANGED_IFADDRESS) stmt.setString(ndx++, m_ipAddr.getHostAddress()); else stmt.setString(ndx++, "0.0.0.0"); if ((m_changed & CHANGED_NETMASK) == CHANGED_NETMASK) stmt.setString(ndx++, m_netmask.getHostAddress()); if ((m_changed & CHANGED_PHYSADDR) == CHANGED_PHYSADDR) stmt.setString(ndx++, m_physAddr); if ((m_changed & CHANGED_DESCRIPTION) == CHANGED_DESCRIPTION) stmt.setString(ndx++, m_ifDescription); if ((m_changed & CHANGED_IFTYPE) == CHANGED_IFTYPE) stmt.setInt(ndx++, m_ifType); if ((m_changed & CHANGED_IFNAME) == CHANGED_IFNAME) stmt.setString(ndx++, m_ifName); if ((m_changed & CHANGED_IFSPEED) == CHANGED_IFSPEED) stmt.setLong(ndx++, m_ifSpeed); if ((m_changed & CHANGED_IFADMINSTATUS) == CHANGED_IFADMINSTATUS) stmt.setInt(ndx++, m_ifAdminStatus); if ((m_changed & CHANGED_IFOPERSTATUS) == CHANGED_IFOPERSTATUS) stmt.setInt(ndx++, m_ifOperStatus); if ((m_changed & CHANGED_IFALIAS) == CHANGED_IFALIAS) stmt.setString(ndx++, m_ifAlias); // Run the insert int rc = stmt.executeUpdate(); log.debug("DbSnmpInterfaceEntry.insert: SQL update result = " + rc); stmt.close(); // clear the mask and mark as backed // by the database m_fromDb = true; m_changed = 0; } /** * Updates an existing record in the OpenNMS ipInterface table. * * @param c * The connection used for the update. * * @throws java.sql.SQLException * Thrown if an error occurs with the connection */ private void update(Connection c) throws SQLException { if (!m_fromDb) throw new IllegalStateException("The record does not exists in the database"); Category log = ThreadCategory.getInstance(getClass()); // first extract the next node identifier StringBuffer sqlText = new StringBuffer("UPDATE snmpInterface SET "); char comma = ' '; if ((m_changed & CHANGED_IFADDRESS) == CHANGED_IFADDRESS) { sqlText.append(comma).append("ipAddr = ?"); comma = ','; } if ((m_changed & CHANGED_NETMASK) == CHANGED_NETMASK) { sqlText.append(comma).append("snmpIpAdEntNetMask = ?"); comma = ','; } if ((m_changed & CHANGED_PHYSADDR) == CHANGED_PHYSADDR) { sqlText.append(comma).append("snmpPhysAddr = ?"); comma = ','; } if ((m_changed & CHANGED_DESCRIPTION) == CHANGED_DESCRIPTION) { sqlText.append(comma).append("snmpIfDescr = ?"); comma = ','; } if ((m_changed & CHANGED_IFTYPE) == CHANGED_IFTYPE) { sqlText.append(comma).append("snmpIfType = ?"); comma = ','; } if ((m_changed & CHANGED_IFNAME) == CHANGED_IFNAME) { sqlText.append(comma).append("snmpIfName = ?"); comma = ','; } if ((m_changed & CHANGED_IFSPEED) == CHANGED_IFSPEED) { sqlText.append(comma).append("snmpIfSpeed = ?"); comma = ','; } if ((m_changed & CHANGED_IFADMINSTATUS) == CHANGED_IFADMINSTATUS) { sqlText.append(comma).append("snmpIfAdminStatus = ?"); comma = ','; } if ((m_changed & CHANGED_IFOPERSTATUS) == CHANGED_IFOPERSTATUS) { sqlText.append(comma).append("snmpIfOperStatus = ?"); comma = ','; } if ((m_changed & CHANGED_IFALIAS) == CHANGED_IFALIAS) { sqlText.append(comma).append("snmpIfAlias = ?"); comma = ','; } sqlText.append(" WHERE nodeID = ? AND snmpIfIndex = ? "); log.debug("DbSnmpInterfaceEntry.update: SQL update statment = " + sqlText.toString()); // create the Prepared statment and then // start setting the result values PreparedStatement stmt = c.prepareStatement(sqlText.toString()); sqlText = null; int ndx = 1; if ((m_changed & CHANGED_IFADDRESS) == CHANGED_IFADDRESS) { //FIXME: What's this about? shouldn't it be m_ipAddr == null ? if (m_ifIndex == -1) stmt.setNull(ndx++, Types.VARCHAR); else stmt.setString(ndx++, m_ipAddr.getHostAddress()); } if ((m_changed & CHANGED_NETMASK) == CHANGED_NETMASK) { if (m_netmask == null) stmt.setNull(ndx++, Types.VARCHAR); else stmt.setString(ndx++, m_netmask.getHostAddress()); } if ((m_changed & CHANGED_PHYSADDR) == CHANGED_PHYSADDR) { if (m_physAddr == null) stmt.setNull(ndx++, Types.CHAR); else stmt.setString(ndx++, m_physAddr); } if ((m_changed & CHANGED_DESCRIPTION) == CHANGED_DESCRIPTION) { if (m_ifDescription == null) stmt.setNull(ndx++, Types.VARCHAR); else stmt.setString(ndx++, m_ifDescription); } if ((m_changed & CHANGED_IFTYPE) == CHANGED_IFTYPE) { if (m_ifType == -1) stmt.setNull(ndx++, Types.INTEGER); else stmt.setInt(ndx++, m_ifType); } if ((m_changed & CHANGED_IFNAME) == CHANGED_IFNAME) { if (m_ifName == null) stmt.setNull(ndx++, Types.VARCHAR); else stmt.setString(ndx++, m_ifName); } if ((m_changed & CHANGED_IFSPEED) == CHANGED_IFSPEED) { if (m_ifSpeed == -1L) stmt.setNull(ndx++, Types.INTEGER); else stmt.setLong(ndx++, m_ifSpeed); } if ((m_changed & CHANGED_IFADMINSTATUS) == CHANGED_IFADMINSTATUS) { if (m_ifAdminStatus == -1) stmt.setNull(ndx++, Types.INTEGER); else stmt.setInt(ndx++, m_ifAdminStatus); } if ((m_changed & CHANGED_IFOPERSTATUS) == CHANGED_IFOPERSTATUS) { if (m_ifOperStatus == -1) stmt.setNull(ndx++, Types.INTEGER); else stmt.setInt(ndx++, m_ifOperStatus); } if ((m_changed & CHANGED_IFALIAS) == CHANGED_IFALIAS) { if (m_ifAlias == null) stmt.setNull(ndx++, Types.VARCHAR); else stmt.setString(ndx++, m_ifAlias); } stmt.setInt(ndx++, m_nodeId); if (m_ifIndex == -1) stmt.setNull(ndx++, Types.INTEGER); else stmt.setInt(ndx++, m_ifIndex); // Run the insert int rc = stmt.executeUpdate(); log.debug("DbSnmpInterfaceEntry.update: update result = " + rc); stmt.close(); // clear the mask and mark as backed // by the database m_changed = 0; } /** * Load the current interface from the database. If the interface was * modified, the modifications are lost. The nodeid and ip address must be * set prior to this call. * * @param c * The connection used to load the data. * * @throws java.sql.SQLException * Thrown if an error occurs with the connection */ private boolean load(Connection c) throws SQLException { if (!m_fromDb) throw new IllegalStateException("The record does not exists in the database"); Category log = ThreadCategory.getInstance(getClass()); // create the Prepared statment and then // start setting the result values PreparedStatement stmt = null; stmt = c.prepareStatement(SQL_LOAD_REC); stmt.setInt(1, m_nodeId); stmt.setInt(2, m_ifIndex); // Run the insert ResultSet rset = stmt.executeQuery(); if (!rset.next()) { rset.close(); stmt.close(); return false; } // extract the values. int ndx = 1; // get the IP address String str = rset.getString(ndx++); if (str != null && !rset.wasNull()) { try { m_ipAddr = InetAddress.getByName(str); } catch (UnknownHostException e) { log.warn("DbSnmpInterface.load: the ipAddr field was malformed: nodeid = " + m_nodeId + ", ifIndex = " + m_ifIndex, e); } } // get the netmask str = rset.getString(ndx++); if (str != null && !rset.wasNull()) { try { m_netmask = InetAddress.getByName(str); } catch (UnknownHostException e) { log.warn("DbSnmpInterface.load: the netmask field was malformed: nodeid = " + m_nodeId + ", ipAddr = " + m_ipAddr.getHostAddress(), e); } } // get the physical address m_physAddr = rset.getString(ndx++); if (rset.wasNull()) m_physAddr = null; // get the description m_ifDescription = rset.getString(ndx++); if (rset.wasNull()) m_ifDescription = null; // get the type m_ifType = rset.getInt(ndx++); if (rset.wasNull()) m_ifIndex = -1; // get the name m_ifName = rset.getString(ndx++); if (rset.wasNull()) m_ifName = null; // get the speed m_ifSpeed = rset.getLong(ndx++); if (rset.wasNull()) m_ifSpeed = -1L; // get the admin status m_ifAdminStatus = rset.getInt(ndx++); if (rset.wasNull()) m_ifAdminStatus = -1; // get the operational status m_ifOperStatus = rset.getInt(ndx++); if (rset.wasNull()) m_ifOperStatus = -1; // get the alias m_ifAlias = rset.getString(ndx++); if (rset.wasNull()) m_ifAlias = null; rset.close(); stmt.close(); // clear the mask and mark as backed // by the database m_changed = 0; return true; } /** * Default constructor. * */ private DbSnmpInterfaceEntry() { throw new UnsupportedOperationException("Default constructor not supported!"); } /** * Constructs a new interface. * * @param nid * The node identifier. * @param ifIndex * The interface index to load * */ private DbSnmpInterfaceEntry(int nid, int ifIndex) { m_fromDb = true; m_nodeId = nid; m_ipAddr = null; m_ifIndex = ifIndex; m_netmask = null; m_physAddr = null; m_ifDescription = null; m_ifType = -1; m_ifName = null; m_ifSpeed = -1L; m_ifAdminStatus = -1; m_ifOperStatus = -1; m_ifAlias = null; m_changed = 0; } /** * Constructs a new interface. * * @param nid * The node identifier. * @param ifIndex * The interface index to load * @param exists * True if the interface already exists. * */ private DbSnmpInterfaceEntry(int nid, int ifIndex, boolean exists) { m_fromDb = exists; m_nodeId = nid; m_ipAddr = null; m_ifIndex = ifIndex; m_netmask = null; m_physAddr = null; m_ifDescription = null; m_ifType = -1; m_ifName = null; m_ifSpeed = -1L; m_ifAdminStatus = -1; m_ifOperStatus = -1; m_ifAlias = null; m_changed = 0; } /** * Returns the node entry's unique identifier. This is a non-mutable * element. If the record does not yet exist in the database then a -1 is * returned. * */ int getNodeId() { return m_nodeId; } /** * Returns the IP address for the entry. * */ InetAddress getIfAddress() { return m_ipAddr; } void setIfAddress(InetAddress addr) { m_ipAddr = addr; m_changed |= CHANGED_IFADDRESS; } boolean hasIfAddressChanged() { if ((m_changed & CHANGED_IFADDRESS) == CHANGED_IFADDRESS) return true; else return false; } boolean updateIfAddress(InetAddress addr) { if (addr == null || addr.equals(m_ipAddr)) return false; else { setIfAddress(addr); return true; } } /** * Returns true if the ifIndex is defined. */ boolean hasIfIndex() { return m_ifIndex != -1; } /** * Returns the current ifIndex */ int getIfIndex() { return m_ifIndex; } InetAddress getNetmask() { return m_netmask; } void setNetmask(InetAddress mask) { m_netmask = mask; m_changed |= CHANGED_NETMASK; } boolean hasNetmaskChanged() { if ((m_changed & CHANGED_NETMASK) == CHANGED_NETMASK) return true; else return false; } boolean updateNetmask(InetAddress newNetmask) { if (newNetmask == null || newNetmask.equals(m_netmask)) return false; else { setNetmask(newNetmask); return true; } } String getPhysicalAddress() { return m_physAddr; } void setPhysicalAddress(String addr) { m_physAddr = addr; m_changed |= CHANGED_PHYSADDR; } boolean hasPhysicalAddressChanged() { if ((m_changed & CHANGED_PHYSADDR) == CHANGED_PHYSADDR) return true; else return false; } boolean updatePhysicalAddress(String newPhysAddr) { if (newPhysAddr == null || newPhysAddr.equals(m_physAddr)) return false; else { setPhysicalAddress(newPhysAddr); return true; } } String getDescription() { return m_ifDescription; } void setDescription(String descr) { m_ifDescription = descr; m_changed |= CHANGED_DESCRIPTION; } boolean hasDescriptionChanged() { if ((m_changed & CHANGED_DESCRIPTION) == CHANGED_DESCRIPTION) return true; else return false; } boolean updateDescription(String newIfDescription) { if (newIfDescription == null || newIfDescription.equals(m_ifDescription)) return false; else { setDescription(newIfDescription); return true; } } String getName() { return m_ifName; } void setName(String name) { m_ifName = name; m_changed |= CHANGED_IFNAME; } boolean hasNameChanged() { if ((m_changed & CHANGED_IFNAME) == CHANGED_IFNAME) return true; else return false; } boolean updateName(String newIfName) { if (newIfName == null || newIfName.equals(m_ifName)) return false; else { setName(newIfName); return true; } } int getType() { return m_ifType; } void setType(int type) { m_ifType = type; m_changed |= CHANGED_IFTYPE; } boolean hasTypeChanged() { if ((m_changed & CHANGED_IFTYPE) == CHANGED_IFTYPE) return true; else return false; } boolean updateType(int newIfType) { if (newIfType == -1 || newIfType == m_ifType) return false; else { setType(newIfType); return true; } } long getSpeed() { return m_ifSpeed; } void setSpeed(long speed) { m_ifSpeed = speed; m_changed |= CHANGED_IFSPEED; } boolean hasSpeedChanged() { if ((m_changed & CHANGED_IFSPEED) == CHANGED_IFSPEED) return true; else return false; } boolean updateSpeed(long newIfSpeed) { if (newIfSpeed == -1L || newIfSpeed == m_ifSpeed) return false; else { setSpeed(newIfSpeed); return true; } } int getAdminStatus() { return m_ifAdminStatus; } void setAdminStatus(int status) { m_ifAdminStatus = status; m_changed |= CHANGED_IFADMINSTATUS; } boolean hasAdminStatusChanged() { if ((m_changed & CHANGED_IFADMINSTATUS) == CHANGED_IFADMINSTATUS) return true; else return false; } boolean updateAdminStatus(int newIfAdminStatus) { if (newIfAdminStatus == -1 || newIfAdminStatus == m_ifAdminStatus) return false; else { setAdminStatus(newIfAdminStatus); return true; } } int getOperationalStatus() { return m_ifOperStatus; } void setOperationalStatus(int status) { m_ifOperStatus = status; m_changed |= CHANGED_IFOPERSTATUS; } boolean hasOperationalStatusChanged() { if ((m_changed & CHANGED_IFOPERSTATUS) == CHANGED_IFOPERSTATUS) return true; else return false; } boolean updateOperationalStatus(int newIfOperStatus) { if (newIfOperStatus == -1 || newIfOperStatus == m_ifOperStatus) return false; else { setOperationalStatus(newIfOperStatus); return true; } } String getAlias() { return m_ifAlias; } void setAlias(String alias) { m_ifAlias = alias; m_changed |= CHANGED_IFALIAS; } boolean hasAliasChanged() { if ((m_changed & CHANGED_IFALIAS) == CHANGED_IFALIAS) return true; else return false; } boolean updateAlias(String newIfAlias) { if (newIfAlias == null || newIfAlias.equals(m_ifAlias)) return false; else { setAlias(newIfAlias); return true; } } /** * Updates the interface information in the configured database. If the * interface does not exist the a new row in the table is created. If the * element already exists then it's current row is updated as needed based * upon the current changes to the node. */ void store() throws SQLException { if (m_changed != 0 || m_fromDb == false) { Connection db = null; try { db = DatabaseConnectionFactory.getInstance().getConnection(); store(db); if (db.getAutoCommit() == false) db.commit(); } finally { try { if (db != null) db.close(); } catch (SQLException e) { ThreadCategory.getInstance(getClass()).warn("Exception closing JDBC connection", e); } } } return; } /** * Updates the interface information in the configured database. If the * interfaca does not exist the a new row in the table is created. If the * element already exists then it's current row is updated as needed based * upon the current changes to the node. * * @param db * The database connection used to write the record. */ void store(Connection db) throws SQLException { if (m_changed != 0 || m_fromDb == false) { if (m_fromDb) update(db); else insert(db); } } /** * Creates a new entry. The entry is created in memory, but is not written * to the database until the first call to <code>store</code>. * * @param nid * The node id of the interface. * @param ifIndex * The ifIndex of the interface * * @return A new interface record. */ static DbSnmpInterfaceEntry create(int nid, int ifIndex) { return new DbSnmpInterfaceEntry(nid, ifIndex, false); } /** * Retreives a current record from the database based upon the key fields of * <em>nodeID</em> and <em>ifindex</em>. If the record cannot be found * then a null reference is returned. * * @param nid * The node id key * @param ifIndex * the interface index. * * @return The loaded entry or null if one could not be found. * */ static DbSnmpInterfaceEntry get(int nid, int ifIndex) throws SQLException { Connection db = null; try { db = DatabaseConnectionFactory.getInstance().getConnection(); return get(db, nid, ifIndex); } finally { try { if (db != null) db.close(); } catch (SQLException e) { ThreadCategory.getInstance(DbSnmpInterfaceEntry.class).warn("Exception closing JDBC connection", e); } } } /** * Retreives a current record from the database based upon the key fields of * <em>nodeID</em> and <em>ifIndex</em>. If the record cannot be found * then a null reference is returned. * * @param db * The databse connection used to load the entry. * @param nid * The node id key * @param ifIndex * The interface index. * * @return The loaded entry or null if one could not be found. * */ static DbSnmpInterfaceEntry get(Connection db, int nid, int ifIndex) throws SQLException { DbSnmpInterfaceEntry entry = new DbSnmpInterfaceEntry(nid, ifIndex); if (!entry.load(db)) entry = null; return entry; } /** * Creates a string that displays the internal contents of the record. This * is mainly just used for debug output since the format is ad-hoc. * */ public String toString() { String sep = System.getProperty("line.separator"); StringBuffer buf = new StringBuffer(); buf.append("from database = ").append(m_fromDb).append(sep); buf.append("node identifier = ").append(m_nodeId).append(sep); buf.append("IP Address = ").append(m_ipAddr.getHostAddress()).append(sep); buf.append("IP Netmask = ").append(m_netmask.getHostAddress()).append(sep); buf.append("MAC = ").append(m_physAddr).append(sep); buf.append("ifIndex = ").append(m_ifIndex).append(sep); buf.append("ifDescr = ").append(m_ifDescription).append(sep); buf.append("ifType = ").append(m_ifType).append(sep); buf.append("ifName = ").append(m_ifName).append(sep); buf.append("ifSpeed = ").append(m_ifSpeed).append(sep); buf.append("ifAdminStatus = ").append(m_ifAdminStatus).append(sep); buf.append("ifOperStatus = ").append(m_ifOperStatus).append(sep); buf.append("ifAlias = ").append(m_ifAlias).append(sep); return buf.toString(); } /** * For debugging only */ public static void main(String[] args) { try { Integer temp = new Integer(args[1]); DbSnmpInterfaceEntry entry = DbSnmpInterfaceEntry.get(Integer.parseInt(args[0]), temp.intValue()); System.out.println(entry.toString()); } catch (Throwable t) { t.printStackTrace(); } } }
package som.interpreter.nodes.specialized; import som.interpreter.Method; import som.interpreter.nodes.ExpressionNode; import som.interpreter.nodes.MessageNode; import som.vm.Universe; import som.vmobjects.SClass; import som.vmobjects.SInvokable; import som.vmobjects.SObject; import som.vmobjects.SSymbol; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.FrameFactory; import com.oracle.truffle.api.nodes.InlinableCallSite; import com.oracle.truffle.api.nodes.Node; public class MonomorpicMessageNode extends MessageNode implements InlinableCallSite { private final SClass rcvrClass; private final SInvokable invokable; private int callCount; public MonomorpicMessageNode(final ExpressionNode receiver, final ExpressionNode[] arguments, final SSymbol selector, final Universe universe, final SClass rcvrClass, final SInvokable invokable) { super(receiver, arguments, selector, universe); this.rcvrClass = rcvrClass; this.invokable = invokable; callCount = 0; } @Override public SObject executeGeneric(final VirtualFrame frame) { callCount++; // evaluate all the expressions: first determine receiver SObject rcvr = receiver.executeGeneric(frame); // then determine the arguments SObject[] args = determineArguments(frame); SClass currentRcvrClass = classOfReceiver(rcvr, receiver); if (currentRcvrClass == rcvrClass) { return invokable.invoke(frame.pack(), rcvr, args); } else { CompilerDirectives.transferToInterpreter(); // So, it might just be a polymorphic send site. PolymorpicMessageNode poly = new PolymorpicMessageNode(receiver, arguments, selector, universe, rcvrClass, invokable, currentRcvrClass); replace(poly, "It is not a monomorpic send."); return doFullSend(frame, rcvr, args, currentRcvrClass); } } @Override public int getCallCount() { return callCount; } @Override public void resetCallCount() { callCount = 0; } @Override public CallTarget getCallTarget() { return invokable.getCallTarget(); } @Override public Node getInlineTree() { Method method = invokable.getTruffleInvokable(); if (method == null) { return this; } return method; } private InlinedMonomorphicMessageNode newInlinedNode( final FrameFactory frameFactory, final Method method) { return new InlinedMonomorphicMessageNode(receiver, arguments, selector, universe, rcvrClass, invokable, frameFactory, method, method.methodCloneForInlining()); } @Override public boolean inline(final FrameFactory factory) { Method method = invokable.getTruffleInvokable(); if (method == null) { return false; } InlinedMonomorphicMessageNode inlinedNode = newInlinedNode(factory, method); replace(inlinedNode, "Node got inlined"); return true; } }
/* * $Log: ReceiverBaseClassic.java,v $ * Revision 1.2 2007-10-23 12:53:20 europe\M00035F * Fix NPE when no error-storage and no inprocess-storage have been defined, but only an error-sender * * Revision 1.1 2007/10/16 12:40:36 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * moved code to ReceiverBaseClassic * * Revision 1.53 2007/10/10 08:53:00 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * transactions from JtaUtil * make runState externally accessible * * Revision 1.52 2007/10/08 13:33:31 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * changed ArrayList to List where possible * * Revision 1.51 2007/10/04 12:01:37 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * limit number of error messages written to log * * Revision 1.50 2007/10/03 08:57:04 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * changed HashMap to Map * * Revision 1.49 2007/09/27 12:55:42 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * introduction of monitoring * * Revision 1.48 2007/09/25 11:34:02 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added deprecation warning for ibi42compatibility * * Revision 1.47 2007/09/24 13:05:41 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * fixed bug in close of errorStorage * * Revision 1.46 2007/09/12 09:27:06 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added attribute pollInterval * * Revision 1.45 2007/09/05 13:05:02 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * moved copying of context to Misc * * Revision 1.44 2007/08/27 11:51:43 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * modified afterMessageProcessed handling * added attribute 'returnedSessionKeys' * * Revision 1.43 2007/08/10 11:21:49 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * catch more exceptions * * Revision 1.42 2007/06/26 12:06:08 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * tuned logging * * Revision 1.41 2007/06/26 06:56:59 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * set inProcessStorage type to 'E' if combined with errorStorage * * Revision 1.40 2007/06/21 07:07:06 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * removed warnings about not transacted=true * * Revision 1.39 2007/06/19 12:07:32 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * modifiy retryinterval handling * * Revision 1.38 2007/06/14 08:49:35 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * catch less specific types of exception * * Revision 1.37 2007/06/12 11:24:04 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * corrected typeSettings of transactional storages * * Revision 1.36 2007/06/08 12:49:03 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * updated javadoc * * Revision 1.35 2007/06/08 12:17:40 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * improved error handling * introduced retry mechanisme with increasing wait interval * * Revision 1.34 2007/06/08 07:49:13 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * changed error to warning * * Revision 1.33 2007/06/07 15:22:44 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * made stopping after receiving an exception configurable * * Revision 1.32 2007/05/23 09:25:17 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added support for attribute 'active' on transactional storages * * Revision 1.31 2007/05/21 12:22:47 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added setMessageLog() * * Revision 1.30 2007/05/02 11:37:51 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added attribute 'active' * * Revision 1.29 2007/02/12 14:03:45 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * Logger from LogUtil * * Revision 1.28 2007/02/05 15:01:44 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * configure inProcessStorage when it is present, not only when transacted * * Revision 1.27 2006/12/13 16:30:41 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added maxRetries to configuration javadoc * * Revision 1.26 2006/08/24 07:12:42 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * documented METT tracing event numbers * * Revision 1.25 2006/06/20 14:10:43 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added stylesheet attribute * * Revision 1.24 2006/04/12 16:17:43 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * retry after failed storing of message in inProcessStorage * * Revision 1.23 2006/02/20 15:42:41 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * moved METT-support to single entry point for tracing * * Revision 1.22 2006/02/09 07:57:47 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * METT tracing support * * Revision 1.21 2005/10/27 08:46:45 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * introduced RunStateEnquiries * * Revision 1.20 2005/10/26 08:52:31 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * allow for transacted="true" without inProcessStorage, (ohne Gewhr!) * * Revision 1.19 2005/10/17 11:29:24 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * fixed nullpointerexception in startRunning * * Revision 1.18 2005/09/26 11:42:10 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * added fileNameIfStopped attribute and replace from/to processing when stopped * * Revision 1.17 2005/09/13 15:42:14 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * improved handling of non-serializable messages like Poison-messages * * Revision 1.16 2005/08/08 09:44:11 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * start transactions if needed and not already started * * Revision 1.15 2005/07/19 15:27:14 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * modified closing procedure * added errorStorage * modified implementation of transactionalStorage * allowed exceptions to bubble up * assume rawmessages to be serializable for transacted processing * added ibis42compatibility attribute, avoiding exception bubbling * * Revision 1.14 2005/07/05 12:54:38 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * allow to set parameters from context for processRequest() methods * * Revision 1.13 2005/06/02 11:52:24 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * limited number of actively polling threads to value of attriubte numThreadsPolling * * Revision 1.12 2005/04/13 12:53:09 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * removed unused imports * * Revision 1.11 2005/03/31 08:22:49 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * fixed bug in getIdleStatistics * * Revision 1.10 2005/03/07 11:04:36 Johan Verrips <johan.verrips@ibissource.org> * PipeLineSession became a extension of HashMap, using other iterator * * Revision 1.9 2005/03/04 08:53:29 Johan Verrips <johan.verrips@ibissource.org> * Fixed IndexOutOfBoundException in getProcessStatistics due to multi threading. * Adjusted this too for getIdleStatistics * * Revision 1.8 2005/02/10 08:17:34 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * included context dump in debug * * Revision 1.7 2005/01/13 08:56:04 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * Make threadContext-attributes available in PipeLineSession * * Revision 1.6 2004/10/12 15:14:11 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * removed unused code * * Revision 1.5 2004/08/25 09:11:33 unknown <unknown@ibissource.org> * Add waitForRunstate with timeout * * Revision 1.4 2004/08/23 13:10:48 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * updated JavaDoc * * Revision 1.3 2004/08/16 14:09:58 unknown <unknown@ibissource.org> * Return returnIfStopped value in case adapter is stopped * * Revision 1.2 2004/08/09 13:46:52 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * various changes * * Revision 1.1 2004/08/03 13:04:30 Gerrit van Brakel <gerrit.van.brakel@ibissource.org> * introduction of GenericReceiver * */ package nl.nn.adapterframework.receivers; import java.io.IOException; import java.io.Serializable; import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.naming.NamingException; import javax.transaction.Status; import javax.transaction.UserTransaction; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.core.HasPhysicalDestination; import nl.nn.adapterframework.core.HasSender; import nl.nn.adapterframework.core.IAdapter; import nl.nn.adapterframework.core.IListener; import nl.nn.adapterframework.core.IMessageHandler; import nl.nn.adapterframework.core.INamedObject; import nl.nn.adapterframework.core.IPullingListener; import nl.nn.adapterframework.core.IPushingListener; import nl.nn.adapterframework.core.IReceiver; import nl.nn.adapterframework.core.IReceiverStatistics; import nl.nn.adapterframework.core.ISender; import nl.nn.adapterframework.core.ITransactionalStorage; import nl.nn.adapterframework.core.IbisExceptionListener; import nl.nn.adapterframework.core.ListenerException; import nl.nn.adapterframework.core.PipeLineResult; import nl.nn.adapterframework.core.PipeLineSession; import nl.nn.adapterframework.core.SenderException; import nl.nn.adapterframework.monitoring.EventTypeEnum; import nl.nn.adapterframework.monitoring.IMonitorAdapter; import nl.nn.adapterframework.monitoring.MonitorAdapterFactory; import nl.nn.adapterframework.monitoring.SeverityEnum; import nl.nn.adapterframework.util.ClassUtils; import nl.nn.adapterframework.util.Counter; import nl.nn.adapterframework.util.DomBuilderException; import nl.nn.adapterframework.util.JtaUtil; import nl.nn.adapterframework.util.LogUtil; import nl.nn.adapterframework.util.Misc; import nl.nn.adapterframework.util.RunStateEnquiring; import nl.nn.adapterframework.util.RunStateEnum; import nl.nn.adapterframework.util.RunStateManager; import nl.nn.adapterframework.util.Semaphore; import nl.nn.adapterframework.util.StatisticsKeeper; import nl.nn.adapterframework.util.TracingEventNumbers; import nl.nn.adapterframework.util.TracingUtil; import nl.nn.adapterframework.util.XmlUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import org.apache.log4j.Logger; /** * This {@link IReceiver Receiver} may be used as a base-class for developing receivers. * * <p><b>Configuration:</b> * <table border="1"> * <tr><th>attributes</th><th>description</th><th>default</th></tr> * <tr><td>classname</td><td>name of the class, mostly a class that extends this class</td><td>&nbsp;</td></tr> * <tr><td>{@link #setName(String) name}</td> <td>name of the receiver as known to the adapter</td><td>&nbsp;</td></tr> * <tr><td>{@link #setActive(boolean) active}</td> <td>when set <code>false</code> or set to something else as "true", (even set to the empty string), the receiver is not included in the configuration</td><td>true</td></tr> * <tr><td>{@link #setNumThreads(int) numThreads}</td><td>the number of threads that may execute a pipeline concurrently (only for pulling listeners)</td><td>1</td></tr> * <tr><td>{@link #setNumThreadsPolling(int) numThreadsPolling}</td><td>the number of threads that are activily polling for messages concurrently. '0' means 'limited only by <code>numThreads</code>' (only for pulling listeners)</td><td>1</td></tr> * <tr><td>{@link #setStyleSheetName(String) styleSheetName}</td> <td></td><td>&nbsp;</td></tr> * <tr><td>{@link #setOnError(String) onError}</td><td>one of 'continue' or 'close'. Controls the behaviour of the receiver when it encounters an error sending a reply or receives an exception asynchronously</td><td>continue</td></tr> * <tr><td>{@link #setReturnedSessionKeys(String) returnedSessionKeys}</td><td>comma separated list of keys of session variables that should be returned to caller, for correct results as well as for erronous results. (Only for listeners that support it, like JavaListener)</td><td>&nbsp;</td></tr> * <tr><td>{@link #setTransacted(boolean) transacted}</td><td>if set to <code>true</code>, messages will be received and processed under transaction control. If processing fails, messages will be sent to the error-sender. (see below)</code></td><td><code>false</code></td></tr> * <tr><td>{@link #setMaxRetries(int) maxRetries}</td><td>The number of times a pulling listening attempt is retried after an exception is caught</td><td>3</td></tr> * <tr><td>{@link #setPollInterval(int) pollInterval}</td><td>The number of seconds waited after an unsuccesful poll attempt before another poll attempt is made.</td><td>0</td></tr> * <tr><td>{@link #setIbis42compatibility(boolean) ibis42compatibility}</td><td>if set to <code>true</code>, the result of a failed processing of a message is a formatted errormessage. Otherwise a listener specific error handling is performed</code></td><td><code>false</code></td></tr> * <tr><td>{@link #setBeforeEvent(int) beforeEvent}</td> <td>METT eventnumber, fired just before a message is processed by this Receiver</td><td>-1 (disabled)</td></tr> * <tr><td>{@link #setAfterEvent(int) afterEvent}</td> <td>METT eventnumber, fired just after message processing by this Receiver is finished</td><td>-1 (disabled)</td></tr> * <tr><td>{@link #setExceptionEvent(int) exceptionEvent}</td><td>METT eventnumber, fired when message processing by this Receiver resulted in an exception</td><td>-1 (disabled)</td></tr> * </table> * </p> * <p> * <table border="1"> * <tr><th>nested elements (accessible in descender-classes)</th><th>description</th></tr> * <tr><td>{@link nl.nn.adapterframework.core.IPullingListener listener}</td><td>the listener used to receive messages from</td></tr> * <tr><td>{@link nl.nn.adapterframework.core.ITransactionalStorage inProcessStorage}</td><td>mandatory for {@link #setTransacted(boolean) transacted} receivers: place to store messages during processing.</td></tr> * <tr><td>{@link nl.nn.adapterframework.core.ITransactionalStorage errorStorage}</td><td>optional for {@link #setTransacted(boolean) transacted} receivers: place to store messages if message processing has gone wrong. If no errorStorage is specified, the inProcessStorage is used for errorStorage</td></tr> * <tr><td>{@link nl.nn.adapterframework.core.ISender errorSender}</td><td>optional for {@link #setTransacted(boolean) transacted} receviers: * will be called to store messages that failed to process. If no errorSender is specified, failed messages will remain in inProcessStorage</td></tr> * </table> * </p> * <p><b>Transaction control</b><br> * If {@link #setTransacted(boolean) transacted} is set to <code>true</code>, messages will be received and processed under transaction control. * This means that after a message has been read and processed and the transaction has ended, one of the following apply: * <ul> * <table border="1"> * <tr><th>situation</th><th>input listener</th><th>Pipeline</th><th>inProcess storage</th><th>errorSender</th><th>summary of effect</th></tr> * <tr><td>successful</td><td>message read and committed</td><td>message processed</td><td>unchanged</td><td>unchanged</td><td>message processed</td></tr> * <tr><td>procesing failed</td><td>message read and committed</td><td>message processing failed and rolled back</td><td>unchanged</td><td>message sent</td><td>message only transferred from listener to errroSender</td></tr> * <tr><td>listening failed</td><td>unchanged: listening rolled back</td><td>no processing performed</td><td>unchanged</td><td>unchanged</td><td>no changes, input message remains on input available for listener</td></tr> * <tr><td>transfer to inprocess storage failed</td><td>unchanged: listening rolled back</td><td>no processing performed</td><td>unchanged</td><td>unchanged</td><td>no changes, input message remains on input available for listener</td></tr> * <tr><td>transfer to errorSender failed</td><td>message read and committed</td><td>message processing failed and rolled back</td><td>message present</td><td>unchanged</td><td>message only transferred from listener to inProcess storage</td></tr> * </table> * If the application or the server crashes in the middle of one or more transactions, these transactions * will be recovered and rolled back after the server/application is restarted. Then allways exactly one of * the following applies for any message touched at any time by Ibis by a transacted receiver: * <ul> * <li>It is processed correctly by the pipeline and removed from the input-queue, * not present in inProcess storage and not send to the errorSender</li> * <li>It is not processed at all by the pipeline, or processing by the pipeline has been rolled back; * the message is removed from the input queue and either (one of) still in inProcess storage <i>or</i> sent to the errorSender</li> * </ul> * </p> * * <p><b>commit or rollback</b><br> * If {@link #setTransacted(boolean) transacted} is set to <code>true</code>, messages will be either committed or rolled back. * All message-processing transactions are committed, unless one or more of the following apply: * <ul> * <li>The PipeLine is transacted and the exitState of the pipeline is not equal to {@link nl.nn.adapterframework.core.PipeLine#setCommitOnState(String) commitOnState} (that defaults to 'success')</li> * <li>a PipeRunException or another runtime-exception has been thrown by any Pipe or by the PipeLine</li> * <li>the setRollBackOnly() method has been called on the userTransaction (not accessible by Pipes)</li> * </ul> * </p> * * @version Id * @author Gerrit van Brakel * @since 4.2 */ public class ReceiverBaseClassic implements IReceiver, IReceiverStatistics, Runnable, IMessageHandler, IbisExceptionListener, HasSender, TracingEventNumbers { public static final String version="$RCSfile: ReceiverBaseClassic.java,v $ $Revision: 1.2 $ $Date: 2007-10-23 12:53:20 $"; protected Logger log = LogUtil.getLogger(this); public static final String RCV_SHUTDOWN_MONITOR_EVENT_MSG ="RCVCLOSED Ibis Receiver shut down"; public static final String RCV_SUSPENDED_MONITOR_EVENT_MSG="RCVSUSPND Ibis Receiver operation suspended due to exceptions"; public static final int RCV_SUSPENSION_MESSAGE_THRESHOLD=600; private String returnIfStopped=""; private String fileNameIfStopped = null; private String replaceFrom = null; private String replaceTo = null; private String styleSheetName = null; private String returnedSessionKeys=null; public static final String ONERROR_CONTINUE = "continue"; public static final String ONERROR_CLOSE = "close"; private boolean active=true; private String name; private String onError = ONERROR_CONTINUE; protected RunStateManager runState = new RunStateManager(); private boolean ibis42compatibility=false; // the number of threads that may execute a pipeline concurrently (only for pulling listeners) private int numThreads = 1; // the number of threads that are activily polling for messages (concurrently, only for pulling listeners) private int numThreadsPolling = 1; private int pollInterval=0; private Counter threadsProcessing = new Counter(0); private Counter threadsRunning = new Counter(0); private Semaphore pollToken = null; // number of messages received private Counter numReceived = new Counter(0); private List processStatistics = new ArrayList(); private List idleStatistics = new ArrayList(); // the adapter that handles the messages and initiates this listener private IAdapter adapter; private IListener listener; private ITransactionalStorage inProcessStorage=null; private ISender errorSender=null; private ITransactionalStorage errorStorage=null; private ISender sender=null; // answer-sender private ITransactionalStorage messageLog=null; private int maxRetries=3; private Counter retryCount = new Counter(0); private boolean transacted=false; // METT event numbers private int beforeEvent=-1; private int afterEvent=-1; private int exceptionEvent=-1; int retryInterval=1; IMonitorAdapter monitorAdapter=null; protected String getLogPrefix() { return "Receiver ["+getName()+"] "; } /** * sends an informational message to the log and to the messagekeeper of the adapter */ protected void info(String msg) { log.info(msg); if (adapter != null) adapter.getMessageKeeper().add(msg); } /** * sends a warning to the log and to the messagekeeper of the adapter */ protected void warn(String msg) { log.warn(msg); if (adapter != null) adapter.getMessageKeeper().add("WARNING: " + msg); } /** * sends a warning to the log and to the messagekeeper of the adapter */ protected void error(String msg, Throwable t) { log.error(msg, t); if (adapter != null) adapter.getMessageKeeper().add("ERROR: " + msg+": "+t.getMessage()); } protected void openAllResources() throws ListenerException { // on exit resouces must be in a state that runstate is or can be set to 'STARTED' try { if (getSender()!=null) { getSender().open(); } if (getErrorSender()!=null) { getErrorSender().open(); } if (getInProcessStorage()!=null) { getInProcessStorage().open(); } if (getMessageLog()!=null) { getMessageLog().open(); } if (isTransacted()) { JtaUtil.getUserTransaction(); } } catch (SenderException e) { throw new ListenerException(e); } catch (NamingException e) { throw new ListenerException(e); } getListener().open(); if (getListener() instanceof IPullingListener){ // start all threads if (getNumThreads() > 1) { for (int i = 1; i <= getNumThreads(); i++) { addThread("[" + i+"]"); } } else { addThread(null); } } } private void addThread(String nameSuffix) { if (getListener() instanceof IPullingListener){ Thread t = new Thread(this, getName() + (nameSuffix==null ? "" : nameSuffix)); t.start(); } } protected void tellResourcesToStop() throws ListenerException { // must lead to a 'closeAllResources()' // runstate is 'STOPPING' // default just calls 'closeAllResources()' if (getListener() instanceof IPushingListener) { closeAllResources(); } // IPullingListeners stop as their threads finish, as the runstate is set to stopping } protected void closeAllResources() { // on exit resouces must be in a state that runstate can be set to 'STOPPED' try { log.debug("closing Receiver ["+ getName()+ "]"); getListener().close(); if (getSender()!=null) { getSender().close(); } if (getErrorSender()!=null) { getErrorSender().close(); } if (getInProcessStorage()!=null) { getInProcessStorage().close(); } if (getErrorStorage()!=null && getErrorStorage()!=getInProcessStorage()) { getErrorStorage().close(); } if (getMessageLog()!=null) { getMessageLog().close(); } log.info("closed Receiver ["+ getName()+ "]"); } catch (Exception e) { log.error( "Receiver [" + getName()+ "]: error closing connection", e); } runState.setRunState(RunStateEnum.STOPPED); info("Receiver [" + getName() + "] stopped"); } protected void propagateName() { IListener listener=getListener(); if (listener!=null && StringUtils.isEmpty(listener.getName())) { listener.setName("listener of ["+getName()+"]"); } ITransactionalStorage inProcess = getInProcessStorage(); if (inProcess != null) { inProcess.setName("inProcessStorage of ["+getName()+"]"); } ISender errorSender = getErrorSender(); if (errorSender != null) { errorSender.setName("errorSender of ["+getName()+"]"); } ITransactionalStorage errorStorage = getErrorStorage(); if (errorStorage != null && errorStorage != inProcess) { errorStorage.setName("errorStorage of ["+getName()+"]"); } } public void configure() throws ConfigurationException { try { propagateName(); if (getListener()==null) { throw new ConfigurationException("Receiver ["+getName()+"] has no listener"); } if (getListener() instanceof IPushingListener) { IPushingListener pl = (IPushingListener)getListener(); pl.setHandler(this); pl.setExceptionListener(this); } if (getListener() instanceof IPullingListener) { if (getNumThreadsPolling()>0 && getNumThreadsPolling()<getNumThreads()) { pollToken = new Semaphore(getNumThreadsPolling()); } } getListener().configure(); if (getListener() instanceof HasPhysicalDestination) { info("Receiver ["+getName()+"] has listener on "+((HasPhysicalDestination)getListener()).getPhysicalDestinationName()); } if (getListener() instanceof HasSender) { // only informational ISender sender = ((HasSender)getListener()).getSender(); if (sender instanceof HasPhysicalDestination) { info("Listener of receiver ["+getName()+"] has answer-sender on "+((HasPhysicalDestination)sender).getPhysicalDestinationName()); } } ISender sender = getSender(); if (sender!=null) { sender.configure(); if (sender instanceof HasPhysicalDestination) { info("receiver ["+getName()+"] has answer-sender on "+((HasPhysicalDestination)sender).getPhysicalDestinationName()); } } ITransactionalStorage inProcessStorage = getInProcessStorage(); if (inProcessStorage!=null) { inProcessStorage.configure(); if (inProcessStorage instanceof HasPhysicalDestination) { info("Receiver ["+getName()+"] has inProcessStorage in "+((HasPhysicalDestination)inProcessStorage).getPhysicalDestinationName()); } } ISender errorSender = getErrorSender(); if (errorSender!=null) { errorSender.configure(); if (errorSender instanceof HasPhysicalDestination) { info("Receiver ["+getName()+"] has errorSender to "+((HasPhysicalDestination)errorSender).getPhysicalDestinationName()); } } ITransactionalStorage errorStorage = getErrorStorage(); if (errorStorage!=null && errorStorage != getInProcessStorage()) { errorStorage.configure(); if (errorStorage instanceof HasPhysicalDestination) { info("Receiver ["+getName()+"] has errorStorage to "+((HasPhysicalDestination)errorStorage).getPhysicalDestinationName()); } } ITransactionalStorage messageLog = getMessageLog(); if (messageLog!=null) { messageLog.configure(); if (messageLog instanceof HasPhysicalDestination) { info("Receiver ["+getName()+"] has messageLog in "+((HasPhysicalDestination)messageLog).getPhysicalDestinationName()); } } if (isTransacted()) { // if (!(getListener() instanceof IXAEnabled && ((IXAEnabled)getListener()).isTransacted())) { // warn("Receiver ["+getName()+"] sets transacted=true, but listener not. Transactional integrity is not guaranteed"); if (inProcessStorage==null && getListener() instanceof IPullingListener) { // throw new ConfigurationException("Receiver ["+getName()+"] sets transacted=true, but has no inProcessStorage."); warn("Receiver ["+getName()+"] sets transacted=true, but has no inProcessStorage. Transactional integrity is not guaranteed"); // } else { // if (!(inProcessStorage instanceof IXAEnabled && ((IXAEnabled)inProcessStorage).isTransacted())) { // warn("Receiver ["+getName()+"] sets transacted=true, but inProcessStorage not. Transactional integrity is not guaranteed"); } if (errorSender==null && errorStorage==null) { warn("Receiver ["+getName()+"] sets transacted=true, but has no errorSender or errorStorage. Messages processed with errors will be lost"); } else { // if (errorSender!=null && !(errorSender instanceof IXAEnabled && ((IXAEnabled)errorSender).isTransacted())) { // warn("Receiver ["+getName()+"] sets transacted=true, but errorSender is not. Transactional integrity is not guaranteed"); // if (errorStorage!=null && !(errorStorage instanceof IXAEnabled && ((IXAEnabled)errorStorage).isTransacted())) { // warn("Receiver ["+getName()+"] sets transacted=true, but errorStorage is not. Transactional integrity is not guaranteed"); if (errorStorage != null && errorStorage==inProcessStorage) { info("Receiver ["+getName()+"] has errorStorage in inProcessStorage, setting inProcessStorage's type to 'errorStorage'"); errorStorage.setType("E"); } } } if (StringUtils.isNotEmpty(getFileNameIfStopped())) { try { setReturnIfStopped(Misc.resourceToString(ClassUtils.getResourceURL(this,fileNameIfStopped), SystemUtils.LINE_SEPARATOR)); } catch (Throwable e) { throw new ConfigurationException("Receiver ["+getName()+"] got exception loading ["+getFileNameIfStopped()+"]", e); } } if (StringUtils.isNotEmpty(getReplaceFrom())) { setReturnIfStopped(Misc.replace(getReturnIfStopped(), getReplaceFrom(), getReplaceTo())); } if (StringUtils.isNotEmpty(styleSheetName)) { URL xsltSource = ClassUtils.getResourceURL(this, styleSheetName); if (xsltSource!=null) { try{ String xsltResult = null; Transformer transformer = XmlUtils.createTransformer(xsltSource); xsltResult = XmlUtils.transformXml(transformer, getReturnIfStopped()); setReturnIfStopped(xsltResult); } catch (IOException e) { throw new ConfigurationException("Receiver cannot retrieve ["+ styleSheetName + "], resource [" + xsltSource.toString() + "]", e); } catch (TransformerConfigurationException te) { throw new ConfigurationException("Receiver got error creating transformer from file [" + styleSheetName + "]", te); } catch (TransformerException te) { throw new ConfigurationException("Receiver got error transforming resource [" + xsltSource.toString() + "] from [" + styleSheetName + "]", te); } catch (DomBuilderException te) { throw new ConfigurationException("Receiver caught DomBuilderException", te); } } } monitorAdapter=MonitorAdapterFactory.getMonitorAdapter(); if (adapter != null) { adapter.getMessageKeeper().add("Receiver ["+getName()+"] initialization complete"); } } catch(ConfigurationException e){ log.debug("Errors occured during configuration, setting runstate to ERROR"); runState.setRunState(RunStateEnum.ERROR); throw e; } } public void startRunning() { // if this receiver is on an adapter, the StartListening method // may only be executed when the adapter is started. if (adapter != null) { RunStateEnum adapterRunState = adapter.getRunState(); if (!adapterRunState.equals(RunStateEnum.STARTED)) { log.warn( "Receiver [" + getName() + "] on adapter [" + adapter.getName() + "] was tried to start, but the adapter is in state ["+adapterRunState+"]. Ignoring command."); adapter.getMessageKeeper().add( "ignored start command on [" + getName() + "]; adapter is in state ["+adapterRunState+"]"); return; } } try { String msg=("Receiver [" + getName() + "] starts listening."); log.info(msg); if (adapter != null) { adapter.getMessageKeeper().add(msg); } runState.setRunState(RunStateEnum.STARTING); openAllResources(); runState.setRunState(RunStateEnum.STARTED); } catch (ListenerException e) { log.error("error occured while starting receiver [" + getName() + "]", e); if (null != adapter) adapter.getMessageKeeper().add( "error occured while starting receiver [" + getName() + "]:" + e.getMessage()); runState.setRunState(RunStateEnum.ERROR); } } public void stopRunning() { if (getRunState().equals(RunStateEnum.STOPPED)){ return; } if (!getRunState().equals(RunStateEnum.ERROR)) { runState.setRunState(RunStateEnum.STOPPING); try { tellResourcesToStop(); } catch (ListenerException e) { warn("exception stopping receiver: "+e.getMessage()); } } else { closeAllResources(); runState.setRunState(RunStateEnum.STOPPED); } } /** * Starts the receiver. This method is called by the startRunning method.<br/> * Basically: * <ul> * <li> it opens the threads</li> * <li>it calls the getRawMessage method to get a message<li> * <li> it performs the onMessage method, resulting a PipeLineResult</li> * <li>it calls the afterMessageProcessed() method of the listener<li> * <li> it optionally sends the result using the sender</li> * </ul> */ public void run() { if (threadsRunning.increase()==1) { fireMonitorEvent(EventTypeEnum.CLEARING,SeverityEnum.HARMLESS,RCV_SHUTDOWN_MONITOR_EVENT_MSG); } IPullingListener listener=null; Map threadContext=null; try { listener = (IPullingListener)getListener(); threadContext = listener.openThread(); if (threadContext==null) { threadContext = new HashMap(); } long startProcessingTimestamp; long finishProcessingTimestamp = System.currentTimeMillis(); runState.setRunState(RunStateEnum.STARTED); while (getRunState().equals(RunStateEnum.STARTED)) { boolean permissionToGo=true; if (pollToken!=null) { try { permissionToGo=false; pollToken.acquire(); permissionToGo=true; } catch (Exception e) { error("acquisition of polltoken interupted" ,e); stopRunning(); } } Object rawMessage=null; try { if (permissionToGo && getRunState().equals(RunStateEnum.STARTED)) { try { rawMessage = getRawMessage(threadContext); synchronized (listener) { if (retryInterval > RCV_SUSPENSION_MESSAGE_THRESHOLD) { fireMonitorEvent(EventTypeEnum.CLEARING,SeverityEnum.HARMLESS,RCV_SUSPENDED_MONITOR_EVENT_MSG); retryInterval=1; } } } catch (Exception e) { if (ONERROR_CONTINUE.equalsIgnoreCase(getOnError())) { long currentInterval; synchronized (listener) { currentInterval=retryInterval; retryInterval=retryInterval*2; if (retryInterval>3600) { retryInterval=3600; } } error("caught Exception retrieving message, will continue retrieving messages in ["+currentInterval+"] seconds", e); if (currentInterval*2 > RCV_SUSPENSION_MESSAGE_THRESHOLD) { fireMonitorEvent(EventTypeEnum.TECHNICAL,SeverityEnum.WARNING,RCV_SUSPENDED_MONITOR_EVENT_MSG); } while (getRunState().equals(RunStateEnum.STARTED) && currentInterval try { Thread.sleep(1000); } catch (Exception e2) { error("sleep interupted" ,e2); stopRunning(); } } } else { error("stopping receiver after exception in retrieving message",e); stopRunning(); } } } } finally { if (pollToken!=null) { pollToken.release(); } } if (rawMessage!=null) { try { TracingUtil.beforeEvent(this); startProcessingTimestamp = System.currentTimeMillis(); try { processRawMessage(listener,rawMessage,threadContext,finishProcessingTimestamp-startProcessingTimestamp); } catch (Exception e) { TracingUtil.exceptionEvent(this); if (ONERROR_CONTINUE.equalsIgnoreCase(getOnError())) { error("caught Exception processing message, will continue processing next message", e); } else { error("stopping receiver after exception in processing message",e); stopRunning(); } } finishProcessingTimestamp = System.currentTimeMillis(); } finally { TracingUtil.afterEvent(this); } } else { if (getPollInterval()>0) { for (int i=0; i<getPollInterval() && getRunState().equals(RunStateEnum.STARTED); i++) { Thread.sleep(1000); } } } } } catch (Throwable e) { error("error occured in receiver [" + getName() + "]",e); } finally { if (listener!=null) { try { listener.closeThread(threadContext); } catch (ListenerException e) { error("Exception closing listener of Receiver ["+getName()+"]", e); } } long stillRunning=threadsRunning.decrease(); if (stillRunning>0) { log.info("a thread of Receiver ["+getName()+"] exited, ["+stillRunning+"] are still running"); fireMonitorEvent(EventTypeEnum.TECHNICAL,SeverityEnum.WARNING,"a thread shut down, ["+stillRunning+"] are still running"); return; } fireMonitorEvent(EventTypeEnum.TECHNICAL,SeverityEnum.CRITICAL,RCV_SHUTDOWN_MONITOR_EVENT_MSG); log.info("the last thread of Receiver ["+getName()+"] exited, cleaning up"); closeAllResources(); } } protected void startProcessingMessage(long waitingDuration) { synchronized (threadsProcessing) { int threadCount = (int) threadsProcessing.getValue(); if (waitingDuration>=0) { getIdleStatistics(threadCount).addValue(waitingDuration); } threadsProcessing.increase(); } log.debug("receiver ["+getName()+"] starts processing message"); } protected void finishProcessingMessage(long processingDuration) { synchronized (threadsProcessing) { int threadCount = (int) threadsProcessing.decrease(); getProcessStatistics(threadCount).addValue(processingDuration); } log.debug("receiver ["+getName()+"] finishes processing message"); } public Object getRawMessage(Map threadContext) throws ListenerException { IPullingListener listener = (IPullingListener)getListener(); if (isTransacted()) { Object rawMessage; UserTransaction utx = null; try { utx = JtaUtil.getUserTransaction(); utx.begin(); } catch (Exception e) { throw new ListenerException("["+getName()+"] Exception preparing to read input message", e); // no need to send message on errorSender, did not even try to read message } try { rawMessage = listener.getRawMessage(threadContext); if (rawMessage==null) { try { utx.rollback(); } catch (Exception e) { log.warn("["+getName()+"] Exception while rolling back transaction after timeout on retrieving message", e); } return null; } } catch (Exception e) { try { utx.rollback(); } catch (Exception rbe) { log.error("["+getName()+"] Exception while rolling back transaction after catching exception", rbe); } throw new ListenerException("["+getName()+"] Exception retrieving message under transaction control",e); // no need to send message on errorSender, message will remain on input channel due to rollback } return rawMessage; } else { return listener.getRawMessage(threadContext); } } /* * Store message from inProcessStore (if present), then commit reception of message. * State upon return: * OK, result non null: transaction committed, message inProcessStore. * OK, result null: transaction committed, no inProcessStore configured or message not serializable. * Exception: message Rolled Back to input. */ private String prepareToProcessMessageTransacted1(UserTransaction utx, String originalMessageId, String correlationId, Object rawMessage) throws ListenerException { log.info("receiver ["+getName()+"] moves message with originalMessageId ["+originalMessageId+"] correlationId ["+correlationId+"] to inProcess"); String newMessageId=null; try { if (getInProcessStorage() == null) { log.warn(getLogPrefix()+"has no inProcessStorage, cannot store message before processing. Will commit read of message, and start a new transaction"); } else { if (rawMessage instanceof Serializable) { //TODO: received date preciezer doen newMessageId = getInProcessStorage().storeMessage(originalMessageId,correlationId,new Date(),"in process",(Serializable)rawMessage); log.debug("["+getName()+"] committing transfer of message with messageId ["+originalMessageId+"] to inProcessStorage, newMessageId ["+newMessageId+"]"); } else { log.warn("["+getName()+"] received message of type ["+rawMessage.getClass().getName()+"] is not serializable, cannot be stored in inProcessStorage; will only commit its reception"); } } utx.commit(); retryCount.clear(); return newMessageId; } catch (Throwable t) { //log.error("["+getName()+"] Exception transfering message with messageId ["+originalMessageId+"] to inProcessStorage, original message: ["+rawMessage+"]", t); try { utx.rollback(); } catch (Exception rbe) { log.error("["+getName()+"] Exception while rolling back transaction for message with messageId ["+originalMessageId+"] after catching exception (that will be thrown after this line has been logged)", rbe); } long retries=retryCount.increase(); if (retries>getMaxRetries()) { log.warn("["+getName()+"] stopping receiver as message cannot be stored in inProcessStorage after catching exception (that will be thrown after this line has been logged), tried ["+retries+"] times"); stopRunning(); } else { log.info("["+getName()+"] waiting for message, rolled back after catching exception (that will be thrown after this line has been logged) to reappear, retryCount=["+retries+"]"); } throw new ListenerException("["+getName()+"] Exception retrieving/storing message with messageId ["+originalMessageId+"] under transaction control", t); // no need to send message on errorSender, message will remain on input channel due to rollback } } /* * Start new transaction, remove message from inProcessStore (if present) * State upon return: * new transaction started, ready to process message, message deleted from inProcess as part of transaction. * Exception: idem, but exception occurred. Message needs to be transferred to errorStorage; */ private void prepareToProcessMessageTransacted2(String newMessageId, UserTransaction utx, String originalMessageId, String correlationId) throws ListenerException { log.info("receiver ["+getName()+"] starts new transaction, and removes message with originalMessageId ["+originalMessageId+"] correlationId ["+correlationId+"] from inProcess"); try { utx.begin(); if (newMessageId==null) { log.info("receiver ["+getName()+"] cannot remove message with originalMessageId ["+originalMessageId+"] correlationId ["+correlationId+"] from inProcess, newMessageId=null, (message not serializable, or inProcessStorage does not exist)"); } else { log.debug("["+getName()+"] deleting message ["+newMessageId+"] correlationId ["+correlationId+"] from inProcessStorage as part of message processing transaction"); getInProcessStorage().deleteMessage(newMessageId); } } catch (Exception e) { throw new ListenerException("["+getName()+"] Exception in preparation of transacted processing of message ["+newMessageId+"] correlationId ["+correlationId+"]",e); } } private void finishTransactedProcessingOfMessage(UserTransaction utx, String inProcessMessageId, String originalMessageId, String correlationId, String message, Date receivedDate, String comments, Object rawMessage) { try { if (utx.getStatus()==Status.STATUS_ACTIVE){ try { log.info("receiver [" + getName() + "] got active transaction from pipeline, committing transaction ["+utx+"] for messageid ["+inProcessMessageId+"]"); utx.commit(); } catch (Exception e) { log.error("receiver [" + getName() + "] exception committing transaction", e); if (rawMessage instanceof Serializable) { moveInProcessToError(utx, inProcessMessageId, originalMessageId, correlationId, message, receivedDate, "exception committing transaction: "+ e.getMessage(), (Serializable)rawMessage); } else { log.error("receiver [" + getName() + "] message is not serializable, cannot store in errorStorage ["+rawMessage+"]"); } if (ONERROR_CLOSE.equalsIgnoreCase(getOnError())) { log.info("receiver [" + getName() + "] closing after exception in committing transaction"); stopRunning(); } } } else { log.warn("receiver [" + getName() + "] got transaction with state ["+JtaUtil.displayTransactionStatus(utx)+"] from pipeline, rolling back transaction ["+utx+"] for messageid ["+inProcessMessageId+"]"); try { utx.rollback(); } catch (Exception e) { log.error("receiver [" + getName() + "] exception rolling back transaction", e); } if (rawMessage instanceof Serializable) { moveInProcessToError(utx, inProcessMessageId, originalMessageId, correlationId,message, receivedDate, comments, (Serializable)rawMessage); } else { log.error("receiver [" + getName() + "] message is not serializable, cannot store in errorStorage ["+rawMessage+"]"); } } } catch (Throwable t) { log.error("["+getName()+"] Exception in finishTransactedProcessingOfMessage", t); try { utx.rollback(); } catch (Exception rbe) { log.error("["+getName()+"] Exception while rolling back transaction after catching exception", rbe); } } } private void moveInProcessToError(UserTransaction utx, String inProcessMessageId, String originalMessageId, String correlationId, String message, Date receivedDate, String comments, Serializable rawMessage) { log.info("receiver ["+getName()+"] moves message id ["+originalMessageId+"] correlationId ["+correlationId+"] from inProcess ["+inProcessMessageId+"] to errorSender/errorStorage"); ISender errorSender = getErrorSender(); ITransactionalStorage errorStorage = getErrorStorage(); if (errorSender==null && errorStorage==null) { log.warn("["+getName()+"] has no errorSender or errorStorage, message with id ["+inProcessMessageId+"] will remain in inProcessStorage"); return; } try { utx.begin(); } catch (Exception e) { log.error("["+getName()+"] Exception preparing to move input message to error sender", e); // no use trying again to send message on errorSender, will cause same exception! return; } try { getInProcessStorage().deleteMessage(inProcessMessageId); if (errorSender!=null) { errorSender.sendMessage(correlationId, message); } if (errorStorage!=null) { errorStorage.storeMessage(originalMessageId, correlationId, receivedDate, comments, rawMessage); } utx.commit(); } catch (Exception e) { log.error("["+getName()+"] Exception moving message with inprocess id ["+inProcessMessageId+"] correlationId ["+correlationId+"] to error sender, original message: ["+message+"]",e); try { utx.rollback(); } catch (Exception rbe) { log.error("["+getName()+"] Exception while rolling back transaction for message with inprocess id ["+inProcessMessageId+"] correlationId ["+correlationId+"], original message: ["+message+"]", rbe); } } } /** * Process the received message with {@link #processRequest(IListener, String, String)}. * A messageId is generated that is unique and consists of the name of this listener and a GUID */ public String processRequest(IListener origin, String message) throws ListenerException { return processRequest(origin, null, message, null, -1); } public String processRequest(IListener origin, String correlationId, String message) throws ListenerException{ return processRequest(origin, correlationId, message, null, -1); } public String processRequest(IListener origin, String correlationId, String message, Map context) throws ListenerException { return processRequest(origin, correlationId, message, context, -1); } public String processRequest(IListener origin, String correlationId, String message, Map context, long waitingTime) throws ListenerException { if (getRunState() == RunStateEnum.STOPPED || getRunState() == RunStateEnum.STOPPING) return getReturnIfStopped(); UserTransaction utx = null; if (isTransacted()) { try { utx = JtaUtil.getUserTransaction(); if (!JtaUtil.inTransaction()) { log.debug("Receiver ["+getName()+"] starts transaction as no one is yet present"); utx.begin(); } } catch (Exception e) { throw new ListenerException("["+getName()+"] Exception obtaining usertransaction", e); } } return processMessageInAdapter(utx, origin, message, message, null, correlationId, context, waitingTime); } public void processRawMessage(IListener origin, Object message) throws ListenerException { processRawMessage(origin, message, null, -1); } public void processRawMessage(IListener origin, Object message, Map context) throws ListenerException { processRawMessage(origin, message, context, -1); } /** * All messages that for this receiver are pumped down to this method, so it actually * calls the {@link nl.nn.adapterframework.core.Adapter adapter} to process the message.<br/> * Assumes that a transation has been started where necessary */ public void processRawMessage(IListener origin, Object rawMessage, Map threadContext, long waitingDuration) throws ListenerException { UserTransaction utx = null; if (isTransacted()) { try { utx = JtaUtil.getUserTransaction(); if (!JtaUtil.inTransaction()) { log.debug("Receiver ["+getName()+"] starts transaction as no one is yet present"); utx.begin(); } } catch (Exception e) { throw new ListenerException("["+getName()+"] Exception obtaining usertransaction", e); } if (rawMessage==null) { try { utx.rollback(); } catch (Exception e) { log.warn("["+getName()+"] Exception while rolling back transaction after timeout on retrieving message", e); } return; } } if (rawMessage==null) { return; } if (threadContext==null) { threadContext = new HashMap(); } String message = origin.getStringFromRawMessage(rawMessage, threadContext); String correlationId = origin.getIdFromRawMessage(rawMessage, threadContext); String messageId = (String)threadContext.get("id"); processMessageInAdapter(utx, origin, rawMessage, message, messageId, correlationId, threadContext, waitingDuration); } /* * assumes message is read, and when transacted, transation is still open to be able to store it in InProcessStore */ private String processMessageInAdapter(UserTransaction utx, IListener origin, Object rawMessage, String message, String messageId, String correlationId, Map threadContext, long waitingDuration) throws ListenerException { String result=null; PipeLineResult pipeLineResult=null; long startProcessingTimestamp = System.currentTimeMillis(); log.debug(getLogPrefix()+"received message with messageId ["+messageId+"] correlationId ["+correlationId+"]"); // update processing statistics // count in processing statistics includes messages that are rolled back to input startProcessingMessage(waitingDuration); try { if (StringUtils.isEmpty(correlationId)) { correlationId=getName()+"-"+Misc.createSimpleUUID(); if (log.isDebugEnabled()) log.debug(getLogPrefix()+"generated correlationId ["+correlationId+"]"); } if (StringUtils.isEmpty(messageId)) { messageId = correlationId; } String inProcessMessageId=null; if (isTransacted()) { // store message in inProcessStorage, and commit transaction. inProcessMessageId = prepareToProcessMessageTransacted1(utx,messageId,correlationId,rawMessage); // If an Exception is thrown, the message is already rolled back to the input. } // from now on the message is really received, it cannot be rolled back to the input anymore numReceived.increase(); String errorMessage=""; try { if (isTransacted()) { // start new transaction, and remove message from inProcessStorage prepareToProcessMessageTransacted2(inProcessMessageId,utx,messageId,correlationId); } PipeLineSession pipelineSession = new PipeLineSession(); if (threadContext!=null) { pipelineSession.putAll(threadContext); if (log.isDebugEnabled()) { String contextDump = "PipeLineSession variables for messageId ["+messageId+"] correlationId ["+correlationId+"]:"; for (Iterator it=pipelineSession.keySet().iterator(); it.hasNext();) { String key = (String)it.next(); Object value = pipelineSession.get(key); if (key.equals("messageText")) { value = "(... see elsewhere ...)"; } contextDump+=" "+key+"=["+(value==null? "null": value.toString())+"]"; } log.debug(contextDump); } } try { if (isIbis42compatibility()) { pipeLineResult = adapter.processMessage(correlationId, message, pipelineSession); result=pipeLineResult.getResult(); errorMessage = result; } else { try { if (getMessageLog()!=null) { getMessageLog().storeMessage(messageId, correlationId, new Date(),"log",message); } pipeLineResult = adapter.processMessageWithExceptions(correlationId, message, pipelineSession); result=pipeLineResult.getResult(); errorMessage = "exitState ["+pipeLineResult.getState()+"], result ["+result+"]"; } catch (Throwable t) { if (isTransacted()) { try { JtaUtil.getUserTransaction().setRollbackOnly(); } catch (Throwable t2) { log.error("caught exception trying to invalidate transaction", t); } } ListenerException l; if (t instanceof ListenerException) { l = (ListenerException)t; } else { l = new ListenerException(t); } // disabled the following logging, as it is already done in the Pipeline //String msg = "receiver [" + getName() + "] caught exception in message processing"; //error(msg, l); errorMessage = l.getMessage(); throw l; } } } finally { if (log.isDebugEnabled() && StringUtils.isNotEmpty(getReturnedSessionKeys())) { log.debug("returning values of session keys ["+getReturnedSessionKeys()+"]"); } Misc.copyContext(getReturnedSessionKeys(),pipelineSession,threadContext); } try { if (getSender()!=null) { getSender().sendMessage(correlationId,result); } } catch (Exception e) { String msg = "receiver [" + getName() + "] caught exception in message post processing"; error(msg, e); errorMessage = msg+": "+e.getMessage(); if (ONERROR_CLOSE.equalsIgnoreCase(getOnError())) { log.info("receiver [" + getName() + "] closing after exception in post processing"); stopRunning(); } } } finally { if (isTransacted()) { finishTransactedProcessingOfMessage(utx,inProcessMessageId,messageId,correlationId,message, new Date(startProcessingTimestamp), errorMessage, (Serializable)rawMessage); } } } finally { try { origin.afterMessageProcessed(pipeLineResult,rawMessage, threadContext); } finally { long finishProcessingTimestamp = System.currentTimeMillis(); finishProcessingMessage(finishProcessingTimestamp-startProcessingTimestamp); } } log.debug(getLogPrefix()+"returning result ["+result+"] for message ["+messageId+"] correlationId ["+correlationId+"]"); return result; } public void exceptionThrown(INamedObject object, Throwable t) { String msg = getLogPrefix()+"received exception ["+t.getClass().getName()+"] from ["+object.getName()+"]"; if (ONERROR_CONTINUE.equalsIgnoreCase(getOnError())) { warn(msg+", will continue processing messages when they arrive: "+ t.getMessage()); } else { error(msg+", stopping receiver", t); stopRunning(); } } public void setRunState(RunStateEnum state) { runState.setRunState(state); } protected void fireMonitorEvent(EventTypeEnum eventType, SeverityEnum severity, String message) { if (monitorAdapter!=null) { monitorAdapter.fireEvent(getName(), eventType, severity, message); } } public void waitForRunState(RunStateEnum requestedRunState) throws InterruptedException { runState.waitForRunState(requestedRunState); } public boolean waitForRunState(RunStateEnum requestedRunState, long timeout) throws InterruptedException { return runState.waitForRunState(requestedRunState, timeout); } /** * Get the {@link RunStateEnum runstate} of this receiver. */ public RunStateEnum getRunState() { return runState.getRunState(); } public boolean isInRunState(RunStateEnum someRunState) { return runState.isInState(someRunState); } protected synchronized StatisticsKeeper getProcessStatistics(int threadsProcessing) { StatisticsKeeper result; try { result = ((StatisticsKeeper)processStatistics.get(threadsProcessing)); } catch (IndexOutOfBoundsException e) { result = null; } if (result==null) { while (processStatistics.size()<threadsProcessing+1){ result = new StatisticsKeeper((processStatistics.size()+1)+" threads processing"); processStatistics.add(processStatistics.size(), result); } } return (StatisticsKeeper) processStatistics.get(threadsProcessing); } protected synchronized StatisticsKeeper getIdleStatistics(int threadsProcessing) { StatisticsKeeper result; try { result = ((StatisticsKeeper)idleStatistics.get(threadsProcessing)); } catch (IndexOutOfBoundsException e) { result = null; } if (result==null) { while (idleStatistics.size()<threadsProcessing+1){ result = new StatisticsKeeper((idleStatistics.size())+" threads processing"); idleStatistics.add(idleStatistics.size(), result); } } return (StatisticsKeeper) idleStatistics.get(threadsProcessing); } /** * Returns an iterator over the process-statistics * @return iterator */ public Iterator getProcessStatisticsIterator() { return processStatistics.iterator(); } /** * Returns an iterator over the idle-statistics * @return iterator */ public Iterator getIdleStatisticsIterator() { return idleStatistics.iterator(); } public ISender getSender() { return sender; } protected void setSender(ISender sender) { this.sender = sender; } public void setAdapter(IAdapter adapter) { this.adapter = adapter; } /** * Returns the listener * @return IPullingListener */ public IListener getListener() { return listener; protected void setListener(IListener newListener) { listener = newListener; if (listener instanceof INamedObject) { if (StringUtils.isEmpty(((INamedObject)listener).getName())) { ((INamedObject) listener).setName("listener of ["+getName()+"]"); } } if (listener instanceof RunStateEnquiring) { ((RunStateEnquiring) listener).SetRunStateEnquirer(runState); } } /** * Returns the inProcessStorage. * @return ITransactionalStorage */ public ITransactionalStorage getInProcessStorage() { return inProcessStorage; } /** * Sets the inProcessStorage. * @param inProcessStorage The inProcessStorage to set */ protected void setInProcessStorage(ITransactionalStorage inProcessStorage) { if (inProcessStorage.isActive()) { this.inProcessStorage = inProcessStorage; inProcessStorage.setName("inProcessStorage of ["+getName()+"]"); if (StringUtils.isEmpty(inProcessStorage.getSlotId())) { inProcessStorage.setSlotId(getName()); } inProcessStorage.setType("I"); } } /** * Returns the errorSender. * @return ISender */ public ISender getErrorSender() { return errorSender; } public ITransactionalStorage getErrorStorage() { if (errorStorage!=null) { return errorStorage; } if (errorSender==null) { return inProcessStorage; } return null; } /** * Sets the errorSender. * @param errorSender The errorSender to set */ protected void setErrorSender(ISender errorSender) { this.errorSender = errorSender; errorSender.setName("errorSender of ["+getName()+"]"); } protected void setErrorStorage(ITransactionalStorage errorStorage) { if (errorStorage.isActive()) { this.errorStorage = errorStorage; errorStorage.setName("errorStorage of ["+getName()+"]"); if (StringUtils.isEmpty(errorStorage.getSlotId())) { errorStorage.setSlotId(getName()); } errorStorage.setType("E"); } } /** * Sets the messageLog. */ protected void setMessageLog(ITransactionalStorage messageLog) { if (messageLog.isActive()) { this.messageLog = messageLog; messageLog.setName("messageLog of ["+getName()+"]"); if (StringUtils.isEmpty(messageLog.getSlotId())) { messageLog.setSlotId(getName()); } messageLog.setType("L"); } } public ITransactionalStorage getMessageLog() { return messageLog; } /** * Get the number of messages received. * @return long */ public long getMessagesReceived() { return numReceived.getValue(); } /** * Sets the name of the Receiver. * If the listener implements the {@link nl.nn.adapterframework.core.INamedObject name} interface and <code>getName()</code> * of the listener is empty, the name of this object is given to the listener. */ public void setName(String newName) { name = newName; propagateName(); } public String getName() { return name; } /** * Controls the use of XA-transactions. */ public void setTransacted(boolean transacted) { this.transacted = transacted; } public boolean isTransacted() { return transacted; } public void setOnError(String newOnError) { onError = newOnError; } public String getOnError() { return onError; } public boolean isOnErrorStop() { return ONERROR_CLOSE.equalsIgnoreCase(getOnError()); } protected IAdapter getAdapter() { return adapter; } /** * Returns a toString of this class by introspection and the toString() value of its listener. * * @return Description of the Return Value */ public String toString() { String result = super.toString(); ToStringBuilder ts=new ToStringBuilder(this, ToStringStyle.MULTI_LINE_STYLE); ts.append("name", getName() ); result += ts.toString(); result+=" listener ["+(listener==null ? "-none-" : listener.toString())+"]"; return result; } /** * Return this value when this receiver is stopped. */ public String getReturnIfStopped() { return returnIfStopped; } /** * Return this value when this receiver is stopped. */ public void setReturnIfStopped (String returnIfStopped){ this.returnIfStopped=returnIfStopped; } /** * The number of threads that this receiver is configured to work with. */ public void setNumThreads(int newNumThreads) { numThreads = newNumThreads; } public int getNumThreads() { return numThreads; } public String formatException(String extrainfo, String correlationId, String message, Throwable t) { return getAdapter().formatErrorMessage(extrainfo,t,message,correlationId,null,0); } public int getNumThreadsPolling() { return numThreadsPolling; } public void setNumThreadsPolling(int i) { numThreadsPolling = i; } public boolean isIbis42compatibility() { return ibis42compatibility; } /** * * @param b * @deprecated Please consider removing this option. */ public void setIbis42compatibility(boolean b) { ibis42compatibility = b; if (ibis42compatibility) { log.warn(getLogPrefix()+" set ibis42compatibility true. This is a deprecated option. Please consider removing this setting by anticipating the use of listener specific error handling"); } } public void setFileNameIfStopped(String fileNameIfStopped) { this.fileNameIfStopped = fileNameIfStopped; } public String getFileNameIfStopped() { return fileNameIfStopped; } public void setReplaceFrom (String replaceFrom){ this.replaceFrom=replaceFrom; } public String getReplaceFrom() { return replaceFrom; } public void setReplaceTo (String replaceTo){ this.replaceTo=replaceTo; } public String getReplaceTo() { return replaceTo; } // event numbers for tracing public int getAfterEvent() { return afterEvent; } public int getBeforeEvent() { return beforeEvent; } public int getExceptionEvent() { return exceptionEvent; } public void setAfterEvent(int i) { afterEvent = i; } public void setBeforeEvent(int i) { beforeEvent = i; } public void setExceptionEvent(int i) { exceptionEvent = i; } public int getMaxRetries() { return maxRetries; } public void setMaxRetries(int i) { maxRetries = i; } public String getStyleSheetName() { return styleSheetName; } public void setStyleSheetName (String styleSheetName){ this.styleSheetName=styleSheetName; } public void setActive(boolean b) { active = b; } public boolean isActive() { return active; } public void setReturnedSessionKeys(String string) { returnedSessionKeys = string; } public String getReturnedSessionKeys() { return returnedSessionKeys; } public void setPollInterval(int i) { pollInterval = i; } public int getPollInterval() { return pollInterval; } }
package com.celements.validation; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.same; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.xwiki.model.reference.DocumentReference; import com.celements.common.test.AbstractBridgedComponentTestCase; import com.xpn.xwiki.XWiki; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.objects.BaseProperty; import com.xpn.xwiki.objects.StringProperty; import com.xpn.xwiki.objects.classes.BaseClass; import com.xpn.xwiki.objects.classes.PropertyClass; import com.xpn.xwiki.web.Utils; public class XClassRegexRuleTest extends AbstractBridgedComponentTestCase { private XWikiContext context; private XWiki xwiki; private XClassRegexRule xClassRegexRule; @Before public void setUp_DocFormCommandTest() throws Exception { context = getContext(); xwiki = createMock(XWiki.class); context.setWiki(xwiki); xClassRegexRule = (XClassRegexRule) Utils.getComponent(IValidationRuleRole.class, "XClassRegexValidation"); } @Test public void testValidate_empty() throws XWikiException { Map<RequestParameter, String[]> requestMap = new HashMap<RequestParameter, String[]>(); replayAll(); Map<String, Set<String>> result = xClassRegexRule.validate(requestMap); verifyAll(); assertTrue("Successful validation should result in an empty map", (result != null) && result.isEmpty()); } @Test public void testValidate_valid() throws XWikiException { BaseClass bclass = getBaseClass("testField"); DocumentReference bclassDocRef1 = new DocumentReference(context.getDatabase(), "Test", "TestClass1"); DocumentReference bclassDocRef2 = new DocumentReference(context.getDatabase(), "Test", "TestClass2"); XWikiDocument doc1 = new XWikiDocument(bclassDocRef1); XWikiDocument doc2 = new XWikiDocument(bclassDocRef2); doc1.setXClass(bclass); doc2.setXClass(bclass); Map<RequestParameter, String[]> requestMap = new HashMap<RequestParameter, String[]>(); String param1 = "Test.TestClass1_0_testField"; String param2 = "Test.TestClass2_0_testField"; requestMap.put(RequestParameter.create(param1), new String[]{"value1"}); requestMap.put(RequestParameter.create(param2), new String[]{"value2", "asdf"}); expect(xwiki.getDocument(eq(bclassDocRef1), same(context))).andReturn(doc1).once(); expect(xwiki.getDocument(eq(bclassDocRef2), same(context))).andReturn(doc2).times(2); replayAll(); Map<String, Set<String>> result = xClassRegexRule.validate(requestMap); verifyAll(); assertTrue("Successful validation should result in an empty map", (result != null) && result.isEmpty()); } @Test public void testValidate_invalid() throws XWikiException { BaseClass bclass = getBaseClass("testField"); DocumentReference bclassDocRef1 = new DocumentReference(context.getDatabase(), "Test", "TestClass1"); DocumentReference bclassDocRef2 = new DocumentReference(context.getDatabase(), "Test", "TestClass2"); XWikiDocument doc1 = new XWikiDocument(bclassDocRef1); XWikiDocument doc2 = new XWikiDocument(bclassDocRef2); doc1.setXClass(bclass); doc2.setXClass(bclass); Map<RequestParameter, String[]> requestMap = new HashMap<RequestParameter, String[]>(); String param1 = "Test.TestClass1_0_testField"; String param2 = "Test.TestClass2_0_testField"; requestMap.put(RequestParameter.create(param1), new String[]{""}); requestMap.put(RequestParameter.create(param2), new String[]{"", ""}); expect(xwiki.getDocument(eq(bclassDocRef1), same(context))).andReturn(doc1).once(); expect(xwiki.getDocument(eq(bclassDocRef2), same(context))).andReturn(doc2).times(2); replayAll(); Map<String, Set<String>> result = xClassRegexRule.validate(requestMap); verifyAll(); assertNotNull(result); assertEquals(2, result.size()); Set<String> set1 = result.get(param1); assertNotNull(set1); assertEquals(1, set1.size()); assertTrue(set1.contains("is empty")); Set<String> set2 = result.get(param1); assertNotNull(set2); assertEquals(1, set2.size()); assertTrue(set2.contains("is empty")); } @Test public void testValidateField_valid() throws XWikiException { BaseClass bclass = getBaseClass("testField"); DocumentReference bclassDocRef = new DocumentReference(context.getDatabase(), "Test", "TestClass"); XWikiDocument doc = new XWikiDocument(bclassDocRef); doc.setXClass(bclass); expect(xwiki.getDocument(eq(bclassDocRef), same(context))).andReturn(doc).once(); replayAll(); Set<String> result = xClassRegexRule.validateField("Test.TestClass", "testField", "value"); verifyAll(); assertTrue("Successful validation should result in an empty set", (result != null) && result.isEmpty()); } @Test public void testValidateField_invalid() throws XWikiException { BaseClass bclass = getBaseClass("testField"); DocumentReference bclassDocRef = new DocumentReference(context.getDatabase(), "Test", "TestClass"); XWikiDocument doc = new XWikiDocument(bclassDocRef); doc.setXClass(bclass); expect(xwiki.getDocument(eq(bclassDocRef), same(context))).andReturn(doc).once(); replayAll(); Set<String> result = xClassRegexRule.validateField("Test.TestClass", "testField", ""); verifyAll(); assertNotNull(result); assertEquals(1, result.size()); assertEquals("is empty", result.iterator().next()); } private BaseClass getBaseClass(String fieldName) { BaseProperty regexProp = new StringProperty(); regexProp.setValue("/.+/"); BaseProperty msgProp = new StringProperty(); msgProp.setValue("is empty"); Map<String, BaseProperty> propfields = new HashMap<String, BaseProperty>(); PropertyClass property = new PropertyClass(); propfields.put("validationRegExp", regexProp); propfields.put("validationMessage", msgProp); property.setFields(propfields); Map<String, PropertyClass> fields = new HashMap<String, PropertyClass>(); fields.put(fieldName, property); BaseClass bclass = new BaseClass(); bclass.setFields(fields); return bclass; } @Test public void testGetFieldFromProperty_null() throws XWikiException{ PropertyClass propclass = new PropertyClass(); assertEquals("", xClassRegexRule.getFieldFromProperty(propclass, "test")); propclass.put("test", null); assertEquals("", xClassRegexRule.getFieldFromProperty(propclass, "test")); } @Test public void testGetFieldFromProperty() throws XWikiException{ BaseProperty prop = new StringProperty(); prop.setValue("value"); PropertyClass propclass = new PropertyClass(); propclass.put("test", prop); assertEquals("value", xClassRegexRule.getFieldFromProperty(propclass, "test")); } private void replayAll(Object ... mocks) { replay(xwiki); replay(mocks); } private void verifyAll(Object ... mocks) { verify(xwiki); verify(mocks); } }
package com.safecharge.request.builder; import com.safecharge.model.CardData; import com.safecharge.model.ExternalMpi; import com.safecharge.model.ExternalTokenProvider; import com.safecharge.model.UserPaymentOption; import com.safecharge.request.SafechargeCCRequest; import com.safecharge.util.CardUtils; import com.safecharge.util.Constants; public abstract class SafechargeCCBuilder<T extends SafechargeCCBuilder<T>> extends SafechargeOrderBuilder<T> { private CardData cardData; private Constants.TransactionType transactionType; private UserPaymentOption userPaymentOption; private String orderId; private int isRebilling; private String isPartialApproval; private ExternalMpi externalMpi; private ExternalTokenProvider externalTokenProvider; private String customSiteName; private String productId; private String customData; private String relatedTransactionId; /** * Adds an order to the request. * * @param orderId the id of the order * @return this object */ public T addOrderId(String orderId) { this.orderId = orderId; return (T) this; } /** * Adds transaction type to the request. * * @param transactionType the type of the transaction. Possible types: {@link com.safecharge.util.Constants.TransactionType} * @return this object */ public T addTransactionType(Constants.TransactionType transactionType) { this.transactionType = transactionType; return (T) this; } /** * Adds user payment option (UPO) to the request. It is the payment option that will be used for the transaction. * * @param cvv the CVV code of the related credit/debit card. Note that CVV is not stored by Safecharge * @param userPaymentOptionId the id of the UPO to add to the request * @return this object */ public T addUserPaymentOption(String cvv, String userPaymentOptionId) { UserPaymentOption userPaymentOption = new UserPaymentOption(); userPaymentOption.setCVV(cvv); userPaymentOption.setUserPaymentOptionId(userPaymentOptionId); return addUserPaymentOption(userPaymentOption); } /** * Adds user payment option (UPO) to the request. It is the payment option that will be used for the transaction. * If CVV is required it should be set in the passed {@code userPaymentOption}. * * @param userPaymentOption the UPO to add to the request * @return this object */ public T addUserPaymentOption(UserPaymentOption userPaymentOption) { this.userPaymentOption = userPaymentOption; return (T) this; } /** * Adds card data to the request. * * @param cardNumber the number printed on the card * @param cardHolderName the name of the card's holder as printed on the card * @param expirationMonth the card's expiration month as printed on the card * @param expirationYear the card's expiration year as printed on the card * @param cardToken this token can be provided instead of the above parameters * @param cvv the CVV code printed on the back of the card * @return this object */ public T addCardData(String cardNumber, String cardHolderName, String expirationMonth, String expirationYear, String cardToken, String cvv) { CardData cardData = CardUtils.createCardDataFromParams(cardNumber, cardHolderName, expirationMonth, expirationYear, cardToken, cvv); return addCardData(cardData); } /** * Adds card data to the request. * * @param cardData {@link CardData} object to set to the request * @return this object */ public T addCardData(CardData cardData) { this.cardData = cardData; return (T) this; } /** * Flag indicating if it is a rebilling(subscription) request. * * @param isRebilling indicates whether this is a regular transaction (0) or a recurring/re-billing transaction (1). * Re-billing can only be performed using a UPO id, and NOT by using card data or a card token * @return this object */ public T addIsRebilling(int isRebilling) { this.isRebilling = isRebilling; return (T) this; } /** * Flag indicating if it is partial approval or not. * 0 - is not partial approval * 1 - is partial approval * * @return this object */ public T addIsPartialApproval(String isPartialApproval) { this.isPartialApproval = isPartialApproval; return (T) this; } /** * * @param externalMpi * @return */ public T addExternalMpi(ExternalMpi externalMpi) { this.externalMpi = externalMpi; return (T) this; } /** * * @param externalTokenProvider * @return */ public T addExternalTokenProvider(ExternalTokenProvider externalTokenProvider) { this.externalTokenProvider = externalTokenProvider; return (T) this; } /** * The method is used to ad to the builder custom site name which will overwrite the one of the merchant site. * @param customSiteName * @return */ public T addCustomSiteName(String customSiteName) { this.customSiteName = customSiteName; return (T) this; } /** * Adds product id to request builder. * @param productId * @return */ public T addProductId(String productId) { this.productId = productId; return (T) this; } /** * Adds custom data to request builder. * @param customData * @return */ public T addCustomData(String customData) { this.customData = customData; return (T) this; } /** * Adds relatedTransactionId data to request builder. * @param relatedTransactionId * @return */ public T addRelatedTransactionId(String relatedTransactionId) { this.relatedTransactionId = relatedTransactionId; return (T) this; } /** * Adds the common credit/debit data, collected by this builder. * * @param request an already created request of type T * @param <S> type parameter * @return the passed {@code request} filled with the data from this builder */ public <S extends SafechargeCCRequest> S build(S request) { super.build(request); request.setUserPaymentOption(userPaymentOption); request.setTransactionType(transactionType); request.setCardData(cardData); request.setOrderId(orderId); request.setIsRebilling(isRebilling); request.setExternalMpi(externalMpi); request.setIsPartialApproval(isPartialApproval); request.setExternalTokenProvider(externalTokenProvider); request.setCustomSiteName(customSiteName); request.setProductId(productId); request.setCustomData(customData); request.setRelatedTransactionId(relatedTransactionId); return request; } }
package com.bitplan.mediawiki.japi; import static org.junit.Assert.*; import java.util.List; import org.junit.Test; import com.bitplan.mediawiki.japi.api.Api; import com.bitplan.mediawiki.japi.api.Module; import com.bitplan.mediawiki.japi.api.Paraminfo; import com.bitplan.mediawiki.japi.api.Warnings; /** * test Api description access * * @author wf * */ public class TestApiDescription extends APITestbase { /** * * @throws Exception */ @Test public void testGetApiDescription() throws Exception { // ExampleWiki lwiki=ewm.get("mediawiki-japi-test1_24"); // com.bitplan.mediawiki.japi.Mediawiki wiki=(Mediawiki) lwiki.wiki; Mediawiki wiki = new Mediawiki("https://en.wikipedia.org"); // wiki.setDebug(true); // |phpfm|query+allpages|query+siteinfo String params = "&modules=main%7Clogin%7Clogout%7Cphpfm&helpformat=none"; Api result = wiki.getActionResult("paraminfo", params); assertNotNull(result); Warnings warnings = result.getWarnings(); assertNull(warnings); Paraminfo paraminfo = result.getParaminfo(); assertNotNull(paraminfo); List<Module> modules = paraminfo.getModules(); if (debug) { for (Module module : modules) { System.out.println(module.getName()); } } assertEquals(4, modules.size()); } }
package org.cytoscape.kegg.webservice; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import cytoscape.CyNetwork; import cytoscape.CyNode; import cytoscape.Cytoscape; import cytoscape.data.CyAttributes; public class KEGGResponseParser { private final CyAttributes netAttr; private final CyAttributes nodeAttr; private final List<CyNode> nodes; KEGGResponseParser() { this.netAttr = Cytoscape.getNetworkAttributes(); this.nodeAttr = Cytoscape.getNodeAttributes(); this.nodes = Cytoscape.getCyNodesList(); } public void mapModule(String modules, CyNetwork network) { final List<String> moduleIDs = new ArrayList<String>(); for (final String module : modules.split("\t")) { final String[] parts = module.split(" "); moduleIDs.add(parts[0]); } netAttr.setListAttribute(network.getIdentifier(), "KEGG.moduleID", moduleIDs); } public List<String> getReactionIDs(String reactions) { final List<String> reactionIDs = new ArrayList<String>(); for (String reaction : reactions.split("\t")) { reactionIDs.add(reaction.split(" ")[0]); } return reactionIDs; } public void mapModuleReaction(Map<String, List<String>> module2reactionMap, CyNetwork network) { for (CyNode node : nodes) { List<String> reactionIDs = nodeAttr.getListAttribute( node.getIdentifier(), "KEGG.reaction.list"); if (reactionIDs != null) { List<String> keggModules = new ArrayList<String>(); for (String reactionID : reactionIDs) { for (String moduleID : module2reactionMap.keySet()) { if (module2reactionMap.get(moduleID).contains( reactionID.replace("rn:", ""))) { keggModules.add(moduleID); } } } nodeAttr.setListAttribute(node.getIdentifier(), "KEGG.module.list", keggModules); } } } public void mapRelpathway(String relpathways, CyNetwork network) { final List<String> relpathwayIDs = new ArrayList<String>(); for (String relpathway : relpathways.split("\t")) { relpathwayIDs.add(relpathway.split(" ")[0]); } netAttr.setListAttribute(network.getIdentifier(), "KEGG.relpathwayID", relpathwayIDs); } public void mapDisease(String diseases, CyNetwork network) { final List<String> diseaseIDs = new ArrayList<String>(); for (String disease : diseases.split("\t")) { diseaseIDs.add(disease.split(" ")[0]); } netAttr.setListAttribute(network.getIdentifier(), "KEGG.diseaseID", diseaseIDs); } public void mapDblink(String dblinks, CyNetwork network) { if (dblinks.split("\t").length == 2) { final List<String> umbbdIDs = new ArrayList<String>(); final List<String> goIDs = new ArrayList<String>(); for (String umbbdID : dblinks.split("\t")[0].split(": ")[1] .split(" ")) { umbbdIDs.add(umbbdID); } for (String goID : dblinks.split("\t")[1].split(": ")[1].split(" ")) { goIDs.add(goID); } netAttr.setListAttribute(network.getIdentifier(), "UMBBD.dblinks", umbbdIDs); netAttr.setListAttribute(network.getIdentifier(), "GO.dblinks", goIDs); } else if (dblinks.split("\t").length == 1) { if (dblinks.split("UMBBD: ").length == 2) { final List<String> umbbdIDs = new ArrayList<String>(); for (String umbbdID : dblinks.split("UMBBD: ")[1].split(" ")) { umbbdIDs.add(umbbdID); } netAttr.setListAttribute(network.getIdentifier(), "UMBBD.dblinks", umbbdIDs); } else if (dblinks.split("GO: ").length == 2) { final List<String> goIDs = new ArrayList<String>(); for (String goID : dblinks.split("GO: ")[1].split(" ")) { goIDs.add(goID); } netAttr.setListAttribute(network.getIdentifier(), "GO.dblinks", goIDs); } } } }
package com.rgi.geopackage.core; import java.io.File; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import utility.DatabaseUtility; import com.rgi.common.BoundingBox; import com.rgi.geopackage.verification.FailedRequirement; /** * @author Luke Lambert * */ public class GeoPackageCore { /** * Constructor * * @param databaseConnection * The open connection to the database that contains a GeoPackage * @throws SQLException */ public GeoPackageCore(final Connection databaseConnection) throws SQLException { this(databaseConnection, false); } /** * Constructor * * @param databaseConnection * The open connection to the database that contains a GeoPackage * @param createDefaults * If true, GeoPackageCore will create the default tables and entries required by all standard * @throws SQLException */ public GeoPackageCore(final Connection databaseConnection, final boolean createDefaults) throws SQLException { this.databaseConnection = databaseConnection; if(createDefaults) { this.createDefaultTables(); } } /** * Requirements this GeoPackage failed to meet * * @return the Core GeoPackage requirements this GeoPackage fails to conform to */ public Collection<FailedRequirement> getFailedRequirements(final File file) { return new CoreVerifier(file, this.databaseConnection).getFailedRequirements(); } /** * Count the number of entries in a user content table * * @param content * Specifies the content table whose rows will be counted * @return Number of rows in the table referenced by the content parameter * @throws SQLException */ public long getRowCount(final Content content) throws SQLException { if(content == null) { throw new IllegalArgumentException("Content may not be null."); } final String rowCountSql = String.format("SELECT COUNT(*) FROM %s;", content.getTableName()); try(final PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(rowCountSql); final ResultSet tileResult = preparedStatement.executeQuery();) { return tileResult.getLong(1); } } /** * Adds a spatial reference system (SRS) to the gpkg_spatial_ref_sys table. * * @param name * Human readable name of this spatial reference system * @param identifier * Unique identifier for each Spatial Reference System within a GeoPackage * @param organization * Case-insensitive name of the defining organization e.g. EPSG or epsg * @param organizationSrsId * Numeric ID of the spatial reference system assigned by the organization * @param definition * Well-known Text (WKT) representation of the spatial reference system * @param description * Human readable description of this spatial reference system * @throws SQLException */ public SpatialReferenceSystem addSpatialReferenceSystem(final String name, final int identifier, final String organization, final int organizationSrsId, final String definition, final String description) throws SQLException { try { final SpatialReferenceSystem spatialReferenceSystem = this.addSpatialReferenceSystemNoCommit(name, identifier, organization, organizationSrsId, definition, description); this.databaseConnection.commit(); return spatialReferenceSystem; } catch(final Exception ex) { this.databaseConnection.rollback(); throw ex; } } /** * Returns a unique spatial reference system (SRS) based on an * organization, and its organization-assigned numeric identifier. * * @param organization * Name of the defining organization * @param organizationSrsId * Numeric identifier of the Spatial Reference System assigned by the organization * @return Returns the unique spatial reference system (SRS), or null * @throws SQLException */ public SpatialReferenceSystem getSpatialReferenceSystem(final String organization, final int organizationSrsId) throws SQLException { final String srsQuerySql = String.format("SELECT %s, %s, %s, %s, %s, %s FROM %s WHERE organization COLLATE NOCASE IN (?) AND organization_coordsys_id = ?;", "srs_name", "srs_id", "organization", "organization_coordsys_id", "definition", "description", GeoPackageCore.SpatialRefSysTableName); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(srsQuerySql)) { preparedStatement.setString(1, organization); preparedStatement.setInt (2, organizationSrsId); try(ResultSet srsResult = preparedStatement.executeQuery()) { if(srsResult.isBeforeFirst()) { return new SpatialReferenceSystem(srsResult.getString(1), srsResult.getInt (2), srsResult.getString(3), srsResult.getInt (4), srsResult.getString(5), srsResult.getString(6)); } } } return null; } /** * Returns a unique spatial reference system (SRS) based on its * unique identifier for each spatial reference system within a GeoPackage * * @param identifier * Unique identifier for each Spatial Reference System within a GeoPackage * @return Returns the unique spatial reference system (SRS), or null * @throws SQLException */ public SpatialReferenceSystem getSpatialReferenceSystem(final int identifier) throws SQLException { final String srsQuerySql = String.format("SELECT %s, %s, %s, %s, %s, %s FROM %s WHERE srs_id = ?;", "srs_name", "srs_id", "organization", "organization_coordsys_id", "definition", "description", GeoPackageCore.SpatialRefSysTableName); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(srsQuerySql)) { preparedStatement.setInt(1, identifier); try(ResultSet srsResult = preparedStatement.executeQuery()) { if(srsResult.isBeforeFirst()) { return new SpatialReferenceSystem(srsResult.getString(1), srsResult.getInt (2), srsResult.getString(3), srsResult.getInt (4), srsResult.getString(5), srsResult.getString(6)); } } } return null; } /** * Add a reference to a tile or feature set to content table * <br> * <br> * <b>**WARNING**</b> this does not do a database commit. It is expected * that this transaction will always be paired with others that need to be * committed or rollback as a single transaction. * * @param tableName * The name of the tiles, feature, or extension specific content table * @param dataType * Type of data stored in the table: "features" per clause Features, "tiles" per clause Tiles, or an implementer-defined value for other data tables per clause in an Extended GeoPackage. * @param identifier * A human-readable identifier (e.g. short name) for the tableName content * @param description * A human-readable description for the tableName content * @param lastChange * Date value in ISO 8601 format as defined by the strftime function %Y-%m-%dT%H:%M:%fZ format string applied to the current time * @param boundingBox * Bounding box for all content in tableName * @param spatialReferenceSystem * Spatial Reference System (SRS) * @throws SQLException */ public Content addContent(final String tableName, final String dataType, final String identifier, final String description, final BoundingBox boundingBox, final SpatialReferenceSystem spatialReferenceSystem) throws SQLException { if(tableName == null || tableName.isEmpty()) { throw new IllegalArgumentException("Tile set name may not be null"); } if(!tableName.matches("^[_a-zA-Z]\\w*")) { throw new IllegalArgumentException("The tile set's name must begin with a letter (A..Z, a..z) or an underscore (_) and may only be followed by letters, underscores, or numbers"); } if(tableName.startsWith("gpkg_")) { throw new IllegalArgumentException("The tile set's name may not start with the reserved prefix 'gpkg_'"); } if(!DatabaseUtility.tableOrViewExists(this.databaseConnection, tableName)) { throw new IllegalArgumentException("Content entry references a table that does not exist"); } if(dataType == null || dataType.isEmpty()) { throw new IllegalArgumentException("Data type cannot be null, or empty."); } if(boundingBox == null) { throw new IllegalArgumentException("Bounding box cannot be null."); } final Content existingContent = this.getContent(tableName); if(existingContent != null) { if(!existingContent.equals(tableName, dataType, identifier, description, boundingBox, spatialReferenceSystem.getIdentifier())) { throw new IllegalArgumentException("A content entry with this table name or identifier already exists but with different properties"); } return existingContent; } final String insertContent = String.format("INSERT INTO %s (%s, %s, %s, %s, %s, %s, %s, %s, %s) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", GeoPackageCore.ContentsTableName, "table_name", "data_type", "identifier", "description", "min_x", "min_y", "max_x", "max_y", "srs_id"); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(insertContent)) { final Integer srsId = spatialReferenceSystem == null ? null : spatialReferenceSystem.getIdentifier(); preparedStatement.setString(1, tableName); preparedStatement.setString(2, dataType); preparedStatement.setString(3, identifier); preparedStatement.setString(4, description); preparedStatement.setObject(5, boundingBox.getMinX(), Types.DOUBLE); // Using setObject because spec allows the bounding box values to be null preparedStatement.setObject(6, boundingBox.getMinY(), Types.DOUBLE); preparedStatement.setObject(7, boundingBox.getMaxX(), Types.DOUBLE); preparedStatement.setObject(8, boundingBox.getMaxY(), Types.DOUBLE); preparedStatement.setObject(9, srsId, Types.INTEGER); // Using setObject because the spec allows the srs id be null preparedStatement.executeUpdate(); } return this.getContent(tableName); } /** * Request all of a specific type of conent from the {@value #ContentsTableName} table that matches a specific spatial reference system * * @param dataType * Type of content being requested e.g. "tiles", "features" or another value representing an extended GeoPackage's content * @param contentFactory * Mechanism used to create a type that corresponds to the dataType * @param matchingSpatialReferenceSystem * Results must reference this spatial reference system. Results are unfiltered if this parameter is null * @return Returns a Collection {@link Content}s of the type indicated by the {@link ContentFactory} * @throws SQLException */ public <T extends Content> Collection<T> getContent(final String dataType, final ContentFactory<T> contentFactory, final SpatialReferenceSystem matchingSpatialReferenceSystem) throws SQLException { if(dataType == null || dataType.isEmpty()) { throw new IllegalArgumentException("Data type may not be null or empty"); } if(contentFactory == null) { throw new IllegalArgumentException("Content factory may not be null"); } final ArrayList<T> content = new ArrayList<>(); final String query = String.format("SELECT %s, %s, %s, %s, %s, %s, %s, %s, %s, %s FROM %s WHERE data_type = ?%s;", "table_name", "data_type", "identifier", "description", "strftime('%Y-%m-%dT%H:%M:%fZ', last_change)", "min_x", "min_y", "max_x", "max_y", "srs_id", GeoPackageCore.ContentsTableName, matchingSpatialReferenceSystem != null ? " AND srs_id = ?" : ""); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(query)) { preparedStatement.setString(1, dataType); if(matchingSpatialReferenceSystem != null) { preparedStatement.setInt(2, matchingSpatialReferenceSystem.getIdentifier()); } try(ResultSet results = preparedStatement.executeQuery()) { while(results.next()) { content.add(contentFactory.create(results.getString(1), // table name results.getString(2), // data type results.getString(3), // identifier results.getString(4), // description results.getString(5), // last change new BoundingBox((Double)results.getObject(7), // min y // Unfortunately as of Xerial's SQLite JDBC implementation 3.8.7 getObject(int columnIndex, Class<T> type) is unimplemented, so a cast is required (Double)results.getObject(6), // min x (Double)results.getObject(9), // max y (Double)results.getObject(8)), // max x (Integer)results.getObject(10))); // srs id } } } return content; } /** * Gets a specific entry in the contents table based on the name of the table the entry corresponds to * * @param tableName * Table name to search for * @param contentFactory * Mechanism used to create the correct subtype of Content * @return Returns a {@link Content} of the type indicated by the {@link ContentFactory} * @throws SQLException */ public <T extends Content> T getContent(final String tableName, final ContentFactory<T> contentFactory) throws SQLException { if(tableName == null || tableName.isEmpty()) { throw new IllegalArgumentException("Table name may not be null or empty"); } if(contentFactory == null) { throw new IllegalArgumentException("Content factory may not be null"); } final String contentQuerySql = String.format("SELECT %s, %s, %s, %s, %s, %s, %s, %s, %s FROM %s WHERE table_name = ?;", "data_type", "identifier", "description", "strftime('%Y-%m-%dT%H:%M:%fZ', last_change)", "min_x", "min_y", "max_x", "max_y", "srs_id", GeoPackageCore.ContentsTableName); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(contentQuerySql)) { preparedStatement.setString(1, tableName); try(ResultSet result = preparedStatement.executeQuery()) { if(result.isBeforeFirst()) { return contentFactory.create(tableName, // table name result.getString(1), // data type result.getString(2), // identifier result.getString(3), // description result.getString(4), // last change new BoundingBox((Double)result.getObject(6), // min y // Unfortunately as of Xerial's SQLite JDBC implementation 3.8.7 getObject(int columnIndex, Class<T> type) is unimplemented, so a cast is required (Double)result.getObject(5), // min x (Double)result.getObject(8), // max y (Double)result.getObject(7)), // max x (Integer)result.getObject(9)); // srs id } return null; } } } /** * Adds a spatial reference system (SRS) to the gpkg_spatial_ref_sys table. * <br> * <br> * <b>**WARNING**</b> this does not do a database commit. It is expected * that this transaction will always be paired with others that need to be * committed or rollback as a single transaction. * * @param name * Human readable name of this spatial reference system * @param identifier * Unique identifier for each Spatial Reference System within a GeoPackage * @param organization * Case-insensitive name of the defining organization e.g. EPSG or epsg * @param organizationSrsId * Numeric ID of the spatial reference system assigned by the organization * @param definition * Well-known Text (WKT) representation of the spatial reference system * @param description * Human readable description of this spatial reference system * @throws SQLException */ private SpatialReferenceSystem addSpatialReferenceSystemNoCommit(final String name, final int identifier, final String organization, final int organizationSrsId, final String definition, final String description) throws SQLException { if(name == null || name.isEmpty()) { throw new IllegalArgumentException("Name may not be null or empty"); } if(organization == null || organization.isEmpty()) { throw new IllegalArgumentException("Organization may not be null or empty"); } if(definition == null || definition.isEmpty()) { throw new IllegalArgumentException("Definition may not be null or empty"); } // TODO: It'd be nice to do an additional check to see if 'definition' was a conformant WKT SRS final SpatialReferenceSystem existingSrs = this.getSpatialReferenceSystem(identifier); if(existingSrs != null) { if(existingSrs.equals(name, identifier, organization, organizationSrsId, definition)) { return existingSrs; } throw new IllegalArgumentException("A spatial reference system already exists with this identifier, but has different values for its other fields"); } final String insertSpatialRef = String.format("INSERT INTO %s (%s, %s, %s, %s, %s, %s) VALUES (?, ?, ?, ?, ?, ?)", GeoPackageCore.SpatialRefSysTableName, "srs_name", "srs_id", "organization", "organization_coordsys_id", "definition", "description"); try(PreparedStatement preparedStatement = this.databaseConnection.prepareStatement(insertSpatialRef)) { preparedStatement.setString(1, name); preparedStatement.setInt (2, identifier); preparedStatement.setString(3, organization); preparedStatement.setInt (4, organizationSrsId); preparedStatement.setString(5, definition); preparedStatement.setString(6, description); preparedStatement.executeUpdate(); return new SpatialReferenceSystem(name, identifier, organization, organizationSrsId, definition, description); } } /** * Create the default tables, and default SRS entries * * @throws SQLException */ protected void createDefaultTables() throws SQLException { try { // Create the spatial ref system table if(!DatabaseUtility.tableOrViewExists(this.databaseConnection, GeoPackageCore.SpatialRefSysTableName)) { try(Statement statement = this.databaseConnection.createStatement()) { statement.executeUpdate(this.getSpatialReferenceSystemCreationSql()); } } // Add the default entries to the spatial ref system table // See: http://www.geopackage.org/spec/#spatial_ref_sys -> 1.1.2.1.2. Table Data Values, Requirement 11 this.addSpatialReferenceSystemNoCommit("World Geodetic System (WGS) 1984", 4326, "EPSG", 4326, "GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.01745329251994328,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]]", "World Geodetic System 1984"); this.addSpatialReferenceSystemNoCommit("Undefined Cartesian Coordinate Reference System", -1, "NONE", -1, "undefined", "undefined Cartesian coordinate reference system"); this.addSpatialReferenceSystemNoCommit("Undefined Geographic Coordinate Reference System", 0, "NONE", 0, "undefined", "undefined Geographic coordinate reference system"); // Create the package contents table or view if(!DatabaseUtility.tableOrViewExists(this.databaseConnection, GeoPackageCore.ContentsTableName)) { try(Statement statement = this.databaseConnection.createStatement()) { // http://www.geopackage.org/spec/#gpkg_contents_sql // http://www.geopackage.org/spec/#_contents statement.executeUpdate(this.getContentsCreationSql()); } } this.databaseConnection.commit(); } catch(final Exception ex) { this.databaseConnection.rollback(); throw ex; } } @SuppressWarnings("static-method") protected String getSpatialReferenceSystemCreationSql() { // http://www.geopackage.org/spec/#_gpkg_spatial_ref_sys // http://www.geopackage.org/spec/#spatial_ref_sys return "CREATE TABLE " + GeoPackageCore.SpatialRefSysTableName + "(srs_name TEXT NOT NULL, -- Human readable name of this SRS (Spatial Reference System)\n" + " srs_id INTEGER NOT NULL PRIMARY KEY, -- Unique identifier for each Spatial Reference System within a GeoPackage\n" + " organization TEXT NOT NULL, -- Case-insensitive name of the defining organization e.g. EPSG or epsg\n" + " organization_coordsys_id INTEGER NOT NULL, -- Numeric ID of the Spatial Reference System assigned by the organization\n" + " definition TEXT NOT NULL, -- Well-known Text representation of the Spatial Reference System\n" + " description TEXT); -- Human readable description of this SRS\n"; } @SuppressWarnings("static-method") protected String getContentsCreationSql() { // http://www.geopackage.org/spec/#gpkg_contents_sql // http://www.geopackage.org/spec/#_contents return "CREATE TABLE " + GeoPackageCore.ContentsTableName + "\n" + "(table_name TEXT NOT NULL PRIMARY KEY, -- The name of the tiles, or feature table\n" + " data_type TEXT NOT NULL, -- Type of data stored in the table: \"features\" per clause Features (http://www.geopackage.org/spec/ " identifier TEXT UNIQUE, -- A human-readable identifier (e.g. short name) for the table_name content\n" + " description TEXT DEFAULT '', -- A human-readable description for the table_name content\n" + " last_change DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')), -- Timestamp value in ISO 8601 format as defined by the strftime function %Y-%m-%dT%H:%M:%fZ format string applied to the current time\n" + " min_x DOUBLE, -- Bounding box minimum easting or longitude for all content in table_name\n" + " min_y DOUBLE, -- Bounding box minimum northing or latitude for all content in table_name\n" + " max_x DOUBLE, -- Bounding box maximum easting or longitude for all content in table_name\n" + " max_y DOUBLE, -- Bounding box maximum northing or latitude for all content in table_name\n" + " srs_id INTEGER, -- Spatial Reference System ID: gpkg_spatial_ref_sys.srs_id; when data_type is features, SHALL also match gpkg_geometry_columns.srs_id; When data_type is tiles, SHALL also match gpkg_tile_matrix_set.srs.id\n" + " CONSTRAINT fk_gc_r_srs_id FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys(srs_id));"; } /** * Gets a specific entry in the contents table based on the name of the table the entry corresponds to * * @param tableName * Table name to search for * @return Returns a {@link Content} * @throws SQLException */ private Content getContent(final String tableName) throws SQLException { return this.getContent(tableName, (inTableName, inDataType, inIdentifier, inDescription, inLastChange, inBoundingBox, inSpatialReferenceSystem) -> new Content(inTableName, inDataType, inIdentifier, inDescription, inLastChange, inBoundingBox, inSpatialReferenceSystem)); } private final Connection databaseConnection; public final static SimpleDateFormat DateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); public final static String SpatialRefSysTableName = "gpkg_spatial_ref_sys"; public final static String ContentsTableName = "gpkg_contents"; }
package org.deri.any23.writer; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.deri.any23.extractor.ExtractionContext; import com.hp.hpl.jena.graph.Node; public class BenchmarkTripleHandler implements TripleHandler { private TripleHandler _underlyingHandler; private class StatObject{ int methodCalls = 0; int triples = 0; long runtime =0; long intStart =0; public void interimStart() { intStart = System.currentTimeMillis(); } public void interimStop() { runtime+=(System.currentTimeMillis()-intStart); intStart=0; } } final Map<String,StatObject> stats; public BenchmarkTripleHandler(TripleHandler tripleHandler) { _underlyingHandler = tripleHandler; stats = new HashMap<String, StatObject>(); stats.put("SUM", new StatObject()); } /* (non-Javadoc) * @see org.deri.any23.writer.TripleHandler#close() */ @Override public void close() { _underlyingHandler.close(); } /* (non-Javadoc) * @see org.deri.any23.writer.TripleHandler#closeContext(org.deri.any23.extractor.ExtractionContext) */ @Override public void closeContext(ExtractionContext context) { if(!stats.containsKey(context.getExtractorName())){stats.put(context.getExtractorName(), new StatObject());} stats.get(context.getExtractorName()).interimStop(); stats.get("SUM").interimStop(); _underlyingHandler.closeContext(context); } /* (non-Javadoc) * @see org.deri.any23.writer.TripleHandler#openContext(org.deri.any23.extractor.ExtractionContext) */ @Override public void openContext(ExtractionContext context) { if(!stats.containsKey(context.getExtractorName())){stats.put(context.getExtractorName(), new StatObject());} stats.get(context.getExtractorName()).methodCalls++; stats.get(context.getExtractorName()).interimStart(); stats.get("SUM").methodCalls++; stats.get("SUM").interimStart(); _underlyingHandler.openContext(context); } /* (non-Javadoc) * @see org.deri.any23.writer.TripleHandler#receiveLabel(java.lang.String, org.deri.any23.extractor.ExtractionContext) */ @Override public void receiveLabel(String label, ExtractionContext context) { _underlyingHandler.receiveLabel(label, context); } /* (non-Javadoc) * @see org.deri.any23.writer.TripleHandler#receiveTriple(com.hp.hpl.jena.graph.Node, com.hp.hpl.jena.graph.Node, com.hp.hpl.jena.graph.Node, org.deri.any23.extractor.ExtractionContext) */ @Override public void receiveTriple(Node s, Node p, Node o, ExtractionContext context) { if(!stats.containsKey(context.getExtractorName())){stats.put(context.getExtractorName(), new StatObject());} stats.get(context.getExtractorName()).triples++; stats.get("SUM").triples++; _underlyingHandler.receiveTriple(s, p, o, context); } /** * @return */ public String report() { StringBuilder sb = new StringBuilder(); StatObject sum = stats.get("SUM"); sb.append("\n>Summary: "); sb.append("\n -total calls: ").append(sum.methodCalls); sb.append("\n -total triples: ").append(sum.triples); sb.append("\n -total runtime: ").append(sum.runtime).append(" ms!"); if(sum.runtime != 0) sb.append("\n -tripls/ms: ").append(sum.triples/sum.runtime); if(sum.methodCalls != 0) sb.append("\n -ms/calls: ").append(sum.runtime/sum.methodCalls); stats.remove("SUM"); for(Entry<String, StatObject>ent: stats.entrySet()) { sb.append("\n>Extractor: ").append(ent.getKey()); sb.append("\n -total calls: ").append(ent.getValue().methodCalls); sb.append("\n -total triples: ").append(ent.getValue().triples); sb.append("\n -total runtime: ").append(ent.getValue().runtime).append(" ms!"); if(ent.getValue().runtime != 0) sb.append("\n -tripls/ms: ").append(ent.getValue().triples/ent.getValue().runtime); if(ent.getValue().methodCalls != 0) sb.append("\n -ms/calls: ").append(ent.getValue().runtime/ent.getValue().methodCalls); } return sb.toString(); } }
package com.github.millij.eom; import java.text.ParseException; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.millij.eom.bean.Company; import com.github.millij.eom.bean.Employee; import com.github.millij.eom.exception.ExcelReadException; public class GenericExcelReaderTest { private static final Logger LOGGER = LoggerFactory.getLogger(GenericExcelReaderTest.class); // XLS private String _filepath_xls_single_sheet; private String _filepath_xls_multiple_sheets; // XLSX private String _filepath_xlsx_single_sheet; private String _filepath_xlsx_multiple_sheets; // Setup @Before public void setup() throws ParseException { // filepaths // xls _filepath_xls_single_sheet = "src/test/resources/sample-files/xls_sample_single_sheet.xls"; _filepath_xls_multiple_sheets = "src/test/resources/sample-files/xls_sample_multiple_sheets.xls"; // xlsx _filepath_xlsx_single_sheet = "src/test/resources/sample-files/xlsx_sample_single_sheet.xlsx"; _filepath_xlsx_multiple_sheets = "src/test/resources/sample-files/xlsx_sample_multiple_sheets.xlsx"; } @After public void teardown() { // nothing to do } // Tests // XLS @Ignore @Test public void test_read_xls_single_sheet() throws ExcelReadException { // Excel Reader LOGGER.info("test_read_xls_single_sheet :: Reading file - {}", _filepath_xls_single_sheet); GenericExcelReader ger = new GenericExcelReader(_filepath_xls_single_sheet); // Read List<Employee> employees = ger.read(Employee.class); Assert.assertNotNull(employees); Assert.assertTrue(employees.size() > 0); for (Employee emp : employees) { LOGGER.info("test_read_xls_single_sheet :: Output - {}", emp); } } @Ignore @Test public void test_read_xls_multiple_sheets() throws ExcelReadException { // Excel Reader LOGGER.info("test_read_xlsx_multiple_sheets :: Reading file - {}", _filepath_xls_multiple_sheets); GenericExcelReader ger = new GenericExcelReader(_filepath_xls_multiple_sheets); // Read Sheet 1 List<Employee> employees = ger.read(0, Employee.class); Assert.assertNotNull(employees); Assert.assertTrue(employees.size() > 0); for (Employee emp : employees) { LOGGER.info("test_read_xls_multiple_sheets :: Output - {}", emp); } // Read Sheet 2 List<Company> companies = ger.read(1, Company.class); Assert.assertNotNull(companies); Assert.assertTrue(companies.size() > 0); for (Company company : companies) { LOGGER.info("test_read_xls_multiple_sheets :: Output - {}", company); } } // XLSX @Test public void test_read_xlsx_single_sheet() throws ExcelReadException { // Excel Reader LOGGER.info("test_read_xlsx_single_sheet :: Reading file - {}", _filepath_xlsx_single_sheet); GenericExcelReader ger = new GenericExcelReader(_filepath_xlsx_single_sheet); // Read List<Employee> employees = ger.read(Employee.class); Assert.assertNotNull(employees); Assert.assertTrue(employees.size() > 0); for (Employee emp : employees) { LOGGER.info("test_read_xlsx_single_sheet :: Output - {}", emp); } } @Test public void test_read_xlsx_multiple_sheets() throws ExcelReadException { // Excel Reader LOGGER.info("test_read_xlsx_multiple_sheets :: Reading file - {}", _filepath_xlsx_multiple_sheets); GenericExcelReader ger = new GenericExcelReader(_filepath_xlsx_multiple_sheets); // Read Sheet 1 List<Employee> employees = ger.read(0, Employee.class); Assert.assertNotNull(employees); Assert.assertTrue(employees.size() > 0); for (Employee emp : employees) { LOGGER.info("test_read_xlsx_multiple_sheets :: Output - {}", emp); } // Read Sheet 2 List<Company> companies = ger.read(1, Company.class); Assert.assertNotNull(companies); Assert.assertTrue(companies.size() > 0); for (Company company : companies) { LOGGER.info("test_read_xlsx_multiple_sheets :: Output - {}", company); } } // Read to Map @Test public void test_read_xlsx_as_Map() throws ExcelReadException { // Excel Reader LOGGER.info("test_read_xlsx_as_Map :: Reading file - {}", _filepath_xlsx_single_sheet); GenericExcelReader ger = new GenericExcelReader(_filepath_xlsx_single_sheet); // Read List<Map<String, Object>> employees = ger.readAsMap(); Assert.assertNotNull(employees); Assert.assertTrue(employees.size() > 0); for (Map<String, Object> emp : employees) { LOGGER.info("test_read_xlsx_single_sheet :: Output - {}", emp); } } }
package com.lucythemoocher.controls; import com.lucythemoocher.game.Game; import android.util.Log; import android.view.MotionEvent; public class PlayerController { private static final int LEFT = -1; private static final int RIGHT = 1; private static final int DOWN = -1; private static final int UP = 1; private static final int DOUBLE_TOUCH_SENSIBILITY = 10; private static int hor_ = 0; private static int ver_ = 0; private static int lastTouch_ = 0; private static int lastHor_ = 0; public static void process(MotionEvent event) { //parcours de tous les points appuys for (int i=0; i<event.getPointerCount(); i++ ) { // X X X // // // Saut if (event.getY(i) < Game.getCam().h()/5) { ver_ = UP; } // // // // Deplacement droit if (event.getX(i) > 4*Game.getCam().w()/5 && event.getY(i) > Game.getCam().h()/5 && event.getY(i) < 4*Game.getCam().h()/5) { hor_ = RIGHT; } // // X // // Deplacement gauche if (event.getX(i) < Game.getCam().w()/5 && event.getY(i) > Game.getCam().h()/5 && event.getY(i) < 4*Game.getCam().h()/5) { hor_ = -1; } // // // // Attaque if (event.getY(i) > 4*Game.getCam().h()/5 && event.getX(i) > Game.getCam().w()/5 && event.getX(i) < 4*Game.getCam().w()/5) { ver_ = DOWN; } } if ( event.getAction() == MotionEvent.ACTION_UP ) { Game.moveStop(); lastHor_ = hor_; lastTouch_ = Game.getTick(); hor_ = 0; ver_ = 0; } } public static void update() { if ( lastHor_ == hor_ && (Game.getTick()-lastTouch_ < DOUBLE_TOUCH_SENSIBILITY) ) { if ( hor_ == 1 ) { Game.moveFastRight(); } else if ( hor_ == -1 ) { Game.moveFastLeft(); } } if ( hor_ == 1 ) { Game.moveRight(); } else if ( hor_ == -1 ) { Game.moveLeft(); } if ( ver_ == 1 ) { Game.moveUp(); } else if ( ver_ == -1 ) { Game.attack(); } ver_ = 0; } }
package com.tikal.jenkins.plugins.multijob; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.console.HyperlinkNote; import hudson.model.Action; import hudson.model.BallColor; import hudson.model.BuildListener; import hudson.model.DependecyDeclarer; import hudson.model.DependencyGraph; import hudson.model.DependencyGraph.Dependency; import hudson.model.Item; import hudson.model.Result; import hudson.model.TaskListener; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.queue.QueueTaskFuture; import hudson.scm.ChangeLogSet; import hudson.scm.ChangeLogSet.Entry; import hudson.scm.SCM; import hudson.scm.SCMRevisionState; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.model.Executor; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.regex.Pattern; import java.util.regex.Matcher; import net.sf.json.JSONObject; import jenkins.model.Jenkins; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FileNotFoundException; import org.jenkinsci.lib.envinject.EnvInjectLogger; import org.jenkinsci.plugins.envinject.EnvInjectBuilderContributionAction; import org.jenkinsci.plugins.envinject.EnvInjectBuilder; import org.jenkinsci.plugins.envinject.service.EnvInjectActionSetter; import org.jenkinsci.plugins.envinject.service.EnvInjectEnvVars; import org.jenkinsci.plugins.envinject.service.EnvInjectVariableGetter; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import com.tikal.jenkins.plugins.multijob.MultiJobBuild.SubBuild; import com.tikal.jenkins.plugins.multijob.PhaseJobsConfig.KillPhaseOnJobResultCondition; import org.jenkinsci.plugins.tokenmacro.TokenMacro; import groovy.util.*; public class MultiJobBuilder extends Builder implements DependecyDeclarer { /** * The name of the parameter in the build.getBuildVariables() to enable the job build, regardless * of scm changes. */ public static final String BUILD_ALWAYS_KEY = "hudson.scm.multijob.build.always"; /** * List of messages to show by console. */ private static final String[] TRIGGER_MESSAGES = { " >> [%s] added to build queue.\n", " >> [%s] has changes since last build. Adding to build queue.\n", " >> [%s] has no changes since last build, but it will be adding to build queue.\n", " >> [%s] has no changes since last build, but you have enabled the 'build always' function. Adding to build queue.\n", " >> [%s] has no changes since last build, so it will be skipped.\n", " >> [%s] has been disabled. Skipping it.\n" }; private String phaseName; private List<PhaseJobsConfig> phaseJobs; private ContinuationCondition continuationCondition = ContinuationCondition.SUCCESSFUL; @DataBoundConstructor public MultiJobBuilder(String phaseName, List<PhaseJobsConfig> phaseJobs, ContinuationCondition continuationCondition) { this.phaseName = phaseName; this.phaseJobs = Util.fixNull(phaseJobs); this.continuationCondition = continuationCondition; } public String expandToken(String toExpand, final AbstractBuild<?,?> build, final BuildListener listener) { String expandedExpression = toExpand; try { expandedExpression = TokenMacro.expandAll(build, listener, toExpand, false, null); } catch (Exception e) { listener.getLogger().println(e.getMessage()); } Pattern pattern = Pattern.compile("(\\$\\{.+?\\})", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(expandedExpression); return matcher.replaceAll(""); } private int getScmChange(AbstractProject subjob,PhaseJobsConfig phaseConfig,AbstractBuild build, BuildListener listener,Launcher launcher) throws IOException, InterruptedException{ final boolean containsLastBuild = ( subjob.getLastBuild() != null ); final SCM scm = subjob.getScm(); final SCMRevisionState scmRS = ( containsLastBuild ? scm.calcRevisionsFromBuild((AbstractBuild) subjob.getLastBuild(), launcher, listener) : null ); final boolean hasChanges = ( containsLastBuild ? scm.poll(subjob, launcher, subjob.getWorkspace(), listener, scmRS).hasChanges() : true ); final boolean buildOnlyIfSCMChanges = phaseConfig.isBuildOnlyIfSCMChanges(); final boolean buildAlways = Boolean.valueOf((String)(build.getBuildVariables().get(BUILD_ALWAYS_KEY))); final int message = (!buildOnlyIfSCMChanges) ? 0 : (hasChanges ? 1 : (!buildOnlyIfSCMChanges ? 2 : ((buildAlways) ? 3 : 4) ) ); listener.getLogger().printf(TRIGGER_MESSAGES[message], subjob.getName()); return message; } public boolean evalCondition(final String condition, final AbstractBuild<?, ?> build, final BuildListener listener) { try { return (Boolean) Eval.me(expandToken(condition, build, listener).toLowerCase().trim()); } catch (Exception e) { listener.getLogger().println("Can't evaluate expression, false is assumed."); listener.getLogger().println(e.toString()); } return false; } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public boolean perform(final AbstractBuild<?, ? > build, final Launcher launcher, final BuildListener listener) throws InterruptedException, IOException { Jenkins jenkins = Jenkins.getInstance(); MultiJobBuild multiJobBuild = (MultiJobBuild) build; MultiJobProject thisProject = multiJobBuild.getProject(); Map<PhaseSubJob, PhaseJobsConfig> phaseSubJobs = new HashMap<PhaseSubJob, PhaseJobsConfig>( phaseJobs.size()); for (PhaseJobsConfig phaseJobConfig : phaseJobs) { Item item = jenkins.getItemByFullName(phaseJobConfig.getJobName()); if (item instanceof AbstractProject) { AbstractProject job = (AbstractProject) item; phaseSubJobs.put(new PhaseSubJob(job), phaseJobConfig); } } List<SubTask> subTasks = new ArrayList<SubTask>(); for (PhaseSubJob phaseSubJob : phaseSubJobs.keySet()) { AbstractProject subJob = phaseSubJob.job; if (subJob.isDisabled()) { listener.getLogger().println(String.format("Skipping %s. This Job has been disabled.", subJob.getName())); continue; } PhaseJobsConfig phaseConfig = phaseSubJobs.get(phaseSubJob); if (phaseConfig.getEnableCondition() && phaseConfig.getCondition() != null) { if (!evalCondition(phaseConfig.getCondition(), build, listener)) { listener.getLogger().println(String.format("Skipping %s. Condition is evaluate to false.", subJob.getName())); continue; } } if (phaseConfig.isBuildOnlyIfSCMChanges()){ if( getScmChange(subJob,phaseConfig,multiJobBuild ,listener,launcher ) >= 4) { continue; } } reportStart(listener, subJob); List<Action> actions = new ArrayList<Action>(); prepareActions(multiJobBuild, subJob, phaseConfig, listener, actions); while (subJob.isInQueue()) { TimeUnit.SECONDS.sleep(subJob.getQuietPeriod()); } if (!phaseConfig.isDisableJob()) { subTasks.add(new SubTask(subJob, phaseConfig, actions, multiJobBuild)); } else { listener.getLogger().println(String.format("Warning: %s subjob is disabled.", subJob.getName())); } } if (subTasks.size() < 1) return true; ExecutorService executor = Executors.newFixedThreadPool(subTasks.size()); Set<Result> jobResults = new HashSet<Result>(); BlockingQueue<SubTask> queue = new ArrayBlockingQueue<SubTask>(subTasks.size()); for (SubTask subTask : subTasks) { Runnable worker = new SubJobWorker(thisProject, listener, subTask, queue); executor.execute(worker); } try { executor.shutdown(); int resultCounter = 0; while (!executor.isTerminated()) { SubTask subTask = queue.poll(5, TimeUnit.SECONDS); if (subTask != null) { resultCounter++; if (subTask.result != null) { jobResults.add(subTask.result); checkPhaseTermination(subTask, subTasks, listener); } } if (subTasks.size() <= resultCounter) { break; } } executor.shutdownNow(); } catch (InterruptedException exception) { listener.getLogger().println("Aborting all subjobs."); for (SubTask _subTask : subTasks) { _subTask.cancelJob(); } int i = 0; while (!executor.isTerminated() && i < 20) { Thread.sleep(1000); i++; } throw new InterruptedException(); } for (Result result : jobResults) { if (!continuationCondition.isContinue(result)) { return false; } } return true; } public final class SubJobWorker extends Thread { final private MultiJobProject multiJobProject; final private BuildListener listener; private SubTask subTask; private BlockingQueue<SubTask> queue; private List<Pattern> compiledPatterns; public SubJobWorker(MultiJobProject multiJobProject, BuildListener listener, SubTask subTask, BlockingQueue<SubTask> queue) { this.multiJobProject = multiJobProject; this.listener = listener; this.subTask = subTask; this.queue = queue; } public void run() { Result result = null; AbstractBuild jobBuild = null; try { int maxRetries = subTask.phaseConfig.getMaxRetries(); if (!subTask.phaseConfig.getEnableRetryStrategy()) { maxRetries = 0; } int retry = 0; boolean finish = false; while (retry <= maxRetries && !finish) { retry++; QueueTaskFuture<AbstractBuild> future = (QueueTaskFuture<AbstractBuild>) subTask.future; while (true) { if (subTask.isCancelled()) { if (jobBuild != null) { Executor exect = jobBuild.getExecutor(); if (exect != null) { exect.interrupt(Result.ABORTED); } reportFinish(listener, jobBuild, Result.ABORTED); abortSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild); finish = true; break; } } try { jobBuild = future.getStartCondition().get(5, TimeUnit.SECONDS); } catch (Exception e) { if (e instanceof TimeoutException) continue; else { throw e; } } updateSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild); if (future.isDone()) { break; } Thread.sleep(2500); } if (jobBuild != null && !finish) { result = jobBuild.getResult(); reportFinish(listener, jobBuild, result); if (result.isWorseOrEqualTo(Result.FAILURE) && result.isCompleteBuild() && subTask.phaseConfig.getEnableRetryStrategy()) { if (isKnownRandomFailure(jobBuild)) { if (retry <= maxRetries) { listener.getLogger().println("Known failure detected, retrying this build. Try " + retry + " of " + maxRetries + "."); updateSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild, result, true); subTask.GenerateFuture(); } else { listener.getLogger().println("Known failure detected, max retries (" + maxRetries + ") exceeded."); updateSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild, result); } } else { listener.getLogger().println("Failed the build, the failure doesn't match the rules."); updateSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild, result); finish = true; } } else { updateSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild, result); finish = true; } ChangeLogSet<Entry> changeLogSet = jobBuild.getChangeSet(); subTask.multiJobBuild.addChangeLogSet(changeLogSet); addBuildEnvironmentVariables(subTask.multiJobBuild, jobBuild, listener); subTask.result = result; } } } catch (Exception e) { if (e instanceof InterruptedException) { if (jobBuild != null) { reportFinish(listener, jobBuild, Result.ABORTED); abortSubBuild(subTask.multiJobBuild, multiJobProject, jobBuild); subTask.result = Result.ABORTED; } } else { listener.getLogger().println(e.toString()); e.printStackTrace(); } } if (jobBuild == null) { updateSubBuild(subTask.multiJobBuild, multiJobProject, subTask.phaseConfig); } queue.add(subTask); } private List<Pattern> getCompiledPattern() throws FileNotFoundException, InterruptedException { if (compiledPatterns == null) { compiledPatterns = new ArrayList<Pattern>(); try { listener.getLogger().println("Scanning failed job console output using parsing rule file " + subTask.phaseConfig.getParsingRulesPath() + "."); final File rulesFile = new File(subTask.phaseConfig.getParsingRulesPath()); final BufferedReader reader = new BufferedReader(new FileReader(rulesFile.getAbsolutePath())); try { String line; while ((line = reader.readLine()) != null) { compiledPatterns.add(Pattern.compile(line)); } } finally { reader.close(); } } catch (Exception e) { if (e instanceof InterruptedException) { throw new InterruptedException(); } else if (e instanceof FileNotFoundException) { throw new FileNotFoundException(); } else { listener.getLogger().println(e.toString()); e.printStackTrace(); } } } return compiledPatterns; } private final class LineAnalyser extends Thread { final private BufferedReader reader; final private List<Pattern> patterns; private BlockingQueue<LineQueue> finishQueue; public LineAnalyser(BufferedReader reader, List<Pattern> patterns, BlockingQueue<LineQueue> finishQueue) { this.reader = reader; this.patterns = patterns; this.finishQueue = finishQueue; } public void run() { boolean errorFound = false; try { String line; while (reader.ready() && !errorFound) { line = reader.readLine(); if (line != null) { for (Pattern pattern : patterns) { if (pattern.matcher(line).find()) { errorFound = true; break; } } } } } catch (Exception e) { if (e instanceof IOException) { // Nothing } else { listener.getLogger().println(e.toString()); e.printStackTrace(); } } finally { finishQueue.add(new LineQueue(errorFound)); } } } private boolean isKnownRandomFailure(AbstractBuild build) throws InterruptedException { boolean failure = false; try { final List<Pattern> patterns = getCompiledPattern(); final File logFile = build.getLogFile(); final BufferedReader reader = new BufferedReader(new FileReader(logFile.getAbsolutePath())); try { int numberOfThreads = 10; // Todo : Add this in Configure section if (numberOfThreads < 0) { numberOfThreads = 1; } ExecutorService executorAnalyser = Executors.newFixedThreadPool(numberOfThreads); BlockingQueue<LineQueue> finishQueue = new ArrayBlockingQueue<LineQueue>(numberOfThreads); for (int i = 0; i < numberOfThreads; i++) { Runnable worker = new LineAnalyser(reader, patterns, finishQueue); executorAnalyser.execute(worker); } executorAnalyser.shutdown(); int resultCounter = 0; while (!executorAnalyser.isTerminated()) { resultCounter++; LineQueue lineQueue = finishQueue.take(); if (lineQueue.hasError()) { failure = true; break; } else if (numberOfThreads == resultCounter) { break; } } executorAnalyser.shutdownNow(); } finally { reader.close(); } } catch (Exception e) { if (e instanceof InterruptedException) { throw new InterruptedException(); } else if (e instanceof FileNotFoundException) { listener.getLogger().println("Parser rules file not found."); failure = false; } else { listener.getLogger().println(e.toString()); e.printStackTrace(); } } return failure; } } protected boolean checkPhaseTermination(SubTask subTask, List<SubTask> subTasks, final BuildListener listener) { try { KillPhaseOnJobResultCondition killCondition = subTask.phaseConfig.getKillPhaseOnJobResultCondition(); if (killCondition.equals(KillPhaseOnJobResultCondition.NEVER) && subTask.result != Result.ABORTED) { return false; } if (killCondition.isKillPhase(subTask.result)) { if (subTask.result != Result.ABORTED || subTask.phaseConfig.getAbortAllJob()) { for (SubTask _subTask : subTasks) { _subTask.cancelJob(); } return true; } } } catch (Exception e) { listener.getLogger().printf(e.toString()); return false; } return false; } private void reportStart(BuildListener listener, AbstractProject subJob) { listener.getLogger().printf( "Starting build job %s.\n", HyperlinkNote.encodeTo('/' + subJob.getUrl(), subJob.getFullName())); } private void reportFinish(BuildListener listener, AbstractBuild jobBuild, Result result) { listener.getLogger().println( "Finished Build : " + HyperlinkNote.encodeTo("/" + jobBuild.getUrl() + "/", String.valueOf(jobBuild.getDisplayName())) + " of Job : " + HyperlinkNote.encodeTo('/' + jobBuild.getProject() .getUrl(), jobBuild.getProject().getFullName()) + " with status : " + HyperlinkNote.encodeTo('/' + jobBuild.getUrl() + "/console", result.toString())); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, PhaseJobsConfig phaseConfig) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), phaseConfig.getJobName(), 0, phaseName, null, BallColor.NOTBUILT.getImage(), "not built", ""); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, null, jobBuild.getIconColor() .getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild, Result result) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, result, jobBuild.getIconColor().getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild, Result result, boolean retry) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, result, jobBuild.getIconColor().getImage(), jobBuild.getDurationString(), jobBuild.getUrl(), retry, false); multiJobBuild.addSubBuild(subBuild); } private void abortSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, Result.ABORTED, BallColor.ABORTED.getImage(), "", jobBuild.getUrl(), false, true); multiJobBuild.addSubBuild(subBuild); } @SuppressWarnings("rawtypes") private void addBuildEnvironmentVariables(MultiJobBuild thisBuild, AbstractBuild jobBuild, BuildListener listener) { // Env variables map Map<String, String> variables = new HashMap<String, String>(); String jobName = jobBuild.getProject().getName(); String jobNameSafe = jobName.replaceAll("[^A-Za-z0-9]", "_") .toUpperCase(); String buildNumber = Integer.toString(jobBuild.getNumber()); String buildResult = jobBuild.getResult().toString(); // These will always reference the last build variables.put("LAST_TRIGGERED_JOB_NAME", jobName); variables.put(jobNameSafe + "_BUILD_NUMBER", buildNumber); variables.put(jobNameSafe + "_BUILD_RESULT", buildResult); if (variables.get("TRIGGERED_JOB_NAMES") == null) { variables.put("TRIGGERED_JOB_NAMES", jobName); } else { String triggeredJobNames = variables.get("TRIGGERED_JOB_NAMES") + "," + jobName; variables.put("TRIGGERED_JOB_NAMES", triggeredJobNames); } if (variables.get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe) == null) { variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, "1"); } else { String runCount = Integer.toString(Integer.parseInt(variables .get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe)) + 1); variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, runCount); } // Set the new build variables map injectEnvVars(thisBuild, listener, variables); } /** * Method for properly injecting environment variables via EnvInject plugin. * Method based off logic in {@link EnvInjectBuilder#perform} */ private void injectEnvVars(AbstractBuild<?, ?> build, BuildListener listener, Map<String, String> incomingVars) { if (build != null && incomingVars != null) { EnvInjectLogger logger = new EnvInjectLogger(listener); FilePath ws = build.getWorkspace(); EnvInjectActionSetter envInjectActionSetter = new EnvInjectActionSetter( ws); EnvInjectEnvVars envInjectEnvVarsService = new EnvInjectEnvVars(logger); try { EnvInjectVariableGetter variableGetter = new EnvInjectVariableGetter(); Map<String, String> previousEnvVars = variableGetter .getEnvVarsPreviousSteps(build, logger); // Get current envVars Map<String, String> variables = new HashMap<String, String>( previousEnvVars); // Resolve variables final Map<String, String> resultVariables = envInjectEnvVarsService .getMergedVariables(variables, incomingVars); // Set the new build variables map build.addAction(new EnvInjectBuilderContributionAction( resultVariables)); // Add or get the existing action to add new env vars envInjectActionSetter.addEnvVarsToEnvInjectBuildAction(build, resultVariables); } catch (Throwable throwable) { listener.getLogger() .println( "[MultiJob] - [ERROR] - Problems occurs on injecting env vars as a build step: " + throwable.getMessage()); } } } @SuppressWarnings("rawtypes") private void prepareActions(AbstractBuild build, AbstractProject project, PhaseJobsConfig projectConfig, BuildListener listener, List<Action> actions) throws IOException, InterruptedException { List<Action> parametersActions = null; // if (projectConfig.hasProperties()) { parametersActions = (List<Action>) projectConfig.getActions(build, listener, project, projectConfig.isCurrParams()); actions.addAll(parametersActions); } public String getPhaseName() { return phaseName; } public void setPhaseName(String phaseName) { this.phaseName = phaseName; } public List<PhaseJobsConfig> getPhaseJobs() { return phaseJobs; } public void setPhaseJobs(List<PhaseJobsConfig> phaseJobs) { this.phaseJobs = phaseJobs; } public boolean phaseNameExist(String phaseName) { for (PhaseJobsConfig phaseJob : phaseJobs) { if (phaseJob.getDisplayName().equals(phaseName)) { return true; } } return false; } private final static class PhaseSubJob { AbstractProject job; PhaseSubJob(AbstractProject job) { this.job = job; } } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Builder> { @SuppressWarnings("rawtypes") @Override public boolean isApplicable(Class<? extends AbstractProject> jobType) { return jobType.equals(MultiJobProject.class); } @Override public String getDisplayName() { return "MultiJob Phase"; } @Override public Builder newInstance(StaplerRequest req, JSONObject formData) throws FormException { return req.bindJSON(MultiJobBuilder.class, formData); } @Override public boolean configure(StaplerRequest req, JSONObject formData) { save(); return true; } } @SuppressWarnings("rawtypes") public void buildDependencyGraph(AbstractProject owner, DependencyGraph graph) { Jenkins jenkins = Jenkins.getInstance(); List<PhaseJobsConfig> phaseJobsConfigs = getPhaseJobs(); if (phaseJobsConfigs == null) return; for (PhaseJobsConfig project : phaseJobsConfigs) { Item topLevelItem = jenkins.getItemByFullName(project.getJobName()); if (topLevelItem instanceof AbstractProject) { Dependency dependency = new Dependency(owner, (AbstractProject) topLevelItem) { @Override public boolean shouldTriggerBuild(AbstractBuild build, TaskListener listener, List<Action> actions) { return false; } }; graph.addDependency(dependency); } } } public boolean onJobRenamed(String oldName, String newName) { boolean changed = false; for (Iterator i = phaseJobs.iterator(); i.hasNext();) { PhaseJobsConfig phaseJobs = (PhaseJobsConfig) i.next(); String jobName = phaseJobs.getJobName(); if (jobName.trim().equals(oldName)) { if (newName != null) { phaseJobs.setJobName(newName); changed = true; } else { i.remove(); changed = true; } } } return changed; } public boolean onJobDeleted(String oldName) { return onJobRenamed(oldName, null); } public static enum ContinuationCondition { SUCCESSFUL("Successful") { @Override public boolean isContinue(Result result) { return result.equals(Result.SUCCESS); } }, UNSTABLE("Stable or Unstable but not Failed") { @Override public boolean isContinue(Result result) { return result.isBetterOrEqualTo(Result.UNSTABLE); } }, COMPLETED("Complete (always continue)") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) ? true : result .isBetterOrEqualTo(Result.FAILURE); } }, FAILURE("Failed") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) || result.isBetterOrEqualTo(Result.FAILURE); } }; abstract public boolean isContinue(Result result); private ContinuationCondition(String label) { this.label = label; } final private String label; public String getLabel() { return label; } } public ContinuationCondition getContinuationCondition() { return continuationCondition; } public void setContinuationCondition( ContinuationCondition continuationCondition) { this.continuationCondition = continuationCondition; } }
package com.github.mnicky.bible4j.data; import static org.testng.Assert.assertEquals; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.github.mnicky.bible4j.data.BibleBook; import com.github.mnicky.bible4j.data.Bookmark; import com.github.mnicky.bible4j.data.Position; /** * Unit tests for Bookmark class. */ public final class Bookmark_Test { private Bookmark b2, b3, b4; @BeforeMethod public void beforeMethod() { b2 = new Bookmark("understanding", new Verse("And how hear we every man in our own tongue, wherein we were born?", new Position(BibleBook.ACTS, 2, 8), new BibleVersion("KJV", "en"))); b3 = new Bookmark("joel", new Verse("But this is that which was spoken by the prophet Joel;", new Position(BibleBook.ACTS, 2, 16), new BibleVersion("KJV", "en"))); b4 = new Bookmark("joel", new Verse("But this is that which was spoken by the prophet Joel;", new Position(BibleBook.ACTS, 2, 16), new BibleVersion("KJV", "en"))); } @Test public void testToString() { String exp = "joel: " + "But this is that which was spoken by the prophet Joel; - ACTS 2,16; (KJV, en)"; String tested = b3.toString(); assertEquals(tested, exp); } //TODO add more equals() tests @Test public void testEqualsForTrue() { boolean exp = true; boolean act = b4.equals(b3); assertEquals(act, exp); } @Test public void testEqualsForFalse() { boolean exp = false; boolean act = b2.equals(b3); assertEquals(act, exp); } @Test public void testHashCodeForConsistency() { int h3 = b3.hashCode(); int h4 = b4.hashCode(); Assert.assertEquals(h3 == h4, true); } @Test public void testHashCodeForConsistencyWithEqualsForTrue() { int h3 = b3.hashCode(); int h4 = b4.hashCode(); boolean b = b3.equals(b4); Assert.assertEquals(h3 == h4, b); } @Test public void testHashCodeForConsistencyWithEqualsForFalse() { int h2 = b2.hashCode(); int h4 = b4.hashCode(); boolean b = b2.equals(b4); Assert.assertEquals(h2 == h4, b); } @Test public void shouldCompareBookmarksByPositionsOfTheirVerses() { assertEquals(b2.compareTo(b3), -1); assertEquals(b3.compareTo(b2), 1); assertEquals(b2.compareTo(b2), 0); } }
package org.glassfish.jms2lab; import javax.annotation.Resource; import javax.inject.Inject; import javax.jms.ConnectionFactory; import javax.jms.JMSContext; import javax.jms.JMSException; import javax.jms.Queue; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.arquillian.junit.InSequence; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import static org.junit.Assert.assertEquals; import org.junit.Test; import org.junit.runner.RunWith; /* * In the couse of the lab, you will be completing the unit tests below using * the comments provided. The tests are designed to be progressively more * challenging. There is no need to finish all the tests in the time that we * have. You can always finish it on your own or simply look at the solutions * provided. In fact, feel free to look at the solution at any time if * absolutely stuck. To get the most out of the lab though, it's best to try * to finish the tests on your own. * * The tests use Arquillian to run your code against a running * GlassFish instance. The instance is assumed to be running on localhost, * ports 8080/4848. For the purposes of this lab, you won't need to know much * about Arquillian - everything has already been setup for you. Once you * have GlassFish running, there's no need to stop/start it during the lab. */ @RunWith(Arquillian.class) public class Jms2Test { @Inject private MessageReceiver messageReceiver; /* * Due to a bug that has been filed, Arquillian is unable to inject * the default connection factory bound under * 'java:comp/defaultJMSConnectionFactory'. As a workaround, we are using * a custom connection factory. In a Java EE environment, this won't be * necessary. */ @Resource(lookup = "java:app/jms/MyConnectionFactory") private ConnectionFactory myConnectionFactory; // These are the three queues we will be using for our tests. They // have already been setup for you in the web.xml for the test. @Resource(lookup = "java:app/jms/MyQueue") private Queue myQueue; @Resource(lookup = "java:app/jms/MyQueue2") private Queue myQueue2; @Resource(lookup = "java:app/jms/RequestQueue") private Queue requestQueue; /* * This factory method defines the actual test artifacts we are deploying * to GlassFish for testing. As you add test artifacts, make sure to * list them here, typically using the addClass method. */ @Deployment public static WebArchive createDeployment() { return ShrinkWrap .create(WebArchive.class, "jms2lab.war") .addClass(MessageReceiver.class) .addClass(MyMessageListener.class) .addAsWebInfResource("web.xml", "web.xml"); } /* * In this very simple test, you will be sending a simple string * message to the injected queue 'myQueue' using all the defaults. * * To complete the test, the assertion (commented out) utilizes a message * receiver utility that's been provided for you. In the next test, you * will not use this utility but write your own message receive code. * * Hint: You could borrow the code in the receiver utility for the next * test :-). */ @Test @InSequence(1) public void testSendStringMessage() { /* * Although Arquillian has some basic injection capabilities, it is not * really a Java EE environment. As a result, it currently cannot inject * a managed JMS context. A bug has been filed to get this fixed. For * now we will need to manually create and manage a JMS context from a * connection factory. Fortunately, this gives us an opportunity to * show the JMS 2 auto-closable feature :-). * * In any other proper Java EE component that you write in the lab, * such as an MDB or EJB, you can use an injected JMS context. That's * what we do in message receiver utility. */ try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Write the code to send the message here. } // assertEquals("message1", // messageReceiver.receiveStringMessage(myQueue)); } /* * In this very simple test, you will be sending a simple string * message to the injected queue 'myQueue' using all the defaults and then * receiving it. */ @Test @InSequence(2) public void testReceiveStringMessage() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Write the code to send and then receive the message here. // Assert that you received the same message that you sent. } } /* * In this test, you will be sending a serializable object message to the * injected queue 'myQueue' and then receiving it. */ @Test @InSequence(3) public void testSendObjectMessage() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. } } /* * In this test, you will be sending a message to the injected queue * 'myQueue' and then receiving it. Try to make the message as efficient as * possible using message and producer level options. There isn't a right * or wrong answer here, so feel free to explore and be creative. Do * whatever you think will reduce overhead and system resource usage. Lost * messages are OK. */ @Test @InSequence(4) public void testSendEfficientMessage() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. } } /* * In this test, you will send and receive a message with custom and * built-in/provider-supplied properties using the injected queue * 'myQueue'. * * Hint: In this case you will need to receive the actual JMS message as * opposed to just the body payload. For the send, there's still no need * to create a JMS message, although you could if you want. */ @Test @InSequence(5) public void testSendMessageWithProperties() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. Make sure to assert properties as well as the // payload. } } /* * In this test, you will send and receive a message using a delivery * delay with the injected queue 'myQueue'. You'll have to be creative * in figuring out how to assert that the delay worked properly. * * Hint: Look at the information that you get with the JMS message that * the provider sets for you. */ @Test @InSequence(6) public void testDeliveryDelay() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. } } /* * In this test, you will send a message that will be received by a * JMS MDB. You will use the injected queue 'myQueue2'. The MDB, * MyMessageListener, has already been written for you but you should take * time to explore it. In the next test, you'll need to write an MDB * yourself. * * Hint: Remember to account for the fact that the MDB is running in a * separate thread from the test. */ @Test @InSequence(7) public void testMessageListener() { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. assertEquals("message7", MyMessageListener.getMessageText()); } } /* * In this test, you'll utilize the request/response paradigm in JMS. In * this paradigm, you send a message to a request queue and wait for a * response. When you send the request, you set a reply to queue that the * response will be sent to. This reply to queue is often a temporary queue * alive only for the session. When the recepient receives the request, * in addition to using the reply to queue to send a response, they must * also set the correlation ID. The correlation ID is set to the message * ID of the request. You must receive the response message and assert that * the request/response paradigm was implemented correctly. You must * implement the recepient as an MDB. Use the injected queue 'requestQueue' * as the request queue. */ @Test @InSequence(8) public void testRequestResponse() throws JMSException { try (JMSContext jmsContext = myConnectionFactory.createContext()) { // Your code here. } } }
package com.zeljic.imgoptimizer.controllers; import java.io.File; import java.io.FilenameFilter; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ResourceBundle; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.geometry.Pos; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.ProgressBar; import javafx.scene.control.TableCell; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TextField; import javafx.scene.control.TitledPane; import javafx.scene.control.ToggleButton; import javafx.scene.control.ToggleGroup; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.stage.DirectoryChooser; import javafx.stage.FileChooser; import javafx.util.Callback; import com.zeljic.imgoptimizer.storage.Item; import com.zeljic.imgoptimizer.storage.Storage; import com.zeljic.imgoptimizer.uil.Loader; public class BootController implements Initializable { @FXML private ToggleButton btnFiles, btnDirectory; @FXML private ToggleGroup tgType; @FXML private TextField txtLocation; @FXML private Button btnBrowse, btnOptimize, btnCancel, btnAll, btnNone, btnInverse, btnClearAll; @FXML private TableView<Item> tblMain; @FXML private TitledPane tlpSelect; @FXML private Label lblInfo; @FXML private ProgressBar pbMain; @FXML private TableColumn<Item, String> tbcName, tbcSize, tbcStatus; @FXML private TableColumn<Item, Item> tbcType; @FXML private TableColumn<Item, Boolean> tbcHash; @Override public void initialize(URL url, ResourceBundle bundle) { prepareTblMain(); } @FXML private void btnFilesOnAction(ActionEvent e) { if (tgType.getSelectedToggle() != null) { btnFiles.setSelected(true); tlpSelect.setText("Select Files"); } else { btnDirectory.setSelected(true); tlpSelect.setText("Select Directory"); } } @FXML private void btnDirectoryOnAction(ActionEvent e) { if (tgType.getSelectedToggle() != null) { btnDirectory.setSelected(true); tlpSelect.setText("Select Directory"); } else { btnFiles.setSelected(true); tlpSelect.setText("Select Files"); } } @FXML private void btnBrowseOnAction(ActionEvent e) { Storage tmpStorage = Storage.getInstance(); if (btnFiles.isSelected()) { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Open Resource File"); fileChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("All Images", "*.jpg", "*.jpeg", "*.png", "*.bmp")); List<File> fcResult = fileChooser.showOpenMultipleDialog(Loader.getInstance("Boot").getStage()); if (fcResult != null && fcResult.size() > 0) { Storage.getInstance().clearStorage(); fcResult.parallelStream().forEach(f -> tmpStorage.addItem(new Item(f))); txtLocation.setText(fcResult.get(0).getParentFile().getAbsolutePath()); } } else { DirectoryChooser directoryChooser = new DirectoryChooser(); directoryChooser.setTitle("Open Resource Directory"); File folder = directoryChooser.showDialog(Loader.getInstance("Boot").getStage()); ArrayList<String> allowed = new ArrayList<String>(Arrays.asList(".jpg", ".jpeg", ".png", ".bmp")); if (folder != null) { Storage.getInstance().clearStorage(); Arrays.asList(folder.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { for (String ext : allowed) if (name.endsWith(ext)) return true; return false; } })).parallelStream().forEach(f -> tmpStorage.addItem(new Item(f))); txtLocation.setText(folder.getAbsolutePath()); } } tblMain.setItems(tmpStorage.getObservableList()); } @FXML private void btnAllOnAction(ActionEvent e) { tblMain.getItems().forEach(item -> item.getCheckProperty().set(true)); } @FXML private void btnNoneOnAction(ActionEvent e) { tblMain.getItems().forEach(item -> item.getCheckProperty().set(false)); } @FXML private void btnInverseOnAction(ActionEvent e) { tblMain.getItems().forEach(item -> item.getCheckProperty().set(!item.getCheckProperty().get())); } @FXML private void btnClearAllOnAction(ActionEvent e) { Storage.getInstance().getObservableList().clear(); tblMain.getItems().clear(); } private void prepareTblMain() { tbcName.setCellValueFactory(item -> item.getValue().getPathProperty()); tbcSize.setCellValueFactory(item -> item.getValue().getSizeProperty().asString()); tbcType.setCellValueFactory(item -> item.getValue().getItemProperty()); tbcHash.setCellValueFactory(item -> item.getValue().getCheckProperty()); tbcHash.setCellFactory(CheckBoxTableCell.forTableColumn(tbcHash)); tbcType.setCellFactory(new Callback<TableColumn<Item, Item>, TableCell<Item, Item>>() { @Override public TableCell<Item, Item> call(TableColumn<Item, Item> column) { TableCell<Item, Item> cell = new TableCell<Item, Item>() { @Override protected void updateItem(Item item, boolean empty) { if (empty) { setGraphic(null); return; } String path = ""; switch (item.getFileTypeProperty().get()) { case JPG: case JPEG: path = "/gfx/ico-jpg.png"; break; case PNG: path = "/gfx/ico-png.png"; break; case BMP: path = "/gfx/ico-bmp.png"; default: path = "/gfx/ico-unknown.png"; break; } setGraphic(new ImageView(new Image(getClass().getResourceAsStream(path)))); }; }; cell.setAlignment(Pos.CENTER); return cell; } }); } }
package com.evolveum.polygon.connector.csv; import com.evolveum.polygon.connector.csv.util.CsvTestUtil; import com.evolveum.polygon.connector.csv.util.Util; import org.apache.commons.io.FileUtils; import org.identityconnectors.common.logging.Log; import org.identityconnectors.common.security.GuardedString; import org.identityconnectors.framework.api.ConnectorFacade; import org.identityconnectors.framework.common.exceptions.ConnectorException; import org.identityconnectors.framework.common.objects.*; import org.identityconnectors.framework.spi.SyncTokenResultsHandler; import org.testng.Assert; import org.testng.AssertJUnit; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.util.*; import static org.testng.Assert.assertTrue; import static org.testng.AssertJUnit.assertEquals; public class SyncOpTest extends BaseTest { private static final Log LOG = Log.getLog(SyncOpTest.class); @Test(expectedExceptions = ConnectorException.class) public void syncLock() throws Exception { CsvConfiguration config = createConfiguration(); config.setTrim(true); ConnectorFacade connector = setupConnector("/sync.csv", config); File oldSyncFile = new File("./target/data.csv.sync.1300734815289"); FileUtils.copyFile(new File(TEMPLATE_FOLDER_PATH, "sync.csv.1300734815289"), oldSyncFile); File lock = new File("./target/data.csv." + Util.SYNC_LOCK_EXTENSION); lock.createNewFile(); try { // SyncToken oldToken = connector.getLatestSyncToken(ObjectClass.ACCOUNT); // assertEquals("1300734815289", oldToken.getValue()); final List<SyncDelta> deltas = new ArrayList<>(); connector.sync(ObjectClass.ACCOUNT, new SyncToken("1300734815289"), delta -> true, null); } finally { CsvTestUtil.deleteAllSyncFiles(); lock.delete(); } } @Test(expectedExceptions = ConnectorException.class) public void badHeaders() throws Exception { ConnectorFacade connector = setupConnector("/sync-bad.csv", createConfiguration()); File oldSyncFile = new File("./target/data.csv.sync.1300734815289"); FileUtils.copyFile(new File(TEMPLATE_FOLDER_PATH, "sync-bad.csv.1300734815289"), oldSyncFile); try { // SyncToken oldToken = connector.getLatestSyncToken(ObjectClass.ACCOUNT); // assertEquals("1300734815289", oldToken.getValue()); connector.sync(ObjectClass.ACCOUNT, new SyncToken("1300734815289"), delta -> { Assert.fail("This test should fail on headers check."); return false; }, null); } finally { CsvTestUtil.deleteAllSyncFiles(); } Assert.fail("This test should fail on headers check."); } @Test public void syncTest() throws Exception { CsvConfiguration config = createConfiguration(); config.setTrim(true); ConnectorFacade connector = setupConnector("/sync.csv", config); File oldSyncFile = new File("./target/data.csv.sync.1300734815289"); FileUtils.copyFile(new File(TEMPLATE_FOLDER_PATH, "sync.csv.1300734815289"), oldSyncFile); try { // SyncToken oldToken = connector.getLatestSyncToken(ObjectClass.ACCOUNT); // assertEquals("1300734815289", oldToken.getValue()); final List<SyncDelta> deltas = new ArrayList<>(); connector.sync(ObjectClass.ACCOUNT, new SyncToken("1300734815289"), delta -> { deltas.add(delta); return true; }, null); AssertJUnit.assertEquals(3, deltas.size()); SyncToken token = deltas.get(0).getToken(); Map<String, SyncDelta> deltaMap = createSyncDeltaTestMap(token); for (SyncDelta delta : deltas) { SyncDelta syncDelta = deltaMap.get(delta.getUid().getUidValue()); deltaMap.remove(delta.getUid().getUidValue()); assertEquals(syncDelta, delta); } assertTrue(deltaMap.isEmpty(), "deltas didn't match"); } finally { CsvTestUtil.deleteAllSyncFiles(); } } @Test public void syncActualTokenTest() throws Exception { CsvConfiguration config = createConfiguration(); config.setTrim(true); ConnectorFacade connector = setupConnector("/sync.csv", config); try { long timestampBefore = System.currentTimeMillis(); SyncToken token = connector.getLatestSyncToken(ObjectClass.ACCOUNT); long timestampToken = Long.valueOf((String) token.getValue()); long timestampAfter = System.currentTimeMillis(); assertTrue(timestampToken>timestampBefore && timestampToken<timestampAfter, "wrong token, expected token between "+timestampBefore+" and "+timestampAfter); } finally { CsvTestUtil.deleteAllSyncFiles(); } } private Map<String, SyncDelta> createSyncDeltaTestMap(SyncToken token) { Map<String, SyncDelta> map = new HashMap<String, SyncDelta>(); SyncDeltaBuilder builder = new SyncDeltaBuilder(); builder.setDeltaType(SyncDeltaType.DELETE); builder.setObjectClass(ObjectClass.ACCOUNT); builder.setToken(token); builder.setUid(new Uid("vilo")); ConnectorObjectBuilder cBuilder = new ConnectorObjectBuilder(); cBuilder.setName("vilo"); cBuilder.setUid("vilo"); cBuilder.setObjectClass(ObjectClass.ACCOUNT); cBuilder.addAttribute(ATTR_FIRST_NAME, "viliam"); cBuilder.addAttribute(ATTR_LAST_NAME, "repan"); cBuilder.addAttribute(OperationalAttributes.PASSWORD_NAME, new GuardedString("Z29vZA==".toCharArray())); builder.setObject(cBuilder.build()); map.put("vilo", builder.build()); builder = new SyncDeltaBuilder(); builder.setDeltaType(SyncDeltaType.UPDATE); builder.setToken(token); builder.setUid(new Uid("miso")); cBuilder = new ConnectorObjectBuilder(); cBuilder.setName("miso"); cBuilder.setUid("miso"); cBuilder.setObjectClass(ObjectClass.ACCOUNT); cBuilder.addAttribute(ATTR_FIRST_NAME, "michal"); cBuilder.addAttribute(ATTR_LAST_NAME, "LastnameChange"); cBuilder.addAttribute(OperationalAttributes.PASSWORD_NAME, new GuardedString("Z29vZA==".toCharArray())); builder.setObject(cBuilder.build()); map.put("miso", builder.build()); builder = new SyncDeltaBuilder(); builder.setDeltaType(SyncDeltaType.CREATE); builder.setToken(token); builder.setUid(new Uid("apple")); cBuilder = new ConnectorObjectBuilder(); cBuilder.setName("apple"); cBuilder.setUid("apple"); cBuilder.setObjectClass(ObjectClass.ACCOUNT); cBuilder.addAttribute(ATTR_FIRST_NAME, "small"); cBuilder.addAttribute(ATTR_LAST_NAME, "smallAppleChange"); cBuilder.addAttribute(OperationalAttributes.PASSWORD_NAME, new GuardedString("Z29vZA==".toCharArray())); builder.setObject(cBuilder.build()); map.put("apple", builder.build()); return map; } @Test public void loopSync() throws Exception { final long SYNC_WAIT_TIME = 200; final long RUN_TIME = 15 * 1000; CsvTestUtil.deleteAllSyncFiles(); CsvConfiguration config = createConfiguration(); config.setTrim(true); ConnectorFacade connector = setupConnector("/sync-loop-1.csv", config); int runCount = 0; try { SyncToken startToken = connector.getLatestSyncToken(ObjectClass.ACCOUNT); switchCsvFile(true); long oldTokenValue; boolean useSecond = false; SyncToken token = startToken; long startTime = System.currentTimeMillis(); while (startTime + RUN_TIME > System.currentTimeMillis()) { runCount++; oldTokenValue = Long.parseLong((String) token.getValue()); final SyncToken[] tokenHolder = new SyncToken[1]; final List<SyncDelta> deltas = new ArrayList<>(); connector.sync(ObjectClass.ACCOUNT, token, new SyncTokenResultsHandler() { @Override public void handleResult(SyncToken syncToken) { tokenHolder[0] = syncToken; } @Override public boolean handle(SyncDelta delta) { deltas.add(delta); return true; } }, null); Thread.sleep(SYNC_WAIT_TIME); AssertJUnit.assertEquals(4, deltas.size()); SyncToken newToken = tokenHolder[0]; if (newToken == null && !deltas.isEmpty()) { newToken = deltas.get(0).getToken(); } else { newToken = startToken; } long newTokenValue = Long.parseLong((String) newToken.getValue()); AssertJUnit.assertTrue(newTokenValue > oldTokenValue); token = newToken; switchCsvFile(useSecond); useSecond = !useSecond; } } finally { LOG.info("Run count: {0}", runCount); CsvTestUtil.deleteAllSyncFiles(); } } private void switchCsvFile(boolean useSecond) throws IOException { String file = useSecond ? "sync-loop-2.csv" : "sync-loop-1.csv"; File csv = new File(CSV_FILE_PATH); FileUtils.copyFile(new File(TEMPLATE_FOLDER_PATH, file), csv); FileUtils.touch(csv); LOG.info("Using second={0}, time: {1}", useSecond, csv.lastModified()); } @Test public void syncTokenTest() throws Exception { CsvTestUtil.deleteAllSyncFiles(); CsvConfiguration config = createConfiguration(); config.setTrim(true); ConnectorFacade connector = setupConnector("/sync-loop-1.csv", config); SyncToken token = connector.getLatestSyncToken(ObjectClass.ACCOUNT); switchCsvFile(true); int count = 0; SyncToken token2 = null; while (count < 15) { count++; token2 = doSync(connector, token); switchCsvFile(false); File toDelete = Util.createSyncFileName(Long.parseLong((String) token2.getValue()), config.getConfig()); if (!toDelete.delete()) { throw new RuntimeException("Couldn't delete " + toDelete.getName()); } doSync(connector, token2); } } private SyncToken doSync(ConnectorFacade connector, SyncToken token) throws Exception { final List<SyncDelta> deltas = new ArrayList<>(); SyncToken newToken = connector.sync(ObjectClass.ACCOUNT, token, new SyncTokenResultsHandler() { @Override public void handleResult(SyncToken syncToken) { } @Override public boolean handle(SyncDelta delta) { deltas.add(delta); return true; } }, null); Thread.sleep(200); if (newToken == null && !deltas.isEmpty()) { newToken = deltas.get(0).getToken(); } LOG.info("New token is {0}", newToken); return newToken; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:18-10-26"); this.setApiVersion("14.7.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.github.wz2cool.dynamic; import com.github.wz2cool.dynamic.mybatis.db.mapper.UserDao; import com.github.wz2cool.dynamic.mybatis.db.model.entity.table.User; import com.sun.javafx.tk.TKPulseListener; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringRunner; import java.util.List; import static org.junit.Assert.assertEquals; @RunWith(SpringRunner.class) @SpringBootTest @ContextConfiguration(classes = TestApplication.class) public class DynamicMapperTest { @Autowired private UserDao userDao; @Test public void testInsert() { User user = new User(); user.setId(10); user.setUsername("frank"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); } @Test public void testInsertSelective() { User user = new User(); user.setId(11); user.setUsername("frank"); int result = userDao.insertSelective(user); assertEquals(1, result); } @Test public void testDeleteByPrimaryKey() { User user = new User(); user.setId(12); user.setUsername("frank"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); result = userDao.deleteByPrimaryKey(1); assertEquals(1, result); } @Test public void testDeleteByT() { User user = new User(); user.setId(13); user.setUsername("frank"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); result = userDao.delete(user); assertEquals(1, result); } @Test public void testDeleteByDynamicQuery() { User user = new User(); user.setId(14); user.setUsername("frank14"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); DynamicQuery<User> dynamicQuery = new DynamicQuery<>(User.class); FilterDescriptor nameFilter = new FilterDescriptor( FilterCondition.AND, User.class, User::getUsername, FilterOperator.CONTAINS, "14"); dynamicQuery.addFilter(nameFilter); result = userDao.deleteByDynamicQuery(dynamicQuery); assertEquals(1, result); } @Test public void testUpdateByPrimaryKey() { User user = new User(); user.setId(15); user.setUsername("frank"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); user.setPassword("test12345"); result = userDao.updateByPrimaryKey(user); assertEquals(1, result); } @Test public void testUpdateByPrimaryKeySelective() { User user = new User(); user.setId(16); user.setUsername("frank"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); User updateUser = new User(); updateUser.setId(16); updateUser.setPassword("test123"); result = userDao.updateByPrimaryKeySelective(updateUser); assertEquals(1, result); } @Test public void testUpdateByDynamicQuery() { User user = new User(); user.setId(17); user.setUsername("frank17"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); User updateUser = new User(); updateUser.setId(17); updateUser.setUsername("Marry"); DynamicQuery<User> dynamicQuery = new DynamicQuery<>(User.class); FilterDescriptor nameFilter = new FilterDescriptor( FilterCondition.AND, User.class, User::getUsername, FilterOperator.CONTAINS, "17"); dynamicQuery.addFilter(nameFilter); result = userDao.updateByDynamicQuery(updateUser, dynamicQuery); assertEquals(1, result); } @Test public void testUpdate() { User user = new User(); user.setId(18); user.setUsername("frank18"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); User updateUser = new User(); updateUser.setUsername("Marry"); DynamicQuery<User> dynamicQuery = new DynamicQuery<>(User.class); FilterDescriptor nameFilter = new FilterDescriptor( FilterCondition.AND, User.class, User::getUsername, FilterOperator.CONTAINS, "18"); dynamicQuery.addFilter(nameFilter); result = userDao.updateSelectiveByDynamicQuery(updateUser, dynamicQuery); assertEquals(1, result); } @Test public void testSelectAll() { List<User> users = userDao.selectAll(); assertEquals(true, users.size() > 0); } @Test public void testSelectByT() { User user = new User(); user.setId(1); List<User> users = userDao.select(user); assertEquals(1, users.size()); } @Test public void testSelectOne() { User user = new User(); user.setId(1); User matchedUser = userDao.selectOne(user); assertEquals(Integer.valueOf(1), matchedUser.getId()); } @Test public void testSelect() { User user = new User(); user.setId(19); user.setUsername("frank19"); user.setPassword("frank"); int result = userDao.insert(user); assertEquals(1, result); User findUser = new User(); user.setId(19); result = userDao.selectCount(findUser); assertEquals(true, result > 0); } }
package crazypants.enderio.machine.monitor; import static crazypants.enderio.machine.power.PowerDisplayUtil.*; import java.awt.Color; import java.awt.Rectangle; import java.text.NumberFormat; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.FontRenderer; import net.minecraft.client.gui.GuiTextField; import org.lwjgl.opengl.GL11; import crazypants.enderio.gui.CheckBoxEIO; import crazypants.enderio.gui.TextFieldEIO; import crazypants.enderio.machine.power.PowerDisplayUtil; import crazypants.enderio.network.PacketHandler; import crazypants.gui.GuiContainerBase; import crazypants.gui.GuiToolTip; import crazypants.render.ColorUtil; import crazypants.render.RenderUtil; import crazypants.util.Lang; public class GuiPowerMonitor extends GuiContainerBase { private static final NumberFormat INT_NF = NumberFormat.getIntegerInstance(); private static final int ICON_SIZE = 16; private static final int SPACING = 6; private static final int MARGIN = 7; private static final int WIDTH = 203; private static final int HEIGHT = 146; private static final int POWER_X = 185; private static final int POWER_Y = 9; private static final int POWER_WIDTH = 10; private static final int POWER_HEIGHT = 130; protected static final int BOTTOM_POWER_Y = POWER_Y + POWER_HEIGHT; private final TilePowerMonitor te; private boolean isRedstoneMode = false; private CheckBoxEIO enabledB; private TextFieldEIO startTF; private TextFieldEIO endTF; private String titleStr; private String engineTxt1; private String engineTxt2; private String engineTxt3; private String engineTxt4; private String engineTxt5; private String engineTxt6; private String monHeading1; private String monHeading2; private String monHeading3; private String monHeading4; private String monHeading5; public GuiPowerMonitor(final TilePowerMonitor te) { super(new ContainerPowerMonitor()); this.te = te; xSize = WIDTH; ySize = HEIGHT; titleStr = Lang.localize("gui.powerMonitor.engineControl"); engineTxt1 = Lang.localize("gui.powerMonitor.engineSection1"); engineTxt2 = Lang.localize("gui.powerMonitor.engineSection2"); engineTxt3 = Lang.localize("gui.powerMonitor.engineSection3"); engineTxt4 = Lang.localize("gui.powerMonitor.engineSection4"); engineTxt5 = Lang.localize("gui.powerMonitor.engineSection5"); engineTxt6 = Lang.localize("gui.powerMonitor.engineSection6"); monHeading1 = Lang.localize("gui.powerMonitor.monHeading1"); monHeading2 = Lang.localize("gui.powerMonitor.monHeading2"); monHeading3 = Lang.localize("gui.powerMonitor.monHeading3"); monHeading4 = Lang.localize("gui.powerMonitor.monHeading4"); monHeading5 = Lang.localize("gui.powerMonitor.monHeading5"); addToolTip(new GuiToolTip(new Rectangle(POWER_X, POWER_Y, POWER_WIDTH, POWER_HEIGHT), "") { @Override protected void updateText() { text.clear(); text.add(formatPower(te.getEnergyStored()) + "/" + formatPower(te.getMaxEnergyStored()) + " " + PowerDisplayUtil.abrevation()); } }); int x = MARGIN + Minecraft.getMinecraft().fontRenderer.getStringWidth(titleStr) + SPACING; enabledB = new CheckBoxEIO(this, 21267, x, 8); enabledB.setSelectedToolTip(Lang.localize("gui.enabled")); enabledB.setUnselectedToolTip(Lang.localize("gui.disabled")); enabledB.setSelected(te.engineControlEnabled); x = MARGIN + getFontRenderer().getStringWidth(engineTxt2) + 4; int y = MARGIN + ICON_SIZE + ICON_SIZE + getFontRenderer().FONT_HEIGHT; startTF = new TextFieldEIO(getFontRenderer(), x, y, 28, 14); startTF.setCanLoseFocus(true); startTF.setMaxStringLength(3); startTF.setVisible(false); startTF.setText(INT_NF.format(te.asPercentInt(te.startLevel))); y = y + getFontRenderer().FONT_HEIGHT + ICON_SIZE + ICON_SIZE + 4; x = 5 + MARGIN + getFontRenderer().getStringWidth(engineTxt5); endTF = new TextFieldEIO(getFontRenderer(), x, y, 28, 14); endTF.setCanLoseFocus(true); endTF.setMaxStringLength(3); endTF.setVisible(false); endTF.setText(INT_NF.format(te.asPercentInt(te.stopLevel))); textFields.add(startTF); textFields.add(endTF); } @Override public void initGui() { super.initGui(); buttonList.clear(); //enabledB.onGuiInit(); } @Override public boolean doesGuiPauseGame() { return false; } @Override public int getOverlayOffsetX() { return 0; } @Override protected void mouseClicked(int x, int y, int par3) { super.mouseClicked(x, y, par3); x = (x - guiLeft); y = (y - guiTop); if(x > 200 && x < 220) { if(y > 9 && y < 27) { isRedstoneMode = false; enabledB.detach(); startTF.setVisible(false); endTF.setVisible(false); } else if(y > 34 && y < 53) { isRedstoneMode = true; enabledB.onGuiInit(); startTF.setVisible(true); endTF.setVisible(true); } } } @Override protected void drawGuiContainerBackgroundLayer(float ptick, int mouseX, int mouseY) { GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F); RenderUtil.bindTexture("enderio:textures/gui/powerMonitor.png"); int sx = (width - xSize) / 2; int sy = (height - ySize) / 2; drawTexturedModalRect(sx, sy, 0, 0, xSize, ySize); int i1 = te.getEnergyStoredScaled(POWER_HEIGHT); drawTexturedModalRect(sx + POWER_X, sy + BOTTOM_POWER_Y - i1, 245, 0, POWER_WIDTH, i1); if(isRedstoneMode) { renderRedstoneTab(sx, sy); } else { renderInfoTab(sx, sy); } checkForModifications(); super.drawGuiContainerBackgroundLayer(ptick, mouseX, mouseY); } private void checkForModifications() { if(enabledB.isSelected() != te.engineControlEnabled || getInt(startTF) != te.asPercentInt(te.startLevel) || getInt(endTF) != te.asPercentInt(te.stopLevel)) { te.engineControlEnabled = enabledB.isSelected(); int i = getInt(startTF); if(i >= 0) { te.startLevel = te.asPercentFloat(i); } i = getInt(endTF); if(i >= 0) { te.stopLevel = te.asPercentFloat(i); } PacketHandler.INSTANCE.sendToServer(new PacketPowerMonitor(te)); } } private int getInt(GuiTextField tf) { String txt = tf.getText(); if(txt == null) { return -1; } try { int val = Integer.parseInt(tf.getText()); if(val >= 0 && val <= 100) { return val; } return -1; } catch (Exception e) { return -1; } } private void renderRedstoneTab(int sx, int sy) { drawTexturedModalRect(sx + 200, sy + SPACING, 225, 0, 20, 48); int left = guiLeft + MARGIN; int rgb; int x = left; int y = guiTop + MARGIN + SPACING; if(!enabledB.isSelected()) { rgb = ColorUtil.getRGB(Color.darkGray); } else { rgb = ColorUtil.getRGB(Color.black); } FontRenderer fontRenderer = getFontRenderer(); fontRenderer.drawString(titleStr, x, y, rgb, false); x = left + fontRenderer.getStringWidth(titleStr) + SPACING + ICON_SIZE + SPACING; y = guiTop + 14; if(!enabledB.isSelected()) { rgb = ColorUtil.getRGB(Color.darkGray); enabledB.drawButton(mc, guiLeft, guiTop); } else { // rgb = ColorUtil.getRGB(Color.blue); // rgb = ColorUtil.getRGB(0, 18, 127); rgb = ColorUtil.getRGB(Color.black); } enabledB.drawButton(mc, guiLeft, guiTop); y += SPACING + ICON_SIZE; x = left; String txt = engineTxt1; fontRenderer.drawString(txt, x, y, rgb, false); y += SPACING + fontRenderer.FONT_HEIGHT; x = left; txt = engineTxt2; fontRenderer.drawString(txt, x, y, rgb, false); x = left + fontRenderer.getStringWidth(txt) + SPACING + startTF.getWidth() + 12; txt = engineTxt3; fontRenderer.drawString(txt, x, y, rgb, false); x = left; y += ICON_SIZE + fontRenderer.FONT_HEIGHT + SPACING; txt = engineTxt4; fontRenderer.drawString(txt, x, y, rgb, false); x = left; y += SPACING + fontRenderer.FONT_HEIGHT; txt = engineTxt5; fontRenderer.drawString(txt, x, y, rgb, false); x += fontRenderer.getStringWidth(txt); txt = engineTxt3; x += MARGIN + endTF.getWidth() + 10; fontRenderer.drawString(txt, x, y, rgb, false); } private void renderInfoTab(int sx, int sy) { drawTexturedModalRect(sx + 200, sy + SPACING, 225, 53, 20, 48); int headingCol = ColorUtil.getRGB(Color.white); int valuesCol = ColorUtil.getRGB(Color.black); int rgb; int x = guiLeft + MARGIN; int y = guiTop + MARGIN; int sectionGap = SPACING; FontRenderer fontRenderer = getFontRenderer(); rgb = headingCol; StringBuilder sb = new StringBuilder(); sb.append(monHeading1); fontRenderer.drawString(sb.toString(), x, y, rgb, true); rgb = valuesCol; y += fontRenderer.FONT_HEIGHT + 2; sb = new StringBuilder(); sb.append(formatPower(te.powerInConduits)); sb.append(" "); sb.append(PowerDisplayUtil.ofStr()); sb.append(" "); sb.append(formatPower(te.maxPowerInConduits)); sb.append(" "); sb.append(PowerDisplayUtil.abrevation()); fontRenderer.drawString(sb.toString(), x, y, rgb, false); rgb = headingCol; y += fontRenderer.FONT_HEIGHT + sectionGap; sb = new StringBuilder(); sb.append(monHeading2); fontRenderer.drawString(sb.toString(), x, y, rgb, true); rgb = valuesCol; y += fontRenderer.FONT_HEIGHT + 2; sb = new StringBuilder(); sb.append(formatPower(te.powerInCapBanks)); sb.append(" "); sb.append(PowerDisplayUtil.ofStr()); sb.append(" "); sb.append(formatPower(te.maxPowerInCapBanks)); sb.append(" "); sb.append(PowerDisplayUtil.abrevation()); fontRenderer.drawString(sb.toString(), x, y, rgb, false); rgb = headingCol; y += fontRenderer.FONT_HEIGHT + sectionGap; sb = new StringBuilder(); sb.append(monHeading3); fontRenderer.drawString(sb.toString(), x, y, rgb, true); rgb = valuesCol; y += fontRenderer.FONT_HEIGHT + 2; sb = new StringBuilder(); sb.append(formatPower(te.powerInMachines)); sb.append(" "); sb.append(PowerDisplayUtil.ofStr()); sb.append(" "); sb.append(formatPower(te.maxPowerInMachines)); sb.append(" "); sb.append(PowerDisplayUtil.abrevation()); fontRenderer.drawString(sb.toString(), x, y, rgb, false); rgb = headingCol; y += fontRenderer.FONT_HEIGHT + sectionGap; sb = new StringBuilder(); sb.append(monHeading4); fontRenderer.drawString(sb.toString(), x, y, rgb, true); rgb = valuesCol; y += fontRenderer.FONT_HEIGHT + 2; sb = new StringBuilder(); sb.append(formatPowerFloat(te.aveRfSent)); sb.append(" "); sb.append(PowerDisplayUtil.abrevation()); sb.append(PowerDisplayUtil.perTickStr()); fontRenderer.drawString(sb.toString(), x, y, rgb, false); rgb = headingCol; y += fontRenderer.FONT_HEIGHT + sectionGap; sb = new StringBuilder(); sb.append(monHeading5); fontRenderer.drawString(sb.toString(), x, y, rgb, true); rgb = valuesCol; y += fontRenderer.FONT_HEIGHT + 2; sb = new StringBuilder(); sb.append(formatPowerFloat(te.aveRfReceived)); sb.append(" "); sb.append(PowerDisplayUtil.abrevation()); sb.append(PowerDisplayUtil.perTickStr()); fontRenderer.drawString(sb.toString(), x, y, rgb, false); } }
package com.elmakers.mine.bukkit.action; import org.bukkit.configuration.ConfigurationSection; import com.elmakers.mine.bukkit.api.action.ActionHandler; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.spell.Spell; import com.elmakers.mine.bukkit.api.spell.SpellResult; public abstract class CheckAction extends CompoundAction { protected abstract boolean isAllowed(CastContext context); @Override protected void addHandlers(Spell spell, ConfigurationSection parameters) { addHandler(spell, "actions"); addHandler(spell, "fail"); } @Override public SpellResult step(CastContext context) { boolean allowed = isAllowed(context); if (!allowed) { ActionHandler fail = getHandler("fail"); if (fail != null && fail.size() != 0) { return startActions("fail"); } } ActionHandler actions = getHandler("actions"); if (actions == null || actions.size() == 0) { return allowed ? SpellResult.CAST : SpellResult.STOP; } if (!allowed) { return SpellResult.NO_TARGET; } return startActions(); } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platforms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:21-07-01"); this.setApiVersion("17.3.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.macro.mall.dao; import com.macro.mall.dto.SmsCouponParam; import org.apache.ibatis.annotations.Param; public interface SmsCouponDao { SmsCouponParam getItem(@Param("id") Long id); }
package com.github.dozedoff.commonj.file; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.LinkedList; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @SuppressWarnings("deprecation") @SuppressFBWarnings("DMI_HARDCODED_ABSOLUTE_FILENAME") public class FileUtilTest { private LinkedList<Path> srcDirs; private LinkedList<Path> srcFiles; private LinkedList<Path> dstFiles; private Path srcDir; private Path dstDir; private Path simplePath; private static final String SIMPLE_PATH = "C:\\foo\\bar\\"; private static final Logger LOGGER = LoggerFactory.getLogger(FileUtilTest.class); @Before public void setUp() throws Exception { srcDirs = new LinkedList<>(); srcFiles = new LinkedList<>(); dstFiles = new LinkedList<>(); Path baseDir = Files.createTempDirectory("testFileUtil"); srcDir = baseDir.resolve("src").resolve("test"); srcDirs.add(srcDir); dstDir = baseDir.resolve("dst").resolve("test"); simplePath = Paths.get(SIMPLE_PATH); } @Test public void testPathTokenListLocalFileName() { LinkedList<String> result = (LinkedList<String>) FileUtil.pathTokenList("C:\\test\\me\\testfile.txt"); String mustContain[] = { "C:", "test", "me", "testfile", ".txt" }; assertThat(result, hasItems(mustContain)); } @Test public void testPathTokenListLocalFileNameSpecialChar() { LinkedList<String> result = (LinkedList<String>) FileUtil.pathTokenList("C:\\test\\me\\test.me\\testfile.txt"); String mustContain[] = { "C:", "test", "me", "testfile", ".txt", "test.me" }; assertThat(result, hasItems(mustContain)); } @Test public void testPathTokenListNetworkFileName() { LinkedList<String> result = (LinkedList<String>) FileUtil.pathTokenList("\\\\nas\\test\\me\\test.me\\testfile.txt"); String mustContain[] = { "\\\\nas", "test", "me", "testfile", ".txt", "test.me" }; assertThat(result, hasItems(mustContain)); } @Test public void testDirectoryMover() throws IOException { /* * \test a.txt dir1 b.txt dir2 c.txt */ buildStructure(srcDirs, srcFiles, srcDir); createFiles(srcDirs, srcFiles); // guard condition for (Path p : srcFiles) { assertTrue(p.toFile().exists()); } // copy /src/test/ to /dst/ FileUtil.moveDirectory(srcDir, dstDir.getParent()); for (Path p : dstFiles) { assertTrue(p.toString(), p.toFile().exists()); } } @Test public void testCopyFile() throws IOException { buildStructure(srcDirs, srcFiles, srcDir); createFiles(srcDirs, srcFiles); /* * ...\srcDir\src\test\dir1\dir2\c.txt * * to * * ...\dstdir\...\srcDir\src\test\dir1\dir2\c.txt */ FileUtil.moveFileWithStructure(srcFiles.get(2), dstDir); assertTrue(Files.exists(dstDir.resolve(srcFiles.get(2).getRoot().relativize(srcFiles.get(2))))); } @Test(expected=IllegalArgumentException.class) public void testCopyFileSourceNull() throws IOException { FileUtil.moveFileWithStructure(null, dstDir); } @Test(expected=IllegalArgumentException.class) public void testCopyFileDestinationNull() throws IOException { FileUtil.moveFileWithStructure(srcDir, null); } @Test(expected=IllegalArgumentException.class) public void testCopyFileDestinationNoRootComponent() throws IOException { Path noRoot = Paths.get("foo/"); FileUtil.moveFileWithStructure(noRoot, dstDir); } private void buildStructure(LinkedList<Path> dirs, LinkedList<Path> files, Path base) { dirs.add(base.resolve("dir1")); dirs.add(base.resolve("dir1").resolve("dir2")); files.add(base.resolve("a.txt")); files.add(base.resolve("dir1").resolve("b.txt")); files.add(base.resolve("dir1").resolve("dir2").resolve("c.txt")); } private void createFiles(LinkedList<Path> dirs, LinkedList<Path> files) throws IOException { for (Path p : dirs) { if (p.toFile().mkdirs()) { LOGGER.error("Failed to create directory {}", p); } } for (Path p : files) { if (p.toFile().createNewFile()) { LOGGER.error("Failed to create file {}", p); } } } @Test public void testRemoveDriveLetter() { Path pathToTest = buildAbsolutePath("test", "me", "now", "squirrel.jpg"); Path path = FileUtil.removeDriveLetter(pathToTest); assertThat(path, is(buildRelativePath("test", "me", "now", "squirrel.jpg"))); } private Path buildAbsolutePath(String... elements) { Path absolutePath = null; Iterable<Path> roots = FileSystems.getDefault().getRootDirectories(); Path root = null; for (Path p : roots) { root = p; break; } // guard if (root == null) { fail("No root directory found"); } absolutePath = root; Path relativePath = buildRelativePath(elements); absolutePath = absolutePath.resolve(relativePath); return absolutePath; } private Path buildRelativePath(String... elements) { Path relativePath = null; relativePath = Paths.get("", elements); return relativePath; } @Test public void testRemoveDriveLetterNoDrive() { Path path = FileUtil.removeDriveLetter(Paths.get("\\test\\me\\now\\squirrel.jpg")); assertThat(path, is(Paths.get("\\test\\me\\now\\squirrel.jpg"))); } @Test public void testRemoveDriveLetterNull() { Path p = null; Path path = FileUtil.removeDriveLetter(p); assertNull(path); } @Test public void testRemoveDriveLetterDirOnly() { Path pathToTest = buildAbsolutePath("test", "me", "now"); Path path = FileUtil.removeDriveLetter(pathToTest); Path validPath = buildRelativePath("test", "me", "now"); assertThat(path, is(validPath)); } @Test public void testRemoveDriveLetterString() { Path pathToTest = buildAbsolutePath("test", "me", "now", "squirrel.jpg"); String path = FileUtil.removeDriveLetter(pathToTest.toString()); Path validPath = buildRelativePath("test", "me", "now", "squirrel.jpg"); assertThat(path, is(validPath.toString())); } @Test public void testRemoveDriveLetterNoDriveString() { String path = FileUtil.removeDriveLetter("\\test\\me\\now\\squirrel.jpg"); assertThat(path, is("\\test\\me\\now\\squirrel.jpg")); } @Ignore("tests will fail on Linux systems") @Test public void testRemoveDriveLetterOnlyDirString() { String path = FileUtil.removeDriveLetter("\\test\\me\\now\\"); assertThat(path, is("\\test\\me\\now\\")); } @Test public void testRemoveDriveLetterNullString() { String s = null; String path = FileUtil.removeDriveLetter(s); assertNull(path); } @Test public void testRemoveDriveLetterStringDirOnly() { Path pathToTest = buildAbsolutePath("test", "me", "now"); String path = FileUtil.removeDriveLetter(pathToTest).toString(); String validPath = buildRelativePath("test", "me", "now").toString(); assertThat(path, is(validPath)); } @Test public void testSanitizeFilenameForWindows() { String original = "foo:bar"; String clean = FileUtil.sanitizeFilenameForWindows(original); assertThat(clean, is("foo_bar")); } @Ignore @Test public void testHasValidWindowsFilenameFileEmpty() { assertThat(FileUtil.hasValidWindowsFilename(new File("")), is(false)); } @Ignore("tests will fail on Linux systems") @Test public void testHasValidWindowsFilenameValidAbsolute() { assertThat(FileUtil.hasValidWindowsFilename(new File("C:\\foobar")), is(true)); } @Ignore @Test public void testHasValidWindowsFilenameValidRelative() { assertThat(FileUtil.hasValidWindowsFilename(new File("baz\\foobar")), is(true)); } @Ignore @Test public void testHasValidWindowsFilenameFileInvalid() { assertThat(FileUtil.hasValidWindowsFilename(new File("foo:bar")), is(false)); } @Ignore("tests will fail on Linux systems") @Test public void testHasValidWindowsFilenameString() { assertThat(FileUtil.hasValidWindowsFilename("C:\\foobar"), is(true)); } @Ignore("tests will fail on Linux systems") @Test public void testConvertDirPathToString() throws Exception { assertThat(FileUtil.convertDirPathToString(simplePath), is(SIMPLE_PATH.toLowerCase())); } @Test public void testConvertDirPathToStringNoEndingSlashes() throws Exception { String noSlashes = "c:\\foo\\bar"; assertThat(FileUtil.convertDirPathToString(Paths.get(noSlashes)), is(SIMPLE_PATH.toLowerCase())); } @Test public void testConvertDirPathToStringNull() throws Exception { assertThat(FileUtil.convertDirPathToString(null), is(nullValue())); } }
package com.romainpiel.lib.helper; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.YuvImage; import android.hardware.Camera; import android.os.Handler; import com.romainpiel.Constants; import com.romainpiel.lib.gif.AnimatedGifEncoder; import java.io.ByteArrayOutputStream; public class PreviewHelper implements Camera.PreviewCallback { private Handler uiHandler; private int angle; private boolean isFrontCamera; private long lastTick; private long t; private boolean capturing; private AnimatedGifEncoder gifEncoder; private Runnable stopCaptureRunnable; private ByteArrayOutputStream gifStream; private OnCaptureListener onCaptureListener; public PreviewHelper(Handler uiHandler) { this.uiHandler = uiHandler; this.stopCaptureRunnable = new Runnable() { @Override public void run() { gifEncoder.finish(); if (onCaptureListener != null) { onCaptureListener.onCaptureProgress(1f); onCaptureListener.onCaptureComplete(gifStream.toByteArray()); } prepareForNextCapture(); } }; this.gifEncoder = new AnimatedGifEncoder(); this.gifEncoder.setRepeat(0); prepareForNextCapture(); } public boolean isCapturing() { return capturing; } public OnCaptureListener getOnCaptureListener() { return onCaptureListener; } public void setAngle(int angle) { this.angle = angle; } public void setFrontCamera(boolean isFrontCamera) { this.isFrontCamera = isFrontCamera; } public void setOnCaptureListener(OnCaptureListener onCaptureListener) { this.onCaptureListener = onCaptureListener; } private void prepareForNextCapture() { this.capturing = false; this.lastTick = -1; this.t = 0; this.gifStream = null; } public void capture() { if (!capturing) { gifStream = new ByteArrayOutputStream(); gifEncoder.start(gifStream); uiHandler.postDelayed(stopCaptureRunnable, Constants.CAPTURE_DURATION); capturing = true; lastTick = System.currentTimeMillis(); t = 0; if (onCaptureListener != null) { onCaptureListener.onCaptureStarted(); } } } @Override public void onPreviewFrame(byte[] data, Camera camera) { if (capturing) { long now = System.currentTimeMillis(); float duration = now - lastTick; t += duration; if (onCaptureListener != null) { onCaptureListener.onCaptureProgress(((float) t) / Constants.CAPTURE_DURATION); } Camera.Parameters parameters = camera.getParameters(); Camera.Size size = parameters.getPreviewSize(); YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); ByteArrayOutputStream output = new ByteArrayOutputStream(); image.compressToJpeg(new Rect(0, 0, image.getWidth(), image.getHeight()), 90, output); Bitmap bitmap = BitmapFactory.decodeByteArray(output.toByteArray(), 0, output.size()); boolean realSized = angle % 180 == 0; float ratio = Constants.CAPTURE_WIDTH / Constants.CAPTURE_HEIGHT; float srcWidth = realSized ? image.getWidth() : (float) image.getHeight() / ratio; float srcHeight = realSized ? (float) image.getWidth() / ratio : image.getHeight(); float scaleFactor = realSized ? Constants.CAPTURE_WIDTH / image.getWidth() : Constants.CAPTURE_WIDTH / image.getHeight(); Matrix matrix = new Matrix(); matrix.postRotate(isFrontCamera ? -angle : angle); matrix.postScale(isFrontCamera ? -scaleFactor : scaleFactor, scaleFactor); int startX = realSized ? 0 : Math.max(0, (image.getWidth() - image.getHeight()) / 2); int startY = realSized ? Math.max(0, (image.getHeight() - image.getWidth())) / 2 : 0; Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, startX, startY, (int) srcWidth, (int) srcHeight, matrix, true); gifEncoder.setDelay((int) (duration / Constants.CAPTURE_ACCELERATION)); gifEncoder.addFrame(rotatedBitmap); rotatedBitmap.recycle(); bitmap.recycle(); lastTick = now; } } public void cancelCapture() { uiHandler.removeCallbacks(stopCaptureRunnable); gifEncoder.finish(); prepareForNextCapture(); } public interface OnCaptureListener { public void onCaptureStarted(); public void onCaptureProgress(float progress); public void onCaptureComplete(byte[] gifData); } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:20-07-25"); this.setApiVersion("16.7.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.github.dozedoff.commonj.net; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import java.net.MalformedURLException; import java.net.SocketException; import java.net.SocketTimeoutException; import java.net.URL; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class GetBinaryTest { GetBinary getBinary; static Server server; static byte[] testData = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20 }; static byte[] testData2 = { 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }; static final int SERVER_PORT = 5400; private static enum Pages { data, data2, range, wait, notok }; HashMap<Pages, URL> pageURLs = new HashMap<>(); static final int READ_TIMEOUT_CLASS = 100; static final int READ_TIMEOUT_TEST = 1500; @BeforeClass public static void startServer() throws Exception { server = new Server(SERVER_PORT); server.setHandler(new TestHandler()); server.start(); } @AfterClass public static void stopServer() throws Exception { server.stop(); } @Before public void setUp() throws Exception { getBinary = new GetBinary(); server.start(); } @Test public void testInvalidTimeoutSetting() { assertThat(getBinary.setReadTimeout(-100), is(false)); } @Test public void testGetViaHttp() throws IOException { testData = generateRandomData(25); byte[] data = getBinary.getViaHttp(getURL(Pages.data)); assertThat(data, is(testData)); } @Test public void testGetViaHttpString() throws IOException { testData = generateRandomData(25); byte[] data = getBinary.getViaHttp(getURL(Pages.data).toString()); assertThat(data, is(testData)); } @Test(timeout = READ_TIMEOUT_TEST, expected = SocketTimeoutException.class) public void testGetViaHttpTimeout() throws IOException { getBinary.setReadTimeout(READ_TIMEOUT_CLASS); getBinary.getViaHttp(getURL(Pages.wait)); } @Test(expected=PageLoadException.class) public void testGetViaHttpBadRequest() throws IOException { getBinary.getViaHttp(getURL(Pages.notok)); } @Test public void testGetLenght() throws Exception { testData = generateRandomData(25); Long size = getBinary.getLenght(getURL(Pages.data)); assertThat(size, is((long) testData.length)); } @Test(timeout = READ_TIMEOUT_TEST, expected = SocketTimeoutException.class) public void testGetLenghtTimeOut() throws Exception { getBinary.setReadTimeout(READ_TIMEOUT_CLASS); getBinary.getLenght(getURL(Pages.wait)); } @Test public void testGetHeader() throws Exception { testData = generateRandomData(5000); Map<String, List<String>> header = getBinary.getHeader(getURL(Pages.data)); assertThat(header.containsKey("Content-Length"), is(true)); assertThat(header.get("Content-Length").get(0), is("5000")); } @Test public void testGetRange() throws Exception { testData = generateRandomData(25); byte[] subSet = Arrays.copyOfRange(testData, 10, 25); byte[] data = getBinary.getRange(getURL(Pages.data), 10, 15); assertThat(data, is(subSet)); } @Test(timeout = READ_TIMEOUT_TEST, expected = SocketTimeoutException.class) public void testGetRangeTimeout() throws Exception { getBinary.setReadTimeout(READ_TIMEOUT_CLASS); getBinary.getRange(getURL(Pages.wait), 10, 15); } @Test(timeout = 5000) public void testReUse() throws Exception { assertThat(getBinary.getViaHttp(getURL(Pages.data)), is(testData)); assertThat(getBinary.getViaHttp(getURL(Pages.data2)), is(testData2)); assertThat(getBinary.getViaHttp(getURL(Pages.data)), is(testData)); } @Test(timeout = READ_TIMEOUT_TEST, expected = SocketTimeoutException.class) public void testConnectionTimeout() throws Exception { getBinary.setMaxRetry(0); getBinary.setReadTimeout(READ_TIMEOUT_CLASS); getBinary.getViaHttp(getURL(Pages.wait)); } @Test(timeout = 10000, expected = SocketException.class) public void testConnectionFail() throws Exception { getBinary.setMaxRetry(0); server.stop(); getBinary.getViaHttp(getURL(Pages.data)); } static class TestHandler extends AbstractHandler { @Override public void handle(String arg0, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { response.setContentType("application/octet-stream"); response.setStatus(HttpServletResponse.SC_OK); baseRequest.setHandled(true); String pageName = extractPage(request); Pages selectedPage = null; try { selectedPage = Pages.valueOf(Pages.class, pageName); } catch (IllegalArgumentException iae) { fail(pageName + " is not a valid page Enum"); } catch (NullPointerException npe) { fail("Page name was null"); } switch (selectedPage) { case data: processDataRequest(request, response, testData); break; case data2: processDataRequest(request, response, testData2); break; case wait: try { Thread.sleep(12000); } catch (InterruptedException e) { } break; case notok: response.setStatus(Response.SC_BAD_REQUEST); break; default: throw new IllegalArgumentException("Unknown page"); } } } private static boolean isRangeRequest(HttpServletRequest request) { return (request.getHeader("Range") != null && request.getHeader("Range").contains("bytes")); } private static void processRangeRequest(HttpServletRequest request, HttpServletResponse response, byte[] data) throws IOException { StringBuilder sb = new StringBuilder(); sb.append(request.getHeader("Range")); sb.replace(0, 6, ""); String[] marker = sb.toString().split("-"); int start = Integer.parseInt(marker[0]); int offset = start + Integer.parseInt(marker[1]); byte[] selection = Arrays.copyOfRange(data, start, offset); response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); response.getOutputStream().write(selection); response.getOutputStream().close(); } private static void processDataRequest(HttpServletRequest request, HttpServletResponse response, byte[] data) throws IOException { if (isRangeRequest(request)) { processRangeRequest(request, response, data); } else { response.getOutputStream().write(data); response.getOutputStream().close(); } } private byte[] generateRandomData(int numOfBytes) { byte[] randomData = new byte[numOfBytes]; for (int i = 0; i < numOfBytes; i++) { randomData[i] = (byte) (Math.random() * Byte.MAX_VALUE); } return randomData; } private static String extractPage(HttpServletRequest request) { String requestUri = request.getRequestURI(); int pageIndexStart = requestUri.lastIndexOf("/"); String pageName = requestUri.substring(pageIndexStart + 1); return pageName; } private URL getURL(Pages page) throws MalformedURLException { if (pageURLs.containsKey(page)) { return pageURLs.get(page); } else { URL pageUrl = createURL(page); pageURLs.put(page, pageUrl); return pageUrl; } } private URL createURL(Pages page) throws MalformedURLException { String pageName = page.toString(); String constructedUrl = "http://localhost:" + SERVER_PORT + "/" + pageName; URL pageUrl = new URL(constructedUrl); return pageUrl; } }
package hex.tree; import water.MemoryManager; import water.util.ArrayUtils; import water.util.AtomicUtils; import water.util.IcedBitSet; import water.util.MathUtils; /** A Histogram, computed in parallel over a Vec. * * <p>Sums and sums-of-squares of floats * * @author Cliff Click */ public class DRealHistogram extends DHistogram<DRealHistogram> { private double _sums[], _ssqs[]; // Sums & square-sums, shared, atomically incremented public DRealHistogram(String name, final int nbins, int nbins_cats, byte isInt, float min, float maxEx) { super(name,nbins, nbins_cats, isInt, min, maxEx); } @Override public double mean(int b) { double n = _bins[b]; return n>0 ? _sums[b]/n : 0; } @Override public double var (int b) { double n = _bins[b]; if( n==0 ) return 0; return (_ssqs[b] - _sums[b]*_sums[b]/n)/(n-1); } // Big allocation of arrays @Override void init0() { _sums = MemoryManager.malloc8d(_nbin); _ssqs = MemoryManager.malloc8d(_nbin); } // Add one row to a bin found via simple linear interpolation. // Compute response mean & variance. // Done racily instead F/J map calls, so atomic @Override void incr0( int b, double y ) { AtomicUtils.DoubleArray.add(_sums,b,y); AtomicUtils.DoubleArray.add(_ssqs,b,y*y); } // Same, except square done by caller void incr1( int b, double y, double yy ) { AtomicUtils.DoubleArray.add(_sums,b,y); AtomicUtils.DoubleArray.add(_ssqs,b,yy); } // Merge two equal histograms together. // Done in a F/J reduce, so no synchronization needed. @Override void add0( DRealHistogram dsh ) { ArrayUtils.add(_sums,dsh._sums); ArrayUtils.add(_ssqs,dsh._ssqs); } // Compute a "score" for a column; lower score "wins" (is a better split). // Score is the sum of the MSEs when the data is split at a single point. // mses[1] == MSE for splitting between bins 0 and 1. // mses[n] == MSE for splitting between bins n-1 and n. @Override public DTree.Split scoreMSE( int col, int min_rows ) { final int nbins = nbins(); assert nbins > 1; // Histogram arrays used for splitting, these are either the original bins // (for an ordered predictor), or sorted by the mean response (for an // unordered predictor, i.e. categorical predictor). double[] sums = _sums; double[] ssqs = _ssqs; double[] bins = _bins; int idxs[] = null; // and a reverse index mapping // For categorical (unordered) predictors, sort the bins by average // prediction then look for an optimal split. Currently limited to enums // where we're one-per-bin. No point for 3 or fewer bins as all possible // combinations (just 3) are tested without needing to sort. if( _isInt == 2 && _step == 1.0f && nbins >= 4 ) { // Sort the index by average response idxs = MemoryManager.malloc4(nbins+1); // Reverse index for( int i=0; i<nbins+1; i++ ) idxs[i] = i; final double[] avgs = MemoryManager.malloc8d(nbins+1); for( int i=0; i<nbins; i++ ) avgs[i] = _bins[i]==0 ? 0 : _sums[i]/_bins[i]; // Average response avgs[nbins] = Double.MAX_VALUE; ArrayUtils.sort(idxs, avgs); // Fill with sorted data. Makes a copy, so the original data remains in // its original order. sums = MemoryManager.malloc8d(nbins); ssqs = MemoryManager.malloc8d(nbins); bins = MemoryManager.malloc8d(nbins); for( int i=0; i<nbins; i++ ) { sums[i] = _sums[idxs[i]]; ssqs[i] = _ssqs[idxs[i]]; bins[i] = _bins[idxs[i]]; } } // Compute mean/var for cumulative bins from 0 to nbins inclusive. double sums0[] = MemoryManager.malloc8d(nbins+1); double ssqs0[] = MemoryManager.malloc8d(nbins+1); double ns0[] = MemoryManager.malloc8d(nbins+1); for( int b=1; b<=nbins; b++ ) { double m0 = sums0[b-1], m1 = sums[b-1]; double s0 = ssqs0[b-1], s1 = ssqs[b-1]; double k0 = ns0 [b-1], k1 = bins[b-1]; if( k0==0 && k1==0 ) continue; sums0[b] = m0+m1; ssqs0[b] = s0+s1; ns0 [b] = k0+k1; } double tot = ns0[nbins]; // Is any split possible with at least min_obs? if( tot < 2*min_rows ) return null; // If we see zero variance, we must have a constant response in this // column. Normally this situation is cut out before we even try to split, // but we might have NA's in THIS column... double var = ssqs0[nbins]*tot - sums0[nbins]*sums0[nbins]; if( var == 0 ) { assert isConstantResponse(); return null; } // If variance is really small, then the predictions (which are all at // single-precision resolution), will be all the same and the tree split // will be in vain. if( ((float)var) == 0f ) return null; // Compute mean/var for cumulative bins from nbins to 0 inclusive. double sums1[] = MemoryManager.malloc8d(nbins+1); double ssqs1[] = MemoryManager.malloc8d(nbins+1); double ns1[] = MemoryManager.malloc8d(nbins+1); for( int b=nbins-1; b>=0; b double m0 = sums1[b+1], m1 = sums[b]; double s0 = ssqs1[b+1], s1 = ssqs[b]; double k0 = ns1 [b+1], k1 = bins[b]; if( k0==0 && k1==0 ) continue; sums1[b] = m0+m1; ssqs1[b] = s0+s1; ns1 [b] = k0+k1; assert MathUtils.compare(ns0[b]+ns1[b],tot,1e-5,1e-5); } // Now roll the split-point across the bins. There are 2 ways to do this: // split left/right based on being less than some value, or being equal/ // not-equal to some value. Equal/not-equal makes sense for categoricals // but both splits could work for any integral datatype. Do the less-than // splits first. int best=0; // The no-split double best_se0=Double.MAX_VALUE; // Best squared error double best_se1=Double.MAX_VALUE; // Best squared error byte equal=0; // Ranged check for( int b=1; b<=nbins-1; b++ ) { if( bins[b] == 0 ) continue; // Ignore empty splits if( ns0[b] < min_rows ) continue; if( ns1[b] < min_rows ) break; // ns1 shrinks at the higher bin#s, so if it fails once it fails always // We're making an unbiased estimator, so that MSE==Var. // Then Squared Error = MSE*N = Var*N // = (ssqs/N - mean^2)*N // = ssqs - N*mean^2 // = ssqs - N*(sum/N)(sum/N) // = ssqs - sum^2/N double se0 = ssqs0[b] - sums0[b]*sums0[b]/ns0[b]; double se1 = ssqs1[b] - sums1[b]*sums1[b]/ns1[b]; if( se0 < 0 ) se0 = 0; // Roundoff error; sometimes goes negative if( se1 < 0 ) se1 = 0; // Roundoff error; sometimes goes negative if( (se0+se1 < best_se0+best_se1) || // Strictly less error? // Or tied MSE, then pick split towards middle bins (se0+se1 == best_se0+best_se1 && Math.abs(b -(nbins>>1)) < Math.abs(best-(nbins>>1))) ) { best_se0 = se0; best_se1 = se1; best = b; } } // If the bin covers a single value, we can also try an equality-based split if( _isInt > 0 && _step == 1.0f && // For any integral (not float) column _maxEx-_min > 2 && idxs==null ) { // Also need more than 2 (boolean) choices to actually try a new split pattern for( int b=1; b<=nbins-1; b++ ) { if( bins[b] < min_rows ) continue; // Ignore too small splits double N = ns0[b] + ns1[b+1]; if( N < min_rows ) continue; // Ignore too small splits double sums2 = sums0[b ]+sums1[b+1]; double ssqs2 = ssqs0[b ]+ssqs1[b+1]; double si = ssqs2 -sums2 *sums2 / N ; // Left+right, excluding 'b' double sx = ssqs [b] -sums[b]*sums[b]/bins[b]; // Just 'b' if( si < 0 ) si = 0; // Roundoff error; sometimes goes negative if( sx < 0 ) sx = 0; // Roundoff error; sometimes goes negative if( si+sx < best_se0+best_se1 ) { // Strictly less error? best_se0 = si; best_se1 = sx; best = b; equal = 1; // Equality check } } } // For categorical (unordered) predictors, we sorted the bins by average // prediction then found the optimal split on sorted bins IcedBitSet bs = null; // In case we need an arbitrary bitset if( idxs != null ) { // We sorted bins; need to build a bitset int min=Integer.MAX_VALUE;// Compute lower bound and span for bitset int max=Integer.MIN_VALUE; for( int i=best; i<nbins; i++ ) { min=Math.min(min,idxs[i]); max=Math.max(max,idxs[i]); } bs = new IcedBitSet(max-min+1,min); // Bitset with just enough span to cover the interesting bits for( int i=best; i<nbins; i++ ) bs.set(idxs[i]); // Reverse the index then set bits equal = (byte)(bs.max() <= 32 ? 2 : 3); // Flag for bitset split; also check max size } if( best==0 ) return null; // No place to split double se = ssqs1[0] - sums1[0]*sums1[0]/ns1[0]; // Squared Error with no split if( se <= best_se0+best_se1) return null; // Ultimately roundoff error loses, and no split actually helped double n0 = equal != 1 ? ns0[best] : ns0[best]+ ns1[best+1]; double n1 = equal != 1 ? ns1[best] : bins[best] ; double p0 = equal != 1 ? sums0[best] : sums0[best]+sums1[best+1]; double p1 = equal != 1 ? sums1[best] : sums[best] ; if( MathUtils.equalsWithinOneSmallUlp((float)(p0/n0),(float)(p1/n1)) ) return null; // No difference in predictions, which are all at 1 float ULP return new DTree.Split(col,best,bs,equal,se,best_se0,best_se1,n0,n1,p0/n0,p1/n1); } @Override public long byteSize0() { return 8*2 + // 2 more internal arrays 24+_sums.length<<3 + 24+_ssqs.length<<3 ; } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:18-07-15"); this.setApiVersion("14.2.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
/* * Constants.java * */ package de.uni_stuttgart.vis.vowl.owl2vowl.model; /** * @author Vincent Link, Eduard Marbach */ public class Constants { /* NODE TYPES */ public static final String TYPE_CLASS = "owl:Class"; public static final String TYPE_RDFSCLASS = "rdfs:Class"; public static final String TYPE_EQUIVALENT = "owl:equivalentClass"; public static final String TYPE_EXTERNALCLASS = "externalclass"; public static final String TYPE_DEPRECTAEDCLASS = "owl:DeprecatedClass"; public static final String TYPE_THING = "owl:Thing"; public static final String TYPE_NOTHING = "owl:Nothing"; public static final String TYPE_RDFSRESOURCE = "rdfs:Resource"; public static final String TYPE_UNION = "owl:unionOf"; public static final String TYPE_INTERSECTION = "owl:intersectionOf"; public static final String TYPE_COMPLEMENT = "owl:complementOf"; public static final String TYPE_DATATYPE = "rdfs:Datatype"; public static final String TYPE_LITERAL = "rdfs:Literal"; /* PROPERTY TYPES */ public static final String PROP_TYPE_SUBCLASS = "rdfs:SubClassOf"; public static final String PROP_TYPE_DISJOINT = "owl:disjointWith"; /* SAMPLE ONTOLOGIES */ public static final String PATH_VARIABLE = System.getProperty("user.dir") + "/OWL2VOWL/ontologies/"; public static final String IMARINE = PATH_VARIABLE + "marinetloimarine.owl"; public static final String MARINE = PATH_VARIABLE + "marinetlo.owl"; public static final String MUSIC = PATH_VARIABLE + "musicontology.rdfs"; public static final String SIOC = PATH_VARIABLE + "sioc.rdf"; public static final String GR = PATH_VARIABLE + "v1.owl"; public static final String MUTO = PATH_VARIABLE + "muto.rdf"; public static final String FOAF = PATH_VARIABLE + "foaf.rdf"; public static final String WINE = PATH_VARIABLE + "wine.rdf"; public static final String PERSONAS = PATH_VARIABLE + "personasonto.owl"; public static final String BENCHMARK1 = PATH_VARIABLE + "ontovibe/BenchmarkOntology.ttl"; public static final String BENCHMARK2 = PATH_VARIABLE + "ontovibe/BenchmarkOntologyModule.ttl"; public static final String EXT_GEONAMES = "http: public static final String EXT_PROV = "http: public static final String EXT_PIZZA = "http://130.88.198.11/co-ode-files/ontologies/pizza.owl"; public static final String EXT_ONTOVIBE = "http://ontovibe.visualdataweb.org/1.0"; public static final String EXT_NICETAG = "http://ns.inria.fr/nicetag/2010/09/09/voc"; /* ANNOTATIONS */ public static final String RDFS_COMMENT = "rdfs:comment"; public static final String RDFS_LABEL = "rdfs:label"; public static final String RDFS_DEFINED_BY = "rdfs:isDefinedBy"; public static final String OWL_VERSIONINFO = "owl:versionInfo"; public static final String RDFS_SUBCLASS = "rdfs:subClassOf"; public static final String RDFS_DOMAIN = "rdfs:domain"; public static final String RDFS_RANGE = "rdfs:range"; public static final String OWL_DEPRECATED = "owl:deprecated"; /* STANDARD IRIS */ public static final String GENERIC_LITERAL_URI = "http://www.w3.org/2000/01/rdf-schema#Literal"; public static final String OWL_THING_CLASS_URI = "http://www.w3.org/2002/07/owl#Thing"; /* PROPERTY ATTRIBUTES */ public static final String PROP_ATTR_FUNCT = "functional"; public static final String PROP_ATTR_OBJ = "object"; public static final String PROP_ATTR_DATA = "datatype"; public static final String PROP_ATTR_DEPR = "deprecated"; public static final String PROP_ATTR_RDF = "rdf"; public static final String PROP_ATTR_TRANS = "transitive"; public static final String PROP_ATTR_INV_FUNCT = "inverse functional"; public static final String PROP_ATTR_SUB = "subclass"; public static final String PROP_ATTR_DISJOINT = "disjoint"; public static final String PROP_ATTR_SYM = "symmetric"; public static final String PROP_ATTR_IMPORT = "external"; /* Ontology info annotations */ public static final String INFO_TITLE = "<http://purl.org/dc/elements/1.1/title>"; public static final String INFO_SEE_ALSO = "rdfs:seeAlso"; public static final String INFO_ISSUED = "<http://purl.org/dc/terms/issued>"; public static final String INFO_CREATOR = "<http://purl.org/dc/elements/1.1/creator>"; public static final String INFO_LICENSE = "<http://purl.org/dc/terms/licence>"; public static final String INFO_DESCRIPTION = "<http://purl.org/dc/elements/1.1/description>"; public static final String INFO_VERSION_INFO = "owl:versionInfo"; public static final String INFO_RDFS_LABEL = "rdfs:label"; /* Annotations used for axioms. */ public static final String AXIOM_OBJ_PROP_DOMAIN = "ObjectPropertyDomain"; public static final String AXIOM_OBJ_PROP_RANGE = "ObjectPropertyRange"; public static final String AXIOM_DATA_PROP_DOMAIN = "DataPropertyDomain"; public static final String AXIOM_DATA_PROP_RANGE = "DataPropertyRange"; public static final String AXIOM_OBJ_UNION = "ObjectUnionOf"; public static final String AXIOM_OBJ_INTERSECTION = "ObjectIntersectionOf"; public static final String AXIOM_OBJ_COMPLEMENT = "ObjectComplementOf"; public static final String AXIOM_DISJOINT = "DisjointClasses"; public static final String AXIOM_DISJOINTUNION = "DisjointUnion"; public static final String AXIOM_SUBCLASS = "SubClassOf"; public static final String AXIOM_CARD_MIN = "ObjectMinCardinality"; public static final String AXIOM_CARD_EXACT = "ObjectExactCardinality"; /* Languages */ public static final String LANG_UNSET = "unset"; public static final String LANG_DEFAULT = "iriBased"; }
package hex; import hex.genmodel.GenModel; import water.MRTask; import water.Scope; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.util.ArrayUtils; import water.util.Log; import water.util.MathUtils; import java.util.Arrays; public class ModelMetricsBinomial extends ModelMetricsSupervised { public final AUC2 _auc; public final double _logloss; public final double _mean_per_class_error; public final GainsLift _gainsLift; public ModelMetricsBinomial(Model model, Frame frame, long nobs, double mse, String[] domain, double sigma, AUC2 auc, double logloss, GainsLift gainsLift) { super(model, frame, nobs, mse, domain, sigma); _auc = auc; _logloss = logloss; _gainsLift = gainsLift; _mean_per_class_error = cm() == null ? Double.NaN : cm().mean_per_class_error(); } public static ModelMetricsBinomial getFromDKV(Model model, Frame frame) { ModelMetrics mm = ModelMetrics.getFromDKV(model, frame); if( !(mm instanceof ModelMetricsBinomial) ) throw new H2OIllegalArgumentException("Expected to find a Binomial ModelMetrics for model: " + model._key.toString() + " and frame: " + frame._key.toString(), "Expected to find a ModelMetricsBinomial for model: " + model._key.toString() + " and frame: " + frame._key.toString() + " but found a: " + (mm == null ? null : mm.getClass())); return (ModelMetricsBinomial) mm; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(super.toString()); if (_auc != null) sb.append(" AUC: " + (float)_auc._auc + "\n"); sb.append(" logloss: " + (float)_logloss + "\n"); sb.append(" mean_per_class_error: " + (float)_mean_per_class_error + "\n"); sb.append(" default threshold: " + (_auc == null ? 0.5 : (float)_auc.defaultThreshold()) + "\n"); if (cm() != null) sb.append(" CM: " + cm().toASCII()); if (_gainsLift != null) sb.append(_gainsLift); return sb.toString(); } public double logloss() { return _logloss; } public double mean_per_class_error() { return _mean_per_class_error; } @Override public AUC2 auc_obj() { return _auc; } @Override public ConfusionMatrix cm() { if( _auc == null ) return null; double[][] cm = _auc.defaultCM(); return cm == null ? null : new ConfusionMatrix(cm, _domain); } public GainsLift gainsLift() { return _gainsLift; } // expose simple metrics criteria for sorting public double auc() { return auc_obj()._auc; } public double lift_top_group() { return gainsLift().response_rates[0] / gainsLift().avg_response_rate; } /** * Build a Binomial ModelMetrics object from target-class probabilities, from actual labels, and a given domain for both labels (and domain[1] is the target class) * @param targetClassProbs A Vec containing target class probabilities * @param actualLabels A Vec containing the actual labels (can be for fewer labels than what's in domain, since the predictions can be for a small subset of the data) * @return ModelMetrics object */ static public ModelMetricsBinomial make(Vec targetClassProbs, Vec actualLabels) { return make(targetClassProbs,actualLabels,actualLabels.domain()); } /** * Build a Binomial ModelMetrics object from target-class probabilities, from actual labels, and a given domain for both labels (and domain[1] is the target class) * @param targetClassProbs A Vec containing target class probabilities * @param actualLabels A Vec containing the actual labels (can be for fewer labels than what's in domain, since the predictions can be for a small subset of the data) * @param domain The two class labels (domain[0] is the non-target class, domain[1] is the target class, for which probabilities are given) * @return ModelMetrics object */ static public ModelMetricsBinomial make(Vec targetClassProbs, Vec actualLabels, String[] domain) { Scope.enter(); Vec _labels = actualLabels.toCategoricalVec(); if (domain==null) domain = _labels.domain(); if (_labels == null || targetClassProbs == null) throw new IllegalArgumentException("Missing actualLabels or predictedProbs for binomial metrics!"); if (!targetClassProbs.isNumeric()) throw new IllegalArgumentException("Predicted probabilities must be numeric per-class probabilities for binomial metrics."); if (targetClassProbs.min() < 0 || targetClassProbs.max() > 1) throw new IllegalArgumentException("Predicted probabilities must be between 0 and 1 for binomial metrics."); if (domain.length!=2) throw new IllegalArgumentException("Domain must have 2 class labels, but is " + Arrays.toString(domain) + " for binomial metrics."); _labels = _labels.adaptTo(domain); if (_labels.cardinality()!=2) throw new IllegalArgumentException("Adapted domain must have 2 class labels, but is " + Arrays.toString(_labels.domain()) + " for binomial metrics."); Frame predsLabel = new Frame(targetClassProbs); predsLabel.add("labels", _labels); MetricBuilderBinomial mb = new BinomialMetrics(_labels.domain()).doAll(predsLabel)._mb; _labels.remove(); Frame preds = new Frame(targetClassProbs); ModelMetricsBinomial mm = (ModelMetricsBinomial)mb.makeModelMetrics(null, predsLabel, null, preds); mm._description = "Computed on user-given predictions and labels, using F1-optimal threshold: " + mm.auc_obj().defaultThreshold() + "."; Scope.exit(); return mm; } // helper to build a ModelMetricsBinomial for a N-class problem from a Frame that contains N per-class probability columns, and the actual label as the (N+1)-th column private static class BinomialMetrics extends MRTask<BinomialMetrics> { public BinomialMetrics(String[] domain) { this.domain = domain; } String[] domain; public MetricBuilderBinomial _mb; @Override public void map(Chunk[] chks) { _mb = new MetricBuilderBinomial(domain); Chunk actuals = chks[1]; double [] ds = new double[3]; for (int i=0;i<chks[0]._len;++i) { ds[2] = chks[0].atd(i); //class 1 probs (user-given) ds[1] = 1-ds[2]; //class 0 probs ds[0] = GenModel.getPrediction(ds, null, ds, Double.NaN/*ignored - uses AUC's default threshold*/); //label _mb.perRow(ds, new float[]{(float)actuals.atd(i)}, null); } } @Override public void reduce(BinomialMetrics mrt) { _mb.reduce(mrt._mb); } } public static class MetricBuilderBinomial<T extends MetricBuilderBinomial<T>> extends MetricBuilderSupervised<T> { protected double _logloss; protected AUC2.AUCBuilder _auc; public MetricBuilderBinomial( String[] domain ) { super(2,domain); _auc = new AUC2.AUCBuilder(AUC2.NBINS); } public double auc() {return new AUC2(_auc)._auc;} // Passed a float[] sized nclasses+1; ds[0] must be a prediction. ds[1...nclasses-1] must be a class // distribution; @Override public double[] perRow(double ds[], float[] yact, Model m) {return perRow(ds, yact, 1, 0, m);} @Override public double[] perRow(double ds[], float[] yact, double w, double o, Model m) { if( Float .isNaN(yact[0]) ) return ds; // No errors if actual is missing if(ArrayUtils.hasNaNs(ds)) return ds; // No errors if prediction has missing values (can happen for GLM) if(w == 0 || Double.isNaN(w)) return ds; final int iact = (int)yact[0]; if( iact != 0 && iact != 1 ) return ds; // The actual is effectively a NaN _count++; _wcount += w; _wY += w*iact; _wYY += w*iact*iact; // Compute error double err = iact+1 < ds.length ? 1-ds[iact+1] : 1; // Error: distance from predicting ycls as 1.0 _sumsqe += w*err*err; // Squared error assert !Double.isNaN(_sumsqe); // Compute log loss _logloss += w*MathUtils.logloss(err); _auc.perRow(ds[2],iact,w); return ds; // Flow coding } @Override public void reduce( T mb ) { super.reduce(mb); // sumseq, count _logloss += mb._logloss; _auc.reduce(mb._auc); } /** * Create a ModelMetrics for a given model and frame * @param m Model * @param f Frame * @param frameWithWeights Frame that contains extra columns such as weights * @param preds Optional predictions (can be null), only used to compute Gains/Lift table for binomial problems @return * @return ModelMetricsBinomial */ @Override public ModelMetrics makeModelMetrics(Model m, Frame f, Frame frameWithWeights, Frame preds) { GainsLift gl = null; if (_wcount > 0) { if (preds!=null) { if (frameWithWeights == null) frameWithWeights = f; Vec resp = m==null && frameWithWeights.vec(f.numCols()-1).isCategorical() ? frameWithWeights.vec(f.numCols()-1) //work-around for the case where we don't have a model, assume that the last column is the actual response : frameWithWeights.vec(m._parms._response_column); if (resp != null) { Vec weight = m==null?null : frameWithWeights.vec(m._parms._weights_column); gl = calculateGainsLift(m, preds, resp, weight); } } } return makeModelMetrics(m, f, gl); } private ModelMetrics makeModelMetrics(Model m, Frame f, GainsLift gl) { double mse = Double.NaN; double logloss = Double.NaN; double sigma = Double.NaN; AUC2 auc = null; if (_wcount > 0) { sigma = weightedSigma(); mse = _sumsqe / _wcount; logloss = _logloss / _wcount; auc = new AUC2(_auc); } ModelMetricsBinomial mm = new ModelMetricsBinomial(m, f, _count, mse, _domain, sigma, auc, logloss, gl); if (m!=null) m.addModelMetrics(mm); return mm; } private GainsLift calculateGainsLift(Model m, Frame preds, Vec resp, Vec weights) { GainsLift gl = null; try { gl = new GainsLift(preds.lastVec(), resp, weights); gl.exec(m != null ? m._output._job : null); } catch(Throwable t) { // TODO: Why do we need to catch Throwable here? Log.debug("Calculating Gains-Lift failed", t); } return gl; } @Override public Frame makePredictionCache(Model m, Vec response) { return new Frame(response.makeVolatileDoubles(1)); } @Override public void cachePrediction(double[] cdist, Chunk[] chks, int row, int cacheChunkIdx, Model m) { assert cdist.length == 3; chks[cacheChunkIdx].set(row, cdist[cdist.length - 1]); } public String toString(){ if(_wcount == 0) return "empty, no rows"; return "auc = " + MathUtils.roundToNDigits(auc(),3) + ", logloss = " + _logloss / _wcount; } } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:19-09-29"); this.setApiVersion("15.8.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.spatial4j.core.shape; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.shape.impl.Range; import static com.spatial4j.core.shape.SpatialRelation.CONTAINS; import static com.spatial4j.core.shape.SpatialRelation.WITHIN; /** * A base test class with utility methods to help test shapes. * Extends from RandomizedTest. */ public abstract class RandomizedShapeTest extends RandomizedTest { protected static final double EPS = 10e-9; protected SpatialContext ctx;//needs to be set ASAP /** Used to reduce the space of numbers to increase the likelihood that * random numbers become equivalent, and thus trigger different code paths. * Also makes some random shapes easier to manually examine. */ protected final double DIVISIBLE = 2;// even coordinates; (not always used) protected RandomizedShapeTest() { } public RandomizedShapeTest(SpatialContext ctx) { this.ctx = ctx; } public static void checkShapesImplementEquals( Class[] classes ) { for( Class clazz : classes ) { try { clazz.getDeclaredMethod( "equals", Object.class ); } catch (Exception e) { fail("Shape needs to define 'equals' : " + clazz.getName()); } try { clazz.getDeclaredMethod( "hashCode" ); } catch (Exception e) { fail("Shape needs to define 'hashCode' : " + clazz.getName()); } } } //These few norm methods normalize the arguments for creating a shape to // account for the dateline. Some tests loop past the dateline or have offsets // that go past it and it's easier to have them coded that way and correct for // it here. These norm methods should be used when needed, not frivolously. protected double normX(double x) { return ctx.isGeo() ? DistanceUtils.normLonDEG(x) : x; } protected double normY(double y) { return ctx.isGeo() ? DistanceUtils.normLatDEG(y) : y; } protected Rectangle makeNormRect(double minX, double maxX, double minY, double maxY) { if (ctx.isGeo()) { if (Math.abs(maxX - minX) >= 360) { minX = -180; maxX = 180; } else { minX = DistanceUtils.normLonDEG(minX); maxX = DistanceUtils.normLonDEG(maxX); } } else { if (maxX < minX) { double t = minX; minX = maxX; maxX = t; } minX = boundX(minX, ctx.getWorldBounds()); maxX = boundX(maxX, ctx.getWorldBounds()); } if (maxY < minY) { double t = minY; minY = maxY; maxY = t; } minY = boundY(minY, ctx.getWorldBounds()); maxY = boundY(maxY, ctx.getWorldBounds()); return ctx.makeRectangle(minX, maxX, minY, maxY); } public static double divisible(double v, double divisible) { return (int) (Math.round(v / divisible) * divisible); } protected double divisible(double v) { return divisible(v, DIVISIBLE); } /** reset()'s p, and confines to world bounds. Might not be divisible if * the world bound isn't divisible too. */ protected Point divisible(Point p) { Rectangle bounds = ctx.getWorldBounds(); double newX = boundX( divisible(p.getX()), bounds ); double newY = boundY( divisible(p.getY()), bounds ); p.reset(newX, newY); return p; } static double boundX(double i, Rectangle bounds) { return bound(i, bounds.getMinX(), bounds.getMaxX()); } static double boundY(double i, Rectangle bounds) { return bound(i, bounds.getMinY(), bounds.getMaxY()); } static double bound(double i, double min, double max) { if (i < min) return min; if (i > max) return max; return i; } protected void assertRelation(SpatialRelation expected, Shape a, Shape b) { assertRelation(null, expected, a, b); } protected void assertRelation(String msg, SpatialRelation expected, Shape a, Shape b) { _assertIntersect(msg, expected, a, b); //check flipped a & b w/ transpose(), while we're at it _assertIntersect(msg, expected.transpose(), b, a); } private void _assertIntersect(String msg, SpatialRelation expected, Shape a, Shape b) { SpatialRelation sect = a.relate(b); if (sect == expected) return; msg = ((msg == null) ? "" : msg+"\r") + a +" intersect "+b; if (expected == WITHIN || expected == CONTAINS) { if (a.getClass().equals(b.getClass())) // they are the same shape type assertEquals(msg,a,b); else { //they are effectively points or lines that are the same location assertTrue(msg,!a.hasArea()); assertTrue(msg,!b.hasArea()); Rectangle aBBox = a.getBoundingBox(); Rectangle bBBox = b.getBoundingBox(); if (aBBox.getHeight() == 0 && bBBox.getHeight() == 0 && (aBBox.getMaxY() == 90 && bBBox.getMaxY() == 90 || aBBox.getMinY() == -90 && bBBox.getMinY() == -90)) ;//== a point at the pole else assertEquals(msg, aBBox, bBBox); } } else { assertEquals(msg,expected,sect);//always fails } } protected void assertEqualsRatio(String msg, double expected, double actual) { double delta = Math.abs(actual - expected); double base = Math.min(actual, expected); double deltaRatio = base==0 ? delta : Math.min(delta,delta / base); assertEquals(msg,0,deltaRatio, EPS); } protected int randomIntBetweenDivisible(int start, int end) { return randomIntBetweenDivisible(start, end, (int)DIVISIBLE); } /** Returns a random integer between [start, end]. Integers between must be divisible by the 3rd argument. */ protected int randomIntBetweenDivisible(int start, int end, int divisible) { // DWS: I tested this int divisStart = (int) Math.ceil( (start+1) / (double)divisible ); int divisEnd = (int) Math.floor( (end-1) / (double)divisible ); int divisRange = Math.max(0,divisEnd - divisStart + 1); int r = randomInt(1 + divisRange);//remember that '0' is counted if (r == 0) return start; if (r == 1) return end; return (r-2 + divisStart)*divisible; } protected Rectangle randomRectangle(Point nearP) { Rectangle bounds = ctx.getWorldBounds(); if (nearP == null) nearP = randomPointIn(bounds); Range xRange = randomRange(rarely() ? 0 : nearP.getX(), Range.xRange(bounds, ctx)); Range yRange = randomRange(rarely() ? 0 : nearP.getY(), Range.yRange(bounds, ctx)); return makeNormRect( divisible(xRange.getMin()), divisible(xRange.getMax()), divisible(yRange.getMin()), divisible(yRange.getMax()) ); } private Range randomRange(double near, Range bounds) { double mid = near + randomGaussian() * bounds.getWidth() / 6; double width = Math.abs(randomGaussian()) * bounds.getWidth() / 6;//1/3rd return new Range(mid - width / 2, mid + width / 2); } private double randomGaussianZeroTo(double max) { if (max == 0) return max; assert max > 0; double r; do { r = Math.abs(randomGaussian()) * (max * 0.50); } while (r > max); return r; } protected Rectangle randomRectangle(int divisible) { double rX = randomIntBetweenDivisible(-180, 180, divisible); double rW = randomIntBetweenDivisible(0, 360, divisible); double rY1 = randomIntBetweenDivisible(-90, 90, divisible); double rY2 = randomIntBetweenDivisible(-90, 90, divisible); double rYmin = Math.min(rY1,rY2); double rYmax = Math.max(rY1,rY2); if (rW > 0 && rX == 180) rX = -180; return makeNormRect(rX, rX + rW, rYmin, rYmax); } protected Point randomPoint() { return randomPointIn(ctx.getWorldBounds()); } protected Point randomPointIn(Circle c) { double d = c.getRadius() * randomDouble(); double angleDEG = 360 * randomDouble(); Point p = ctx.getDistCalc().pointOnBearing(c.getCenter(), d, angleDEG, ctx, null); assertEquals(CONTAINS,c.relate(p)); return p; } protected Point randomPointIn(Rectangle r) { double x = r.getMinX() + randomDouble()*r.getWidth(); double y = r.getMinY() + randomDouble()*r.getHeight(); x = normX(x); y = normY(y); Point p = ctx.makePoint(x,y); assertEquals(CONTAINS,r.relate(p)); return p; } protected Point randomPointIn(Shape shape) { if (!shape.hasArea())// or try the center? throw new UnsupportedOperationException("Need area to define shape!"); Rectangle bbox = shape.getBoundingBox(); Point p; do { p = randomPointIn(bbox); } while (!shape.relate(p).intersects()); return p; } /** Tests that {@code left} >= {@code right}, but may be less if within some tolerance. */ public static void assertGreaterOrEqual(double left, double right, double delta) { if (left > right) { return; } assertEquals(left, right, delta); } }
package edu.umich.umms.worldofworkcraft.domain; import lombok.Data; import org.neo4j.graphdb.Direction; import org.springframework.data.neo4j.annotation.Fetch; import org.springframework.data.neo4j.annotation.GraphId; import org.springframework.data.neo4j.annotation.NodeEntity; import org.springframework.data.neo4j.annotation.RelatedTo; import java.util.Set; @NodeEntity public class Achievement { public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Long getPoint() { return point; } public void setPoint(Long point) { this.point = point; } public Learner getLearner() { return learner; } public void setLearner(Learner learner) { this.learner = learner; } @GraphId private Long id; private String name; private Long point; private Learner learner; }
package com.treasure_data.jdbc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.junit.Test; import com.treasure_data.client.ClientException; import com.treasure_data.jdbc.command.NullClientAPI; import com.treasure_data.model.Database; import com.treasure_data.model.DatabaseSummary; import com.treasure_data.model.TableSummary; public class TestTDDatabaseMetaData { @Test public void getCatalogSeparator() throws Exception { TDDatabaseMetaData metadata = new TDDatabaseMetaData( new NullClientAPI()); assertEquals(".", metadata.getCatalogSeparator()); } @Test public void getCatalogTerm() throws Exception { TDDatabaseMetaData metadata = new TDDatabaseMetaData( new NullClientAPI()); assertEquals("database", metadata.getCatalogTerm()); } @Test public void getCatalogs() throws Exception { NullClientAPI api = new NullClientAPI() { @Override public DatabaseSummary showDatabase() throws ClientException { return new DatabaseSummary("db01", 10, "created_at01", "updated_at01"); } }; { TDDatabaseMetaData metadata = new TDDatabaseMetaData(api); ResultSet rs = null; try { rs = metadata.getCatalogs(); try { rs.getString("TABLE_CAT"); fail(); } catch (Throwable t) { assertTrue(t instanceof SQLException); } assertTrue(rs.next()); assertEquals("db01", rs.getString(1)); assertEquals("db01", rs.getString("TABLE_CAT")); try { rs.getString("notfound"); } catch (Throwable t) { assertTrue(t instanceof SQLException); } assertFalse(rs.next()); } finally { if (rs != null) { rs.close(); } } } } @Test public void getColumns() throws Exception { NullClientAPI api = new NullClientAPI() { @Override public List<TableSummary> showTables() throws ClientException { List<TableSummary> list = new ArrayList<TableSummary>(); list.add(new TableSummary( new Database("mugadb"), "tbl01", 12344, "[[\"f01\",\"string\"],[\"f02\",\"int\"],[\"f03\",\"long\"]]", "2012-02-20T18:31:48Z", "2012-02-20T18:31:48Z")); list.add(new TableSummary(new Database("mugadb"), "tbl02", 12344, "[]", "2012-02-20T18:31:48Z", "2012-02-20T18:31:48Z")); return list; } }; { TDDatabaseMetaData metadata = new TDDatabaseMetaData(api); ResultSet rs = null; try { rs = metadata.getColumns(null, null, null, null); try { rs.getString(1); fail(); } catch (Throwable t) { assertTrue(t instanceof SQLException); } assertTrue(rs.next()); assertEquals("default", rs.getString("TABLE_CAT")); assertEquals(null, rs.getString("TABLE_SCHEM")); assertEquals("tbl01", rs.getString("TABLE_NAME")); assertEquals("f01", rs.getString("COLUMN_NAME")); assertEquals("string", rs.getString("TYPE_NAME")); assertTrue(rs.next()); assertEquals("default", rs.getString("TABLE_CAT")); assertEquals(null, rs.getString("TABLE_SCHEM")); assertEquals("tbl01", rs.getString("TABLE_NAME")); assertEquals("f02", rs.getString("COLUMN_NAME")); assertEquals("int", rs.getString("TYPE_NAME")); assertTrue(rs.next()); assertEquals("default", rs.getString("TABLE_CAT")); assertEquals(null, rs.getString("TABLE_SCHEM")); assertEquals("tbl01", rs.getString("TABLE_NAME")); assertEquals("f03", rs.getString("COLUMN_NAME")); assertEquals("long", rs.getString("TYPE_NAME")); assertFalse(rs.next()); } finally { if (rs != null) { rs.close(); } } } } @Test public void getTables() throws Exception { NullClientAPI api = new NullClientAPI() { @Override public List<TableSummary> showTables() throws ClientException { List<TableSummary> list = new ArrayList<TableSummary>(); list.add(new TableSummary( new Database("mugadb"), "tbl01", 12344, "[[\"f01\",\"string\"],[\"f02\",\"int\"],[\"f03\",\"long\"]]", "2012-02-20T18:31:48Z", "2012-02-20T18:31:48Z")); list.add(new TableSummary(new Database("mugadb"), "tbl02", 12344, "[]", "2012-02-20T18:31:48Z", "2012-02-20T18:31:48Z")); return list; } }; { TDDatabaseMetaData metadata = new TDDatabaseMetaData(new Database("mugadb"), api); ResultSet rs = null; try { rs = metadata.getTables(null, null, null, null); assertTrue(rs.next()); assertEquals("mugadb", rs.getString("TABLE_CAT")); assertEquals(null, rs.getString("TABLE_SCHEM")); assertEquals("tbl01", rs.getString("TABLE_NAME")); assertEquals("TABLE", rs.getString("TABLE_TYPE")); assertTrue(rs.next()); assertEquals("mugadb", rs.getString("TABLE_CAT")); assertEquals(null, rs.getString("TABLE_SCHEM")); assertEquals("tbl02", rs.getString("TABLE_NAME")); assertEquals("TABLE", rs.getString("TABLE_TYPE")); assertFalse(rs.next()); } finally { if (rs != null) { rs.close(); } } } } }
package water.rapids; // Since we have a single key field in H2O (different to data.table), bmerge() becomes a lot simpler (no // need for recursion through join columns) with a downside of transfer-cost should we not need all the key. import water.*; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.NewChunk; import water.fvec.Vec; import static water.rapids.SingleThreadRadixOrder.getSortedOXHeaderKey; import water.util.ArrayUtils; import water.util.Log; import java.util.Arrays; public class BinaryMerge extends DTask<BinaryMerge> { long _numRowsInResult=0; // returned to caller, so not transient int _chunkSizes[]; // TODO: only _chunkSizes.length is needed by caller, so return that length only double _timings[]; transient long _retFirst[/*n2GB*/][]; // The row number of the first right table's index key that matches transient long _retLen[/*n2GB*/][]; // How many rows does it match to? transient byte _leftKey[/*n2GB*/][/*i mod 2GB * _keySize*/]; transient byte _rightKey[][]; transient long _leftOrder[/*n2GB*/][/*i mod 2GB * _keySize*/]; transient long _rightOrder[][]; transient boolean _oneToManyMatch = false; // does any left row match to more than 1 right row? If not, can allocate and loop more efficiently, and mark the resulting key'd frame with a 'unique' index. // TODO: implement int _leftFieldSizes[], _rightFieldSizes[]; // the widths of each column in the key transient int _leftKeyNCol, _rightKeyNCol; // the number of columns in the key i.e. length of _leftFieldSizes and _rightFieldSizes transient int _leftKeySize, _rightKeySize; // the total width in bytes of the key, sum of field sizes transient int _numJoinCols; transient long _leftN, _rightN; transient long _leftBatchSize, _rightBatchSize; Frame _leftFrame, _rightFrame; transient long _perNodeNumRightRowsToFetch[]; transient long _perNodeNumLeftRowsToFetch[]; int _leftMSB, _rightMSB; boolean _allLeft, _allRight; BinaryMerge(Frame leftFrame, Frame rightFrame, int leftMSB, int rightMSB, int leftFieldSizes[], int rightFieldSizes[], boolean allLeft) { // In X[Y], 'left'=i and 'right'=x _leftFrame = leftFrame; _rightFrame = rightFrame; _leftMSB = leftMSB; _rightMSB = rightMSB; _leftFieldSizes = leftFieldSizes; _rightFieldSizes = rightFieldSizes; _allLeft = allLeft; _allRight = false; // TODO: pass through // TODO: set 2 Frame and 2 int[] to NULL at the end of compute2 to save some traffic back, but should be small and insignificant } @Override protected void compute2() { _timings = new double[12]; long t0 = System.nanoTime(); SingleThreadRadixOrder.OXHeader leftSortedOXHeader = DKV.getGet(getSortedOXHeaderKey(/*left=*/true, _leftMSB)); if (leftSortedOXHeader == null) { if (_allRight) throw H2O.unimpl(); // TODO pass through _allRight and implement tryComplete(); return; } SingleThreadRadixOrder.OXHeader rightSortedOXHeader = DKV.getGet(getSortedOXHeaderKey(/*left=*/false, _rightMSB)); if (rightSortedOXHeader == null) { if (_allLeft == false) { tryComplete(); return; } rightSortedOXHeader = new SingleThreadRadixOrder.OXHeader(0, 0, 0); // enables general case code to run below without needing new special case code } // both left and right MSB have some data to match _leftBatchSize = leftSortedOXHeader._batchSize; _rightBatchSize = rightSortedOXHeader._batchSize; _perNodeNumRightRowsToFetch = new long[H2O.CLOUD.size()]; _perNodeNumLeftRowsToFetch = new long[H2O.CLOUD.size()]; // get left batches _leftKey = new byte[leftSortedOXHeader._nBatch][]; _leftOrder = new long[leftSortedOXHeader._nBatch][]; _retFirst = new long[leftSortedOXHeader._nBatch][]; _retLen = new long[leftSortedOXHeader._nBatch][]; for (int b=0; b<leftSortedOXHeader._nBatch; ++b) { MoveByFirstByte.OXbatch oxLeft = DKV.getGet(MoveByFirstByte.getSortedOXbatchKey(/*left=*/true, _leftMSB, b)); _leftKey[b] = oxLeft._x; _leftOrder[b] = oxLeft._o; _retFirst[b] = new long[oxLeft._o.length]; _retLen[b] = new long[oxLeft._o.length]; } _leftN = leftSortedOXHeader._numRows; // get right batches _rightKey = new byte[rightSortedOXHeader._nBatch][]; _rightOrder = new long[rightSortedOXHeader._nBatch][]; for (int b=0; b<rightSortedOXHeader._nBatch; ++b) { MoveByFirstByte.OXbatch oxRight = DKV.getGet(MoveByFirstByte.getSortedOXbatchKey(/*left=*/false, _rightMSB, b)); _rightKey[b] = oxRight._x; _rightOrder[b] = oxRight._o; } _rightN = rightSortedOXHeader._numRows; _leftKeyNCol = _leftFieldSizes.length; _rightKeyNCol = _rightFieldSizes.length; _leftKeySize = ArrayUtils.sum(_leftFieldSizes); _rightKeySize = ArrayUtils.sum(_rightFieldSizes); _numJoinCols = Math.min(_leftKeyNCol, _rightKeyNCol); _timings[0] = (System.nanoTime() - t0) / 1e9; if ((_leftN != 0 || _allRight) && (_rightN != 0 || _allLeft)) { t0 = System.nanoTime(); bmerge_r(-1, _leftN, -1, _rightN); _timings[1] = (System.nanoTime() - t0) / 1e9; if (_numRowsInResult > 0) createChunksInDKV(); } //null out members before returning to calling node tryComplete(); } private int keycmp(byte x[][], long xi, byte y[][], long yi) { // TO DO - faster way closer to CPU like batches of long compare, maybe. byte xByte=0, yByte=0; xi *= _leftKeySize; yi *= _rightKeySize; // x[] and y[] are len keys. // TO DO: rationalize x and y being chunked into 2GB pieces. Take x[0][] and y[0][] outside loop / function // TO DO: switch to use keycmp_sameShape() for common case of all(leftFieldSizes == rightFieldSizes), although, skipping to current column will // help save repeating redundant work and saving the outer for() loop and one if() may not be worth it. int i=0, xlen=0, ylen=0, diff=0; while (i<_numJoinCols && xlen==0) { // TO DO: pass i in to start at a later key column, when known xlen = _leftFieldSizes[i]; ylen = _rightFieldSizes[i]; if (xlen!=ylen) { while (xlen>ylen && x[0][(int)xi]==0) { xi++; xlen while (ylen>xlen && y[0][(int)yi]==0) { yi++; ylen if (xlen!=ylen) return (xlen - ylen); } while (xlen>0 && (xByte=x[0][(int)xi])==(yByte=y[0][(int)yi])) { xi++; yi++; xlen i++; } return (xByte & 0xFF) - (yByte & 0xFF); // Same return value as strcmp in C. <0 => xi<yi } private void bmerge_r(long lLowIn, long lUppIn, long rLowIn, long rUppIn) { // TO DO: parallel each of the 256 bins long lLow = lLowIn, lUpp = lUppIn, rLow = rLowIn, rUpp = rUppIn; long mid, tmpLow, tmpUpp; long lr = lLow + (lUpp - lLow) / 2; // i.e. (lLow+lUpp)/2 but being robust to one day in the future someone somewhere overflowing long; e.g. 32 exabytes of 1-column ints while (rLow < rUpp - 1) { mid = rLow + (rUpp - rLow) / 2; int cmp = keycmp(_leftKey, lr, _rightKey, mid); // -1, 0 or 1, like strcmp if (cmp < 0) { rUpp = mid; } else if (cmp > 0) { rLow = mid; } else { // rKey == lKey including NA == NA // branch mid to find start and end of this group in this column // TO DO?: not if mult=first|last and col<ncol-1 tmpLow = mid; tmpUpp = mid; while (tmpLow < rUpp - 1) { mid = tmpLow + (rUpp - tmpLow) / 2; if (keycmp(_leftKey, lr, _rightKey, mid) == 0) tmpLow = mid; else rUpp = mid; } while (rLow < tmpUpp - 1) { mid = rLow + (tmpUpp - rLow) / 2; if (keycmp(_leftKey, lr, _rightKey, mid) == 0) tmpUpp = mid; else rLow = mid; } break; } } // rLow and rUpp now surround the group in the right table. // The left table key may (unusually, and not recommended, but sometimes needed) be duplicated. // Linear search outwards from left row. Most commonly, the first test shows this left key is unique. // This saves i) re-finding the matching rows in the right for all the dup'd left and ii) recursive bounds logic gets awkward if other left rows can find the same right rows // Related to 'allow.cartesian' in data.table. // TO DO: if index stores attribute that it is unique then we don't need this step. However, each of these while()s would run at most once in that case, which may not be worth optimizing. tmpLow = lr + 1; while (tmpLow<lUpp && keycmp(_leftKey, tmpLow, _leftKey, lr)==0) tmpLow++; lUpp = tmpLow; tmpUpp = lr - 1; while (tmpUpp>lLow && keycmp(_leftKey, tmpUpp, _leftKey, lr)==0) tmpUpp lLow = tmpUpp; // lLow and lUpp now surround the group in the left table. If left key is unique then lLow==lr-1 and lUpp==lr+1. long len = rUpp - rLow - 1; // if value found, rLow and rUpp surround it, unlike standard binary search where rLow falls on it if (len > 0 || _allLeft) { if (len > 1) _oneToManyMatch = true; _numRowsInResult += Math.max(1,len) * (lUpp-lLow-1); // 1 for NA row when _allLeft for (long j = lLow + 1; j < lUpp; j++) { // usually iterates once only for j=lr, but more than once if there are dup keys in left table { // may be a range of left dup'd join-col values, but we need to fetch each one since the left non-join columns are likely not dup'd and may be the reason for the cartesian join long globalRowNumber = _leftOrder[(int)(j / _leftBatchSize)][(int)(j % _leftBatchSize)]; int chkIdx = _leftFrame.anyVec().elem2ChunkIdx(globalRowNumber); //binary search in espc H2ONode node = _leftFrame.anyVec().chunkKey(chkIdx).home_node(); //bit mask ops on the vec key _perNodeNumLeftRowsToFetch[node.index()]++; // the key is the same within this left dup range, but still need to fetch left non-join columns } if (len==0) continue; // _allLeft must be true if len==0 int jb = (int)(j/_leftBatchSize); int jo = (int)(j%_leftBatchSize); _retFirst[jb][jo] = rLow + 2; // rLow surrounds row, so +1. Then another +1 for 1-based row-number. 0 (default) means nomatch and saves extra set to -1 for no match. Could be significant in large edge cases by not needing to write at all to _retFirst if it has no matches. _retLen[jb][jo] = len; //StringBuilder sb = new StringBuilder(); //sb.append("Left row " + _leftOrder[jb][jo] + " matches to " + _retLen[jb][jo] + " right rows: "); long a = _retFirst[jb][jo] -1; for (int i=0; i<_retLen[jb][jo]; i++) { long loc = a+i; //sb.append(_rightOrder[(int)(loc / _rightBatchSize)][(int)(loc % _rightBatchSize)] + " "); long globalRowNumber = _rightOrder[(int)(loc / _rightBatchSize)][(int)(loc % _rightBatchSize)]; int chkIdx = _rightFrame.anyVec().elem2ChunkIdx(globalRowNumber); //binary search in espc H2ONode node = _rightFrame.anyVec().chunkKey(chkIdx).home_node(); //bit mask ops on the vec key _perNodeNumRightRowsToFetch[node.index()]++; // just count the number per node. So we can allocate arrays precisely up front, and also to return early to use in case of memory errors or other distribution problems } //Log.info(sb); } } // TO DO: check assumption that retFirst and retLength are initialized to 0, for case of no match // Now branch (and TO DO in parallel) to merge below and merge above if (lLow > lLowIn && rLow > rLowIn) bmerge_r(lLowIn, lLow + 1, rLowIn, rLow+1); if (lUpp < lUppIn && rUpp < rUppIn) bmerge_r(lUpp-1, lUppIn, rUpp-1, rUppIn); // We don't feel tempted to reduce the global _ansN here and make a global frame, // since we want to process each MSB l/r combo individually without allocating them all. // Since recursive, no more code should be here (it would run too much) } private void createChunksInDKV() { // Collect all matches // Create the final frame (part) for this MSB combination // Cannot use a List<Long> as that's restricted to 2Bn items and also isn't an Iced datatype long t0 = System.nanoTime(); long perNodeRightRows[][][] = new long[H2O.CLOUD.size()][][]; long perNodeRightRowsFrom[][][] = new long[H2O.CLOUD.size()][][]; long perNodeRightLoc[] = new long[H2O.CLOUD.size()]; long perNodeLeftRows[][][] = new long[H2O.CLOUD.size()][][]; long perNodeLeftRowsFrom[][][] = new long[H2O.CLOUD.size()][][]; long perNodeLeftRowsRepeat[][][] = new long[H2O.CLOUD.size()][][]; long perNodeLeftLoc[] = new long[H2O.CLOUD.size()]; // Allocate memory to split this MSB combn's left and right matching rows into contiguous batches sent to the nodes they reside on int batchSize = (int) _leftBatchSize; // TODO: what's the right batch size here. And why is _leftBatchSize type long? for (int i = 0; i < H2O.CLOUD.size(); i++) { if (_perNodeNumRightRowsToFetch[i] > 0) { int nbatch = (int) ((_perNodeNumRightRowsToFetch[i] - 1) / batchSize + 1); // TODO: wrap in class to avoid this boiler plate int lastSize = (int) (_perNodeNumRightRowsToFetch[i] - (nbatch - 1) * batchSize); assert nbatch >= 1; assert lastSize > 0; perNodeRightRows[i] = new long[nbatch][]; perNodeRightRowsFrom[i] = new long[nbatch][]; int b; for (b = 0; b < nbatch - 1; b++) { perNodeRightRows[i][b] = new long[batchSize]; // TO DO?: use MemoryManager.malloc() perNodeRightRowsFrom[i][b] = new long[batchSize]; } perNodeRightRows[i][b] = new long[lastSize]; perNodeRightRowsFrom[i][b] = new long[lastSize]; } if (_perNodeNumLeftRowsToFetch[i] > 0) { int nbatch = (int) ((_perNodeNumLeftRowsToFetch[i] - 1) / batchSize + 1); // TODO: wrap in class to avoid this boiler plate int lastSize = (int) (_perNodeNumLeftRowsToFetch[i] - (nbatch - 1) * batchSize); assert nbatch >= 1; assert lastSize > 0; perNodeLeftRows[i] = new long[nbatch][]; perNodeLeftRowsFrom[i] = new long[nbatch][]; perNodeLeftRowsRepeat[i] = new long[nbatch][]; int b; for (b = 0; b < nbatch - 1; b++) { perNodeLeftRows[i][b] = new long[batchSize]; // TO DO?: use MemoryManager.malloc() perNodeLeftRowsFrom[i][b] = new long[batchSize]; perNodeLeftRowsRepeat[i][b] = new long[batchSize]; } perNodeLeftRows[i][b] = new long[lastSize]; perNodeLeftRowsFrom[i][b] = new long[lastSize]; perNodeLeftRowsRepeat[i][b] = new long[lastSize]; } } _timings[2] = (System.nanoTime() - t0) / 1e9; t0 = System.nanoTime(); // Loop over _retFirst and _retLen and populate the batched requests for each node helper // _retFirst and _retLen are the same shape long resultLoc=0; // sweep upwards through the final result, filling it in long leftLoc=-1; // sweep through left table along the sorted row locations. // TODO: hop back to original order here for [] syntax. for (int jb=0; jb<_retFirst.length; ++jb) { // jb = j batch for (int jo=0; jo<_retFirst[jb].length; ++jo) { // jo = j offset leftLoc++; // to save jb*_retFirst[0].length + jo; long f = _retFirst[jb][jo]; long l = _retLen[jb][jo]; if (f==0) { // left row matches to no right row assert l == 0; // doesn't have to be 0 (could be 1 already if allLeft==true) but currently it should be, so check it if (!_allLeft) continue; // now insert the left row once and NA for the right columns i.e. left outer join } { // new scope so 'row' can be declared in the for() loop below and registerized (otherwise 'already defined in this scope' in that scope) // Fetch the left rows and mark the contiguous from-ranges each left row should be recycled over long row = _leftOrder[(int)(leftLoc / _leftBatchSize)][(int)(leftLoc % _leftBatchSize)]; Vec v = _leftFrame.anyVec(); int chkIdx = v.elem2ChunkIdx(row); //binary search in espc H2ONode node = v.chunkKey(chkIdx).home_node(); //bit mask ops on the vec key long pnl = perNodeLeftLoc[node.index()]++; // pnl = per node location perNodeLeftRows[node.index()][(int)(pnl/batchSize)][(int)(pnl%batchSize)] = row; // ask that node for global row number row perNodeLeftRowsFrom[node.index()][(int)(pnl/batchSize)][(int)(pnl%batchSize)] = resultLoc; // TODO: could store the batch and offset separately? If it will be used to assign into a Vec, then that's have different shape/espc so the location is better. perNodeLeftRowsRepeat[node.index()][(int)(pnl/batchSize)][(int)(pnl%batchSize)] = Math.max(1,l); } if (f==0) { resultLoc++; continue; } assert l > 0; for (int r=0; r<l; r++) { long loc = f+r-1; // -1 because these are 0-based where 0 means no-match and 1 refers to the first row long row = _rightOrder[(int)(loc / _rightBatchSize)][(int)(loc % _rightBatchSize)]; // TODO: could take / and % outside loop in cases where it doesn't span a batch boundary // find the owning node for the row, using local operations here Vec v = _rightFrame.anyVec(); int chkIdx = v.elem2ChunkIdx(row); //binary search in espc H2ONode node = v.chunkKey(chkIdx).home_node(); //bit mask ops on the vec key long pnl = perNodeRightLoc[node.index()]++; // pnl = per node location perNodeRightRows[node.index()][(int)(pnl/batchSize)][(int)(pnl%batchSize)] = row; // ask that node for global row number row perNodeRightRowsFrom[node.index()][(int)(pnl/batchSize)][(int)(pnl%batchSize)] = resultLoc++; // TODO: could store the batch and offset separately? If it will be used to assign into a Vec, then that's have different shape/espc so the location is better. } } } _timings[3] = (System.nanoTime() - t0) / 1e9; t0 = System.nanoTime(); // Create the chunks for the final frame from this MSB pair. batchSize = 1<<22; // number of rows per chunk. 32MB for doubles, 64MB for UUIDs to fit into 256MB DKV Value limit int nbatch = (int) (_numRowsInResult-1)/batchSize +1; // TODO: wrap in class to avoid this boiler plate int lastSize = (int)(_numRowsInResult - (nbatch-1)*batchSize); assert nbatch >= 1; assert lastSize > 0; _chunkSizes = new int[nbatch]; int _numLeftCols = _leftFrame.numCols(); int _numColsInResult = _leftFrame.numCols() + _rightFrame.numCols() - _numJoinCols; double[][][] frameLikeChunks = new double[_numColsInResult][nbatch][]; //TODO: compression via int types for (int col=0; col<_numColsInResult; col++) { int b; for (b = 0; b < nbatch - 1; b++) { frameLikeChunks[col][b] = new double[batchSize]; Arrays.fill(frameLikeChunks[col][b], Double.NaN); // NA by default to save filling with NA for nomatches when allLeft _chunkSizes[b] = batchSize; } frameLikeChunks[col][b] = new double[lastSize]; Arrays.fill(frameLikeChunks[col][b], Double.NaN); _chunkSizes[b] = lastSize; } _timings[4] = (System.nanoTime() - t0) / 1e9; t0 = System.nanoTime(); RPC<GetRawRemoteRows> grrrsRite[][] = new RPC[H2O.CLOUD.size()][]; RPC<GetRawRemoteRows> grrrsLeft[][] = new RPC[H2O.CLOUD.size()][]; for (H2ONode node : H2O.CLOUD._memary) { int ni = node.index(); int bUppRite = perNodeRightRows[ni] == null ? 0 : perNodeRightRows[ni].length; int bUppLeft = perNodeLeftRows[ni] == null ? 0 : perNodeLeftRows[ni].length; grrrsRite[ni] = new RPC[bUppRite]; grrrsLeft[ni] = new RPC[bUppLeft]; for (int b = 0; b < bUppRite; b++) grrrsRite[ni][b] = new RPC<>(node, new GetRawRemoteRows(_rightFrame, perNodeRightRows[ni][b])).call(); for (int b = 0; b < bUppLeft; b++) grrrsLeft[ni][b] = new RPC<>(node, new GetRawRemoteRows(_leftFrame, perNodeLeftRows[ni][b])).call(); } for (H2ONode node : H2O.CLOUD._memary) { int ni = node.index(); int bUppRite = perNodeRightRows[ni] == null ? 0 : perNodeRightRows[ni].length; int bUppLeft = perNodeLeftRows[ni] == null ? 0 : perNodeLeftRows[ni].length; for (int b = 0; b < bUppRite; b++) { t0 = System.nanoTime(); GetRawRemoteRows grrr = grrrsRite[ni][b].get(); _timings[5] += (System.nanoTime() - t0) / 1e9; _timings[6] += grrr.timeTaken; t0 = System.nanoTime(); assert (grrr._rows == null); double[][]/*Chunk[]*/ chks = grrr._chk; for (int col = 0; col < _numColsInResult - _numLeftCols; col++) { // TODO: currently join columns must be the first _numJoinCols. Relax. double chk[] = chks[_numJoinCols + col]; for (int row = 0; row < chk.length /*.len()*/; row++) { long actualRowInMSBCombo = perNodeRightRowsFrom[ni][b][row]; int whichChunk = (int) (actualRowInMSBCombo / batchSize); int offset = (int) (actualRowInMSBCombo % batchSize); frameLikeChunks[_numLeftCols + col][whichChunk][offset] = chk[row]; // colForBatch.atd(row); TODO: this only works for numeric columns (not for date, UUID, strings, etc.) } } _timings[7] += (System.nanoTime() - t0) / 1e9; } for (int b = 0; b < bUppLeft; b++) { t0 = System.nanoTime(); GetRawRemoteRows grrr = grrrsLeft[ni][b].get(); _timings[8] += (System.nanoTime() - t0) / 1e9; _timings[9] += grrr.timeTaken; t0 = System.nanoTime(); assert (grrr._rows == null); double[][]/*Chunk[]*/ chks = grrr._chk; for (int col = 0; col < chks.length; ++col) { double chk[] = chks[col]; for (int row = 0; row < chk.length; row++) { long actualRowInMSBCombo = perNodeLeftRowsFrom[ni][b][row]; for (int rep = 0; rep < perNodeLeftRowsRepeat[ni][b][row]; rep++) { long a = actualRowInMSBCombo + rep; int whichChunk = (int) (a / batchSize); // TO DO: loop into batches to save / and % for each repeat and still cater for crossing multiple batch boundaries int offset = (int) (a % batchSize); frameLikeChunks[col][whichChunk][offset] = chk[row]; // colForBatch.atd(row); TODO: this only works for numeric columns (not for date, UUID, strings, etc.) } } } } _timings[10] += (System.nanoTime() - t0) / 1e9; } t0 = System.nanoTime(); // compress all chunks and store them Futures fs = new Futures(); for (int col=0; col<_numColsInResult; col++) { for (int b = 0; b < nbatch; b++) { Chunk ck = new NewChunk(frameLikeChunks[col][b]).compress(); DKV.put(getKeyForMSBComboPerCol(/*_leftFrame, _rightFrame,*/ _leftMSB, _rightMSB, col, b), ck, fs, true); frameLikeChunks[col][b]=null; //free mem as early as possible (it's now in the store) } } _timings[11] = (System.nanoTime() - t0) / 1e9; fs.blockForPending(); } static Key getKeyForMSBComboPerCol(/*Frame leftFrame, Frame rightFrame,*/ int leftMSB, int rightMSB, int col /*final table*/, int batch) { return Key.make("__binary_merge__Chunk_for_col" + col + "_batch" + batch // + rightFrame._key.toString() + "_joined_with" + leftFrame._key.toString() + "_leftMSB"+leftMSB + "_rightMSB" + rightMSB, (byte)1, Key.HIDDEN_USER_KEY, false, MoveByFirstByte.ownerOfMSB(rightMSB) ); //TODO home locally } class GetRawRemoteRows extends DTask<GetRawRemoteRows> { double[/*col*/][] _chk; //null on the way to remote node, non-null on the way back long[/*rows*/] _rows; //which rows to fetch from remote node, non-null on the way to remote, null on the way back double timeTaken; Frame _fr; GetRawRemoteRows(Frame fr, long[] rows) { _rows = rows; _fr = fr; _priority = nextThrPriority(); // bump locally AND ship this priority to the worker where the priority() getter will query it } @Override public byte priority() { return _priority; } private byte _priority; // Raise the priority, so that if a thread blocks here, we are guaranteed // the task completes (perhaps using a higher-priority thread from the // upper thread pools). This prevents thread deadlock. // Remember that this gets queried on both the caller and the sender, of course. @Override protected void compute2() { assert(_rows!=null); assert(_chk ==null); long t0 = System.nanoTime(); _chk = new double[_fr.numCols()][_rows.length]; int cidx[] = new int[_rows.length]; int offset[] = new int[_rows.length]; Vec anyVec = _fr.anyVec(); for (int row=0; row<_rows.length; row++) { cidx[row] = anyVec.elem2ChunkIdx(_rows[row]); // binary search of espc array. TODO: sort input row numbers to avoid offset[row] = (int)(_rows[row] - anyVec.espc()[cidx[row]]); } Chunk c[] = new Chunk[anyVec.nChunks()]; for (int col=0; col<_fr.numCols(); col++) { Vec v = _fr.vec(col); for (int i=0; i<c.length; i++) c[i] = v.chunkKey(i).home() ? v.chunkForChunkIdx(i) : null; for (int row=0; row<_rows.length; row++) { _chk[col][row] = c[cidx[row]].atd(offset[row]); } } // tell remote node to fill up Chunk[/*batch*/][/*rows*/] // perNodeRows[node] has perNodeRows[node].length batches of row numbers to fetch _rows=null; assert(_chk !=null); timeTaken = (System.nanoTime() - t0) / 1e9; tryComplete(); } } }
// This file is part of the Kaltura Collaborative Media Suite which allows users // to do with audio, video, and animation what Wiki platfroms allow them to do with // text. // This program is free software: you can redistribute it and/or modify // published by the Free Software Foundation, either version 3 of the // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // @ignore package com.kaltura.client; import com.kaltura.client.utils.request.ConnectionConfiguration; import com.kaltura.client.types.BaseResponseProfile; /** * This class was generated using generate.php * against an XML schema provided by Kaltura. * * MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN. */ @SuppressWarnings("serial") public class Client extends ClientBase { public Client(ConnectionConfiguration config) { super(config); this.setClientTag("java:19-12-11"); this.setApiVersion("15.12.0"); this.clientConfiguration.put("format", 1); // JSON } /** * @param clientTag */ public void setClientTag(String clientTag){ this.clientConfiguration.put("clientTag", clientTag); } /** * @return String */ public String getClientTag(){ if(this.clientConfiguration.containsKey("clientTag")){ return(String) this.clientConfiguration.get("clientTag"); } return null; } /** * @param apiVersion */ public void setApiVersion(String apiVersion){ this.clientConfiguration.put("apiVersion", apiVersion); } /** * @return String */ public String getApiVersion(){ if(this.clientConfiguration.containsKey("apiVersion")){ return(String) this.clientConfiguration.get("apiVersion"); } return null; } /** * @param partnerId Impersonated partner id */ public void setPartnerId(Integer partnerId){ this.requestConfiguration.put("partnerId", partnerId); } /** * Impersonated partner id * * @return Integer */ public Integer getPartnerId(){ if(this.requestConfiguration.containsKey("partnerId")){ return(Integer) this.requestConfiguration.get("partnerId"); } return 0; } /** * @param ks Kaltura API session */ public void setKs(String ks){ this.requestConfiguration.put("ks", ks); } /** * Kaltura API session * * @return String */ public String getKs(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param sessionId Kaltura API session */ public void setSessionId(String sessionId){ this.requestConfiguration.put("ks", sessionId); } /** * Kaltura API session * * @return String */ public String getSessionId(){ if(this.requestConfiguration.containsKey("ks")){ return(String) this.requestConfiguration.get("ks"); } return null; } /** * @param responseProfile Response profile - this attribute will be automatically unset after every API call. */ public void setResponseProfile(BaseResponseProfile responseProfile){ this.requestConfiguration.put("responseProfile", responseProfile); } /** * Response profile - this attribute will be automatically unset after every API call. * * @return BaseResponseProfile */ public BaseResponseProfile getResponseProfile(){ if(this.requestConfiguration.containsKey("responseProfile")){ return(BaseResponseProfile) this.requestConfiguration.get("responseProfile"); } return null; } }
package com.wizzardo.tools.collections.flow; import com.wizzardo.tools.collections.flow.flows.FlowProcessOnEnd; import org.junit.Assert; import org.junit.Test; import java.util.*; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; public class FlowTest { @Test public void test_grouping_1() { List<ArrayList<Integer>> result = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer it) { return it % 2 == 0; } }) .flatMap(new Mapper<FlowGroup<Boolean, Integer>, FlowProcessOnEnd<?, ArrayList<Integer>>>() { @Override public FlowProcessOnEnd<?, ArrayList<Integer>> map(FlowGroup<Boolean, Integer> group) { return group.toList(); } }) .toSortedList(new Comparator<List<Integer>>() { @Override public int compare(List<Integer> o1, List<Integer> o2) { return o1.size() < o2.size() ? -1 : (o1.size() == o2.size() ? 0 : 1); } }).get(); Assert.assertEquals(2, result.size()); Assert.assertEquals(1, result.get(0).size()); Assert.assertEquals(Integer.valueOf(2), result.get(0).get(0)); Assert.assertEquals(2, result.get(1).size()); Assert.assertEquals(Integer.valueOf(1), result.get(1).get(0)); Assert.assertEquals(Integer.valueOf(3), result.get(1).get(1)); } @Test public void test_grouping_2() { List<ArrayList<Integer>> result = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer it) { return it % 2 == 0; } }) .filter(new Filter<FlowGroup<Boolean, Integer>>() { @Override public boolean allow(FlowGroup<Boolean, Integer> group) { return group.getKey(); } }) .flatMap(new Mapper<FlowGroup<Boolean, Integer>, FlowProcessOnEnd<?, ArrayList<Integer>>>() { int counter = 0; @Override public FlowProcessOnEnd<?, ArrayList<Integer>> map(FlowGroup<Boolean, Integer> group) { Assert.assertEquals("should be executed only once", 1, ++counter); return group.toList(); } }) .toSortedList(new Comparator<List<Integer>>() { @Override public int compare(List<Integer> o1, List<Integer> o2) { return o1.size() < o2.size() ? -1 : (o1.size() == o2.size() ? 0 : 1); } }) .get(); Assert.assertEquals(1, result.size()); Assert.assertEquals(1, result.get(0).size()); Assert.assertEquals(Integer.valueOf(2), result.get(0).get(0)); } @Test public void test_grouping_3() { List<Integer> result = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer it) { return it % 2 == 0; } }) .flatMap(new Mapper<FlowGroup<Boolean, Integer>, FlowProcessOnEnd<?, Integer>>() { @Override public FlowProcessOnEnd<?, Integer> map(FlowGroup<Boolean, Integer> group) { return group.first(); } }) .toSortedList() .get(); Assert.assertEquals(2, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0)); Assert.assertEquals(Integer.valueOf(2), result.get(1)); } @Test public void test_grouping_4() { final AtomicInteger counter = new AtomicInteger(); Integer result = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer it) { counter.incrementAndGet(); return it % 2 == 0; } }) .flatMap(new Mapper<FlowGroup<Boolean, Integer>, FlowProcessOnEnd<?, Integer>>() { @Override public FlowProcessOnEnd<?, Integer> map(FlowGroup<Boolean, Integer> group) { return group.first(); } }) .first() .get(); Assert.assertEquals(Integer.valueOf(1), result); Assert.assertEquals(1, counter.get()); } static class Person { final String name; final int age; final long salary; Person(String name, int age, long salary) { this.name = name; this.age = age; this.salary = salary; } } @Test public void test_grouping_5() { Map<Integer, Map<Long, List<Person>>> result = Flow.of( new Person("Paul", 24, 20000), new Person("Mark", 24, 30000), new Person("Will", 28, 28000), new Person("William", 28, 28000) ) .groupBy(new Mapper<Person, Integer>() { @Override public Integer map(Person person) { return person.age; } }) .toMap(new Mapper<FlowGroup<Integer, Person>, Map<Long, List<Person>>>() { @Override public Map<Long, List<Person>> map(FlowGroup<Integer, Person> ageGroup) { return ageGroup .groupBy(new Mapper<Person, Long>() { @Override public Long map(Person person) { return person.salary; } }) .toMap(new Mapper<FlowGroup<Long, Person>, List<Person>>() { @Override public List<Person> map(FlowGroup<Long, Person> salaryGroup) { return salaryGroup.toList().get(); } }); } }); Assert.assertEquals(2, result.size()); Assert.assertEquals(2, result.get(24).size()); Assert.assertEquals(1, result.get(24).get(20000l).size()); Assert.assertEquals("Paul", result.get(24).get(20000l).get(0).name); Assert.assertEquals(2, result.get(24).size()); Assert.assertEquals(1, result.get(24).get(30000l).size()); Assert.assertEquals("Mark", result.get(24).get(30000l).get(0).name); Assert.assertEquals(1, result.get(28).size()); Assert.assertEquals(2, result.get(28).get(28000l).size()); Assert.assertEquals("Will", result.get(28).get(28000l).get(0).name); Assert.assertEquals("William", result.get(28).get(28000l).get(1).name); } @Test public void test_grouping_6() { List<ArrayList<Map<String, List<Person>>>> result = Flow.of( new Person("Paul", 24, 20000), new Person("Mark", 24, 30000), new Person("Will", 28, 28000), new Person("William", 28, 28000) ) .groupBy(new Mapper<Person, Integer>() { @Override public Integer map(Person person) { return person.age; } }) .flatMap(new Mapper<FlowGroup<Integer, Person>, FlowProcessOnEnd<?, ArrayList<Map<String, List<Person>>>>>() { @Override public FlowProcessOnEnd<?, ArrayList<Map<String, List<Person>>>> map(FlowGroup<Integer, Person> integerPersonFlowGroup) { return integerPersonFlowGroup.groupBy(new Mapper<Person, Long>() { @Override public Long map(Person person) { return person.salary; } }).flatMap(new Mapper<FlowGroup<Long, Person>, FlowProcessOnEnd<?, Map<String, List<Person>>>>() { @Override public FlowProcessOnEnd<?, Map<String, List<Person>>> map(FlowGroup<Long, Person> longPersonFlowGroup) { return longPersonFlowGroup.groupBy(new Mapper<Person, String>() { @Override public String map(Person person) { return person.name; } }).toMapFlow(); } }).toList(); } }) .toList().get(); Assert.assertEquals(2, result.size()); Assert.assertEquals(2, result.get(0).size()); Assert.assertEquals(1, result.get(0).get(0).size()); Assert.assertEquals(1, result.get(0).get(0).get("Paul").size()); Assert.assertEquals("Paul", result.get(0).get(0).get("Paul").get(0).name); Assert.assertEquals(1, result.get(0).get(1).size()); Assert.assertEquals(1, result.get(0).get(1).get("Mark").size()); Assert.assertEquals("Mark", result.get(0).get(1).get("Mark").get(0).name); Assert.assertEquals(1, result.get(1).size()); Assert.assertEquals(2, result.get(1).get(0).size()); Assert.assertEquals(1, result.get(1).get(0).get("Will").size()); Assert.assertEquals("Will", result.get(1).get(0).get("Will").get(0).name); Assert.assertEquals(1, result.get(1).get(0).get("William").size()); Assert.assertEquals("William", result.get(1).get(0).get("William").get(0).name); } @Test public void test_sorted_list() { List<Integer> result = Flow.of(3, 2, 1).toSortedList().get(); Assert.assertEquals(3, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0)); Assert.assertEquals(Integer.valueOf(2), result.get(1)); Assert.assertEquals(Integer.valueOf(3), result.get(2)); } @Test public void test_each() { final AtomicInteger counter = new AtomicInteger(); Flow.of(1, 2, 3).each(new Consumer<Integer>() { @Override public void consume(Integer integer) { counter.incrementAndGet(); } }).execute(); Assert.assertEquals(3, counter.get()); } @Test public void test_each_2() { final AtomicInteger counter = new AtomicInteger(); Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .each(new Consumer<FlowGroup<Boolean, Integer>>() { @Override public void consume(FlowGroup<Boolean, Integer> group) { counter.incrementAndGet(); } }).execute(); Assert.assertEquals(2, counter.get()); } @Test public void test_each_3() { final AtomicInteger counter = new AtomicInteger(); Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .each(new Consumer<FlowGroup<Boolean, Integer>>() { @Override public void consume(FlowGroup<Boolean, Integer> group) { counter.incrementAndGet(); } }) .first() .get(); Assert.assertEquals(1, counter.get()); } @Test public void test_first() { Assert.assertEquals(Integer.valueOf(1), Flow.of(1, 2, 3).first().get()); } @Test public void test_stop_after_first() { final AtomicInteger counter = new AtomicInteger(); Assert.assertEquals(Integer.valueOf(1), Flow.of(1, 2, 3).each(new Consumer<Integer>() { @Override public void consume(Integer integer) { counter.incrementAndGet(); } }).first().get()); Assert.assertEquals(1, counter.get()); } @Test public void test_last() { Assert.assertEquals(Integer.valueOf(3), Flow.of(1, 2, 3).last().get()); } @Test public void test_min() { Assert.assertEquals(Integer.valueOf(1), Flow.of(1, 2, 3).min().get()); Assert.assertEquals(Integer.valueOf(1), Flow.of(3, 2, 1).min().get()); } @Test public void test_min_2() { Comparator<Number> comparator = new Comparator<Number>() { @Override public int compare(Number o1, Number o2) { return Integer.valueOf(o1.intValue()).compareTo(o2.intValue()); } }; Assert.assertEquals(Integer.valueOf(1), Flow.of(1, 2, 3).min(comparator).get()); Assert.assertEquals(Integer.valueOf(1), Flow.of(3, 2, 1).min(comparator).get()); } @Test public void test_max() { Assert.assertEquals(Integer.valueOf(3), Flow.of(1, 2, 3).max().get()); Assert.assertEquals(Integer.valueOf(3), Flow.of(3, 2, 1).max().get()); } @Test public void test_max_2() { Comparator<Number> comparator = new Comparator<Number>() { @Override public int compare(Number o1, Number o2) { return Integer.valueOf(o1.intValue()).compareTo(o2.intValue()); } }; Assert.assertEquals(Integer.valueOf(3), Flow.of(1, 2, 3).max(comparator).get()); Assert.assertEquals(Integer.valueOf(3), Flow.of(3, 2, 1).max(comparator).get()); } @Test public void test_max_3() { Assert.assertEquals(Integer.valueOf(6), Flow.of(1, 2, 3) .max() .map(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer * 2; } }).first().get() ); Assert.assertEquals(Integer.valueOf(6), Flow.of(3, 2, 1) .max() .map(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer * 2; } }).first().get()); } @Test public void test_reduce() { Assert.assertEquals(Integer.valueOf(3), Flow.of(1, 2, 3).reduce(new Reducer<Integer>() { @Override public Integer reduce(Integer a, Integer b) { return a > b ? a : b; } }).get()); Assert.assertEquals(Integer.valueOf(3), Flow.of(3, 2, 1).reduce(new Reducer<Integer>() { @Override public Integer reduce(Integer a, Integer b) { return a > b ? a : b; } }).get()); } @Test public void test_reduce2() { Assert.assertEquals(Integer.valueOf(6), Flow.of(1, 2, 3).reduce(new Reducer<Integer>() { @Override public Integer reduce(Integer a, Integer b) { return a > b ? a : b; } }).map(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer * 2; } }).first().get()); Assert.assertEquals(Integer.valueOf(6), Flow.of(3, 2, 1).reduce(new Reducer<Integer>() { @Override public Integer reduce(Integer a, Integer b) { return a > b ? a : b; } }).map(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer * 2; } }).first().get()); } @Test public void test_collect() { List<Integer> list = new ArrayList<Integer>(); List<Integer> result = Flow.of(1, 2, 3) .collect(list, new BiConsumer<List<Integer>, Integer>() { @Override public void consume(List<Integer> integers, Integer integer) { integers.add(integer * 2); } }) .get(); Assert.assertSame(list, result); Assert.assertEquals(3, result.size()); Assert.assertEquals(Integer.valueOf(2), result.get(0)); Assert.assertEquals(Integer.valueOf(4), result.get(1)); Assert.assertEquals(Integer.valueOf(6), result.get(2)); } @Test public void test_merge() { List<Integer> result = Flow.of(1, 2, 3, 4, 5, 6) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .merge() .toList() .get(); Assert.assertEquals(6, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0)); Assert.assertEquals(Integer.valueOf(2), result.get(1)); Assert.assertEquals(Integer.valueOf(3), result.get(2)); Assert.assertEquals(Integer.valueOf(4), result.get(3)); Assert.assertEquals(Integer.valueOf(5), result.get(4)); Assert.assertEquals(Integer.valueOf(6), result.get(5)); } @Test public void test_merge_2() { List<Integer> result = Flow.of(1, 2, 3, 4, 5, 6) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .filter(new Filter<FlowGroup<Boolean, Integer>>() { @Override public boolean allow(FlowGroup<Boolean, Integer> group) { return group.getKey(); } }) .merge() .toList() .get(); Assert.assertEquals(3, result.size()); Assert.assertEquals(Integer.valueOf(2), result.get(0)); Assert.assertEquals(Integer.valueOf(4), result.get(1)); Assert.assertEquals(Integer.valueOf(6), result.get(2)); } @Test public void test_merge_3() { List<Integer> result = Flow.of(new int[]{1, 2}, new int[]{3, 4}, new int[]{5, 6}) .merge(new Mapper<int[], Flow<Integer>>() { @Override public Flow<Integer> map(int[] ints) { return Flow.of(ints); } }) .toList() .get(); Assert.assertEquals(6, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0)); Assert.assertEquals(Integer.valueOf(2), result.get(1)); Assert.assertEquals(Integer.valueOf(3), result.get(2)); Assert.assertEquals(Integer.valueOf(4), result.get(3)); Assert.assertEquals(Integer.valueOf(5), result.get(4)); Assert.assertEquals(Integer.valueOf(6), result.get(5)); } @Test public void test_merge_4() { List<Integer> result = Flow.of(1, 2, 3, 4, 5, 6) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .filter(new Filter<FlowGroup<Boolean, Integer>>() { @Override public boolean allow(FlowGroup<Boolean, Integer> group) { return group.getKey(); } }) .merge(new Mapper<FlowGroup<Boolean, Integer>, Flow<Integer>>() { @Override public Flow<Integer> map(FlowGroup<Boolean, Integer> group) { return group.map(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer / 2; } }); } }) .toList() .get(); Assert.assertEquals(3, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0)); Assert.assertEquals(Integer.valueOf(2), result.get(1)); Assert.assertEquals(Integer.valueOf(3), result.get(2)); } @Test public void test_count() { int result = Flow.of(1, 2, 3, 4, 5, 6).count().get(); Assert.assertEquals(6, result); } @Test public void test_map() { List<String> result = Flow.of(1, 2, 3) .map(new Mapper<Integer, String>() { @Override public String map(Integer integer) { return integer.toString(); } }).toList().get(); Assert.assertEquals(3, result.size()); Assert.assertEquals("1", result.get(0)); Assert.assertEquals("2", result.get(1)); Assert.assertEquals("3", result.get(2)); } @Test public void test_filter() { List<Integer> result = Flow.of(1, 2, 3, 4) .filter(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 == 0; } }).toList().get(); Assert.assertEquals(2, result.size()); Assert.assertEquals(Integer.valueOf(2), result.get(0)); Assert.assertEquals(Integer.valueOf(4), result.get(1)); } @Test public void test_of_iterable() { List<Integer> list = new ArrayList<Integer>(); list.add(1); list.add(2); list.add(3); int result = Flow.of(list).count().get(); Assert.assertEquals(3, result); } @Test public void test_of_iterable_2() { List<Integer> list = new ArrayList<Integer>(); list.add(1); list.add(2); list.add(3); int result = Flow.of(list).first().get(); Assert.assertEquals(1, result); } @Test public void test_of_iterator() { List<Integer> list = new ArrayList<Integer>(); list.add(1); list.add(2); list.add(3); int result = Flow.of(list.iterator()).first().get(); Assert.assertEquals(1, result); } @Test public void test_do_nothing() { Flow.of(new Iterator() { @Override public boolean hasNext() { throw new IllegalStateException("should not be called"); } @Override public Object next() { throw new IllegalStateException("should not be called"); } @Override public void remove() { throw new IllegalStateException("should not be called"); } }).start(); } @Test public void test_toMap() { Map<Boolean, List<Integer>> map = Flow.of(1, 2, 3) .toMap(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }, Flow.<Boolean, Integer>flowGroupListMapper()); Assert.assertEquals(2, map.size()); Assert.assertEquals(1, map.get(true).size()); Assert.assertEquals(Integer.valueOf(2), map.get(true).get(0)); Assert.assertEquals(2, map.get(false).size()); Assert.assertEquals(Integer.valueOf(1), map.get(false).get(0)); Assert.assertEquals(Integer.valueOf(3), map.get(false).get(1)); } @Test public void test_toMap_2() { Map<Boolean, List<Integer>> map = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .toMap(); Assert.assertEquals(2, map.size()); Assert.assertEquals(1, map.get(true).size()); Assert.assertEquals(Integer.valueOf(2), map.get(true).get(0)); Assert.assertEquals(2, map.get(false).size()); Assert.assertEquals(Integer.valueOf(1), map.get(false).get(0)); Assert.assertEquals(Integer.valueOf(3), map.get(false).get(1)); } @Test public void test_toMap_3() { Map<Boolean, List<Integer>> map = Flow.of(1, 2, 3) .groupBy(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }) .filter(new Filter<FlowGroup<Boolean, Integer>>() { @Override public boolean allow(FlowGroup<Boolean, Integer> group) { return group.getKey(); } }) .toMap(); Assert.assertEquals(1, map.size()); Assert.assertEquals(1, map.get(true).size()); Assert.assertEquals(Integer.valueOf(2), map.get(true).get(0)); } @Test public void test_toMap_4() { Map<Boolean, List<String>> map = Flow.of(1, 2, 3) .toMap(new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }, new Mapper<FlowGroup<Boolean, Integer>, List<String>>() { @Override public List<String> map(FlowGroup<Boolean, Integer> group) { return group.map(new Mapper<Integer, String>() { @Override public String map(Integer integer) { return integer.toString(); } }).toList().get(); } }); Assert.assertEquals(2, map.size()); Assert.assertEquals(1, map.get(true).size()); Assert.assertEquals("2", map.get(true).get(0)); Assert.assertEquals(2, map.get(false).size()); Assert.assertEquals("1", map.get(false).get(0)); Assert.assertEquals("3", map.get(false).get(1)); } @Test public void test_toMap_6() { Map<Boolean, List<Integer>> map = Flow.of(1, 2, 3) .toMap(new Supplier<Map<Boolean, FlowGroup<Boolean, Integer>>>() { @Override public Map<Boolean, FlowGroup<Boolean, Integer>> supply() { return new TreeMap<Boolean, FlowGroup<Boolean, Integer>>(); } }, new Mapper<Integer, Boolean>() { @Override public Boolean map(Integer integer) { return integer % 2 == 0; } }); Assert.assertEquals(2, map.size()); Assert.assertEquals(1, map.get(true).size()); Assert.assertEquals(Integer.valueOf(2), map.get(true).get(0)); Assert.assertEquals(2, map.get(false).size()); Assert.assertEquals(Integer.valueOf(1), map.get(false).get(0)); Assert.assertEquals(Integer.valueOf(3), map.get(false).get(1)); } @Test public void test_join() { Assert.assertEquals("1,2,3", Flow.of(1, 2, 3).join(",").get()); } @Test public void improveCoverage() { Assert.assertEquals(null, new Flow().get()); } @Test public void test_of_ints() { Assert.assertEquals("1,2,3", Flow.of(new int[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new int[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_longs() { Assert.assertEquals("1,2,3", Flow.of(new long[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new long[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_shorts() { Assert.assertEquals("1,2,3", Flow.of(new short[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new short[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_bytes() { Assert.assertEquals("1,2,3", Flow.of(new byte[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new byte[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_floats() { Assert.assertEquals("1.0,2.0,3.0", Flow.of(new float[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new float[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_doubles() { Assert.assertEquals("1.0,2.0,3.0", Flow.of(new double[]{1, 2, 3}).join(",").get()); IllegalState flow = Flow.of(new double[]{1, 2, 3}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_booleans() { Assert.assertEquals("true,false", Flow.of(new boolean[]{true, false}).join(",").get()); IllegalState flow = Flow.of(new boolean[]{true, false}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } @Test public void test_of_chars() { Assert.assertEquals("a,b,c", Flow.of(new char[]{'a', 'b', 'c'}).join(",").get()); IllegalState flow = Flow.of(new char[]{'a', 'b', 'c'}).then(new IllegalState()); flow.stop(); flow.execute(); Assert.assertEquals(null, flow.get()); } static class IllegalState extends FlowProcessor { @Override public void process(Object o) { throw new IllegalStateException(); } } @Test public void test_of_map() { Map<Integer, String> map = new HashMap<Integer, String>(); map.put(1, "1"); map.put(2, "2"); map.put(3, "3"); String result = Flow.of(map).map(new Mapper<Map.Entry<Integer, String>, Integer>() { @Override public Integer map(Map.Entry<Integer, String> entry) { return entry.getKey(); } }).join(", ").get(); Assert.assertEquals("1, 2, 3", result); } @Test public void test_each_with_index() { final StringBuilder sb = new StringBuilder(); Flow.of(2, 4, 6).each(new ConsumerWithInt<Integer>() { @Override public void consume(int i, Integer integer) { if (sb.length() != 0) sb.append(", "); sb.append(i); } }).execute(); Assert.assertEquals("0, 1, 2", sb.toString()); } @Test public void test_each_with_index_2() { final StringBuilder sb = new StringBuilder(); Flow.of(2, 4, 6) .groupBy(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer; } }) .each(new ConsumerWithInt<FlowGroup<Integer, Integer>>() { @Override public void consume(int i, FlowGroup<Integer, Integer> integerIntegerFlowGroup) { if (sb.length() != 0) sb.append(", "); sb.append(i); } }) .execute(); Assert.assertEquals("0, 1, 2", sb.toString()); } @Test public void test_each_with_index_3() { final StringBuilder sb = new StringBuilder(); Flow.of(2, 4, 6) .each(new ConsumerWithInt<Integer>() { @Override public void consume(int i, Integer integer) { if (sb.length() != 0) sb.append(", "); sb.append(i); } }) .first() .get(); Assert.assertEquals("0", sb.toString()); } @Test public void test_each_with_index_4() { final StringBuilder sb = new StringBuilder(); Flow.of(2, 4, 6) .groupBy(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer; } }) .each(new ConsumerWithInt<FlowGroup<Integer, Integer>>() { @Override public void consume(int i, FlowGroup<Integer, Integer> integerIntegerFlowGroup) { if (sb.length() != 0) sb.append(", "); sb.append(i); } }) .first() .get(); Assert.assertEquals("0", sb.toString()); } @Test public void test_any() { Assert.assertTrue(Flow.of(1, 2, 3).any(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 == 0; } }).get()); Assert.assertFalse(Flow.of(1, 3, 5).any(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 == 0; } }).get()); } @Test public void test_all() { Assert.assertTrue(Flow.of(1, 3, 5).all(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 != 0; } }).get()); Assert.assertFalse(Flow.of(1, 2, 3).all(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 != 0; } }).get()); } @Test public void test_none() { Assert.assertTrue(Flow.of(1, 3, 5).none(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 == 0; } }).get()); Assert.assertFalse(Flow.of(1, 2, 3).none(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 != 0; } }).get()); } @Test public void test_none_and() { Assert.assertEquals("yes", Flow.of(1, 3, 5) .none(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 == 0; } }) .map(new Mapper<Boolean, String>() { @Override public String map(Boolean aBoolean) { return aBoolean ? "yes" : "no"; } }) .first() .get() ); Assert.assertEquals("no", Flow.of(1, 2, 3) .none(new Filter<Integer>() { @Override public boolean allow(Integer integer) { return integer % 2 != 0; } }) .map(new Mapper<Boolean, String>() { @Override public String map(Boolean aBoolean) { return aBoolean ? "yes" : "no"; } }) .first() .get() ); } @Test public void test_skip() { Assert.assertEquals("3,4,5", Flow.of(1, 2, 3, 4, 5).skip(2).join(",").get()); } @Test public void test_skip_2() { Assert.assertEquals("3,4,5", Flow.of(1, 2, 3, 4, 5) .groupBy(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer; } }) .skip(2) .map(new Mapper<FlowGroup<Integer, Integer>, Integer>() { @Override public Integer map(FlowGroup<Integer, Integer> group) { return group.key; } }) .join(",") .get()); } @Test public void test_limit() { Assert.assertEquals("1,2,3", Flow.of(1, 2, 3, 4, 5).limit(3).join(",").get()); } @Test public void test_limit_2() { Assert.assertEquals("1,2,3", Flow.of(1, 2, 3, 4, 5) .groupBy(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer; } }) .limit(3) .map(new Mapper<FlowGroup<Integer, Integer>, Integer>() { @Override public Integer map(FlowGroup<Integer, Integer> group) { return group.key; } }) .join(",") .get()); } @Test public void test_limit_3() { Assert.assertEquals("{1=[1], 2=[2], 3=[3]}", Flow.of(1, 2, 3, 4, 5) .groupBy(new Mapper<Integer, Integer>() { @Override public Integer map(Integer integer) { return integer; } }) .limit(3) .toMap() .toString() ); } @Test public void test_limit_4() { final AtomicInteger i = new AtomicInteger(); Assert.assertEquals("0,1,2", Flow.of(new Supplier<Integer>() { @Override public Integer supply() { Assert.assertTrue(i.get() < 5); return i.get() < 5 ? i.getAndIncrement() : null; } }).limit(3).join(",").get()); } @Test public void test_supplier() { final int[] ints = new int[]{1, 2, 3}; final AtomicInteger i = new AtomicInteger(); Assert.assertEquals("1,2,3", Flow.of(new Supplier<Integer>() { @Override public Integer supply() { return i.get() < ints.length ? ints[i.getAndIncrement()] : null; } }).join(",").get()); } @Test public void test_async() { List<String> result = Flow.of("a", "b", "c").async(new Mapper<String, Flow<String>>() { @Override public Flow<String> map(String s) { return Flow.of(s.toUpperCase()); } }).toList().get(); Assert.assertEquals(3, result.size()); Assert.assertTrue(result.containsAll(Arrays.asList("A", "B", "C"))); } @Test public void test_async_non_blocking() { final AtomicInteger counter = new AtomicInteger(); Flow.of("a", "b", "c") .async(new Mapper<String, Flow<String>>() { @Override public Flow<String> map(String s) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } counter.incrementAndGet(); return Flow.of(s); } }).execute(); Assert.assertEquals(0, counter.get()); try { Thread.sleep(350); } catch (InterruptedException e) { e.printStackTrace(); } Assert.assertEquals(3, counter.get()); } @Test public void test_async_executor_service() { long time = System.currentTimeMillis(); List<String> result = Flow.of("a", "b", "c") .async(Executors.newFixedThreadPool(1), 1, new Mapper<String, Flow<String>>() { @Override public Flow<String> map(String s) { try { Thread.sleep(20); } catch (InterruptedException e) { e.printStackTrace(); } return Flow.of(s.toUpperCase()); } }).toList().get(); time = System.currentTimeMillis() - time; Assert.assertTrue(time > 60); Assert.assertEquals(3, result.size()); Assert.assertTrue(result.containsAll(Arrays.asList("A", "B", "C"))); } @Test public void test_async_executor_service_2() { long time = System.currentTimeMillis(); List<String> result = Flow.of("a", "b", "c") .async(Executors.newFixedThreadPool(2), 1, new Mapper<String, Flow<String>>() { @Override public Flow<String> map(String s) { try { Thread.sleep(20); } catch (InterruptedException e) { e.printStackTrace(); } return Flow.of(s.toUpperCase()); } }).toList().get(); time = System.currentTimeMillis() - time; Assert.assertTrue(time > 40); Assert.assertEquals(3, result.size()); Assert.assertTrue(result.containsAll(Arrays.asList("A", "B", "C"))); } @Test public void test_async_queue_limit() { final AtomicInteger before = new AtomicInteger(); long time = System.currentTimeMillis(); String result = Flow.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) .each(new Consumer<Integer>() { @Override public void consume(Integer integer) { before.incrementAndGet(); } }) .async(Executors.newFixedThreadPool(1), 5, new Mapper<Integer, Flow<String>>() { @Override public Flow<String> map(Integer i) { try { Thread.sleep(20); } catch (InterruptedException e) { e.printStackTrace(); } return Flow.of(String.valueOf(i)); } }) .first() .get(); time = System.currentTimeMillis() - time; Assert.assertTrue(time > 20); Assert.assertEquals(7, before.get()); // 1 processed + 5 in queue + 1 waiting to be added Assert.assertEquals("1", result); } @Test public void test_async_process_only_first_after() { final AtomicInteger after = new AtomicInteger(); String result = Flow.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) .async(Executors.newFixedThreadPool(1), 5, new Mapper<Integer, Flow<String>>() { @Override public Flow<String> map(Integer i) { return Flow.of(String.valueOf(i)); } }) .each(new Consumer<String>() { @Override public void consume(String s) { after.incrementAndGet(); try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } } }) .first() .get(); Assert.assertEquals(1, after.get()); Assert.assertEquals("1", result); } }
package mho.wheels.iterables; import mho.wheels.math.BinaryFraction; import mho.wheels.numberUtils.BigDecimalUtils; import mho.wheels.numberUtils.FloatingPointUtils; import mho.wheels.ordering.Ordering; import mho.wheels.ordering.comparators.ListBasedComparator; import mho.wheels.ordering.comparators.WithNullComparator; import mho.wheels.random.IsaacPRNG; import mho.wheels.structures.*; import mho.wheels.testing.TestProperties; import org.jetbrains.annotations.NotNull; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.*; import java.util.function.Function; import java.util.function.Predicate; import static mho.wheels.iterables.IterableUtils.*; import static mho.wheels.ordering.Ordering.*; import static mho.wheels.testing.Testing.*; public class RandomProviderProperties extends TestProperties { private static final String RANDOM_PROVIDER_CHARS = " ,-0123456789@PR[]adeimnorv"; public RandomProviderProperties() { super("RandomProvider"); } @Override protected void testBothModes() { propertiesConstructor(); propertiesConstructor_List_Integer(); propertiesGetScale(); propertiesGetSecondaryScale(); propertiesGetTertiaryScale(); propertiesGetSeed(); propertiesWithScale(); propertiesWithSecondaryScale(); propertiesWithTertiaryScale(); propertiesCopy(); propertiesDeepCopy(); propertiesReset(); propertiesGetId(); propertiesIntegers(); propertiesLongs(); propertiesBooleans(); propertiesUniformSample_Iterable(); propertiesUniformSample_String(); propertiesOrderings(); propertiesRoundingModes(); propertiesPositiveBytes(); propertiesPositiveShorts(); propertiesPositiveIntegers(); propertiesPositiveLongs(); propertiesNegativeBytes(); propertiesNegativeShorts(); propertiesNegativeIntegers(); propertiesNegativeLongs(); propertiesNaturalBytes(); propertiesNaturalShorts(); propertiesNaturalIntegers(); propertiesNaturalLongs(); propertiesNonzeroBytes(); propertiesNonzeroShorts(); propertiesNonzeroIntegers(); propertiesNonzeroLongs(); propertiesBytes(); propertiesShorts(); propertiesAsciiCharacters(); propertiesCharacters(); propertiesRangeUp_byte(); propertiesRangeUp_short(); propertiesRangeUp_int(); propertiesRangeUp_long(); propertiesRangeUp_char(); propertiesRangeDown_byte(); propertiesRangeDown_short(); propertiesRangeDown_int(); propertiesRangeDown_long(); propertiesRangeDown_char(); propertiesRange_byte_byte(); propertiesRange_short_short(); propertiesRange_int_int(); propertiesRange_long_long(); propertiesRange_BigInteger_BigInteger(); propertiesRange_char_char(); propertiesPositiveIntegersGeometric(); propertiesNegativeIntegersGeometric(); propertiesNaturalIntegersGeometric(); propertiesNonzeroIntegersGeometric(); propertiesIntegersGeometric(); propertiesRangeUpGeometric(); propertiesRangeDownGeometric(); propertiesPositiveBigIntegers(); propertiesNegativeBigIntegers(); propertiesNaturalBigIntegers(); propertiesNonzeroBigIntegers(); propertiesBigIntegers(); propertiesRangeUp_BigInteger(); propertiesRangeDown_BigInteger(); propertiesPositiveBinaryFractions(); propertiesNegativeBinaryFractions(); propertiesNonzeroBinaryFractions(); propertiesBinaryFractions(); propertiesRangeUp_BinaryFraction(); propertiesRangeDown_BinaryFraction(); propertiesRange_BinaryFraction_BinaryFraction(); propertiesPositiveFloats(); propertiesNegativeFloats(); propertiesNonzeroFloats(); propertiesFloats(); propertiesPositiveDoubles(); propertiesNegativeDoubles(); propertiesNonzeroDoubles(); propertiesDoubles(); propertiesPositiveFloatsUniform(); propertiesNegativeFloatsUniform(); propertiesNonzeroFloatsUniform(); propertiesFloatsUniform(); propertiesPositiveDoublesUniform(); propertiesNegativeDoublesUniform(); propertiesNonzeroDoublesUniform(); propertiesDoublesUniform(); propertiesRangeUp_float(); propertiesRangeDown_float(); propertiesRange_float_float(); propertiesRangeUp_double(); propertiesRangeDown_double(); propertiesRange_double_double(); propertiesRangeUpUniform_float(); propertiesRangeDownUniform_float(); propertiesRangeUniform_float_float(); propertiesRangeUpUniform_double(); propertiesRangeDownUniform_double(); propertiesRangeUniform_double_double(); propertiesPositiveBigDecimals(); propertiesNegativeBigDecimals(); propertiesNonzeroBigDecimals(); propertiesBigDecimals(); propertiesPositiveCanonicalBigDecimals(); propertiesNegativeCanonicalBigDecimals(); propertiesNonzeroCanonicalBigDecimals(); propertiesCanonicalBigDecimals(); propertiesRangeUp_BigDecimal(); propertiesRangeDown_BigDecimal(); propertiesRange_BigDecimal_BigDecimal(); propertiesRangeUpCanonical_BigDecimal(); propertiesRangeDownCanonical_BigDecimal(); propertiesRangeCanonical_BigDecimal_BigDecimal(); propertiesWithElement(); propertiesWithNull(); propertiesOptionals(); propertiesNullableOptionals(); propertiesDependentPairsInfinite(); propertiesShuffle(); propertiesPermutationsFinite(); propertiesStringPermutations(); propertiesPrefixPermutations(); propertiesStrings_int_String(); propertiesStrings_int(); propertiesLists(); propertiesStrings_String(); propertiesStrings(); propertiesListsAtLeast(); propertiesStringsAtLeast_int_String(); propertiesStringsAtLeast_int(); propertiesDistinctStrings_int_String(); propertiesDistinctStrings_int(); propertiesDistinctLists(); propertiesDistinctStrings_String(); propertiesDistinctStrings(); propertiesDistinctListsAtLeast(); propertiesDistinctStringsAtLeast_int_String(); propertiesDistinctStringsAtLeast_int(); propertiesStringBags_int_String(); propertiesStringBags_int(); propertiesBags(); propertiesStringBags_String(); propertiesStringBags(); propertiesBagsAtLeast(); propertiesStringBagsAtLeast_int_String(); propertiesStringBagsAtLeast_int(); propertiesStringSubsets_int_String(); propertiesStringSubsets_int(); propertiesSubsets(); propertiesStringSubsets_String(); propertiesStringSubsets(); propertiesSubsetsAtLeast(); propertiesStringSubsetsAtLeast_int_String(); propertiesStringSubsetsAtLeast_int(); propertiesEithers(); propertiesChoose(); propertiesCartesianProduct(); propertiesRepeatingIterables(); propertiesRepeatingIterablesDistinctAtLeast(); propertiesSublists(); propertiesSubstrings(); propertiesListsWithElement(); propertiesStringsWithChar_char_String(); propertiesStringsWithChar_char(); propertiesSubsetsWithElement(); propertiesStringSubsetsWithChar_char_String(); propertiesStringSubsetsWithChar_char(); propertiesListsWithSublists(); propertiesStringsWithSubstrings_Iterable_String_String(); propertiesStringsWithSubstrings_Iterable_String(); propertiesMaps(); propertiesRandomProvidersFixedScales(); propertiesRandomProvidersDefault(); propertiesRandomProvidersDefaultSecondaryAndTertiaryScale(); propertiesRandomProvidersDefaultTertiaryScale(); propertiesRandomProviders(); propertiesEquals(); propertiesHashCode(); propertiesToString(); } private static <T> void simpleTestWithNulls( @NotNull RandomProvider rp, @NotNull Iterable<T> xs, @NotNull Predicate<T> predicate ) { rp.reset(); assertTrue(rp, all(predicate, take(TINY_LIMIT, xs))); rp.reset(); testNoRemove(TINY_LIMIT, xs); } private static <T> void simpleTest( @NotNull RandomProvider rp, @NotNull Iterable<T> xs, @NotNull Predicate<T> predicate ) { simpleTestWithNulls(rp, xs, x -> x != null && predicate.test(x)); } private void propertiesConstructor() { initialize("RandomProvider()"); //noinspection unused for (Void v : take(LIMIT, repeat((Void) null))) { RandomProvider rp = new RandomProvider(); rp.validate(); } } private void propertiesConstructor_List_Integer() { initialize("RandomProvider(List<Integer>)"); for (List<Integer> is : take(LIMIT, P.lists(IsaacPRNG.SIZE, P.integers()))) { RandomProvider rp = new RandomProvider(is); rp.validate(); assertEquals(is, rp.getScale(), 32); assertEquals(is, rp.getSecondaryScale(), 8); assertEquals(is, rp.getTertiaryScale(), 2); } Iterable<List<Integer>> isFail = filterInfinite(js -> js.size() != IsaacPRNG.SIZE, P.lists(P.integers())); for (List<Integer> is : take(LIMIT, isFail)) { try { new RandomProvider(is); fail(is); } catch (IllegalArgumentException ignored) {} } } private void propertiesGetScale() { initialize("getScale()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { int scale = rp.getScale(); assertEquals(rp, rp.withScale(scale), rp); } } private void propertiesGetSecondaryScale() { initialize("getSecondaryScale()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { int secondaryScale = rp.getSecondaryScale(); assertEquals(rp, rp.withSecondaryScale(secondaryScale), rp); } } private void propertiesGetTertiaryScale() { initialize("getTertiaryScale()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { int tertiaryScale = rp.getTertiaryScale(); assertEquals(rp, rp.withTertiaryScale(tertiaryScale), rp); } } private void propertiesGetSeed() { initialize("getSeed()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { List<Integer> seed = rp.getSeed(); assertEquals(rp, seed.size(), IsaacPRNG.SIZE); assertEquals( rp, new RandomProvider(seed).withScale(rp.getScale()).withSecondaryScale(rp.getSecondaryScale()) .withTertiaryScale(rp.getTertiaryScale()), rp ); } } private void propertiesWithScale() { initialize("withScale(int)"); for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProviders(), P.naturalIntegers()))) { RandomProvider rp = p.a.withScale(p.b); rp.validate(); assertEquals(p, rp.getScale(), p.b); assertEquals(p, rp.getSecondaryScale(), p.a.getSecondaryScale()); assertEquals(p, rp.getTertiaryScale(), p.a.getTertiaryScale()); assertEquals(p, rp.getSeed(), p.a.getSeed()); inverse(x -> x.withScale(p.b), (RandomProvider y) -> y.withScale(p.a.getScale()), p.a); } for (RandomProvider rp : take(LIMIT, P.randomProviders())) { idempotent(x -> x.withScale(rp.getScale()), rp); } } private void propertiesWithSecondaryScale() { initialize("withSecondaryScale(int)"); for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProviders(), P.naturalIntegers()))) { RandomProvider rp = p.a.withSecondaryScale(p.b); rp.validate(); assertEquals(p, rp.getScale(), p.a.getScale()); assertEquals(p, rp.getSecondaryScale(), p.b); assertEquals(p, rp.getTertiaryScale(), p.a.getTertiaryScale()); assertEquals(p, rp.getSeed(), p.a.getSeed()); inverse( x -> x.withSecondaryScale(p.b), (RandomProvider y) -> y.withSecondaryScale(p.a.getSecondaryScale()), p.a ); } for (RandomProvider rp : take(LIMIT, P.randomProviders())) { idempotent(x -> x.withSecondaryScale(rp.getSecondaryScale()), rp); } } private void propertiesWithTertiaryScale() { initialize("withTertiaryScale(int)"); for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProviders(), P.naturalIntegers()))) { RandomProvider rp = p.a.withTertiaryScale(p.b); rp.validate(); assertEquals(p, rp.getScale(), p.a.getScale()); assertEquals(p, rp.getSecondaryScale(), p.a.getSecondaryScale()); assertEquals(p, rp.getTertiaryScale(), p.b); assertEquals(p, rp.getSeed(), p.a.getSeed()); inverse( x -> x.withTertiaryScale(p.b), (RandomProvider y) -> y.withTertiaryScale(p.a.getTertiaryScale()), p.a ); } for (RandomProvider rp : take(LIMIT, P.randomProviders())) { idempotent(x -> x.withTertiaryScale(rp.getTertiaryScale()), rp); } } private void propertiesCopy() { initialize("copy()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { RandomProvider copy = rp.copy(); assertEquals(rp, rp, copy); head(rp.integers()); assertEquals(rp, rp, copy); } } private void propertiesDeepCopy() { initialize("deepCopy()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { RandomProvider copy = rp.deepCopy(); assertEquals(rp, rp, copy); head(rp.integers()); assertNotEquals(rp, rp, copy); } } private void propertiesReset() { initialize("reset()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { RandomProvider rpDependent = rp.withScale(10); RandomProvider original = rp.deepCopy(); RandomProvider dependent = original.withScale(10); assertEquals(rp, rpDependent, dependent); head(rp.integers()); assertNotEquals(rp, rp, original); assertNotEquals(rp, rpDependent, dependent); rp.reset(); assertEquals(rp, rp, original); assertEquals(rp, rpDependent, dependent); } } private void propertiesGetId() { initialize("getId()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { rp.getId(); } } private void propertiesIntegers() { initialize("integers()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Integer> is = rp.integers(); simpleTest(rp, is, i -> true); } } private void propertiesLongs() { initialize("longs()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Long> ls = rp.longs(); simpleTest(rp, ls, l -> true); } } private void propertiesBooleans() { initialize("booleans()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Boolean> bs = rp.booleans(); simpleTest(rp, bs, b -> true); for (boolean b : ExhaustiveProvider.INSTANCE.booleans()) { assertTrue(rp, elem(b, bs)); } } } private void propertiesUniformSample_Iterable() { initialize("uniformSample(Iterable<T>)"); Iterable<Pair<RandomProvider, List<Integer>>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).listsAtLeast(1, P.withNull(P.integersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps)) { Iterable<Integer> is = p.a.uniformSample(p.b); simpleTestWithNulls(p.a, is, p.b::contains); assertEquals(is, isEmpty(is), p.b.isEmpty()); } for (RandomProvider rp : take(LIMIT,P.randomProvidersDefault())) { try { rp.uniformSample(Collections.emptyList()); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesUniformSample_String() { initialize("uniformSample(String)"); for (Pair<RandomProvider, String> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.stringsAtLeast(1)))) { Iterable<Character> cs = p.a.uniformSample(p.b); simpleTest(p.a, cs, c -> elem(c, cs)); assertEquals(cs, isEmpty(cs), p.b.isEmpty()); } for (RandomProvider rp : take(LIMIT,P.randomProvidersDefault())) { try { rp.uniformSample(""); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesOrderings() { initialize("orderings()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Ordering> os = rp.orderings(); simpleTest(rp, os, o -> true); for (Ordering o : ExhaustiveProvider.INSTANCE.orderings()) { assertTrue(rp, elem(o, os)); } } } private void propertiesRoundingModes() { initialize("roundingModes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<RoundingMode> rms = rp.roundingModes(); simpleTest(rp, rms, rm -> true); for (RoundingMode rm : ExhaustiveProvider.INSTANCE.roundingModes()) { assertTrue(rp, elem(rm, rms)); } } } private void propertiesPositiveBytes() { initialize("positiveBytes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Byte> bs = rp.positiveBytes(); simpleTest(rp, bs, b -> b > 0); } } private void propertiesPositiveShorts() { initialize("positiveShorts()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Short> ss = rp.positiveShorts(); simpleTest(rp, ss, s -> s > 0); } } private void propertiesPositiveIntegers() { initialize("positiveIntegers()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Integer> is = rp.positiveIntegers(); simpleTest(rp, is, i -> i > 0); } } private void propertiesPositiveLongs() { initialize("positiveLongs()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Long> ls = rp.positiveLongs(); simpleTest(rp, ls, l -> l > 0); } } private void propertiesNegativeBytes() { initialize("negativeBytes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Byte> bs = rp.negativeBytes(); simpleTest(rp, bs, b -> b < 0); } } private void propertiesNegativeShorts() { initialize("negativeShorts()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Short> ss = rp.negativeShorts(); simpleTest(rp, ss, s -> s < 0); } } private void propertiesNegativeIntegers() { initialize("negativeIntegers()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Integer> is = rp.negativeIntegers(); simpleTest(rp, is, i -> i < 0); } } private void propertiesNegativeLongs() { initialize("negativeLongs()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Long> ls = rp.negativeLongs(); simpleTest(rp, ls, l -> l < 0); } } private void propertiesNaturalBytes() { initialize("naturalBytes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Byte> bs = rp.naturalBytes(); simpleTest(rp, bs, b -> b >= 0); } } private void propertiesNaturalShorts() { initialize("naturalShorts()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Short> ss = rp.naturalShorts(); simpleTest(rp, ss, s -> s >= 0); } } private void propertiesNaturalIntegers() { initialize("naturalIntegers()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Integer> is = rp.naturalIntegers(); simpleTest(rp, is, i -> i >= 0); } } private void propertiesNaturalLongs() { initialize("naturalLongs()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Long> ls = rp.naturalLongs(); simpleTest(rp, ls, l -> l >= 0); } } private void propertiesNonzeroBytes() { initialize("nonzeroBytes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Byte> bs = rp.nonzeroBytes(); simpleTest(rp, bs, b -> b != 0); } } private void propertiesNonzeroShorts() { initialize("nonzeroShorts()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Short> ss = rp.nonzeroShorts(); simpleTest(rp, ss, s -> s != 0); } } private void propertiesNonzeroIntegers() { initialize("nonzeroIntegers()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Integer> is = rp.nonzeroIntegers(); simpleTest(rp, is, i -> i != 0); } } private void propertiesNonzeroLongs() { initialize("nonzeroLongs()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Long> ls = rp.nonzeroLongs(); simpleTest(rp, ls, l -> l != 0); } } private void propertiesBytes() { initialize("bytes()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Byte> bs = rp.bytes(); simpleTest(rp, bs, b -> true); } } private void propertiesShorts() { initialize("shorts()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Short> ss = rp.shorts(); simpleTest(rp, ss, s -> true); } } private void propertiesAsciiCharacters() { initialize("asciiCharacters()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Character> cs = rp.asciiCharacters(); simpleTest(rp, cs, c -> c < 128); } } private void propertiesCharacters() { initialize("characters()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Character> cs = rp.characters(); simpleTest(rp, cs, c -> true); } } private void propertiesRangeUp_byte() { initialize("rangeUp(byte)"); for (Pair<RandomProvider, Byte> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.bytes()))) { Iterable<Byte> bs = p.a.rangeUp(p.b); simpleTest(p.a, bs, b -> b >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Byte.MAX_VALUE), repeat(Byte.MAX_VALUE)); } } private void propertiesRangeUp_short() { initialize("rangeUp(short)"); for (Pair<RandomProvider, Short> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.shorts()))) { Iterable<Short> ss = p.a.rangeUp(p.b); simpleTest(p.a, ss, s -> s >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Short.MAX_VALUE), repeat(Short.MAX_VALUE)); } } private void propertiesRangeUp_int() { initialize("rangeUp(int)"); for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.integers()))) { Iterable<Integer> is = p.a.rangeUp(p.b); simpleTest(p.a, is, i -> i >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Integer.MAX_VALUE), repeat(Integer.MAX_VALUE)); } } private void propertiesRangeUp_long() { initialize("rangeUp(long)"); for (Pair<RandomProvider, Long> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.longs()))) { Iterable<Long> ls = p.a.rangeUp(p.b); simpleTest(p.a, ls, l -> l >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Long.MAX_VALUE), repeat(Long.MAX_VALUE)); } } private void propertiesRangeUp_char() { initialize("rangeUp(char)"); Iterable<Pair<RandomProvider, Character>> ps = P.pairs(P.randomProvidersDefault(), P.characters()); for (Pair<RandomProvider, Character> p : take(LIMIT, ps)) { Iterable<Character> cs = p.a.rangeUp(p.b); simpleTest(p.a, cs, c -> c >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Character.MAX_VALUE), repeat(Character.MAX_VALUE)); } } private void propertiesRangeDown_byte() { initialize("rangeDown(byte)"); for (Pair<RandomProvider, Byte> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.bytes()))) { Iterable<Byte> bs = p.a.rangeDown(p.b); simpleTest(p.a, bs, b -> b <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Byte.MIN_VALUE), repeat(Byte.MIN_VALUE)); } } private void propertiesRangeDown_short() { initialize("rangeDown(short)"); for (Pair<RandomProvider, Short> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.shorts()))) { Iterable<Short> ss = p.a.rangeDown(p.b); simpleTest(p.a, ss, s -> s <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Short.MIN_VALUE), repeat(Short.MIN_VALUE)); } } private void propertiesRangeDown_int() { initialize("rangeDown(int)"); for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.integers()))) { Iterable<Integer> is = p.a.rangeDown(p.b); simpleTest(p.a, is, i -> i <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Integer.MIN_VALUE), repeat(Integer.MIN_VALUE)); } } private void propertiesRangeDown_long() { initialize("rangeDown(long)"); for (Pair<RandomProvider, Long> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.longs()))) { Iterable<Long> ls = p.a.rangeDown(p.b); simpleTest(p.a, ls, l -> l <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Long.MIN_VALUE), repeat(Long.MIN_VALUE)); } } private void propertiesRangeDown_char() { initialize("rangeDown(char)"); Iterable<Pair<RandomProvider, Character>> ps = P.pairs(P.randomProvidersDefault(), P.characters()); for (Pair<RandomProvider, Character> p : take(LIMIT, ps)) { Iterable<Character> cs = p.a.rangeDown(p.b); simpleTest(p.a, cs, b -> b <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Character.MIN_VALUE), repeat(Character.MIN_VALUE)); } } private void propertiesRange_byte_byte() { initialize("range(byte, byte)"); Iterable<Triple<RandomProvider, Byte, Byte>> ts = P.triples(P.randomProvidersDefault(), P.bytes(), P.bytes()); for (Triple<RandomProvider, Byte, Byte> t : take(LIMIT, ts)) { Iterable<Byte> bs = t.a.range(t.b, t.c); simpleTest(t.a, bs, b -> b >= t.b && b <= t.c); assertEquals(t, t.b > t.c, isEmpty(bs)); } for (Pair<RandomProvider, Byte> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.bytes()))) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesRange_short_short() { initialize("range(short, short)"); Iterable<Triple<RandomProvider, Short, Short>> ts = P.triples( P.randomProvidersDefault(), P.shorts(), P.shorts() ); for (Triple<RandomProvider, Short, Short> t : take(LIMIT, ts)) { Iterable<Short> ss = t.a.range(t.b, t.c); simpleTest(t.a, ss, s -> s >= t.b && s <= t.c); assertEquals(t, t.b > t.c, isEmpty(ss)); } for (Pair<RandomProvider, Short> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.shorts()))) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesRange_int_int() { initialize("range(int, int)"); Iterable<Triple<RandomProvider, Integer, Integer>> ts = P.triples( P.randomProvidersDefault(), P.integers(), P.integers() ); for (Triple<RandomProvider, Integer, Integer> t : take(LIMIT, ts)) { Iterable<Integer> is = t.a.range(t.b, t.c); simpleTest(t.a, is, i -> i >= t.b && i <= t.c); assertEquals(t, t.b > t.c, isEmpty(is)); } for (Pair<RandomProvider, Integer> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.integers()))) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesRange_long_long() { initialize("range(long, long)"); Iterable<Triple<RandomProvider, Long, Long>> ts = P.triples(P.randomProvidersDefault(), P.longs(), P.longs()); for (Triple<RandomProvider, Long, Long> t : take(LIMIT, ts)) { Iterable<Long> ls = t.a.range(t.b, t.c); simpleTest(t.a, ls, l -> l >= t.b && l <= t.c); assertEquals(t, t.b > t.c, isEmpty(ls)); } for (Pair<RandomProvider, Long> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.longs()))) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesRange_BigInteger_BigInteger() { initialize("range(BigInteger, BigInteger)"); Iterable<Triple<RandomProvider, BigInteger, BigInteger>> ts = P.triples( P.randomProvidersDefault(), P.bigIntegers(), P.bigIntegers() ); for (Triple<RandomProvider, BigInteger, BigInteger> t : take(LIMIT, ts)) { Iterable<BigInteger> is = t.a.range(t.b, t.c); simpleTest(t.a, is, i -> ge(i, t.b) && le(i, t.c)); assertEquals(t, gt(t.b, t.c), isEmpty(is)); } Iterable<Pair<RandomProvider, BigInteger>> ps = P.pairs(P.randomProvidersDefault(), P.bigIntegers()); for (Pair<RandomProvider, BigInteger> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesRange_char_char() { initialize("range(char, char)"); Iterable<Triple<RandomProvider, Character, Character>> ts = P.triples( P.randomProvidersDefault(), P.characters(), P.characters() ); for (Triple<RandomProvider, Character, Character> t : take(LIMIT, ts)) { Iterable<Character> cs = t.a.range(t.b, t.c); simpleTest(t.a, cs, c -> c >= t.b && c <= t.c); assertEquals(t, t.b > t.c, isEmpty(cs)); } Iterable<Pair<RandomProvider, Character>> ps = P.pairs(P.randomProvidersDefault(), P.characters()); for (Pair<RandomProvider, Character> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } } private void propertiesPositiveIntegersGeometric() { initialize("positiveIntegersGeometric()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<Integer> is = rp.positiveIntegersGeometric(); simpleTest(rp, is, i -> i > 0); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveIntegersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNegativeIntegersGeometric() { initialize("negativeIntegersGeometric()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<Integer> is = rp.negativeIntegersGeometric(); simpleTest(rp, is, i -> i < 0); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeIntegersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNaturalIntegersGeometric() { initialize("naturalIntegersGeometric()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() > 0 && x.getScale() != Integer.MAX_VALUE, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<Integer> is = rp.naturalIntegersGeometric(); simpleTest(rp, is, i -> i >= 0); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.naturalIntegersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { try { rp.withScale(Integer.MAX_VALUE).naturalIntegersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNonzeroIntegersGeometric() { initialize("nonzeroIntegersGeometric()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<Integer> is = rp.nonzeroIntegersGeometric(); simpleTest(rp, is, i -> i != 0); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroIntegersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesIntegersGeometric() { initialize("integersGeometric()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() > 0 && x.getScale() != Integer.MAX_VALUE, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<Integer> is = rp.integersGeometric(); simpleTest(rp, is, i -> true); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.integersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { try { rp.withScale(Integer.MAX_VALUE).integersGeometric(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRangeUpGeometric() { initialize("rangeUpGeometric(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b && (p.b >= 1 || p.a.getScale() < Integer.MAX_VALUE + p.b), P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { Iterable<Integer> is = p.a.rangeUpGeometric(p.b); simpleTest(p.a, is, i -> i >= p.b); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() <= p.b || p.b < 1 && p.a.getScale() >= Integer.MAX_VALUE + p.b, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.rangeUpGeometric(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeDownGeometric() { initialize("rangeDownGeometric(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() < p.b && (p.b <= -1 || p.a.getScale() > p.b - Integer.MAX_VALUE), P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { Iterable<Integer> is = p.a.rangeDownGeometric(p.b); simpleTest(p.a, is, i -> i <= p.b); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() >= p.b || p.b > -1 && p.a.getScale() <= p.b - Integer.MAX_VALUE, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.rangeDownGeometric(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesPositiveBigIntegers() { initialize("positiveBigIntegers()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigInteger> is = rp.positiveBigIntegers(); simpleTest(rp, is, i -> i.signum() == 1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveBigIntegers(); fail(rp); } catch (IllegalStateException ignored) { } } } private void propertiesNegativeBigIntegers() { initialize("negativeBigIntegers()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigInteger> is = rp.negativeBigIntegers(); simpleTest(rp, is, i -> i.signum() == -1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeBigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNaturalBigIntegers() { initialize("naturalBigIntegers()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() > 0 && x.getScale() != Integer.MAX_VALUE, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigInteger> is = rp.naturalBigIntegers(); simpleTest(rp, is, i -> i.signum() != -1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.naturalBigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { try { rp.withScale(Integer.MAX_VALUE).naturalBigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNonzeroBigIntegers() { initialize("nonzeroBigIntegers()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigInteger> is = rp.nonzeroBigIntegers(); simpleTest(rp, is, i -> !i.equals(BigInteger.ZERO)); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroBigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesBigIntegers() { initialize("bigIntegers()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() > 0 && x.getScale() != Integer.MAX_VALUE, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigInteger> is = rp.bigIntegers(); simpleTest(rp, is, i -> true); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.bigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { try { rp.withScale(Integer.MAX_VALUE).bigIntegers(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRangeUp_BigInteger() { initialize("rangeUp(BigInteger)"); Iterable<Pair<RandomProvider, BigInteger>> ps = filterInfinite( p -> { int minBitLength = p.b.signum() == -1 ? 0 : p.b.bitLength(); return p.a.getScale() > minBitLength && (minBitLength == 0 || p.a.getScale() != Integer.MAX_VALUE); }, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.bigIntegers()) ); for (Pair<RandomProvider, BigInteger> p : take(LIMIT, ps)) { Iterable<BigInteger> is = p.a.rangeUp(p.b); simpleTest(p.a, is, i -> ge(i, p.b)); } Iterable<Pair<RandomProvider, BigInteger>> psFail = filterInfinite( p -> { int minBitLength = p.b.signum() == -1 ? 0 : p.b.bitLength(); return p.a.getScale() <= minBitLength || minBitLength != 0 && p.a.getScale() == Integer.MAX_VALUE; }, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.bigIntegers()) ); for (Pair<RandomProvider, BigInteger> p : take(LIMIT, psFail)) { try { p.a.rangeUp(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeDown_BigInteger() { initialize("rangeDown(BigInteger)"); Iterable<Pair<RandomProvider, BigInteger>> ps = filterInfinite( p -> { int minBitLength = p.b.signum() == 1 ? 0 : p.b.negate().bitLength(); return p.a.getScale() > minBitLength && (minBitLength == 0 || p.a.getScale() != Integer.MAX_VALUE); }, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.bigIntegers()) ); for (Pair<RandomProvider, BigInteger> p : take(LIMIT, ps)) { Iterable<BigInteger> is = p.a.rangeDown(p.b); simpleTest(p.a, is, i -> le(i, p.b)); } Iterable<Pair<RandomProvider, BigInteger>> psFail = filterInfinite( p -> { int minBitLength = p.b.signum() == 1 ? 0 : p.b.negate().bitLength(); return p.a.getScale() <= minBitLength || minBitLength != 0 && p.a.getScale() == Integer.MAX_VALUE; }, P.pairs(P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.bigIntegers()) ); for (Pair<RandomProvider, BigInteger> p : take(LIMIT, psFail)) { try { p.a.rangeDown(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesPositiveBinaryFractions() { initialize("positiveBinaryFractions()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BinaryFraction> bfs = rp.positiveBinaryFractions(); rp.reset(); simpleTest(rp, bfs, bf -> bf.signum() == 1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNegativeBinaryFractions() { initialize("negativeBinaryFractions()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BinaryFraction> bfs = rp.negativeBinaryFractions(); rp.reset(); simpleTest(rp, bfs, bf -> bf.signum() == -1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNonzeroBinaryFractions() { initialize("nonzeroBinaryFractions()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BinaryFraction> bfs = rp.nonzeroBinaryFractions(); rp.reset(); simpleTest(rp, bfs, bf -> bf != BinaryFraction.ZERO); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroBinaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesBinaryFractions() { initialize("binaryFractions()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BinaryFraction> bfs = rp.binaryFractions(); rp.reset(); simpleTest(rp, bfs, bf -> true); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.binaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.binaryFractions(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRangeUp_BinaryFraction() { initialize("rangeUp(BinaryFraction)"); Iterable<Pair<RandomProvider, BinaryFraction>> ps = P.pairs( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, ps)) { Iterable<BinaryFraction> bfs = p.a.rangeUp(p.b); simpleTest(p.a, bfs, bf -> ge(bf, p.b)); } Iterable<Pair<RandomProvider, BinaryFraction>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, psFail)) { try { p.a.rangeUp(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, psFail)) { try { p.a.rangeUp(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeDown_BinaryFraction() { initialize("rangeDown(BinaryFraction)"); Iterable<Pair<RandomProvider, BinaryFraction>> ps = P.pairs( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, ps)) { Iterable<BinaryFraction> bfs = p.a.rangeDown(p.b); simpleTest(p.a, bfs, bf -> le(bf, p.b)); } Iterable<Pair<RandomProvider, BinaryFraction>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, psFail)) { try { p.a.rangeDown(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.binaryFractions() ); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, psFail)) { try { p.a.rangeDown(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRange_BinaryFraction_BinaryFraction() { initialize("range(BinaryFraction, BinaryFraction)"); Iterable<Triple<RandomProvider, BinaryFraction, BinaryFraction>> ts = P.triples( filterInfinite( x -> x.getScale() > 0 && x.getScale() != Integer.MAX_VALUE, P.randomProvidersDefaultSecondaryAndTertiaryScale() ), P.binaryFractions(), P.binaryFractions() ); for (Triple<RandomProvider, BinaryFraction, BinaryFraction> t : take(LIMIT, ts)) { Iterable<BinaryFraction> bfs = t.a.range(t.b, t.c); simpleTest(t.a, bfs, bf -> ge(bf, t.b) && le(bf, t.c)); assertEquals(t, gt(t.b, t.c), isEmpty(bfs)); } Iterable<Pair<RandomProvider, BinaryFraction>> ps = P.pairs(P.randomProvidersDefault(), P.binaryFractions()); for (Pair<RandomProvider, BinaryFraction> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); } Iterable<Triple<RandomProvider, BinaryFraction, BinaryFraction>> tsFail = P.triples( filterInfinite(x -> x.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.binaryFractions(), P.binaryFractions() ); for (Triple<RandomProvider, BinaryFraction, BinaryFraction> t : take(LIMIT, tsFail)) { try { t.a.range(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = P.triples(P.randomProvidersDefault(), P.binaryFractions(), P.binaryFractions()); for (Triple<RandomProvider, BinaryFraction, BinaryFraction> t : take(LIMIT, tsFail)) { try { t.a.withScale(Integer.MAX_VALUE).range(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesPositiveFloats() { initialize("positiveFloats()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.positiveFloats(); simpleTest(rp, fs, f -> f > 0); } } private void propertiesNegativeFloats() { initialize("negativeFloats()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.negativeFloats(); simpleTest(rp, fs, f -> f < 0); } } private void propertiesNonzeroFloats() { initialize("nonzeroFloats()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.nonzeroFloats(); simpleTest(rp, fs, f -> f != 0); } } private void propertiesFloats() { initialize("floats()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.floats(); simpleTest(rp, fs, f -> true); } } private void propertiesPositiveDoubles() { initialize("positiveDoubles()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.positiveDoubles(); simpleTest(rp, ds, d -> d > 0); } } private void propertiesNegativeDoubles() { initialize("negativeDoubles()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.negativeDoubles(); simpleTest(rp, ds, d -> d < 0); } } private void propertiesNonzeroDoubles() { initialize("nonzeroDoubles()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.nonzeroDoubles(); simpleTest(rp, ds, d -> d != 0); } } private void propertiesDoubles() { initialize("doubles()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.doubles(); simpleTest(rp, ds, d -> true); } } private void propertiesPositiveFloatsUniform() { initialize("positiveFloatsUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.positiveFloatsUniform(); simpleTest(rp, fs, f -> f > 0 && Float.isFinite(f)); } } private void propertiesNegativeFloatsUniform() { initialize("negativeFloatsUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.negativeFloatsUniform(); simpleTest(rp, fs, f -> f < 0 && Float.isFinite(f)); } } private void propertiesNonzeroFloatsUniform() { initialize("nonzeroFloatsUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.nonzeroFloatsUniform(); simpleTest(rp, fs, f -> f != 0 && Float.isFinite(f)); } } private void propertiesFloatsUniform() { initialize("floatsUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Float> fs = rp.floatsUniform(); simpleTest(rp, fs, f -> Float.isFinite(f) && !FloatingPointUtils.isNegativeZero(f)); } } private void propertiesPositiveDoublesUniform() { initialize("positiveDoublesUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.positiveDoublesUniform(); simpleTest(rp, ds, d -> d > 0 && Double.isFinite(d)); } } private void propertiesNegativeDoublesUniform() { initialize("negativeDoublesUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.negativeDoublesUniform(); simpleTest(rp, ds, d -> d < 0 && Double.isFinite(d)); } } private void propertiesNonzeroDoublesUniform() { initialize("nonzeroDoublesUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.nonzeroDoublesUniform(); simpleTest(rp, ds, d -> d != 0 && Double.isFinite(d)); } } private void propertiesDoublesUniform() { initialize("doublesUniform()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { Iterable<Double> ds = rp.doublesUniform(); simpleTest(rp, ds, d -> Double.isFinite(d) && !FloatingPointUtils.isNegativeZero(d)); } } private void propertiesRangeUp_float() { initialize("rangeUp(float)"); Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(f -> !Float.isNaN(f), P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { Iterable<Float> fs = p.a.rangeUp(p.b); simpleTest(p.a, fs, f -> f >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Float.POSITIVE_INFINITY), repeat(Float.POSITIVE_INFINITY)); } } private void propertiesRangeDown_float() { initialize("rangeDown(float)"); Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(f -> !Float.isNaN(f), P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { Iterable<Float> fs = p.a.rangeDown(p.b); simpleTest(p.a, fs, f -> f <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Float.NEGATIVE_INFINITY), repeat(Float.NEGATIVE_INFINITY)); } } private void propertiesRange_float_float() { initialize("range(float, float)"); Iterable<Triple<RandomProvider, Float, Float>> ts = P.triples( P.randomProvidersDefault(), filter(f -> !Float.isNaN(f), P.floats()), filter(f -> !Float.isNaN(f), P.floats()) ); for (Triple<RandomProvider, Float, Float> t : take(LIMIT, ts)) { Iterable<Float> fs = t.a.range(t.b, t.c); simpleTest(t.a, fs, f -> f >= t.b && f <= t.c); assertEquals(t, t.b > t.c, isEmpty(fs)); } Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(f -> !Float.isNaN(f) && f != 0.0f, P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); try { p.a.range(Float.NaN, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.range(p.b, Float.NaN); fail(p); } catch (ArithmeticException ignored) {} } } private void propertiesRangeUp_double() { initialize("rangeUp(double)"); Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(d -> !Double.isNaN(d), P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { Iterable<Double> ds = p.a.rangeUp(p.b); simpleTest(p.a, ds, d -> d >= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeUp(Double.POSITIVE_INFINITY), repeat(Double.POSITIVE_INFINITY)); } } private void propertiesRangeDown_double() { initialize("rangeDown(double)"); Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(d -> !Double.isNaN(d), P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { Iterable<Double> ds = p.a.rangeDown(p.b); simpleTest(p.a, ds, d -> d <= p.b); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.rangeDown(Double.NEGATIVE_INFINITY), repeat(Double.NEGATIVE_INFINITY)); } } private void propertiesRange_double_double() { initialize("range(double, double)"); Iterable<Triple<RandomProvider, Double, Double>> ts = P.triples( P.randomProvidersDefault(), filter(d -> !Double.isNaN(d), P.doubles()), filter(d -> !Double.isNaN(d), P.doubles()) ); for (Triple<RandomProvider, Double, Double> t : take(LIMIT, ts)) { Iterable<Double> ds = t.a.range(t.b, t.c); simpleTest(t.a, ds, f -> f >= t.b && f <= t.c); assertEquals(t, t.b > t.c, isEmpty(ds)); } Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(d -> !Double.isNaN(d) && d != 0.0, P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.range(p.b, p.b), repeat(p.b)); try { p.a.range(Double.NaN, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.range(p.b, Double.NaN); fail(p); } catch (ArithmeticException ignored) {} } } private void propertiesRangeUpUniform_float() { initialize("rangeUpUniform(float)"); Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(Float::isFinite, P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { Iterable<Float> fs = p.a.rangeUpUniform(p.b); simpleTest(p.a, fs, f -> f >= p.b && Float.isFinite(f) && !FloatingPointUtils.isNegativeZero(f)); } } private void propertiesRangeDownUniform_float() { initialize("rangeDownUniform(float)"); Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(Float::isFinite, P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { Iterable<Float> fs = p.a.rangeDownUniform(p.b); simpleTest(p.a, fs, f -> f <= p.b && Float.isFinite(f) && !FloatingPointUtils.isNegativeZero(f)); } } private void propertiesRangeUniform_float_float() { initialize("rangeUniform(float, float)"); Iterable<Triple<RandomProvider, Float, Float>> ts = P.triples( P.randomProvidersDefault(), filter(Float::isFinite, P.floats()), filter(Float::isFinite, P.floats()) ); for (Triple<RandomProvider, Float, Float> t : take(LIMIT, ts)) { Iterable<Float> fs = t.a.rangeUniform(t.b, t.c); simpleTest( t.a, fs, f -> f >= t.b && f <= t.c && Float.isFinite(f) && !FloatingPointUtils.isNegativeZero(f) ); assertEquals(t, t.b > t.c, isEmpty(fs)); } Iterable<Pair<RandomProvider, Float>> ps = P.pairs( P.randomProvidersDefault(), filter(f -> Float.isFinite(f) && !FloatingPointUtils.isNegativeZero(f), P.floats()) ); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.rangeUniform(p.b, p.b), repeat(p.b)); } ps = P.pairs(P.randomProvidersDefault(), filter(Float::isFinite, P.floats())); for (Pair<RandomProvider, Float> p : take(LIMIT, ps)) { try { p.a.rangeUniform(Float.NaN, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(Float.NEGATIVE_INFINITY, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(Float.POSITIVE_INFINITY, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Float.NaN); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Float.NEGATIVE_INFINITY); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Float.POSITIVE_INFINITY); fail(p); } catch (ArithmeticException ignored) {} } } private void propertiesRangeUpUniform_double() { initialize("rangeUpUniform(double)"); Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(Double::isFinite, P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { Iterable<Double> ds = p.a.rangeUpUniform(p.b); simpleTest(p.a, ds, d -> d >= p.b && Double.isFinite(d) && !FloatingPointUtils.isNegativeZero(d)); } } private void propertiesRangeDownUniform_double() { initialize("rangeDownUniform(double)"); Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(Double::isFinite, P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { Iterable<Double> ds = p.a.rangeDownUniform(p.b); simpleTest(p.a, ds, d -> d <= p.b && Double.isFinite(d) && !FloatingPointUtils.isNegativeZero(d)); } } private void propertiesRangeUniform_double_double() { initialize("rangeUniform(double, double)"); Iterable<Triple<RandomProvider, Double, Double>> ts = P.triples( P.randomProvidersDefault(), filter(Double::isFinite, P.doubles()), filter(Double::isFinite, P.doubles()) ); for (Triple<RandomProvider, Double, Double> t : take(LIMIT, ts)) { Iterable<Double> ds = t.a.rangeUniform(t.b, t.c); simpleTest( t.a, ds, d -> d >= t.b && d <= t.c && Double.isFinite(d) && !FloatingPointUtils.isNegativeZero(d) ); assertEquals(t, t.b > t.c, isEmpty(ds)); } Iterable<Pair<RandomProvider, Double>> ps = P.pairs( P.randomProvidersDefault(), filter(d -> Double.isFinite(d) && !FloatingPointUtils.isNegativeZero(d), P.doubles()) ); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.rangeUniform(p.b, p.b), repeat(p.b)); } ps = P.pairs(P.randomProvidersDefault(), filter(Double::isFinite, P.doubles())); for (Pair<RandomProvider, Double> p : take(LIMIT, ps)) { try { p.a.rangeUniform(Double.NaN, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(Double.NEGATIVE_INFINITY, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(Double.POSITIVE_INFINITY, p.b); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Double.NaN); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Double.NEGATIVE_INFINITY); fail(p); } catch (ArithmeticException ignored) {} try { p.a.rangeUniform(p.b, Double.POSITIVE_INFINITY); fail(p); } catch (ArithmeticException ignored) {} } } private void propertiesPositiveBigDecimals() { initialize("positiveBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.positiveBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> bd.signum() == 1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNegativeBigDecimals() { initialize("negativeBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.negativeBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> bd.signum() == -1); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNonzeroBigDecimals() { initialize("nonzeroBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.nonzeroBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> ne(bd, BigDecimal.ZERO)); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesBigDecimals() { initialize("bigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.bigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> true); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.bigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.bigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesPositiveCanonicalBigDecimals() { initialize("positiveCanonicalBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.positiveCanonicalBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> bd.signum() == 1 && BigDecimalUtils.isCanonical(bd)); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.positiveCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNegativeCanonicalBigDecimals() { initialize("negativeCanonicalBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.negativeCanonicalBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> bd.signum() == -1 && BigDecimalUtils.isCanonical(bd)); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.negativeCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesNonzeroCanonicalBigDecimals() { initialize("nonzeroCanonicalBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.nonzeroCanonicalBigDecimals(); rp.reset(); simpleTest(rp, bds, bd -> ne(bd, BigDecimal.ZERO) && BigDecimalUtils.isCanonical(bd)); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } rpsFail = filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.nonzeroCanonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesCanonicalBigDecimals() { initialize("canonicalBigDecimals()"); Iterable<RandomProvider> rps = filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { Iterable<BigDecimal> bds = rp.canonicalBigDecimals(); rp.reset(); simpleTest(rp, bds, BigDecimalUtils::isCanonical); } Iterable<RandomProvider> rpsFail = filterInfinite( x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.canonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.canonicalBigDecimals(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRangeUp_BigDecimal() { initialize("rangeUp(BigDecimal)"); Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { Iterable<BigDecimal> bds = p.a.rangeUp(p.b); simpleTest(p.a, bds, bd -> ge(bd, p.b)); } Iterable<Pair<RandomProvider, BigDecimal>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeUp(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeUp(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeDown_BigDecimal() { initialize("rangeDown(BigDecimal)"); Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { Iterable<BigDecimal> bds = p.a.rangeDown(p.b); simpleTest(p.a, bds, bd -> le(bd, p.b)); } Iterable<Pair<RandomProvider, BigDecimal>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeDown(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeDown(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRange_BigDecimal_BigDecimal() { initialize("range(BigDecimal, BigDecimal)"); Iterable<Triple<RandomProvider, BigDecimal, BigDecimal>> ts = P.triples( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, ts)) { Iterable<BigDecimal> bds = t.a.range(t.b, t.c); simpleTest(t.a, bds, bd -> ge(bd, t.b) && le(bd, t.c)); assertEquals(t, gt(t.b, t.c), isEmpty(bds)); } Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { assertTrue(p, all(bd -> eq(bd, p.b), take(TINY_LIMIT, p.a.range(p.b, p.b)))); } Iterable<Triple<RandomProvider, BigDecimal, BigDecimal>> tsFail = P.triples( filterInfinite(x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, tsFail)) { try { t.a.range(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = P.triples( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, tsFail)) { try { t.a.range(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesRangeUpCanonical_BigDecimal() { initialize("rangeUpCanonical(BigDecimal)"); Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { Iterable<BigDecimal> bds = p.a.rangeUpCanonical(p.b); simpleTest(p.a, bds, bd -> ge(bd, p.b) && BigDecimalUtils.isCanonical(bd)); } Iterable<Pair<RandomProvider, BigDecimal>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeUpCanonical(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeUpCanonical(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeDownCanonical_BigDecimal() { initialize("rangeDownCanonical(BigDecimal)"); Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() >= 2 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { Iterable<BigDecimal> bds = p.a.rangeDownCanonical(p.b); simpleTest(p.a, bds, bd -> le(bd, p.b) && BigDecimalUtils.isCanonical(bd)); } Iterable<Pair<RandomProvider, BigDecimal>> psFail = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeDownCanonical(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail = P.pairs( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, psFail)) { try { p.a.rangeDownCanonical(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesRangeCanonical_BigDecimal_BigDecimal() { initialize("rangeCanonical(BigDecimal, BigDecimal)"); Iterable<Triple<RandomProvider, BigDecimal, BigDecimal>> ts = P.triples( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, ts)) { Iterable<BigDecimal> bds = t.a.rangeCanonical(t.b, t.c); simpleTest(t.a, bds, bd -> ge(bd, t.b) && le(bd, t.c) && BigDecimalUtils.isCanonical(bd)); assertEquals(t, gt(t.b, t.c), isEmpty(bds)); } Iterable<Pair<RandomProvider, BigDecimal>> ps = P.pairs( filterInfinite( x -> x.getScale() > 0 && x.getSecondaryScale() > 0, P.randomProvidersDefaultTertiaryScale() ), P.bigDecimals() ); for (Pair<RandomProvider, BigDecimal> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.rangeCanonical(p.b, p.b), repeat(BigDecimalUtils.canonicalize(p.b))); } Iterable<Triple<RandomProvider, BigDecimal, BigDecimal>> tsFail = P.triples( filterInfinite(x -> x.getScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, tsFail)) { try { t.a.rangeCanonical(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = P.triples( filterInfinite(x -> x.getSecondaryScale() < 1, P.randomProvidersDefaultTertiaryScale()), P.bigDecimals(), P.bigDecimals() ); for (Triple<RandomProvider, BigDecimal, BigDecimal> t : take(LIMIT, tsFail)) { try { t.a.rangeCanonical(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesWithElement() { initialize("withElement(T, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = P.triples( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withNull(P.integersGeometric()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { List<Integer> withElement = toList(take(TINY_LIMIT, t.a.withElement(t.b, t.c))); testNoRemove(TINY_LIMIT, t.a.withElement(t.b, t.c)); List<Integer> filteredResult = toList(filter(x -> !Objects.equals(x, t.b), withElement)); assertEquals( t, filteredResult, toList(take(filteredResult.size(), filterInfinite(x -> !Objects.equals(x, t.b), t.c))) ); } Iterable<Triple<RandomProvider, Integer, List<Integer>>> tsFail1 = P.triples( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withNull(P.integersGeometric()), P.withScale(4).lists(P.withNull(P.integersGeometric())) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, tsFail1)) { try { toList(t.a.withElement(t.b, t.c)); fail(t); } catch (IllegalArgumentException ignored) {} } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail2 = P.triples( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withNull(P.integersGeometric()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail2)) { try { t.a.withElement(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesWithNull() { initialize("withNull(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { List<Integer> withNull = toList(take(TINY_LIMIT, p.a.withNull(p.b))); testNoRemove(TINY_LIMIT, p.a.withNull(p.b)); List<Integer> filteredResult = toList(filter(x -> x != null, withNull)); assertEquals(p, filteredResult, toList(take(filteredResult.size(), p.b))); } Iterable<Pair<RandomProvider, List<Integer>>> psFail1 = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withScale(4).lists(P.integersGeometric()) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, psFail1)) { try { toList(p.a.withNull(p.b)); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Pair<RandomProvider, Iterable<Integer>>> psFail2 = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.integers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, psFail2)) { try { p.a.withNull(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesOptionals() { initialize("optionals(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { List<Optional<Integer>> os = toList(take(TINY_LIMIT, p.a.optionals(p.b))); testNoRemove(TINY_LIMIT, p.a.optionals(p.b)); List<Integer> filteredResult = toList(optionalFilter(os)); assertEquals(p, filteredResult, toList(take(filteredResult.size(), p.b))); } Iterable<Pair<RandomProvider, List<Integer>>> psFail = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withScale(4).lists(P.integersGeometric()) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, psFail)) { try { toList(p.a.optionals(p.b)); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Pair<RandomProvider, Iterable<Integer>>> psFail2 = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.integers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, psFail2)) { try { p.a.optionals(p.b); fail(p); } catch (IllegalStateException ignored) {} } psFail2 = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, psFail2)) { try { toList(p.a.optionals(p.b)); fail(p); } catch (NullPointerException ignored) {} } } private void propertiesNullableOptionals() { initialize("nullableOptionals(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { List<NullableOptional<Integer>> os = toList(take(TINY_LIMIT, p.a.nullableOptionals(p.b))); testNoRemove(TINY_LIMIT, p.a.nullableOptionals(p.b)); List<Integer> filteredResult = toList(nullableOptionalFilter(os)); assertEquals(p, filteredResult, toList(take(filteredResult.size(), p.b))); } Iterable<Pair<RandomProvider, List<Integer>>> psFail = P.pairs( filterInfinite(x -> x.getScale() >= 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withScale(4).lists(P.withNull(P.integersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, psFail)) { try { toList(p.a.nullableOptionals(p.b)); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Pair<RandomProvider, Iterable<Integer>>> psFail2 = P.pairs( filterInfinite(x -> x.getScale() < 2, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, psFail2)) { try { p.a.nullableOptionals(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesDependentPairsInfinite() { initialize("dependentPairsInfinite(Iterable<A>, Function<A, Iterable<B>>)"); RandomProvider RP = RandomProvider.example(); IterableProvider PS = P.withScale(4); Function<List<Integer>, Iterable<Map<Integer, List<Integer>>>> f = xs -> filterInfinite( m -> !all(p -> isEmpty(p.b), fromMap(m)), PS.maps(xs, map(IterableUtils::unrepeat, PS.listsAtLeast(1, P.integersGeometric()))) ); Function< Pair<List<Integer>, Map<Integer, List<Integer>>>, Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>> > g = p -> { Iterable<Pair<Integer, List<Integer>>> values = fromMap(p.b); Map<Integer, Iterable<Integer>> transformedValues = toMap( map(e -> new Pair<>(e.a, cycle(e.b)), values) ); return new Pair<>(cycle(p.a), new FiniteDomainFunction<>(transformedValues)); }; Iterable<Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>>> ps = map( g, nub( P.dependentPairsInfinite( nub(map(IterableUtils::unrepeat, PS.listsAtLeast(1, P.integersGeometric()))), f ) ) ); for (Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>> p : take(LIMIT, ps)) { Iterable<Pair<Integer, Integer>> pairs = RP.dependentPairsInfinite(p.a, p.b); testNoRemove(TINY_LIMIT, pairs); assertTrue(p, all(q -> q != null, take(TINY_LIMIT, pairs))); } Iterable<Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>>> psFail = map( p -> p.b, P.dependentPairs( filterInfinite(r -> r.b.domainSize() != 0, ps), q -> map(k -> new Pair<>(q.a, q.b.set(k, null)), P.uniformSample(toList(q.b.domain()))) ) ); for (Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>> p : take(LIMIT, psFail)) { try { toList(RP.dependentPairsInfinite(p.a, p.b)); fail(p); } catch (NullPointerException ignored) {} } f = xs -> { if (xs.isEmpty()) { return repeat(new HashMap<>()); } else { return filterInfinite( m -> !all(p -> isEmpty(p.b), fromMap(m)), PS.maps(xs, PS.lists(P.integersGeometric())) ); } }; g = p -> { Iterable<Pair<Integer, List<Integer>>> values = fromMap(p.b); Map<Integer, Iterable<Integer>> transformedValues = toMap( map(e -> new Pair<>(e.a, (Iterable<Integer>) e.b), values) ); return new Pair<>(cycle(p.a), new FiniteDomainFunction<>(transformedValues)); }; Iterable<Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>>> psFail2 = map( g, nub(P.dependentPairsInfinite(nub(map(IterableUtils::unrepeat, PS.lists(P.integersGeometric()))), f)) ); for (Pair<Iterable<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>> p : take(LIMIT, psFail2)) { try { toList(RP.dependentPairsInfinite(p.a, p.b)); fail(p); } catch (NoSuchElementException ignored) {} } Iterable<Pair<List<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>>> psFail3 = map( p -> new Pair<>( p.a, new FiniteDomainFunction<>(toMap(map(e -> new Pair<>(e.a, cycle(e.b)), fromMap(p.b)))) ), nub(P.dependentPairsInfinite(PS.listsAtLeast(1, P.integersGeometric()), f)) ); for (Pair<List<Integer>, FiniteDomainFunction<Integer, Iterable<Integer>>> p : take(LIMIT, psFail3)) { try { toList(RP.dependentPairsInfinite(p.a, p.b)); fail(p); } catch (NoSuchElementException ignored) {} } } private void propertiesShuffle() { initialize("shuffle(List<T>)"); Iterable<Pair<RandomProvider, List<Integer>>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).lists(P.withNull(P.naturalIntegersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps)) { List<Integer> shuffled = toList(p.b); p.a.shuffle(shuffled); Comparator<Integer> comparator = new ListBasedComparator<>(p.b); assertEquals(p, sort(comparator, p.b), sort(comparator, shuffled)); } } private void propertiesPermutationsFinite() { initialize("permutationsFinite(List<T>)"); Iterable<Pair<RandomProvider, List<Integer>>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).lists(P.withNull(P.naturalIntegersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps)) { Comparator<Integer> comparator = new ListBasedComparator<>(p.b); List<Integer> sorted = sort(comparator, p.b); simpleTest(p.a, p.a.permutationsFinite(p.b), xs -> sort(comparator, xs).equals(sorted)); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { List<Integer> xs = Collections.emptyList(); aeqit(rp, TINY_LIMIT, rp.permutationsFinite(xs), repeat(xs)); } Iterable<Pair<RandomProvider, Integer>> ps2 = P.pairs( P.randomProvidersDefault(), P.withNull(P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps2)) { List<Integer> xs = Collections.singletonList(p.b); aeqit(p, TINY_LIMIT, p.a.permutationsFinite(xs), repeat(xs)); } } private void propertiesStringPermutations() { initialize("stringPermutations(String)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs(P.randomProvidersDefault(), P.withScale(4).strings()); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { Comparator<Character> comparator = new ListBasedComparator<>(toList(p.b)); String sorted = sort(comparator, p.b); simpleTest(p.a, p.a.stringPermutations(p.b), xs -> sort(comparator, xs).equals(sorted)); } for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { aeqit(rp, TINY_LIMIT, rp.stringPermutations(""), repeat("")); } for (Pair<RandomProvider, Character> p : take(LIMIT, P.pairs(P.randomProvidersDefault(), P.characters()))) { String s = Character.toString(p.b); aeqit(p, TINY_LIMIT, p.a.stringPermutations(s), repeat(s)); } } private void propertiesPrefixPermutations() { initialize("prefixPermutations(Iterable<T>)"); Iterable<Pair<RandomProvider, List<Integer>>> ps = P.pairs( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withScale(4).lists(P.withNull(P.naturalIntegersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps)) { Comparator<Integer> comparator = new ListBasedComparator<>(p.b); List<Integer> sorted = sort(comparator, p.b); simpleTest(p.a, p.a.prefixPermutations(p.b), xs -> sort(comparator, toList(xs)).equals(sorted)); } Iterable<RandomProvider> rps = filterInfinite( rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { List<Integer> xs = Collections.emptyList(); aeqit(rp, TINY_LIMIT, map(IterableUtils::toList, rp.prefixPermutations(xs)), repeat(xs)); } Iterable<Pair<RandomProvider, Integer>> ps2 = P.pairs( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withNull(P.integersGeometric()) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps2)) { List<Integer> xs = Collections.singletonList(p.b); aeqit(p, TINY_LIMIT, map(IterableUtils::toList, p.a.prefixPermutations(xs)), repeat(xs)); } Iterable<Pair<RandomProvider, Iterable<Integer>>> ps3 = P.pairs( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps3)) { simpleTest(p.a, p.a.prefixPermutations(p.b), xs -> true); for (Iterable<Integer> xs : take(TINY_LIMIT, p.a.prefixPermutations(p.b))) { simpleTestWithNulls(p.a, xs, x -> true); } } Iterable<Pair<RandomProvider, List<Integer>>> psFail = P.pairs( filterInfinite(rp -> rp.getScale() < 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.withScale(4).lists(P.withNull(P.naturalIntegersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, psFail)) { try { toList(p.a.prefixPermutations(p.b)); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesStrings_int_String() { initialize("strings(int, String)"); Iterable<Triple<RandomProvider, String, Integer>> ts = map( p -> new Triple<>(p.a, p.b.a, p.b.b), P.pairs( P.randomProvidersDefault(), P.pairsLogarithmicOrder( P.withScale(4).stringsAtLeast(1), P.withScale(4).naturalIntegersGeometric() ) ) ); for (Triple<RandomProvider, String, Integer> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.strings(t.c, t.b), s -> s.length() == t.c && isSubsetOf(s, t.b)); } Iterable<Pair<RandomProvider, String>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.strings(0, p.b), repeat("")); } Iterable<Triple<RandomProvider, Character, Integer>> ts2 = map( p -> new Triple<>(p.a, p.b.a, p.b.b), P.pairs( P.randomProvidersDefault(), P.pairsLogarithmicOrder(P.characters(), P.withScale(4).naturalIntegersGeometric()) ) ); for (Triple<RandomProvider, Character, Integer> t : take(LIMIT, ts2)) { aeqit(t, TINY_LIMIT, t.a.strings(t.c, Character.toString(t.b)), repeat(replicate(t.c, t.b.charValue()))); } Iterable<Pair<RandomProvider, Integer>> psFail = P.pairs(P.randomProvidersDefault(), P.positiveIntegers()); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.strings(p.b, ""); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Triple<RandomProvider, String, Integer>> tsFail = P.triples( P.randomProvidersDefault(), P.stringsAtLeast(1), P.negativeIntegers() ); for (Triple<RandomProvider, String, Integer> t : take(LIMIT, tsFail)) { try { t.a.strings(t.c, t.b); fail(t); } catch (IllegalArgumentException ignored) {} } } private void propertiesStrings_int() { initialize("strings(int)"); Iterable<Pair<RandomProvider, Integer>> ps = P.pairsLogarithmicOrder( P.randomProvidersDefault(), P.withScale(4).naturalIntegersGeometric() ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.strings(p.b), s -> s.length() == p.b); } Iterable<Pair<RandomProvider, Integer>> psFail = P.pairs(P.randomProvidersDefault(), P.negativeIntegers()); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.strings(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesLists() { initialize("lists(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.lists(p.b), is -> true); } Iterable<Pair<RandomProvider, List<Integer>>> ps2 = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).listsAtLeast(1, P.withNull(P.withScale(4).integersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { simpleTest(p.a, p.a.lists(p.a.uniformSample(p.b)), is -> isSubsetOf(is, p.b)); } for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { try { toList(p.a.lists(p.b)); fail(p); } catch (NoSuchElementException ignored) {} } } private void propertiesStrings_String() { initialize("strings(String)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.strings(p.b), s -> isSubsetOf(s, p.b)); } Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.strings(""); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesStrings() { initialize("strings()"); Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { simpleTest(rp, rp.strings(), s -> true); } } private void propertiesListsAtLeast() { initialize("listsAtLeast(int, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.listsAtLeast(t.b, t.c), is -> is.size() >= t.b); } Iterable<Triple<RandomProvider, Integer, List<Integer>>> ts2 = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).listsAtLeast(1, P.withNull(P.withScale(4).integersGeometric())) ) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, ts2)) { simpleTest(t.a, t.a.listsAtLeast(t.b, t.a.uniformSample(t.c)), is -> isSubsetOf(is, t.c)); } for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, ts2)) { try { toList(t.a.listsAtLeast(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.listsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.listsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringsAtLeast_int_String() { initialize("stringsAtLeast(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.stringsAtLeast(t.b, t.c), s -> s.length() >= t.b && isSubsetOf(s, t.c)); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.stringsAtLeast(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Triple<RandomProvider, Integer, String>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringsAtLeast_int() { initialize("stringsAtLeast(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringsAtLeast(p.b), s -> s.length() >= p.b); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringsAtLeast(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } psFail = filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringsAtLeast(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesDistinctStrings_int_String() { initialize("distinctStrings(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.distinctStrings(t.b, t.c), s -> s.length() == t.b && isSubsetOf(s, t.c) && unique(s) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.distinctStrings(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } if (P instanceof ExhaustiveProvider) { Iterable<Triple<RandomProvider, Integer, String>> tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b < 0 ? 0 : p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.distinctStrings(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } } } private void propertiesDistinctStrings_int() { initialize("distinctStrings(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), filterInfinite(i -> i <= (1 << 16), P.withScale(4).naturalIntegersGeometric()) ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.distinctStrings(p.b), s -> s.length() >= p.b && unique(s)); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.distinctStrings(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesDistinctLists() { initialize("distinctLists(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).repeatingIterables(P.withNull(P.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.distinctLists(p.b), IterableUtils::unique); } Iterable<Pair<RandomProvider, List<Integer>>> ps2 = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).listsAtLeast(1, P.withNull(P.withScale(4).integersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { simpleTest(p.a, p.a.distinctLists(p.a.uniformSample(p.b)), is -> isSubsetOf(is, p.b) && unique(is)); } for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { try { toList(p.a.distinctLists(p.b)); fail(p); } catch (NoSuchElementException ignored) {} } } private void propertiesDistinctStrings_String() { initialize("distinctStrings(String)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.distinctStrings(p.b), s -> isSubsetOf(s, p.b) && unique(s)); } Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.distinctStrings(""); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesDistinctStrings() { initialize("distinctStrings()"); Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { simpleTest(rp, rp.distinctStrings(), IterableUtils::unique); } } private void propertiesDistinctListsAtLeast() { initialize("distinctListsAtLeast(int, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.distinctListsAtLeast(t.b, t.c), is -> is.size() >= t.b && unique(is)); } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts2 = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).rangeUpGeometric(2) ) ), p -> P.withScale(p.a.getScale()).repeatingIterablesDistinctAtLeast( p.b, P.withNull(P.naturalIntegersGeometric()) ) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts2)) { simpleTest(t.a, t.a.distinctListsAtLeast(t.b, t.c), is -> isSubsetOf(is, t.c) && unique(is)); } Iterable<Triple<RandomProvider, Integer, List<Integer>>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).listsAtLeast(1, P.withNull(P.withScale(4).integersGeometric())) ) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.distinctListsAtLeast(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail2 = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail2)) { try { t.a.distinctListsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail2 = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail2)) { try { t.a.distinctListsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesDistinctStringsAtLeast_int_String() { initialize("distinctStringsAtLeast(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.distinctStringsAtLeast(t.b, t.c), s -> s.length() >= t.b && isSubsetOf(s, t.c) && unique(s) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.distinctStringsAtLeast(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } if (P instanceof ExhaustiveProvider) { Iterable<Triple<RandomProvider, Integer, String>> tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b < 0 ? 0 : p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.distinctStringsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(MEDIUM_LIMIT, tsFail)) { try { t.a.distinctStringsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } } private void propertiesDistinctStringsAtLeast_int() { initialize("distinctStringsAtLeast(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.distinctStringsAtLeast(p.b), s -> s.length() >= p.b && unique(s)); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.distinctStringsAtLeast(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } psFail = filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.distinctStringsAtLeast(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesStringBags_int_String() { initialize("stringBags(int, String)"); Iterable<Triple<RandomProvider, String, Integer>> ts = map( p -> new Triple<>(p.a, p.b.a, p.b.b), P.pairs( P.randomProvidersDefault(), P.pairsLogarithmicOrder( P.withScale(4).stringsAtLeast(1), P.withScale(4).naturalIntegersGeometric() ) ) ); for (Triple<RandomProvider, String, Integer> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.stringBags(t.c, t.b), s -> s.length() == t.c && isSubsetOf(s, t.b) && weaklyIncreasing(toList(s)) ); } Iterable<Pair<RandomProvider, String>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { aeqit(p, TINY_LIMIT, p.a.stringBags(0, p.b), repeat("")); } Iterable<Triple<RandomProvider, Character, Integer>> ts2 = map( p -> new Triple<>(p.a, p.b.a, p.b.b), P.pairs( P.randomProvidersDefault(), P.pairsLogarithmicOrder(P.characters(), P.withScale(4).naturalIntegersGeometric()) ) ); for (Triple<RandomProvider, Character, Integer> t : take(LIMIT, ts2)) { aeqit( t, TINY_LIMIT, t.a.stringBags(t.c, Character.toString(t.b)), repeat(replicate(t.c, t.b.charValue())) ); } Iterable<Pair<RandomProvider, Integer>> psFail = P.pairs(P.randomProvidersDefault(), P.positiveIntegers()); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringBags(p.b, ""); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Triple<RandomProvider, String, Integer>> tsFail = P.triples( P.randomProvidersDefault(), P.stringsAtLeast(1), P.negativeIntegers() ); for (Triple<RandomProvider, String, Integer> t : take(LIMIT, tsFail)) { try { t.a.stringBags(t.c, t.b); fail(t); } catch (IllegalArgumentException ignored) {} } } private void propertiesStringBags_int() { initialize("stringBags(int)"); Iterable<Pair<RandomProvider, Integer>> ps = P.pairsLogarithmicOrder( P.randomProvidersDefault(), P.withScale(4).naturalIntegersGeometric() ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringBags(p.b), s -> s.length() == p.b && weaklyIncreasing(toList(s))); } Iterable<Pair<RandomProvider, Integer>> psFail = P.pairs(P.randomProvidersDefault(), P.negativeIntegers()); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringBags(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesBags() { initialize("bags(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.prefixPermutations(EP.naturalIntegers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.bags(p.b), IterableUtils::weaklyIncreasing); } Iterable<Pair<RandomProvider, List<Integer>>> ps2 = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).listsAtLeast(1, P.withScale(4).integersGeometric()) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { simpleTest(p.a, p.a.bags(p.a.uniformSample(p.b)), is -> isSubsetOf(is, p.b) && weaklyIncreasing(is)); } for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { try { toList(p.a.bags(p.b)); fail(p); } catch (NoSuchElementException ignored) {} } Iterable<Pair<RandomProvider, Iterable<Integer>>> psFail = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.prefixPermutations(P.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, psFail)) { try { toList(p.a.bags(p.b)); fail(p); } catch (NullPointerException ignored) {} } } private void propertiesStringBags_String() { initialize("stringBags(String)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringBags(p.b), s -> isSubsetOf(s, p.b) && weaklyIncreasing(toList(s))); } Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.stringBags(""); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesStringBags() { initialize("stringBags()"); Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { simpleTest(rp, rp.stringBags(), s -> weaklyIncreasing(toList(s))); } } private void propertiesBagsAtLeast() { initialize("bagsAtLeast(int, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.bagsAtLeast(t.b, t.c), is -> is.size() >= t.b && weaklyIncreasing(is)); } Iterable<Triple<RandomProvider, Integer, List<Integer>>> ts2 = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).listsAtLeast(1, P.withScale(4).integersGeometric()) ) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, ts2)) { simpleTest(t.a, t.a.bagsAtLeast(t.b, t.a.uniformSample(t.c)), is -> isSubsetOf(is, t.c)); } for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, ts2)) { try { toList(t.a.bagsAtLeast(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.bagsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.bagsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(P.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.bagsAtLeast(t.b, t.c)); fail(t); } catch (NullPointerException ignored) {} } } private void propertiesStringBagsAtLeast_int_String() { initialize("stringBagsAtLeast(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.stringBagsAtLeast(t.b, t.c), s -> s.length() >= t.b && isSubsetOf(s, t.c) && weaklyIncreasing(toList(s)) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.stringBagsAtLeast(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } Iterable<Triple<RandomProvider, Integer, String>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringBagsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringBagsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringBagsAtLeast_int() { initialize("stringBagsAtLeast(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest( p.a, p.a.stringBagsAtLeast(p.b), s -> s.length() >= p.b && weaklyIncreasing(toList(s)) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringBagsAtLeast(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } psFail = filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringBagsAtLeast(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesStringSubsets_int_String() { initialize("stringSubsets(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.stringSubsets(t.b, t.c), s -> s.length() == t.b && isSubsetOf(s, t.c) && increasing(toList(s)) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.stringSubsets(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } if (P instanceof ExhaustiveProvider) { Iterable<Triple<RandomProvider, Integer, String>> tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b < 0 ? 0 : p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringSubsets(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } } } private void propertiesStringSubsets_int() { initialize("distinctStrings(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), filterInfinite(i -> i <= (1 << 16), P.withScale(4).naturalIntegersGeometric()) ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringSubsets(p.b), s -> s.length() >= p.b && increasing(toList(s))); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringSubsets(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesSubsets() { initialize("subsets(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).repeatingIterables(P.naturalIntegers()) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.subsets(p.b), IterableUtils::increasing); } Iterable<Pair<RandomProvider, List<Integer>>> ps2 = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).listsAtLeast(1, P.withScale(4).integersGeometric()) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { simpleTest(p.a, p.a.subsets(p.a.uniformSample(p.b)), is -> isSubsetOf(is, p.b) && increasing(is)); } for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps2)) { try { toList(p.a.subsets(p.b)); fail(p); } catch (NoSuchElementException ignored) {} } } private void propertiesStringSubsets_String() { initialize("stringSubsets(String)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs( filterInfinite( rp -> rp.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withScale(4).stringsAtLeast(1) ); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringSubsets(p.b), s -> isSubsetOf(s, p.b) && increasing(toList(s))); } Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.stringSubsets(""); fail(rp); } catch (IllegalArgumentException ignored) {} } } private void propertiesStringSubsets() { initialize("stringSubsets()"); Iterable<RandomProvider> rpsFail = filterInfinite( s -> s.getScale() > 0, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { simpleTest(rp, rp.stringSubsets(), s -> increasing(toList(s))); } } private void propertiesSubsetsAtLeast() { initialize("subsetsAtLeast(int, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.subsetsAtLeast(t.b, t.c), is -> is.size() >= t.b && increasing(is)); } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts2 = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).rangeUpGeometric(2) ) ), p -> P.withScale(p.a.getScale()) .repeatingIterablesDistinctAtLeast(p.b, P.naturalIntegersGeometric()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts2)) { simpleTest(t.a, t.a.subsetsAtLeast(t.b, t.c), is -> isSubsetOf(is, t.c) && increasing(is)); } Iterable<Triple<RandomProvider, Integer, List<Integer>>> tsFail = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.withScale(4).listsAtLeast(1, P.withScale(4).integersGeometric()) ) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.subsetsAtLeast(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail2 = filterInfinite( t -> t.a.getScale() > t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail2)) { try { t.a.subsetsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail2 = filterInfinite( t -> t.a.getScale() <= t.b, P.triples( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric(), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail2)) { try { t.a.subsetsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringSubsetsAtLeast_int_String() { initialize("stringSubsetsAtLeast(int, String)"); Iterable<Triple<RandomProvider, Integer, String>> ts = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, ts)) { simpleTest( t.a, t.a.stringSubsetsAtLeast(t.b, t.c), s -> s.length() >= t.b && isSubsetOf(s, t.c) && increasing(toList(s)) ); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { toList(p.a.stringSubsetsAtLeast(p.b, "")); fail(p); } catch (IllegalArgumentException ignored) {} } if (P instanceof ExhaustiveProvider) { Iterable<Triple<RandomProvider, Integer, String>> tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b < 0 ? 0 : p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(LIMIT, tsFail)) { try { t.a.stringSubsetsAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } tsFail = map( p -> new Triple<>(p.a.a, p.a.b, p.b), P.dependentPairsInfinite( filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ), p -> filterInfinite( s -> !s.isEmpty() && nub(s).length() >= p.b, P.withScale(p.a.getScale()).stringsAtLeast(p.b) ) ) ); for (Triple<RandomProvider, Integer, String> t : take(MEDIUM_LIMIT, tsFail)) { try { t.a.stringSubsetsAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } } private void propertiesStringSubsetsAtLeast_int() { initialize("stringSubsetsAtLeast(int)"); Iterable<Pair<RandomProvider, Integer>> ps = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringSubsetsAtLeast(p.b), s -> s.length() >= p.b && increasing(toList(s))); } Iterable<Pair<RandomProvider, Integer>> psFail = filterInfinite( p -> p.a.getScale() > p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).negativeIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringSubsetsAtLeast(p.b); fail(p); } catch (IllegalArgumentException ignored) {} } psFail = filterInfinite( p -> p.a.getScale() <= p.b, P.pairs( P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale(), P.withScale(4).naturalIntegersGeometric() ) ); for (Pair<RandomProvider, Integer> p : take(LIMIT, psFail)) { try { p.a.stringSubsetsAtLeast(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesEithers() { initialize("either(Iterable<A>, Iterable<B>)"); Iterable<Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>>> ts = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.eithers(t.b, t.c), e -> true); } Iterable<Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>>> tsFail = P.triples( filterInfinite(rp -> rp.getScale() <= 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.eithers(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), map(xs -> ((Iterable<Integer>) xs), P.lists(P.integersGeometric())), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.eithers(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } tsFail = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), map(xs -> ((Iterable<Integer>) xs), P.lists(P.integersGeometric())) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.eithers(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } } private void propertiesChoose() { initialize("choose(Iterable<A>, Iterable<B>)"); Iterable<Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>>> ts = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.choose(t.b, t.c), i -> true); } Iterable<Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>>> tsFail = P.triples( filterInfinite(rp -> rp.getScale() <= 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.choose(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), map(xs -> ((Iterable<Integer>) xs), P.lists(P.integersGeometric())), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.choose(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } tsFail = P.triples( filterInfinite(rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.naturalIntegers()), map(xs -> ((Iterable<Integer>) xs), P.lists(P.integersGeometric())) ); for (Triple<RandomProvider, Iterable<Integer>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { toList(t.a.choose(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } } private void propertiesCartesianProduct() { initialize("cartesianProduct(List<List<T>>)"); Iterable<Pair<RandomProvider, List<List<Integer>>>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).listsAtLeast(1, P.withScale(4).listsAtLeast(1, P.withNull(P.integersGeometric()))) ); for (Pair<RandomProvider, List<List<Integer>>> p : take(LIMIT, ps)) { simpleTest( p.a, p.a.cartesianProduct(p.b), xs -> xs.size() == p.b.size() && and(zipWith(List::contains, p.b, xs)) ); } Iterable<Pair<RandomProvider, List<List<Integer>>>> psFail = P.pairs( P.randomProvidersDefault(), P.withScale(4).listsWithElement( Collections.emptyList(), P.withScale(4).listsAtLeast(1, P.withNull(P.integersGeometric())) ) ); for (Pair<RandomProvider, List<List<Integer>>> p : take(LIMIT, psFail)) { try { toList(p.a.cartesianProduct(p.b)); fail(p); } catch (IllegalArgumentException ignored) {} } psFail = P.pairs( P.randomProvidersDefault(), P.withScale(4).listsWithElement( null, P.withScale(4).listsAtLeast(1, P.withNull(P.integersGeometric())) ) ); for (Pair<RandomProvider, List<List<Integer>>> p : take(LIMIT, psFail)) { try { toList(p.a.cartesianProduct(p.b)); fail(p); } catch (NullPointerException ignored) {} } } private void propertiesRepeatingIterables() { initialize("repeatingIterables(Iterable<T>)"); Iterable<Pair<RandomProvider, Iterable<Integer>>> ps = P.pairs( filterInfinite(rp -> rp.getScale() > 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Pair<RandomProvider, Iterable<Integer>> p : take(LIMIT, ps)) { simpleTest( p.a, p.a.repeatingIterables(p.b), ys -> lengthAtLeast(TINY_LIMIT, ys) ); } } private void propertiesRepeatingIterablesDistinctAtLeast() { initialize("repeatingIterablesDistinctAtLeast(int, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = map( p -> new Triple<>(p.a, p.b.b, p.b.a), filterInfinite( p -> p.a.getScale() > p.b.b, P.pairs( P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.pairsLogarithmicOrder( P.prefixPermutations(EP.withNull(EP.naturalIntegers())), P.withScale(4).rangeUpGeometric(2) ) ) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(TINY_LIMIT, ts)) { simpleTest( t.a, t.a.repeatingIterablesDistinctAtLeast(t.b, t.c), ys -> { List<Integer> tys = toList(take(TINY_LIMIT, ys)); Set<Integer> distinctElements = new HashSet<>(); Iterator<Integer> ysi = ys.iterator(); while (distinctElements.size() < t.b) { distinctElements.add(ysi.next()); } return tys.size() == TINY_LIMIT; } ); } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail = map( p -> new Triple<>(p.a, p.b.b, p.b.a), filterInfinite( p -> p.a.getScale() <= p.b.b, P.pairs( P.randomProvidersDefaultSecondaryAndTertiaryScale(), P.pairsLogarithmicOrder( P.prefixPermutations(EP.withNull(EP.naturalIntegers())), P.withScale(4).rangeUpGeometric(2) ) ) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(TINY_LIMIT, tsFail)) { try { t.a.repeatingIterablesDistinctAtLeast(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } tsFail = map( p -> new Triple<>(p.a, p.b.b, p.b.a), filterInfinite( p -> p.a.getScale() > p.b.b, P.pairs( filterInfinite( rp -> rp.getScale() < 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ), P.pairsLogarithmicOrder( P.prefixPermutations(EP.withNull(EP.naturalIntegers())), P.integers() ) ) ) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(TINY_LIMIT, tsFail)) { try { t.a.repeatingIterablesDistinctAtLeast(t.b, t.c); fail(t); } catch (IllegalArgumentException ignored) {} } } private void propertiesSublists() { initialize("sublists(List<T>)"); Iterable<Pair<RandomProvider, List<Integer>>> ps = P.pairs( P.randomProvidersDefault(), P.withScale(4).lists(P.withNull(P.integersGeometric())) ); for (Pair<RandomProvider, List<Integer>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.sublists(p.b), xs -> isInfixOf(xs, p.b)); } } private void propertiesSubstrings() { initialize("substrings(String<T>)"); Iterable<Pair<RandomProvider, String>> ps = P.pairs(P.randomProvidersDefault(), P.withScale(4).strings()); for (Pair<RandomProvider, String> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.substrings(p.b), p.b::contains); } } private void propertiesListsWithElement() { initialize("listsWithElement(T, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = P.triples( filterInfinite( rp -> rp.getScale() >= 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withNull(P.integersGeometric()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.listsWithElement(t.b, t.c), xs -> xs.contains(t.b)); } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail = P.triples( filterInfinite( rp -> rp.getScale() < 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withNull(P.integersGeometric()), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.listsWithElement(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } Iterable<Triple<RandomProvider, Integer, List<Integer>>> tsFail2 = P.triples( filterInfinite( rp -> rp.getScale() >= 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.withNull(P.integersGeometric()), P.lists(P.withNull(P.withScale(4).integersGeometric())) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, tsFail2)) { try { toList(t.a.listsWithElement(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } } private void propertiesStringsWithChar_char_String() { initialize("stringsWithChar(char, String)"); Iterable<Triple<RandomProvider, Character, String>> ts = filterInfinite( t -> nub(t.c).length() != 1 || head(t.c) != t.b, P.triples( filterInfinite( rp -> rp.getScale() >= 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Character, String> t : take(LIMIT, ts)) { String combined = cons(t.b, t.c); simpleTest(t.a, t.a.stringsWithChar(t.b, t.c), s -> elem(t.b, s) && isSubsetOf(s, combined)); } Iterable<Triple<RandomProvider, Character, String>> tsFail = filterInfinite( t -> nub(t.c).length() != 1 || head(t.c) != t.b, P.triples( filterInfinite( rp -> rp.getScale() < 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Character, String> t : take(LIMIT, tsFail)) { try { t.a.stringsWithChar(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } Iterable<Pair<RandomProvider, Character>> psFail = P.pairs( filterInfinite( rp -> rp.getScale() >= 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters() ); for (Pair<RandomProvider, Character> p : take(LIMIT, psFail)) { try { p.a.stringsWithChar(p.b, ""); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesStringsWithChar_char() { initialize("stringsWithChar(char)"); Iterable<Pair<RandomProvider, Character>> ps = P.pairs( filterInfinite( rp -> rp.getScale() >= 3, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters() ); for (Pair<RandomProvider, Character> p : take(MEDIUM_LIMIT, ps)) { simpleTest(p.a, p.a.stringsWithChar(p.b), s -> elem(p.b, s)); } } private void propertiesSubsetsWithElement() { initialize("subsetsWithElement(T, Iterable<T>)"); Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> ts = P.triples( filterInfinite( rp -> rp.getScale() >= 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.integersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.subsetsWithElement(t.b, t.c), xs -> xs.contains(t.b) && increasing(xs)); } Iterable<Triple<RandomProvider, Integer, Iterable<Integer>>> tsFail = P.triples( filterInfinite( rp -> rp.getScale() < 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.integersGeometric(), P.prefixPermutations(EP.naturalIntegers()) ); for (Triple<RandomProvider, Integer, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.subsetsWithElement(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } Iterable<Triple<RandomProvider, Integer, List<Integer>>> tsFail2 = P.triples( filterInfinite( rp -> rp.getScale() >= 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.integersGeometric(), P.lists(P.withScale(4).integersGeometric()) ); for (Triple<RandomProvider, Integer, List<Integer>> t : take(LIMIT, tsFail2)) { try { toList(t.a.subsetsWithElement(t.b, t.c)); fail(t); } catch (NoSuchElementException ignored) {} } } private void propertiesStringSubsetsWithChar_char_String() { initialize("stringSubsetsWithChar(char, String)"); Iterable<Triple<RandomProvider, Character, String>> ts = filterInfinite( t -> nub(t.c).length() != 1 || head(t.c) != t.b, P.triples( filterInfinite( rp -> rp.getScale() >= 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Character, String> t : take(LIMIT, ts)) { String combined = cons(t.b, t.c); simpleTest( t.a, t.a.stringSubsetsWithChar(t.b, t.c), s -> elem(t.b, s) && isSubsetOf(s, combined) && increasing(toList(s)) ); } Iterable<Triple<RandomProvider, Character, String>> tsFail = filterInfinite( t -> nub(t.c).length() != 1 || head(t.c) != t.b, P.triples( filterInfinite( rp -> rp.getScale() < 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters(), P.withScale(4).stringsAtLeast(1) ) ); for (Triple<RandomProvider, Character, String> t : take(LIMIT, tsFail)) { try { t.a.stringSubsetsWithChar(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } Iterable<Pair<RandomProvider, Character>> psFail = P.pairs( filterInfinite( rp -> rp.getScale() >= 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters() ); for (Pair<RandomProvider, Character> p : take(LIMIT, psFail)) { try { p.a.stringSubsetsWithChar(p.b, ""); fail(p); } catch (IllegalArgumentException ignored) {} } } private void propertiesStringSubsetsWithChar_char() { initialize("stringSubsetsWithChar(char)"); Iterable<Pair<RandomProvider, Character>> ps = P.pairs( filterInfinite( rp -> rp.getScale() >= 2, P.withScale(4).randomProvidersDefaultSecondaryAndTertiaryScale() ), P.characters() ); for (Pair<RandomProvider, Character> p : take(MEDIUM_LIMIT, ps)) { simpleTest(p.a, p.a.stringSubsetsWithChar(p.b), s -> elem(p.b, s) && increasing(toList(s))); } } private void propertiesListsWithSublists() { initialize("listsWithSublists(Iterable<List<T>>, Iterable<T>)"); Iterable<Triple<RandomProvider, Iterable<List<Integer>>, Iterable<Integer>>> ts = P.triples( filterInfinite(rp -> rp.getScale() > 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.lists(EP.withNull(EP.naturalIntegers()))), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Iterable<List<Integer>>, Iterable<Integer>> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.listsWithSublists(t.b, t.c), xs -> true); } Iterable<Triple<RandomProvider, Iterable<List<Integer>>, Iterable<Integer>>> tsFail = P.triples( filterInfinite(rp -> rp.getScale() <= 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.lists(EP.withNull(EP.naturalIntegers()))), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, Iterable<List<Integer>>, Iterable<Integer>> t : take(LIMIT, tsFail)) { try { t.a.listsWithSublists(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringsWithSubstrings_Iterable_String_String() { initialize("stringsWithSubstrings(Iterable<String>, String)"); Iterable<Triple<RandomProvider, Iterable<String>, String>> ts = P.triples( filterInfinite(rp -> rp.getScale() > 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.strings()), P.withScale(4).stringsAtLeast(1) ); for (Triple<RandomProvider, Iterable<String>, String> t : take(LIMIT, ts)) { simpleTest(t.a, t.a.stringsWithSubstrings(t.b, t.c), s -> true); } Iterable<Triple<RandomProvider, Iterable<String>, String>> tsFail = P.triples( filterInfinite(rp -> rp.getScale() <= 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.strings()), P.withScale(4).stringsAtLeast(1) ); for (Triple<RandomProvider, Iterable<String>, String> t : take(LIMIT, tsFail)) { try { t.a.stringsWithSubstrings(t.b, t.c); fail(t); } catch (IllegalStateException ignored) {} } } private void propertiesStringsWithSubstrings_Iterable_String() { initialize("stringsWithSubstrings(Iterable<String>)"); Iterable<Pair<RandomProvider, Iterable<String>>> ps = P.pairs( filterInfinite(rp -> rp.getScale() > 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.strings()) ); for (Pair<RandomProvider, Iterable<String>> p : take(LIMIT, ps)) { simpleTest(p.a, p.a.stringsWithSubstrings(p.b), s -> true); } Iterable<Pair<RandomProvider, Iterable<String>>> psFail = P.pairs( filterInfinite(rp -> rp.getScale() <= 1, P.randomProvidersDefaultSecondaryAndTertiaryScale()), P.prefixPermutations(EP.strings()) ); for (Pair<RandomProvider, Iterable<String>> p : take(LIMIT, psFail)) { try { p.a.stringsWithSubstrings(p.b); fail(p); } catch (IllegalStateException ignored) {} } } private void propertiesMaps() { initialize("maps(List<Integer>, List<Integer>)"); Comparator<Integer> withNullComparator = new WithNullComparator<>(); Iterable<Triple<RandomProvider, List<Integer>, Iterable<Integer>>> ts = P.triples( P.randomProvidersDefault(), P.withScale(4).distinctLists(P.withNull(P.integersGeometric())), P.prefixPermutations(EP.withNull(EP.naturalIntegers())) ); for (Triple<RandomProvider, List<Integer>, Iterable<Integer>> t : take(MEDIUM_LIMIT, ts)) { List<Integer> sortedKeys = sort(withNullComparator, t.b); simpleTest(t.a, t.a.maps(t.b, t.c), m -> sort(withNullComparator, m.keySet()).equals(sortedKeys)); } } private void propertiesRandomProvidersFixedScales() { initialize("randomProvidersFixedScales(int, int, int)"); Iterable<Quadruple<RandomProvider, Integer, Integer, Integer>> ts = P.quadruples( P.randomProvidersDefault(), P.integersGeometric(), P.integersGeometric(), P.integersGeometric() ); for (Quadruple<RandomProvider, Integer, Integer, Integer> q : take(LIMIT, ts)) { simpleTest( q.a, q.a.randomProvidersFixedScales(q.b, q.c, q.d), rp -> rp.getScale() == q.b && rp.getSecondaryScale() == q.c && rp.getTertiaryScale() == q.d ); for (RandomProvider rp : take(TINY_LIMIT, q.a.randomProvidersFixedScales(q.b, q.c, q.d))) { rp.validate(); } } } private void propertiesRandomProvidersDefault() { initialize("randomProvidersDefault()"); for (RandomProvider rp : take(LIMIT, P.randomProvidersDefault())) { simpleTest( rp, rp.randomProvidersDefault(), s -> s.getScale() == 32 && s.getSecondaryScale() == 8 && s.getTertiaryScale() == 2 ); for (RandomProvider s : take(TINY_LIMIT, rp.randomProvidersDefault())) { s.validate(); } } } private void propertiesRandomProvidersDefaultSecondaryAndTertiaryScale() { initialize("randomProvidersDefaultSecondaryAndTertiaryScale()"); Iterable<RandomProvider> rps = filterInfinite( rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { simpleTest( rp, rp.randomProvidersDefaultSecondaryAndTertiaryScale(), s -> s.getSecondaryScale() == 8 && s.getTertiaryScale() == 2 ); for (RandomProvider s : take(TINY_LIMIT, rp.randomProvidersDefaultSecondaryAndTertiaryScale())) { s.validate(); } } Iterable<RandomProvider> rpsFail = filterInfinite( rp -> rp.getScale() <= 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.randomProvidersDefaultSecondaryAndTertiaryScale(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRandomProvidersDefaultTertiaryScale() { initialize("randomProvidersDefaultTertiaryScale()"); Iterable<RandomProvider> rps = filterInfinite( rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { simpleTest(rp, rp.randomProvidersDefaultTertiaryScale(), s -> s.getTertiaryScale() == 2); for (RandomProvider s : take(TINY_LIMIT, rp.randomProvidersDefaultTertiaryScale())) { s.validate(); } } Iterable<RandomProvider> rpsFail = filterInfinite( rp -> rp.getScale() <= 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.randomProvidersDefaultTertiaryScale(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesRandomProviders() { initialize("randomProviders()"); Iterable<RandomProvider> rps = filterInfinite( rp -> rp.getScale() > 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rps)) { simpleTest(rp, rp.randomProviders(), s -> true); for (RandomProvider s : take(TINY_LIMIT, rp.randomProviders())) { s.validate(); } } Iterable<RandomProvider> rpsFail = filterInfinite( rp -> rp.getScale() <= 0, P.randomProvidersDefaultSecondaryAndTertiaryScale() ); for (RandomProvider rp : take(LIMIT, rpsFail)) { try { rp.randomProviders(); fail(rp); } catch (IllegalStateException ignored) {} } } private void propertiesEquals() { initialize("equals(Object)"); propertiesEqualsHelper(LIMIT, P, IterableProvider::randomProviders); } private void propertiesHashCode() { initialize("hashCode()"); propertiesHashCodeHelper(LIMIT, P, IterableProvider::randomProviders); } private void propertiesToString() { initialize("toString()"); for (RandomProvider rp : take(LIMIT, P.randomProviders())) { String s = rp.toString(); assertTrue(rp, isSubsetOf(s, RANDOM_PROVIDER_CHARS)); } } }
package io.autoscaling.ingestion.verticles; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.kinesis.AmazonKinesisAsyncClient; import com.amazonaws.services.kinesis.model.ListStreamsRequest; import com.amazonaws.services.kinesis.model.ListStreamsResult; import com.amazonaws.services.kinesis.model.PutRecordRequest; import com.amazonaws.services.kinesis.model.PutRecordResult; import io.autoscaling.ingestion.exceptions.KinesisException; import io.autoscaling.ingestion.helper.AmazonUtil; import io.autoscaling.ingestion.helper.Constants; import io.autoscaling.proto.AddressBookProtos; import io.vertx.core.AbstractVerticle; import io.vertx.core.MultiMap; import io.vertx.core.eventbus.EventBus; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; public class KinesisVerticle extends AbstractVerticle { private static final Logger LOGGER = LoggerFactory.getLogger(KinesisVerticle.class); private AmazonKinesisAsyncClient kinesisAsyncClient; @Override public void start() throws Exception { EventBus eb = vertx.eventBus(); kinesisAsyncClient = createClient(); eb.consumer(Constants.EVENTBUS_ADDRESS, message -> { try { MultiMap multiMap = message.headers(); String partitionKey = multiMap.get(Constants.MESSAGE_KEY); Integer randomId = (Integer)message.body(); byte [] byteMessage = createMessage(randomId); ByteBuffer buf = ByteBuffer.wrap(byteMessage); sendMessageToKinesis(buf, partitionKey); // Now send back reply message.reply("OK"); } catch (KinesisException exc) { LOGGER.error(exc); } }); LOGGER.info("Receiver ready!"); } @Override public void stop() throws Exception { if (kinesisAsyncClient != null) { kinesisAsyncClient.shutdown(); } } protected void sendMessageToKinesis(ByteBuffer payload, String partitionKey) throws KinesisException { if (kinesisAsyncClient == null) { throw new KinesisException("AmazonKinesisAsyncClient is not initialized"); } PutRecordRequest putRecordRequest = new PutRecordRequest(); putRecordRequest.setStreamName(Constants.STREAM_NAME); putRecordRequest.setPartitionKey(partitionKey); LOGGER.info("Writing to streamName " + Constants.STREAM_NAME + " using partitionkey " + partitionKey); putRecordRequest.setData(payload); Future<PutRecordResult> futureResult = kinesisAsyncClient.putRecordAsync(putRecordRequest); try { PutRecordResult recordResult = futureResult.get(); LOGGER.info("Sent message to Kinesis: " + recordResult.toString()); } catch (InterruptedException iexc) { LOGGER.error(iexc); } catch (ExecutionException eexc) { LOGGER.error(eexc); } } private byte[] createMessage(int id) { AddressBookProtos.Person.Builder personBuilder = AddressBookProtos.Person.newBuilder(); personBuilder.setId(id); personBuilder.setName("Jon Doe"); personBuilder.setEmail("jon.doe@test.com"); AddressBookProtos.Person.PhoneNumber.Builder phoneNumber = AddressBookProtos.Person.PhoneNumber.newBuilder().setNumber("049 0176 0815"); phoneNumber.setType(AddressBookProtos.Person.PhoneType.MOBILE); personBuilder.addPhone(phoneNumber); AddressBookProtos.Person person = personBuilder.build(); AddressBookProtos.AddressBook.Builder addressBookBuilder = AddressBookProtos.AddressBook.newBuilder(); addressBookBuilder.addPerson(person); AddressBookProtos.AddressBook addressBook = addressBookBuilder.build(); byte[] addressBookBytes = addressBook.toByteArray(); return addressBookBytes; } private boolean isValid(String str) { return str != null && !str.isEmpty(); } private AmazonKinesisAsyncClient createClient() { // Building Kinesis configuration int connectionTimeout = ClientConfiguration.DEFAULT_CONNECTION_TIMEOUT; int maxConnection = ClientConfiguration.DEFAULT_MAX_CONNECTIONS; RetryPolicy retryPolicy = ClientConfiguration.DEFAULT_RETRY_POLICY; int socketTimeout = ClientConfiguration.DEFAULT_SOCKET_TIMEOUT; boolean useReaper = ClientConfiguration.DEFAULT_USE_REAPER; String userAgent = ClientConfiguration.DEFAULT_USER_AGENT; ClientConfiguration clientConfiguration = new ClientConfiguration(); clientConfiguration.setConnectionTimeout(connectionTimeout); clientConfiguration.setMaxConnections(maxConnection); clientConfiguration.setRetryPolicy(retryPolicy); clientConfiguration.setSocketTimeout(socketTimeout); clientConfiguration.setUseReaper(useReaper); clientConfiguration.setUserAgent(userAgent); // Reading credentials from ENV-variables AWSCredentialsProvider awsCredentialsProvider = new DefaultAWSCredentialsProviderChain(); // Configuring Kinesis-client with configuration AmazonKinesisAsyncClient kinesisAsyncClient = new AmazonKinesisAsyncClient(awsCredentialsProvider, clientConfiguration); Regions myRegion = Regions.fromName(AmazonUtil.getInstance().getRegion()); kinesisAsyncClient.withRegion(Region.getRegion(myRegion)); return kinesisAsyncClient; } }
package org.pwsafe.lib.file; import junit.framework.TestCase; import org.pwsafe.util.FileConverter; import java.io.File; import java.nio.file.Files; import java.nio.file.Path; public class PwsFileV1Test extends TestCase { private static final String PASSPHRASE = "passphrase"; /** * Tests that the library can create a version 1 database, read from the created file and * round trip convert to V2. */ public void testV1File() throws Exception { Path tmpFile = Files.createTempFile("javapasswordsafe_v1", ".dat"); assertTrue(tmpFile.toFile().delete()); String v1FileName = tmpFile.toString(); try { PwsRecord rec = null; final PwsFile v1File = new PwsFileV1(); v1File.setStorage(new PwsFileStorage(v1FileName)); v1File.setPassphrase(new StringBuilder(PASSPHRASE)); rec = v1File.newRecord(); rec.setField(new PwsStringField(PwsRecordV1.TITLE, "Entry number 1")); rec.setField(new PwsStringField(PwsRecordV1.PASSWORD, "Password 1")); v1File.add(rec); rec = v1File.newRecord(); rec.setField(new PwsStringField(PwsRecordV1.TITLE, "Entry number 2")); rec.setField(new PwsStringField(PwsRecordV1.PASSWORD, "Password 2")); rec.setField(new PwsStringField(PwsRecordV1.USERNAME, "Username 2")); rec.setField(new PwsStringField(PwsRecordV1.NOTES, "Notes line 1\r\nNotes line 2")); v1File.add(rec); assertTrue("Modified flag is not TRUE", v1File.isModified()); assertEquals("Record count is not = 2", 2, v1File.getRecordCount()); v1File.save(); assertTrue("Modified flag is not FALSE", !v1File.isModified()); v1File.dispose(); assertEquals(null, v1File.getPassphrase()); PwsFile anotherV1File = new PwsFileV1(new PwsFileStorage(v1FileName), PASSPHRASE); anotherV1File.readAll(); anotherV1File.close(); assertEquals(2, anotherV1File.getRecordCount()); PwsFileV1 thirdV1File = new PwsFileV1(new PwsFileStorage(v1FileName), PASSPHRASE); thirdV1File.readAll(); thirdV1File.close(); final PwsFileV2 newTempFile = (PwsFileV2) FileConverter.convertV1ToV2(thirdV1File); assertEquals(2, newTempFile.getRecordCount()); final PwsFileV1 newFile = (PwsFileV1) FileConverter.convertV2ToV1(newTempFile); assertEquals(2, newFile.getRecordCount()); assertEquals(thirdV1File.getRecord(0), newFile.getRecord(0)); } finally { assertTrue(tmpFile.toFile().delete()); } } }
package de.quaddy_services.ptc.edit; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.junit.jupiter.api.Test; import de.quaddy_services.ptc.enterprise.EnterpriseUtil; import de.quaddy_services.ptc.store.PosAndContent; import de.quaddy_services.ptc.store.Task; import de.quaddy_services.ptc.store.TaskHistory; public class EditNoChangeTest { private static final File TEST_FILE = new File("src/test/resources/de/quaddy_services/ptc/edit/EditNoChangeTest.txt"); /** * @param args * @throws ParseException * @throws IOException */ @Test public void testNoChange() throws IOException { List<String> tempOrig = readFile(TEST_FILE); final File tempTestFile = File.createTempFile("EditNoChangeTest", ".txt"); tempTestFile.deleteOnExit(); copyFile(TEST_FILE, tempTestFile); TaskHistory tempTaskHistory = new TaskHistory() { @Override public File getActualFile() { return tempTestFile; } @Override public void backupFile() { // no backup file for test } }; TaskEditor tempTaskEdtior = new TaskEditor(); long tempStartPos; List<PosAndContent<Task>> tempTasks; tempTasks = tempTaskHistory.getLastLinesForEdit(); tempStartPos = tempTasks.get(0).getPosInFile(); tempTaskEdtior.setTasks(tempTasks, new EnterpriseUtil()); // tempTaskEdtior.show for (Iterator<PosAndContent<Task>> i = tempTasks.iterator(); i.hasNext();) { PosAndContent<Task> tempPosAndContent = i.next(); if (tempTaskEdtior.getDeletedTasks().contains(tempPosAndContent)) { i.remove(); } } tempTaskHistory.saveTasks(tempStartPos, tempTasks); List<String> tempSaved = readFile(tempTestFile); Iterator<String> tempSaveI = tempSaved.iterator(); for (String tempString : tempOrig) { assertEquals(tempString, tempSaveI.next()); } System.out.println("ok"); } private List<String> readFile(File aTestFile) throws IOException { List<String> tempLines = new ArrayList<String>(); BufferedReader tempReader = new BufferedReader(new FileReader(aTestFile)); while (tempReader.ready()) { tempLines.add(tempReader.readLine()); } tempReader.close(); return tempLines; } private void copyFile(File aFrom, File aTo) throws FileNotFoundException, IOException { FileInputStream tempReader = new FileInputStream(aFrom); FileOutputStream tempWriter = new FileOutputStream(aTo); int tempRead; byte[] tempBuff = new byte[1024 * 20]; while (0 <= (tempRead = tempReader.read(tempBuff))) { tempWriter.write(tempBuff, 0, tempRead); } tempReader.close(); tempWriter.close(); } }
package org.auslides.security.rest; import org.auslides.security.config.RepositoryConfig; import org.auslides.security.config.ShiroConfig; import org.auslides.security.model.Permission; import org.auslides.security.model.Role; import org.auslides.security.model.User; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.auslides.security.Application; import org.auslides.security.repository.UserRepository; import java.util.Arrays; import org.apache.shiro.authc.UsernamePasswordToken; import org.apache.shiro.authc.credential.DefaultPasswordService; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.TestRestTemplate; import org.springframework.boot.test.TestRestTemplate.HttpClientOption; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.springframework.test.context.web.WebAppConfiguration; import static org.testng.AssertJUnit.assertEquals; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @SpringApplicationConfiguration(classes = {Application.class, ShiroConfig.class, RepositoryConfig.class}) @WebAppConfiguration @IntegrationTest @TestExecutionListeners(inheritListeners = false, listeners = {DependencyInjectionTestExecutionListener.class}) public class UserControllerTest extends AbstractTestNGSpringContextTests { private final String BASE_URL = "http://localhost:8080/users"; private final String USER_NAME = "Balala"; private final String USER_EMAIL = "balala@gmail.com"; private final String USER_PWD = "1111"; @Autowired private DefaultPasswordService passwordService; @Autowired private UserRepository userRepo; @BeforeClass public void setUp() { userRepo.deleteAll(); final Permission p1 = new Permission(); p1.setName("VIEW_USER_ROLES"); // define roles final Role roleAdmin = new Role(); roleAdmin.setName("ADMIN"); roleAdmin.getPermissions().add(p1); // define user final User user = new User(); user.setActive(true); user.setEmail(USER_EMAIL); user.setName(USER_NAME); user.setPassword(passwordService.encryptPassword(USER_PWD)); user.getRoles().add(roleAdmin); userRepo.save(user); } @Test public void test_count() { assertEquals(1, userRepo.count()); } @Test public void test_authenticate_success() throws JsonProcessingException { // authenticate HttpHeaders headers = new HttpHeaders(); headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON)); headers.setContentType(MediaType.APPLICATION_JSON); final String json = new ObjectMapper().writeValueAsString( new UsernamePasswordToken(USER_EMAIL, USER_PWD)); System.out.println(json); final ResponseEntity<String> response = new TestRestTemplate( HttpClientOption.ENABLE_COOKIES).exchange(BASE_URL.concat("/auth"), HttpMethod.POST, new HttpEntity<>(json, headers), String.class); assertThat(response.getStatusCode(), equalTo(HttpStatus.OK)); } @Test public void test_authenticate_failure() throws JsonProcessingException { // authenticate HttpHeaders headers = new HttpHeaders(); headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON)); headers.setContentType(MediaType.APPLICATION_JSON); final String json = new ObjectMapper().writeValueAsString( new UsernamePasswordToken(USER_EMAIL, "wrong password")); System.out.println(json); final ResponseEntity<String> response = new TestRestTemplate( HttpClientOption.ENABLE_COOKIES).exchange(BASE_URL.concat("/auth"), HttpMethod.POST, new HttpEntity<>(json, headers), String.class); assertThat(response.getStatusCode(), equalTo(HttpStatus.OK)); } }
package gov.usgs.cida.wqp.exception; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import org.junit.jupiter.api.Test; public class WqpExceptionTest { public static class Klass { public void doSomething() throws WqpException { try { int div = 1/ 0; } catch (Exception e) { throw new WqpException(WqpExceptionId.URL_PARSING_EXCEPTION, Klass.class, "doSomething", "div by zero"); } } } @Test public void test() { WqpException ex = null; String trace = null; try { new Klass().doSomething(); fail("test should throw exception"); } catch (WqpException e) { ex = e; trace = e.traceBack(); } assertNotNull(trace); System.out.println(trace); assertTrue(trace.contains("doSomething")); assertTrue(trace.contains("div by zero")); assertEquals(trace, ex.getMessage()); assertEquals(trace, ex.toString()); assertEquals("doSomething", ex.getMethod()); assertEquals("div by zero", ex.getMessageOnly()); assertEquals(Klass.class.getName(), ex.getClassname()); assertEquals(null, ex.getPrevious()); } }
package it.sauronsoftware.ftp4j.listparsers; import it.sauronsoftware.ftp4j.FTPFile; import it.sauronsoftware.ftp4j.FTPListParseException; import it.sauronsoftware.ftp4j.FTPListParser; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.Properties; import java.util.StringTokenizer; /** * This parser can handle the standard MLST/MLSD responses (RFC 3659). * * @author Carlo Pelliccia * @since 1.5 */ public class MLSDListParser implements FTPListParser { /** * Date format 1 for MLSD date facts (supports millis). */ private static final DateFormat MLSD_DATE_FORMAT_1 = new SimpleDateFormat("yyyyMMddHHmmss.SSS Z"); /** * Date format 2 for MLSD date facts (doesn't support millis). */ private static final DateFormat MLSD_DATE_FORMAT_2 = new SimpleDateFormat("yyyyMMddHHmmss Z"); public FTPFile[] parse(String[] lines) throws FTPListParseException { ArrayList list = new ArrayList(); for (int i = 0; i < lines.length; i++) { FTPFile file = parseLine(lines[i]); if (file != null) { list.add(file); } } int size = list.size(); FTPFile[] ret = new FTPFile[size]; for (int i = 0; i < size; i++) { ret[i] = (FTPFile) list.get(i); } return ret; } /** * Parses a line ad a MLSD response element. * * @param line * The line. * @return The file, or null if the line has to be ignored. * @throws FTPListParseException * If the line is not a valid MLSD entry. */ private FTPFile parseLine(String line) throws FTPListParseException { // List line format is <FACTS WITH NO SPACES SEPARATED WITH SEMICOLON> <SPACE> <FILENAME> // Example of line that failed before: Type=file;Size=25730;Modify=19940728095854;Perm=; cap;mux.tar.z int nameIndex = line.indexOf(" "); // Throw exception if no name in response line if (nameIndex == -1) { throw new FTPListParseException(); } // Extract the file name. String name = line.substring(nameIndex + 1); // Extract the facts string String factsLine = line.substring(0, nameIndex); ArrayList list = new ArrayList(); StringTokenizer st = new StringTokenizer(factsLine, ";"); while (st.hasMoreElements()) { String aux = st.nextToken().trim(); if (aux.length() > 0) { list.add(aux); } } // If no facts, throw exception if (list.size() == 0) { throw new FTPListParseException(); } // Parses the facts. Properties facts = new Properties(); for (Iterator i = list.iterator(); i.hasNext();) { String aux = (String) i.next(); int sep = aux.indexOf('='); if (sep == -1) { throw new FTPListParseException(); } String key = aux.substring(0, sep).trim(); String value = aux.substring(sep + 1, aux.length()).trim(); if (key.length() == 0 || value.length() == 0) { throw new FTPListParseException(); } facts.setProperty(key, value); } // Type. int type; String typeString = facts.getProperty("type"); if (typeString == null) { throw new FTPListParseException(); } else if ("file".equalsIgnoreCase(typeString)) { type = FTPFile.TYPE_FILE; } else if ("dir".equalsIgnoreCase(typeString)) { type = FTPFile.TYPE_DIRECTORY; } else if ("cdir".equalsIgnoreCase(typeString)) { // Current directory. Skips... return null; } else if ("pdir".equalsIgnoreCase(typeString)) { // Parent directory. Skips... return null; } else { // Unknown... (link?)... Skips... return null; } // Last modification date. Date modifiedDate = null; String modifyString = facts.getProperty("modify"); if (modifyString != null) { modifyString += " +0000"; try { synchronized (MLSD_DATE_FORMAT_1) { modifiedDate = MLSD_DATE_FORMAT_1.parse(modifyString); } } catch (ParseException e1) { try { synchronized (MLSD_DATE_FORMAT_2) { modifiedDate = MLSD_DATE_FORMAT_2.parse(modifyString); } } catch (ParseException e2) { ; } } } // Size. long size = 0; String sizeString = facts.getProperty("size"); if (sizeString != null) { try { size = Long.parseLong(sizeString); } catch (NumberFormatException e) { ; } if (size < 0) { size = 0; } } // Done! FTPFile ret = new FTPFile(); ret.setType(type); ret.setModifiedDate(modifiedDate); ret.setSize(size); ret.setName(name); return ret; } }
package harmony.mastermind.testutil; import java.util.List; import harmony.mastermind.commons.exceptions.IllegalValueException; import harmony.mastermind.model.TaskManager; import harmony.mastermind.model.task.*; import harmony.mastermind.model.task.UniqueTaskList.TaskNotFoundException; public class TypicalTestTasks { public static TestTask task1, task2, task3, task4, task5, task6, task7, task8; //@@author A0124797R public TypicalTestTasks() { try { task1 = new TaskBuilder().withName("do laundry") .withTags("chores").build(); task2 = new TaskBuilder().withName("finish assignment").build(); task3 = new TaskBuilder().withName("cs2105 assignment") .withStartDate("23 oct 1pm").withEndDate("23 oct 5pm") .withTags("examPrep").build(); task4 = new TaskBuilder().withName("complete cs2103 lecture quiz") .withEndDate("25 oct 2359") .withTags("homework").build(); //manual inputs task5 = new TaskBuilder().withName("do past year papers") .withEndDate("22 oct 2359") .withTags("homework").build(); task6 = new TaskBuilder().withName("sweep floor").withEndDate("21 oct 2359").build(); //completed tasks task7 = new TaskBuilder().withName("lecture").build(); task8 = new TaskBuilder().withName("submit PR").withEndDate("22 oct 2359").build(); } catch (IllegalValueException e) { e.printStackTrace(); assert false : "not possible"; } } //@@author A0124797R public static void loadTaskManagerWithSampleData(TaskManager tm) { try { tm.addTask(new Task(task1)); tm.addTask(new Task(task2)); tm.addTask(new Task(task3)); tm.addTask(new Task(task4)); tm.addTask(new Task(task7)); tm.markTask(new Task(task7)); tm.addTask(new Task(task8)); tm.markTask(new Task(task8)); } catch (UniqueTaskList.DuplicateTaskException e) { assert false : "not possible"; } catch (TaskNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } //@@author A0124797R public TestTask[] getTypicalTasks() { return new TestTask[]{task1, task2, task3, task4}; } //@@author A0124797R public TestTask[] getTypicalArchivedTasks() { return new TestTask[]{task7.mark(),task8.mark()}; } public TaskManager getTypicalTaskManager(){ TaskManager tm = new TaskManager(); loadTaskManagerWithSampleData(tm); return tm; } }
package io.ebean.docker.commands; import io.ebean.docker.container.Container; import io.ebean.docker.container.ContainerConfig; import io.ebean.docker.container.ContainerFactory; import org.junit.Ignore; import org.junit.Test; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.Properties; public class OracleContainerTest { @Ignore @Test public void start() { OracleConfig config = new OracleConfig("latest"); config.setUser("test_start"); //config.setPort("15221"); OracleContainer container = new OracleContainer(config); container.startWithCreate(); container.startContainerOnly(); container.startWithDropCreate(); //container.stopOnly(); } @Ignore @Test public void viaContainerFactory() { Properties properties = new Properties(); properties.setProperty("oracle.version", "latest"); //properties.setProperty("oracle.containerName", "junk_oracle"); //properties.setProperty("oracle.port", "1521"); //properties.setProperty("oracle.dbName", "test_rob"); properties.setProperty("oracle.dbUser", "test_robino"); //properties.setProperty("oracle.startMode", "dropcreate"); ContainerFactory factory = new ContainerFactory(properties); factory.startContainers(); Container container = factory.container("oracle"); ContainerConfig config = container.config(); config.setStartMode("dropCreate"); container.start(); config.setStartMode("container"); container.start(); config.setStartMode("create"); container.start(); try(Connection connection = config.createConnection()) { exeSql(connection, "create table test_junk (acol integer)"); exeSql(connection, "insert into test_junk (acol) values (42)"); exeSql(connection, "insert into test_junk (acol) values (43)"); // docker exec -it ut_oracle bash // $ORACLE_HOME/bin/sqlplus system/oracle // $ORACLE_HOME/bin/sqlplus test_robino/test } catch (SQLException e) { throw new RuntimeException(e); } //container.stop(); } private void exeSql(Connection connection, String sql) throws SQLException { PreparedStatement st = connection.prepareStatement(sql); st.execute(); st.close(); } }
package modtweaker2.mods.factorization; import static modtweaker2.helpers.ReflectionHelper.getStaticObject; import java.util.List; import modtweaker2.helpers.ReflectionHelper; import net.minecraft.item.ItemStack; public class FactorizationHelper { @SuppressWarnings("rawtypes") public static List lacerator = null; @SuppressWarnings("rawtypes") public static List slag = null; @SuppressWarnings("rawtypes") public static List crystallizer = null; static { try { lacerator = getStaticObject(Class.forName("factorization.oreprocessing.TileEntityGrinder"), "recipes"); slag = ReflectionHelper.getObject(getStaticObject(Class.forName("factorization.oreprocessing.TileEntitySlagFurnace"), "recipes"), "list"); crystallizer = ReflectionHelper.getObject(getStaticObject(Class.forName("factorization.oreprocessing.TileEntityCrystallizer"), "recipes"), "list"); } catch (Exception e) {} } private FactorizationHelper() {} public static Object getLaceratorRecipe(Object input, ItemStack output, float probability) { return ReflectionHelper.getInstance(ReflectionHelper.getConstructor("factorization.oreprocessing.TileEntityGrinder$GrinderRecipe", Object.class, ItemStack.class, float.class), input, output, probability); } public static Object getSlagFurnaceRecipe(ItemStack input, float chance1, ItemStack output1, float chance2, ItemStack output2) { return ReflectionHelper.getInstance(ReflectionHelper.getConstructor("factorization.oreprocessing.TileEntitySlagFurnace$SmeltingResult", ItemStack.class, float.class, ItemStack.class, float.class, ItemStack.class), input, chance1, output1, chance2, output2); } public static Object getCrystallizerRecipe(ItemStack input, ItemStack output, ItemStack solution, float output_count) { return ReflectionHelper.getInstance(ReflectionHelper.getConstructor("factorization.oreprocessing.TileEntityCrystallizer$CrystalRecipe", ItemStack.class, ItemStack.class, float.class, ItemStack.class), input, output, output_count, solution); } }
package org.fluentd.logger.sender; import org.fluentd.logger.util.MockFluentd; import org.fluentd.logger.util.MockFluentd.MockProcess; import org.junit.Test; import org.msgpack.MessagePack; import org.msgpack.unpacker.Unpacker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedInputStream; import java.io.EOFException; import java.io.IOException; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestRawSocketSender { @Test public void testNormal01() throws Exception { // start mock fluentd int port = MockFluentd.randomPort(); final List<Event> elist = new ArrayList<Event>(); MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elist.add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentd.start(); // start senders Sender sender = new RawSocketSender("localhost", port); Map<String, Object> data = new HashMap<String, Object>(); data.put("t1k1", "t1v1"); data.put("t1k2", "t1v2"); sender.emit("tag.label1", data); Map<String, Object> data2 = new HashMap<String, Object>(); data2.put("t2k1", "t2v1"); data2.put("t2k2", "t2v2"); sender.emit("tag.label2", data2); // close sender sockets sender.close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentd.close(); // check data assertEquals(2, elist.size()); { Event e = elist.get(0); assertEquals("tag.label1", e.tag); assertEquals("t1v1", e.data.get("t1k1")); assertEquals("t1v2", e.data.get("t1k2")); } { Event e = elist.get(1); assertEquals("tag.label2", e.tag); assertEquals("t2v1", e.data.get("t2k1")); assertEquals("t2v2", e.data.get("t2k2")); } } @Test public void testNormal02() throws Exception { // start mock fluentd int port = MockFluentd.randomPort(); // Use a random port available final List<Event> elist = new ArrayList<Event>(); MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elist.add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentd.start(); // start senders Sender sender = new RawSocketSender("localhost", port); int count = 10000; for (int i = 0; i < count; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); sender.emit(tag, record); } // close sender sockets sender.close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentd.close(); // check data assertEquals(count, elist.size()); } @Test public void testNormal03() throws Exception { // start mock fluentds final MockFluentd[] fluentds = new MockFluentd[2]; final List[] elists = new List[2]; final int[] ports = new int[2]; ports[0] = MockFluentd.randomPort(); RawSocketSender rawSocketSender = new RawSocketSender("localhost", ports[0]); // it should be failed to connect to fluentd elists[0] = new ArrayList<Event>(); fluentds[0] = new MockFluentd(ports[0], new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elists[0].add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentds[0].start(); ports[1] = MockFluentd.randomPort(); elists[1] = new ArrayList<Event>(); fluentds[1] = new MockFluentd(ports[1], new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elists[1].add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentds[1].start(); // start senders Sender[] senders = new Sender[2]; int[] counts = new int[2]; senders[0] = rawSocketSender; counts[0] = 10000; for (int i = 0; i < counts[0]; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); senders[0].emit(tag, record); } senders[1] = new RawSocketSender("localhost", ports[1]); counts[1] = 10000; for (int i = 0; i < counts[1]; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); senders[1].emit(tag, record); } // close sender sockets senders[0].close(); senders[1].close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentds[0].close(); fluentds[1].close(); // check data assertEquals(counts[0], elists[0].size()); assertEquals(counts[1], elists[1].size()); } @Test public void testTimeout() throws InterruptedException { final AtomicBoolean socketFinished = new AtomicBoolean(false); ExecutorService executor = Executors.newSingleThreadExecutor(); executor.execute(new Runnable() { @Override public void run() { RawSocketSender socketSender = null; try { // try to connect to test network socketSender = new RawSocketSender("192.0.2.1", 24224, 200, 8 * 1024); } finally { if (socketSender != null) { socketSender.close(); } socketFinished.set(true); } } }); while(!socketFinished.get()) Thread.yield(); assertTrue(socketFinished.get()); executor.shutdownNow(); } @Test public void testBufferingAndResending() throws InterruptedException, IOException { final ConcurrentLinkedQueue<Event> readEvents = new ConcurrentLinkedQueue<Event>(); final CountDownLatch countDownLatch = new CountDownLatch(4); int port = MockFluentd.randomPort(); MockProcess mockProcess = new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); readEvents.add(e); countDownLatch.countDown(); } } catch (EOFException e) { // e.printStackTrace(); } } }; MockFluentd fluentd = new MockFluentd(port, mockProcess); fluentd.start(); Sender sender = new RawSocketSender("localhost", port); Map<String, Object> data = new HashMap<String, Object>(); data.put("key0", "v0"); sender.emit("tag0", data); // close fluentd to make the next sending failed TimeUnit.MILLISECONDS.sleep(500); fluentd.closeClientSockets(); TimeUnit.MILLISECONDS.sleep(500); data = new HashMap<String, Object>(); data.put("key0", "v1"); sender.emit("tag0", data); // wait to avoid the suppression of reconnection TimeUnit.MILLISECONDS.sleep(500); data = new HashMap<String, Object>(); data.put("key0", "v2"); sender.emit("tag0", data); data = new HashMap<String, Object>(); data.put("key0", "v3"); sender.emit("tag0", data); countDownLatch.await(500, TimeUnit.MILLISECONDS); sender.close(); fluentd.close(); assertEquals(4, readEvents.size()); Event event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v0")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v1")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v2")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v3")); } }
package jenkins.plugins.coverity.ws; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.xml.datatype.DatatypeConfigurationException; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import com.coverity.ws.v9.CovRemoteServiceException_Exception; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.model.BuildListener; import hudson.model.Descriptor; import hudson.model.Result; import jenkins.model.Jenkins; import jenkins.plugins.coverity.CIMInstance; import jenkins.plugins.coverity.CIMStream; import jenkins.plugins.coverity.CoverityBuildAction; import jenkins.plugins.coverity.CoverityPublisher; import jenkins.plugins.coverity.CoverityPublisher.DescriptorImpl; import jenkins.plugins.coverity.DefectFilters; import jenkins.plugins.coverity.Utils.CoverityPublisherBuilder; import jenkins.plugins.coverity.Utils.TestableConsoleLogger; import jenkins.plugins.coverity.ws.TestWebServiceFactory.TestDefectService; @RunWith(PowerMockRunner.class) @PrepareForTest(Jenkins.class) public class DefectReaderTest { @Mock private Jenkins jenkins; @Mock private AbstractBuild<?, ?> build; @Mock private BuildListener listener; @Mock private DescriptorImpl descriptor; private TestableConsoleLogger consoleLogger; private TestDefectService defectService; private static final String cimInstanceName = "cim-instance"; @Before public void setup() throws IOException { // setup jenkins PowerMockito.mockStatic(Jenkins.class); when(Jenkins.getInstance()).thenReturn(jenkins); // setup console logger consoleLogger = new TestableConsoleLogger(); when(listener.getLogger()).thenReturn(consoleLogger.getPrintStream()); // setup global configuration (DescriptorImpl) with TestWebService CIMInstance cimInstance = mock(CIMInstance.class); defectService = (TestDefectService)new TestWebServiceFactory().getDefectService(cimInstance); when(cimInstance.getDefectService()).thenReturn(defectService); when(descriptor.getInstance(cimInstanceName)).thenReturn(cimInstance); when(jenkins.getDescriptorOrDie(CoverityPublisher.class)).thenReturn(descriptor); } private DefectFilters getMatchingDefectFilters(){ try { return new DefectFilters( Arrays.asList("Undecided"), new ArrayList<>(Arrays.asList("High", "Medium", "Low")), Arrays.asList("Unclassified"), Arrays.asList("Unspecified", "Major", "Moderate", "Minor"), Arrays.asList("Default.Other"), Arrays.asList("TEST_CHECKER"), "2017-01-01"); } catch(Descriptor.FormException e) { return null; } } @Test public void getLatestDefectsForBuild_withNoDefectFilters_addDefectsToBuildAction() throws ParseException, DatatypeConfigurationException, IOException, CovRemoteServiceException_Exception { when(jenkins.getRootUrl()).thenReturn("rootUrl/"); when(build.getUrl()).thenReturn("buildUrl/"); CIMStream cimStream = new CIMStream(cimInstanceName, "test-project", "test-stream", null); CoverityPublisher publisher = new CoverityPublisherBuilder().withCimStream(cimStream).build(); defectService.setupMergedDefects(10); DefectReader reader = new DefectReader(build, listener, publisher); Boolean result = reader.getLatestDefectsForBuild(); assertTrue(result); // assert build action added to build with expected defect count ArgumentCaptor<CoverityBuildAction> buildAction = ArgumentCaptor.forClass(CoverityBuildAction.class); verify(build).addAction(buildAction.capture()); assertEquals(10, buildAction.getValue().getDefects().size()); // verify all expected log messages were written consoleLogger.verifyMessages( "[Coverity] Fetching defects for stream \"test-stream\"", "[Coverity] Found 10 defects matching all filters", "Coverity details: rootUrl/buildUrl/coverity_defects"); } @Test public void getLatestDefectsForBuild_withMatchingDefectFilters_addDefectsToBuildAction() throws ParseException, DatatypeConfigurationException, IOException, CovRemoteServiceException_Exception { when(jenkins.getRootUrl()).thenReturn("rootUrl/"); when(build.getUrl()).thenReturn("buildUrl/"); CIMStream cimStream = new CIMStream(cimInstanceName, "test-project", "test-stream", getMatchingDefectFilters()); CoverityPublisher publisher = new CoverityPublisherBuilder().withCimStream(cimStream).build(); defectService.setupMergedDefects(3); DefectReader reader = new DefectReader(build, listener, publisher); Boolean result = reader.getLatestDefectsForBuild(); assertTrue(result); // assert build action added to build with expected defect count ArgumentCaptor<CoverityBuildAction> buildAction = ArgumentCaptor.forClass(CoverityBuildAction.class); verify(build).addAction(buildAction.capture()); assertEquals(3, buildAction.getValue().getDefects().size()); // verify all expected log messages were written consoleLogger.verifyMessages( "[Coverity] Fetching defects for stream \"test-stream\"", "[Coverity] Found 3 defects matching all filters", "Coverity details: rootUrl/buildUrl/coverity_defects"); } @Test public void getLatestDefectsForBuild_withOverOneThousandMatchingDefects_addDefectsToBuildAction() throws Descriptor.FormException, ParseException, DatatypeConfigurationException, IOException, CovRemoteServiceException_Exception { when(jenkins.getRootUrl()).thenReturn("rootUrl/"); when(build.getUrl()).thenReturn("buildUrl/"); CIMStream cimStream = new CIMStream(cimInstanceName, "test-project", "test-stream", getMatchingDefectFilters()); CoverityPublisher publisher = new CoverityPublisherBuilder().withCimStream(cimStream).build(); defectService.setupMergedDefects(3750); DefectReader reader = new DefectReader(build, listener, publisher); Boolean result = reader.getLatestDefectsForBuild(); assertTrue(result); // assert build action added to build with expected defect count ArgumentCaptor<CoverityBuildAction> buildAction = ArgumentCaptor.forClass(CoverityBuildAction.class); verify(build).addAction(buildAction.capture()); assertEquals(3750, buildAction.getValue().getDefects().size()); // verify all expected log messages were written consoleLogger.verifyMessages( "[Coverity] Fetching defects for stream \"test-stream\"", "[Coverity] Fetching defects for stream \"test-stream\" (fetched 1,000 of 3,750)", "[Coverity] Fetching defects for stream \"test-stream\" (fetched 2,000 of 3,750)", "[Coverity] Fetching defects for stream \"test-stream\" (fetched 3,000 of 3,750)", "[Coverity] Found 3,750 defects matching all filters", "Coverity details: rootUrl/buildUrl/coverity_defects"); } @Test public void getLatestDefectsForBuild_withMatchingDefectFilters_setsBuildResultAsFailed() throws ParseException, DatatypeConfigurationException, IOException, CovRemoteServiceException_Exception { // set initial state as success (result can only get worse) when(build.getResult()).thenReturn(Result.SUCCESS); CIMStream cimStream = new CIMStream(cimInstanceName, "test-project", "test-stream", getMatchingDefectFilters()); CoverityPublisher publisher = new CoverityPublisherBuilder().withCimStream(cimStream).withFailBuild(true).build(); defectService.setupMergedDefects(3); DefectReader reader = new DefectReader(build, listener, publisher); Boolean result = reader.getLatestDefectsForBuild(); assertFalse(result); // verify all expected log messages were written consoleLogger.verifyMessages( "[Coverity] Fetching defects for stream \"test-stream\"", "[Coverity] Found 3 defects matching all filters"); } @Test public void getLatestDefectsForBuild_skipsFetchingDefects() throws Descriptor.FormException, ParseException, DatatypeConfigurationException, IOException, CovRemoteServiceException_Exception { CoverityPublisher publisher = new CoverityPublisherBuilder().withSkipFetchingDefects(true).build(); DefectReader reader = new DefectReader(build, listener, publisher); Boolean result = reader.getLatestDefectsForBuild(); assertFalse(result); } }
package net.acesinc.data.json.generator.types; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.GregorianCalendar; /** * * @author andrewserff */ public abstract class BaseDateType extends TypeHandler { private Date min; private Date max; public static final SimpleDateFormat INPUT_DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd"); public BaseDateType() { } @Override public void setLaunchArguments(String[] launchArguments) { super.setLaunchArguments(launchArguments); try { if (launchArguments.length == 0) { min = INPUT_DATE_FORMAT.parse("1970/01/01"); max = new Date(); } else if (launchArguments.length == 1) { //min only min = INPUT_DATE_FORMAT.parse(stripQuotes(launchArguments[0])); max = new Date(); } else if (launchArguments.length == 2) { min = INPUT_DATE_FORMAT.parse(stripQuotes(launchArguments[0])); max = INPUT_DATE_FORMAT.parse(stripQuotes(launchArguments[1])); } } catch (ParseException ex) { throw new IllegalArgumentException("Provided date is invalid. Please use the format [ yyyy/MM/dd ]", ex); } if (!min.before(max) && !min.equals(max)) { throw new IllegalArgumentException("Min Date must be before Max Date"); } } public Date getRandomDate() { GregorianCalendar gc = new GregorianCalendar(); GregorianCalendar minCal = new GregorianCalendar(); minCal.setTime(min); GregorianCalendar maxCal = new GregorianCalendar(); maxCal.setTime(max); int year = getRand().nextInt(minCal.get(GregorianCalendar.YEAR), maxCal.get(GregorianCalendar.YEAR)); gc.set(GregorianCalendar.YEAR, year); int month = -1; if (minCal.get(GregorianCalendar.YEAR) == maxCal.get(GregorianCalendar.YEAR)) { month = getRand().nextInt(minCal.get(GregorianCalendar.MONTH), maxCal.get(GregorianCalendar.MONTH)); } else if (year == minCal.get(GregorianCalendar.YEAR)) { month = getRand().nextInt(minCal.get(GregorianCalendar.MONTH), gc.getActualMaximum(GregorianCalendar.MONTH)); } else if (year == maxCal.get(GregorianCalendar.YEAR)) { month = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.MONTH), maxCal.get(GregorianCalendar.MONTH)); } else { month = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.MONTH), gc.getActualMaximum(GregorianCalendar.MONTH)); } gc.set(GregorianCalendar.MONTH, month); int day = -1; if (minCal.get(GregorianCalendar.YEAR) == maxCal.get(GregorianCalendar.YEAR) && minCal.get(GregorianCalendar.MONTH) == maxCal.get(GregorianCalendar.MONTH)) { day = getRand().nextInt(minCal.get(GregorianCalendar.DAY_OF_MONTH), maxCal.get(GregorianCalendar.DAY_OF_MONTH)); } else if (year == minCal.get(GregorianCalendar.YEAR) && month == minCal.get(GregorianCalendar.MONTH)) { day = getRand().nextInt(minCal.get(GregorianCalendar.DAY_OF_MONTH), gc.getActualMaximum(GregorianCalendar.DAY_OF_MONTH)); } else if (year == maxCal.get(GregorianCalendar.YEAR) && month == maxCal.get(GregorianCalendar.MONTH)) { day = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.DAY_OF_MONTH), maxCal.get(GregorianCalendar.DAY_OF_MONTH)); } else { day = getRand().nextInt(1, gc.getActualMaximum(GregorianCalendar.DAY_OF_MONTH)); } gc.set(GregorianCalendar.DAY_OF_MONTH, day); //generate a random time too int hour = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.HOUR_OF_DAY), gc.getActualMaximum(GregorianCalendar.HOUR_OF_DAY)); gc.set(GregorianCalendar.HOUR_OF_DAY, hour); int min = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.MINUTE), gc.getActualMaximum(GregorianCalendar.MINUTE)); gc.set(GregorianCalendar.MINUTE, min); int sec = getRand().nextInt(gc.getActualMinimum(GregorianCalendar.SECOND), gc.getActualMaximum(GregorianCalendar.SECOND)); gc.set(GregorianCalendar.SECOND, sec); return gc.getTime(); } }
package jenkins.plugins.git; import hudson.FilePath; import hudson.Launcher; import hudson.model.Action; import hudson.model.Actionable; import hudson.model.Run; import hudson.model.TaskListener; import hudson.plugins.git.UserRemoteConfig; import hudson.plugins.git.extensions.impl.IgnoreNotifyCommit; import hudson.scm.SCMRevisionState; import hudson.plugins.git.GitSCM; import hudson.plugins.git.extensions.GitSCMExtension; import hudson.plugins.git.extensions.impl.BuildChooserSetting; import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.util.StreamTaskListener; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import jenkins.plugins.git.traits.BranchDiscoveryTrait; import jenkins.plugins.git.traits.IgnoreOnPushNotificationTrait; import jenkins.plugins.git.traits.TagDiscoveryTrait; import jenkins.scm.api.SCMHead; import jenkins.scm.api.SCMRevision; import jenkins.scm.api.SCMSource; import static org.hamcrest.Matchers.*; import jenkins.scm.api.SCMSourceOwner; import jenkins.scm.api.metadata.PrimaryInstanceMetadataAction; import jenkins.scm.api.trait.SCMSourceTrait; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.mockito.Mockito; import static org.junit.Assert.*; import static org.mockito.Mockito.when; /** * Tests for {@link AbstractGitSCMSource} */ public class AbstractGitSCMSourceTest { @Rule public JenkinsRule r = new JenkinsRule(); @Rule public GitSampleRepoRule sampleRepo = new GitSampleRepoRule(); @Rule public GitSampleRepoRule sampleRepo2 = new GitSampleRepoRule(); // TODO AbstractGitSCMSourceRetrieveHeadsTest *sounds* like it would be the right place, but it does not in fact retrieve any heads! @Issue("JENKINS-37482") @Test public void retrieveHeads() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); SCMSource source = new GitSCMSource(null, sampleRepo.toString(), "", "*", "", true); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); // And reuse cache: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); sampleRepo.git("checkout", "-b", "dev2"); sampleRepo.write("file", "modified again"); sampleRepo.git("commit", "--all", "--message=dev2"); // After changing data: assertEquals("[SCMHead{'dev'}, SCMHead{'dev2'}, SCMHead{'master'}]", source.fetch(listener).toString()); } @Test public void retrieveHeadsRequiresBranchDiscovery() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait())); assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); // And reuse cache: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); sampleRepo.git("checkout", "-b", "dev2"); sampleRepo.write("file", "modified again"); sampleRepo.git("commit", "--all", "--message=dev2"); // After changing data: assertEquals("[SCMHead{'dev'}, SCMHead{'dev2'}, SCMHead{'master'}]", source.fetch(listener).toString()); } @Issue("JENKINS-46207") @Test public void retrieveHeadsSupportsTagDiscovery_ignoreTagsWithoutTagDiscoveryTrait() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); sampleRepo.git("tag", "lightweight"); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2"); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait())); assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); // And reuse cache: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); sampleRepo.git("checkout", "-b", "dev2"); sampleRepo.write("file", "modified again"); sampleRepo.git("commit", "--all", "--message=dev2"); // After changing data: assertEquals("[SCMHead{'dev'}, SCMHead{'dev2'}, SCMHead{'master'}]", source.fetch(listener).toString()); } @Issue("JENKINS-46207") @Test public void retrieveHeadsSupportsTagDiscovery_findTagsWithTagDiscoveryTrait() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev-commit-message"); long beforeLightweightTag = System.currentTimeMillis(); sampleRepo.git("tag", "lightweight"); long afterLightweightTag = System.currentTimeMillis(); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2-commit-message"); long beforeAnnotatedTag = System.currentTimeMillis(); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); long afterAnnotatedTag = System.currentTimeMillis(); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3-commit-message"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); source.setTraits(new ArrayList<SCMSourceTrait>()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Arrays.asList(new BranchDiscoveryTrait(), new TagDiscoveryTrait())); Set<SCMHead> scmHeadSet = source.fetch(listener); long now = System.currentTimeMillis(); for (SCMHead scmHead : scmHeadSet) { if (scmHead instanceof GitTagSCMHead) { GitTagSCMHead tagHead = (GitTagSCMHead) scmHead; // FAT file system time stamps only resolve to 2 second boundary // EXT3 file system time stamps only resolve to 1 second boundary long fileTimeStampFuzz = isWindows() ? 2000L : 1000L; if (scmHead.getName().equals("lightweight")) { long timeStampDelta = afterLightweightTag - tagHead.getTimestamp(); assertThat(timeStampDelta, is(both(greaterThanOrEqualTo(0L)).and(lessThanOrEqualTo(afterLightweightTag - beforeLightweightTag + fileTimeStampFuzz)))); } else if (scmHead.getName().equals("annotated")) { long timeStampDelta = afterAnnotatedTag - tagHead.getTimestamp(); assertThat(timeStampDelta, is(both(greaterThanOrEqualTo(0L)).and(lessThanOrEqualTo(afterAnnotatedTag - beforeAnnotatedTag + fileTimeStampFuzz)))); } else { fail("Unexpected tag head '" + scmHead.getName() + "'"); } } } assertEquals("[SCMHead{'annotated'}, SCMHead{'dev'}, SCMHead{'lightweight'}, SCMHead{'master'}]", scmHeadSet.toString()); // And reuse cache: assertEquals("[SCMHead{'annotated'}, SCMHead{'dev'}, SCMHead{'lightweight'}, SCMHead{'master'}]", source.fetch(listener).toString()); sampleRepo.git("checkout", "-b", "dev2"); sampleRepo.write("file", "modified again"); sampleRepo.git("commit", "--all", "--message=dev2"); // After changing data: assertEquals("[SCMHead{'annotated'}, SCMHead{'dev'}, SCMHead{'dev2'}, SCMHead{'lightweight'}, SCMHead{'master'}]", source.fetch(listener).toString()); } @Issue("JENKINS-46207") @Test public void retrieveHeadsSupportsTagDiscovery_onlyTagsWithoutBranchDiscoveryTrait() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); sampleRepo.git("tag", "lightweight"); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2"); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); source.setTraits(new ArrayList<SCMSourceTrait>()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new TagDiscoveryTrait())); assertEquals("[SCMHead{'annotated'}, SCMHead{'lightweight'}]", source.fetch(listener).toString()); // And reuse cache: assertEquals("[SCMHead{'annotated'}, SCMHead{'lightweight'}]", source.fetch(listener).toString()); } @Issue("JENKINS-45953") @Test public void retrieveRevisions() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); sampleRepo.git("tag", "lightweight"); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2"); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); source.setTraits(new ArrayList<SCMSourceTrait>()); TaskListener listener = StreamTaskListener.fromStderr(); assertThat(source.fetchRevisions(listener), hasSize(0)); source.setTraits(Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait())); assertThat(source.fetchRevisions(listener), containsInAnyOrder("dev", "master")); source.setTraits(Collections.<SCMSourceTrait>singletonList(new TagDiscoveryTrait())); assertThat(source.fetchRevisions(listener), containsInAnyOrder("annotated", "lightweight")); source.setTraits(Arrays.asList(new BranchDiscoveryTrait(), new TagDiscoveryTrait())); assertThat(source.fetchRevisions(listener), containsInAnyOrder("dev", "master", "annotated", "lightweight")); } public static abstract class ActionableSCMSourceOwner extends Actionable implements SCMSourceOwner { } @Test public void retrievePrimaryHead_NotDuplicated() throws Exception { retrievePrimaryHead(false); } @Test public void retrievePrimaryHead_Duplicated() throws Exception { retrievePrimaryHead(true); } public void retrievePrimaryHead(boolean duplicatePrimary) throws Exception { sampleRepo.init(); sampleRepo.write("file.txt", ""); sampleRepo.git("add", "file.txt"); sampleRepo.git("commit", "--all", "--message=add-empty-file"); sampleRepo.git("checkout", "-b", "new-primary"); sampleRepo.write("file.txt", "content"); sampleRepo.git("add", "file.txt"); sampleRepo.git("commit", "--all", "--message=add-file"); if (duplicatePrimary) { // If more than one branch points to same sha1 as new-primary and the // command line git implementation is older than 2.8.0, then the guesser // for primary won't be able to choose between the two alternatives. // The next line illustrates that case with older command line git. sampleRepo.git("checkout", "-b", "new-primary-duplicate", "new-primary"); } sampleRepo.git("checkout", "master"); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.git("symbolic-ref", "HEAD", "refs/heads/new-primary"); SCMSource source = new GitSCMSource(null, sampleRepo.toString(), "", "*", "", true); ActionableSCMSourceOwner owner = Mockito.mock(ActionableSCMSourceOwner.class); when(owner.getSCMSource(source.getId())).thenReturn(source); when(owner.getSCMSources()).thenReturn(Collections.singletonList(source)); source.setOwner(owner); TaskListener listener = StreamTaskListener.fromStderr(); Map<String, SCMHead> headByName = new TreeMap<String, SCMHead>(); for (SCMHead h: source.fetch(listener)) { headByName.put(h.getName(), h); } if (duplicatePrimary) { assertThat(headByName.keySet(), containsInAnyOrder("master", "dev", "new-primary", "new-primary-duplicate")); } else { assertThat(headByName.keySet(), containsInAnyOrder("master", "dev", "new-primary")); } List<Action> actions = source.fetchActions(null, listener); GitRemoteHeadRefAction refAction = null; for (Action a: actions) { if (a instanceof GitRemoteHeadRefAction) { refAction = (GitRemoteHeadRefAction) a; break; } } final boolean CLI_GIT_LESS_THAN_280 = !sampleRepo.gitVersionAtLeast(2, 8); if (duplicatePrimary && CLI_GIT_LESS_THAN_280) { assertThat(refAction, is(nullValue())); } else { assertThat(refAction, notNullValue()); assertThat(refAction.getName(), is("new-primary")); when(owner.getAction(GitRemoteHeadRefAction.class)).thenReturn(refAction); when(owner.getActions(GitRemoteHeadRefAction.class)).thenReturn(Collections.singletonList(refAction)); actions = source.fetchActions(headByName.get("new-primary"), null, listener); } PrimaryInstanceMetadataAction primary = null; for (Action a: actions) { if (a instanceof PrimaryInstanceMetadataAction) { primary = (PrimaryInstanceMetadataAction) a; break; } } if (duplicatePrimary && CLI_GIT_LESS_THAN_280) { assertThat(primary, is(nullValue())); } else { assertThat(primary, notNullValue()); } } @Issue("JENKINS-31155") @Test public void retrieveRevision() throws Exception { sampleRepo.init(); sampleRepo.write("file", "v1"); sampleRepo.git("commit", "--all", "--message=v1"); sampleRepo.git("tag", "v1"); String v1 = sampleRepo.head(); sampleRepo.write("file", "v2"); sampleRepo.git("commit", "--all", "--message=v2"); // master sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "v3"); sampleRepo.git("commit", "--all", "--message=v3"); // dev // SCM.checkout does not permit a null build argument, unfortunately. Run<?,?> run = r.buildAndAssertSuccess(r.createFreeStyleProject()); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); source.setTraits(Arrays.asList(new BranchDiscoveryTrait(), new TagDiscoveryTrait())); StreamTaskListener listener = StreamTaskListener.fromStderr(); // Test retrieval of branches: assertEquals("v2", fileAt("master", run, source, listener)); assertEquals("v3", fileAt("dev", run, source, listener)); // Tags: assertEquals("v1", fileAt("v1", run, source, listener)); // And commit hashes: assertEquals("v1", fileAt(v1, run, source, listener)); assertEquals("v1", fileAt(v1.substring(0, 7), run, source, listener)); // Nonexistent stuff: assertNull(fileAt("nonexistent", run, source, listener)); assertNull(fileAt("1234567", run, source, listener)); assertNull(fileAt("", run, source, listener)); assertNull(fileAt("\n", run, source, listener)); assertThat(source.fetchRevisions(listener), hasItems("master", "dev", "v1")); // we do not care to return commit hashes or other references } private String fileAt(String revision, Run<?,?> run, SCMSource source, TaskListener listener) throws Exception { SCMRevision rev = source.fetch(revision, listener); if (rev == null) { return null; } else { FilePath ws = new FilePath(run.getRootDir()).child("tmp-" + revision); source.build(rev.getHead(), rev).checkout(run, new Launcher.LocalLauncher(listener), ws, listener, null, SCMRevisionState.NONE); return ws.child("file").readToString(); } } @Issue("JENKINS-37727") @Test public void pruneRemovesDeletedBranches() throws Exception { sampleRepo.init(); /* Write a file to the master branch */ sampleRepo.write("master-file", "master-content-" + UUID.randomUUID().toString()); sampleRepo.git("add", "master-file"); sampleRepo.git("commit", "--message=master-branch-commit-message"); /* Write a file to the dev branch */ sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("dev-file", "dev-content-" + UUID.randomUUID().toString()); sampleRepo.git("add", "dev-file"); sampleRepo.git("commit", "--message=dev-branch-commit-message"); /* Fetch from sampleRepo */ GitSCMSource source = new GitSCMSource(null, sampleRepo.toString(), "", "*", "", true); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); // And reuse cache: assertEquals("[SCMHead{'dev'}, SCMHead{'master'}]", source.fetch(listener).toString()); /* Create dev2 branch and write a file to it */ sampleRepo.git("checkout", "-b", "dev2", "master"); sampleRepo.write("dev2-file", "dev2-content-" + UUID.randomUUID().toString()); sampleRepo.git("add", "dev2-file"); sampleRepo.git("commit", "--message=dev2-branch-commit-message"); // Verify new branch is visible assertEquals("[SCMHead{'dev'}, SCMHead{'dev2'}, SCMHead{'master'}]", source.fetch(listener).toString()); /* Delete the dev branch */ sampleRepo.git("branch", "-D", "dev"); /* Fetch and confirm dev branch was pruned */ assertEquals("[SCMHead{'dev2'}, SCMHead{'master'}]", source.fetch(listener).toString()); } @Test public void testSpecificRevisionBuildChooser() throws Exception { sampleRepo.init(); /* Write a file to the master branch */ sampleRepo.write("master-file", "master-content-" + UUID.randomUUID().toString()); sampleRepo.git("add", "master-file"); sampleRepo.git("commit", "--message=master-branch-commit-message"); /* Fetch from sampleRepo */ GitSCMSource source = new GitSCMSource(sampleRepo.toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new IgnoreOnPushNotificationTrait())); List<GitSCMExtension> extensions = new ArrayList<GitSCMExtension>(); assertThat(source.getExtensions(), is(empty())); LocalBranch localBranchExtension = new LocalBranch("**"); extensions.add(localBranchExtension); source.setExtensions(extensions); assertThat(source.getExtensions(), contains( allOf( instanceOf(LocalBranch.class), hasProperty("localBranch", is("**") ) ) )); SCMHead head = new SCMHead("master"); SCMRevision revision = new AbstractGitSCMSource.SCMRevisionImpl(head, "beaded4deed2bed4feed2deaf78933d0f97a5a34"); // because we are ignoring push notifications we also ignore commits extensions.add(new IgnoreNotifyCommit()); /* Check that BuildChooserSetting not added to extensions by build() */ GitSCM scm = (GitSCM) source.build(head); assertThat(scm.getExtensions(), containsInAnyOrder( allOf( instanceOf(LocalBranch.class), hasProperty("localBranch", is("**") ) ), // no BuildChooserSetting instanceOf(IgnoreNotifyCommit.class), instanceOf(GitSCMSourceDefaults.class) )); /* Check that BuildChooserSetting has been added to extensions by build() */ GitSCM scmRevision = (GitSCM) source.build(head, revision); assertThat(scmRevision.getExtensions(), containsInAnyOrder( allOf( instanceOf(LocalBranch.class), hasProperty("localBranch", is("**") ) ), instanceOf(BuildChooserSetting.class), instanceOf(IgnoreNotifyCommit.class), instanceOf(GitSCMSourceDefaults.class) )); } @Test public void testCustomRemoteName() throws Exception { sampleRepo.init(); GitSCMSource source = new GitSCMSource(null, sampleRepo.toString(), "", "upstream", null, "*", "", true); SCMHead head = new SCMHead("master"); GitSCM scm = (GitSCM) source.build(head); List<UserRemoteConfig> configs = scm.getUserRemoteConfigs(); assertEquals(1, configs.size()); UserRemoteConfig config = configs.get(0); assertEquals("upstream", config.getName()); GitSCMSource source = new GitSCMSource(null, sampleRepo.toString(), "", null, "+refs/heads/*:refs/remotes/origin/* +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/*", "*", "", true); assertEquals("+refs/heads/*:refs/remotes/origin/* +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/*", config.getRefspec());
package net.arcation.allegiance.listeners; import net.arcation.allegiance.Allegiance; import net.arcation.allegiance.data.PlayerData; import net.arcation.allegiance.targets.PlaytimeTarget; import net.arcation.allegiance.util.Hash; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.player.PlayerKickEvent; import org.bukkit.event.player.PlayerQuitEvent; import java.util.HashMap; import java.util.UUID; public class PlaytimeListener implements Listener, Runnable { private final PlaytimeTarget target; private final Allegiance allegiance; private static final String PLAYTIME_KEY = "P"; //We're just going to store one int for their previous location //If the location they are at at (currentTime + playTimeCheck) has the EXACT hash of the location they are at now //AND they are different locations, ... then I don't know what to tell you. Deal with it. private final HashMap<UUID,Integer> locationHashes; private long lastUpdate; public PlaytimeListener(Allegiance allegiance,PlaytimeTarget target, long playTimeCheck) { this.allegiance = allegiance; this.target = target; locationHashes = new HashMap<>(); //Bukkit.getPluginManager().registerEvents(this,allegiance); Bukkit.getScheduler().runTaskTimer(allegiance,this,1000*60,playTimeCheck); lastUpdate = System.currentTimeMillis(); } @Override public void run() { allegiance.log("Running scheduled play time check..."); int incrementAmount = (int)((System.currentTimeMillis()-lastUpdate)/1000); //increment amount in seconds for(Player player : Bukkit.getOnlinePlayers()) { UUID id = player.getUniqueId(); Location loc = player.getLocation(); int currentHash = Hash.HashCode(loc.getBlockX(),loc.getBlockY(),loc.getBlockZ()); if(!locationHashes.containsKey(id)) { locationHashes.put(id,currentHash); continue; //Sorry you don't get a playtime increment if you weren't on at the last check } //Get their old location hash int oldHash = locationHashes.get(id); //Put in their new location hash locationHashes.put(id,currentHash); if(currentHash == oldHash) continue; //Sorry you don't get a playtime increment because you are in the same location (probably) PlayerData data = allegiance.getPlayer(id); data.increaseTarget(target,incrementAmount); } lastUpdate = System.currentTimeMillis(); } // @EventHandler(priority = EventPriority.MONITOR,ignoreCancelled = true) // public void onPlayerLeave(PlayerKickEvent event) // handleLeave(event.getPlayer().getUniqueId()); // @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) // public void onPlayerLeave(PlayerQuitEvent event) // handleLeave(event.getPlayer().getUniqueId()); // private void handleLeave(UUID id) // if(locationHashes.containsKey(id)) // locationHashes.remove(id); }
package org.kohsuke.github; import com.github.tomakehurst.wiremock.core.WireMockConfiguration; import com.github.tomakehurst.wiremock.extension.responsetemplating.ResponseTemplateTransformer; import com.github.tomakehurst.wiremock.extension.responsetemplating.helpers.HandlebarsCurrentDateHelper; import org.apache.commons.io.IOUtils; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.hamcrest.StringDescription; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.kohsuke.github.junit.GitHubWireMockRule; import wiremock.com.github.jknack.handlebars.Helper; import wiremock.com.github.jknack.handlebars.Options; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.*; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; /** * @author Liam Newman */ public abstract class AbstractGitHubWireMockTest extends Assert { private final GitHubBuilder githubBuilder = createGitHubBuilder(); final static String GITHUB_API_TEST_ORG = "github-api-test-org"; final static String STUBBED_USER_LOGIN = "placeholder-user"; final static String STUBBED_USER_PASSWORD = "placeholder-password"; protected boolean useDefaultGitHub = true; protected final Set<String> tempGitHubRepositories = new HashSet<>(); /** * {@link GitHub} instance for use during test. Traffic will be part of snapshot when taken. */ protected GitHub gitHub; private GitHub gitHubBeforeAfter; protected final String baseFilesClassPath = this.getClass().getName().replace('.', '/'); protected final String baseRecordPath = "src/test/resources/" + baseFilesClassPath + "/wiremock"; @Rule public final GitHubWireMockRule mockGitHub; protected final TemplatingHelper templating = new TemplatingHelper(); public AbstractGitHubWireMockTest() { mockGitHub = new GitHubWireMockRule(this.getWireMockOptions()); } protected WireMockConfiguration getWireMockOptions() { return WireMockConfiguration.options().dynamicPort().usingFilesUnderDirectory(baseRecordPath); } private static GitHubBuilder createGitHubBuilder() { GitHubBuilder builder = new GitHubBuilder(); try { File f = new File(System.getProperty("user.home"), ".github.kohsuke2"); if (f.exists()) { Properties props = new Properties(); FileInputStream in = null; try { in = new FileInputStream(f); props.load(in); } finally { IOUtils.closeQuietly(in); } // use the non-standard credential preferentially, so that developers of this library do not have // to clutter their event stream. builder = GitHubBuilder.fromProperties(props); } else { builder = GitHubBuilder.fromEnvironment(); builder = GitHubBuilder.fromCredentials(); } } catch (IOException e) { } return builder.withRateLimitHandler(RateLimitHandler.FAIL); } protected GitHubBuilder getGitHubBuilder() { GitHubBuilder builder = githubBuilder.clone(); if (!mockGitHub.isUseProxy()) { // This sets the user and password to a placeholder for wiremock testing builder.oauthToken = null; builder.withPassword(STUBBED_USER_LOGIN, STUBBED_USER_PASSWORD); } return builder; } @Before public void wireMockSetup() throws Exception { GitHubBuilder builder = getGitHubBuilder().withEndpoint(mockGitHub.apiServer().baseUrl()); if (useDefaultGitHub) { gitHub = builder.build(); } if (mockGitHub.isUseProxy()) { gitHubBeforeAfter = getGitHubBuilder().withEndpoint("https://api.github.com/").build(); } else { gitHubBeforeAfter = null; } } protected void snapshotNotAllowed() { assumeFalse("Test contains hand written mappings. Only valid when not taking a snapshot.", mockGitHub.isTakeSnapshot()); } protected void requireProxy(String reason) { assumeTrue("Test only valid when proxying (-Dtest.github.useProxy to enable): " + reason, mockGitHub.isUseProxy()); } protected void verifyAuthenticated(GitHub instance) { assertThat( "GitHub connection believes it is anonymous. Make sure you set GITHUB_OAUTH or both GITHUB_LOGIN and GITHUB_PASSWORD environment variables", instance.isAnonymous(), Matchers.is(false)); } protected GHUser getUser() { return getUser(gitHub); } protected static GHUser getUser(GitHub gitHub) { try { return gitHub.getMyself(); } catch (IOException e) { throw new RuntimeException(e.getMessage(), e); } } /** * Creates a temporary repository that will be deleted at the end of the test. Repository name is based on the * current test method. * * @return a temporary repository * @throws IOException * if repository could not be created or retrieved. */ protected GHRepository getTempRepository() throws IOException { return getTempRepository("temp-" + this.mockGitHub.getMethodName()); } /** * Creates a temporary repository that will be deleted at the end of the test. * * @param name * string name of the the repository * * @return a temporary repository * @throws IOException * if repository could not be created or retrieved. */ protected GHRepository getTempRepository(String name) throws IOException { String fullName = GITHUB_API_TEST_ORG + '/' + name; if (mockGitHub.isUseProxy()) { cleanupRepository(fullName); GHRepository repository = getGitHubBeforeAfter().getOrganization(GITHUB_API_TEST_ORG) .createRepository(name) .description("A test repository for testing the github-api project: " + name) .homepage("http://github-api.kohsuke.org/") .autoInit(true) .wiki(true) .downloads(true) .issues(true) .private_(false) .create(); try { Thread.sleep(3000); } catch (InterruptedException e) { throw new RuntimeException(e.getMessage(), e); } } return gitHub.getRepository(fullName); } @Before @After public void cleanupTempRepositories() throws IOException { if (mockGitHub.isUseProxy()) { for (String fullName : tempGitHubRepositories) { cleanupRepository(fullName); } } } protected void cleanupRepository(String fullName) throws IOException { if (mockGitHub.isUseProxy()) { tempGitHubRepositories.add(fullName); try { GHRepository repository = getGitHubBeforeAfter().getRepository(fullName); if (repository != null) { repository.delete(); } } catch (GHFileNotFoundException e) { // Repo already deleted } } } /** * {@link GitHub} instance for use before/after test. Traffic will not be part of snapshot when taken. Should only * be used when isUseProxy() or isTakeSnapShot(). * * @return a github instance after checking Authentication */ public GitHub getGitHubBeforeAfter() { verifyAuthenticated(gitHubBeforeAfter); return gitHubBeforeAfter; } protected void kohsuke() { // No-op for now // Generally this means the test is doing something that requires additional access rights // Not always clear which ones. // TODO: Add helpers that assert the expected rights using gitHubBeforeAfter and only when proxy is enabled // String login = getUserTest().getLogin(); // assumeTrue(login.equals("kohsuke") || login.equals("kohsuke2")); } public static <T> void assertThat(T actual, Matcher<? super T> matcher) { assertThat("", actual, matcher); } public static <T> void assertThat(String reason, T actual, Matcher<? super T> matcher) { if (!matcher.matches(actual)) { Description description = new StringDescription(); description.appendText(reason) .appendText(System.lineSeparator()) .appendText("Expected: ") .appendDescriptionOf(matcher) .appendText(System.lineSeparator()) .appendText(" but: "); matcher.describeMismatch(actual, description); throw new AssertionError(description.toString()); } } public static void assertThat(String reason, boolean assertion) { if (!assertion) { throw new AssertionError(reason); } } public static void assertEquals(Object expected, Object actual) { assertThat(actual, Matchers.equalTo(expected)); } public static void assertNotEquals(Object expected, Object actual) { assertThat(actual, Matchers.not(expected)); } public static void assertNotNull(Object actual) { assertThat(actual, Matchers.notNullValue()); } public static void assertNull(Object actual) { assertThat(actual, Matchers.nullValue()); } public static void assertTrue(Boolean condition) { assertThat(condition, Matchers.is(true)); } public static void assertFalse(Boolean condition) { assertThat(condition, Matchers.is(false)); } protected static class TemplatingHelper { public Date testStartDate = new Date(); public ResponseTemplateTransformer newResponseTransformer() { testStartDate = new Date(); return ResponseTemplateTransformer.builder() .global(true) .maxCacheEntries(0L) .helper("testStartDate", new Helper<Object>() { private HandlebarsCurrentDateHelper helper = new HandlebarsCurrentDateHelper(); @Override public Object apply(final Object context, final Options options) throws IOException { return this.helper.apply(TemplatingHelper.this.testStartDate, options); } }) .build(); } } }
package jenkins.plugins.git; import hudson.model.TaskListener; import hudson.util.StreamTaskListener; import jenkins.plugins.git.traits.BranchDiscoveryTrait; import jenkins.scm.api.trait.SCMSourceTrait; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.TestExtension; import java.util.Collections; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; /** * Currently the test aims to functionally validate estimation of size of .git repo from a cached directory * TODO Test estimation with API extension point */ public class GitRepoSizeEstimatorTest { @Rule public JenkinsRule jenkins = new JenkinsRule(); @Rule public GitSampleRepoRule sampleRepo = new GitSampleRepoRule(); static final String GitBranchSCMHead_DEV_MASTER = "[GitBranchSCMHead{name='dev', ref='refs/heads/dev'}, GitBranchSCMHead{name='master', ref='refs/heads/master'}]"; /* In the scenario of not having a cache or an implemented extension point, the estimation class should recommend NONE which means keep the impl as is. */ @Test public void testSizeEstimationWithNoGitCache() throws Exception { GitSCMSource instance = new GitSCMSource("https://github.com/rishabhBudhouliya/git-plugin.git"); GitRepoSizeEstimator repoSizeEstimator = new GitRepoSizeEstimator(instance); String tool = repoSizeEstimator.getGitTool(); // The class should make no recommendation since it can't find a .git cached directory assertThat(tool, is("NONE")); } /* In the case of having a cached .git repository, the estimator class should estimate the size of the local checked out repository and ultimately provide a suggestion on the base of decided heuristic. */ @Test public void testSizeEstimationWithGitCache() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); sampleRepo.git("tag", "lightweight"); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2"); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait())); assertEquals(GitBranchSCMHead_DEV_MASTER, source.fetch(listener).toString()); GitRepoSizeEstimator repoSizeEstimator = new GitRepoSizeEstimator(source); /* Since the size of repository is 21.785 KiBs, the estimator should suggest "jgit" as an implementation */ assertThat(repoSizeEstimator.getGitTool(), containsString("git")); } @Test public void testSizeEstimationWithGithubAPI() { String remote = "https://github.com/rishabhBudhouliya/git-plugin.git"; GitRepoSizeEstimator sizeEstimator = new GitRepoSizeEstimator(remote); assertThat(sizeEstimator.getGitTool(), is("git")); } @Test public void testSizeEstimationWithBitbucketAPIs() { String remote = "https://bitbucket.com/rishabhBudhouliya/git-plugin.git"; GitRepoSizeEstimator sizeEstimator = new GitRepoSizeEstimator(remote); assertThat(sizeEstimator.getGitTool(), is("NONE")); } @org.jvnet.hudson.test.TestExtension public static class TestExtensionGithub extends GitRepoSizeEstimator.RepositorySizeAPI { @Override public boolean acceptsRemote(String remote) { return remote.contains("github"); } @Override public Long getSizeOfRepository(String remote) { long mockedSize = 500; return mockedSize; } } @org.jvnet.hudson.test.TestExtension public static class TestExtensionGitlab extends GitRepoSizeEstimator.RepositorySizeAPI { @Override public boolean acceptsRemote(String remote) { return remote.contains("gitlab"); } @Override public Long getSizeOfRepository(String remote) { long mockedSize = 1000; return mockedSize; } } }
package net.floodlightcontroller.pktinhistory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.types.SwitchMessagePair; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFType; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; public class PktInHistory implements IFloodlightModule, IPktinHistoryService, IOFMessageListener { protected IFloodlightProviderService floodlightProvider; protected ConcurrentCircularBuffer<SwitchMessagePair> buffer; @Override public String getName() { return "PktInHistory"; } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { // TODO Auto-generated method stub return false; } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { // TODO Auto-generated method stub return false; } @Override public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { switch(msg.getType()) { case PACKET_IN: buffer.add(new SwitchMessagePair(sw, msg)); break; default: break; } return Command.CONTINUE; } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IPktinHistoryService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService> (); m.put(IPktinHistoryService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>> (); l.add(IFloodlightProviderService.class); return l; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class); buffer = new ConcurrentCircularBuffer<SwitchMessagePair>(SwitchMessagePair.class, 100); } @Override public void startUp(FloodlightModuleContext context) { floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this); } @Override public ConcurrentCircularBuffer<SwitchMessagePair> getBuffer() { return buffer; } }
package jenkins.plugins.git; import hudson.model.TaskListener; import hudson.util.StreamTaskListener; import jenkins.plugins.git.traits.BranchDiscoveryTrait; import jenkins.scm.api.trait.SCMSourceTrait; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; import java.util.Collections; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; /** * Currently the test aims to functionally validate estimation of size of .git repo from a cached directory * TODO Test estimation with API extension point */ public class GitRepoSizeEstimatorTest { @Rule public JenkinsRule jenkins = new JenkinsRule(); @Rule public GitSampleRepoRule sampleRepo = new GitSampleRepoRule(); static final String GitBranchSCMHead_DEV_MASTER = "[GitBranchSCMHead{name='dev', ref='refs/heads/dev'}, GitBranchSCMHead{name='master', ref='refs/heads/master'}]"; /* In the scenario of not having a cache or an implemented extension point, the estimation class should recommend NONE which means keep the impl as is. */ @Test public void testSizeEstimationWithNoGitCache() throws Exception { GitSCMSource instance = new GitSCMSource("https://github.com/rishabhBudhouliya/git-plugin.git"); GitRepoSizeEstimator repoSizeEstimator = new GitRepoSizeEstimator(instance); String tool = repoSizeEstimator.getGitTool(); // The class should make no recommendation since it can't find a .git cached directory assertThat(tool.equals("NONE"), is(true)); } /* In the case of having a cached .git repository, the estimator class should estimate the size of the local checked out repository and ultimately provide a suggestion on the base of decided heuristic. */ @Test public void testSizeEstimationWithGitCache() throws Exception { sampleRepo.init(); sampleRepo.git("checkout", "-b", "dev"); sampleRepo.write("file", "modified"); sampleRepo.git("commit", "--all", "--message=dev"); sampleRepo.git("tag", "lightweight"); sampleRepo.write("file", "modified2"); sampleRepo.git("commit", "--all", "--message=dev2"); sampleRepo.git("tag", "-a", "annotated", "-m", "annotated"); sampleRepo.write("file", "modified3"); sampleRepo.git("commit", "--all", "--message=dev3"); GitSCMSource source = new GitSCMSource(sampleRepo.toString()); TaskListener listener = StreamTaskListener.fromStderr(); // SCMHeadObserver.Collector.result is a TreeMap so order is predictable: assertEquals("[]", source.fetch(listener).toString()); source.setTraits(Collections.<SCMSourceTrait>singletonList(new BranchDiscoveryTrait())); assertEquals(GitBranchSCMHead_DEV_MASTER, source.fetch(listener).toString()); GitRepoSizeEstimator repoSizeEstimator = new GitRepoSizeEstimator(source); /* Since the size of repository is 21.785 KiBs, the estimator should suggest "jgit" as an implementation */ assertThat(repoSizeEstimator.getGitTool(), containsString("git")); } }
package net.interfax.rest.client.impl; import net.interfax.rest.client.InterFAXClient; import net.interfax.rest.client.config.ClientConfig; import net.interfax.rest.client.config.ClientCredentials; import net.interfax.rest.client.config.ConfigLoader; import net.interfax.rest.client.domain.APIResponse; import net.interfax.rest.client.util.ArrayUtil; import org.apache.tika.Tika; import org.apache.tika.io.IOUtils; import org.glassfish.jersey.client.RequestEntityProcessing; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.glassfish.jersey.media.multipart.MultiPart; import org.glassfish.jersey.media.multipart.MultiPartFeature; import org.glassfish.jersey.media.multipart.file.FileDataBodyPart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; public class InterFAXJerseyClient implements InterFAXClient { private static String username; private static String password; private static String scheme; private static String hostname; private static String outboundFaxesEndpoint; private static String outboundDocumentsEndpoint; private static Client client; private static Tika tika; private final ReentrantLock reentrantLock = new ReentrantLock(); private static final Logger log = LoggerFactory.getLogger(InterFAXJerseyClient.class); public InterFAXJerseyClient(String username, String password) { InterFAXJerseyClient.username = username; InterFAXJerseyClient.password = password; initializeClient(username, password); } public InterFAXJerseyClient() { initialiseCredentials(); initializeClient(username, password); } @Override public APIResponse sendFax(final String faxNumber, final File fileToSendAsFax) { Response response = null; APIResponse apiResponse = null; try { String contentType = tika.detect(fileToSendAsFax); URI outboundFaxesUri = UriBuilder.fromUri(outboundFaxesEndpoint).queryParam("faxNumber", faxNumber).build(); WebTarget target = client.target(outboundFaxesUri); response = target .request() .header("Content-Type", contentType) .post(Entity.entity(fileToSendAsFax, contentType)); apiResponse = new APIResponse(); apiResponse.setStatusCode(response.getStatus()); copyHeadersToAPIResponse(response, apiResponse); if (response.hasEntity()) apiResponse.setResponseBody(response.readEntity(String.class)); } catch (Exception e) { log.error("Exception occurred while sending fax", e); } finally { if (response != null) response.close(); } return apiResponse; } @Override public APIResponse sendFax(final String faxNumber, final File[] filesToSendAsFax) { Response response = null; APIResponse apiResponse = null; try { MultiPart multiPart = new MultiPart(); int count = 1; for (File file : filesToSendAsFax) { String contentType = tika.detect(file); String entityName = "file"+count++; FileDataBodyPart fileDataBodyPart = new FileDataBodyPart(entityName, file, MediaType.valueOf(contentType)); multiPart.bodyPart(fileDataBodyPart); } URI outboundFaxesUri = UriBuilder.fromUri(outboundFaxesEndpoint).queryParam("faxNumber", faxNumber).build(); WebTarget target = client.target(outboundFaxesUri); target.register(MultiPartFeature.class); response = target .request() .header("Content-Type", "multipart/mixed") .post(Entity.entity(multiPart, multiPart.getMediaType())); apiResponse = new APIResponse(); apiResponse.setStatusCode(response.getStatus()); copyHeadersToAPIResponse(response, apiResponse); if (response.hasEntity()) apiResponse.setResponseBody(response.readEntity(String.class)); } catch (Exception e) { log.error("Exception occurred while sending fax", e); } finally { if (response != null) response.close(); } return apiResponse; } @Override public APIResponse uploadDocument(final File fileToUpload) { Response response = null; APIResponse apiResponse = new APIResponse(); try { // create document upload session URI outboundDocumentsUri = UriBuilder .fromPath(outboundDocumentsEndpoint) .scheme(scheme) .host(hostname) .port(8089) .queryParam("size", fileToUpload.length()) .queryParam("name", fileToUpload.getName()) .build(); WebTarget target = client.target(outboundDocumentsUri); response = target .request() .header("Content-Length", 0) .post(null); apiResponse = new APIResponse(); apiResponse.setStatusCode(response.getStatus()); copyHeadersToAPIResponse(response, apiResponse); if (response.hasEntity()) apiResponse.setResponseBody(response.readEntity(String.class)); // upload chunks if (apiResponse.getStatusCode() == Response.Status.CREATED.getStatusCode()) { String uploadChunkToDocumentEndpoint = URI .create(apiResponse.getHeaders().get("Location").get(0).toString()) .getPath(); InputStream inputStream = new FileInputStream(fileToUpload); byte[] bytes = IOUtils.toByteArray(inputStream); int chunkSize = 1024*1024; byte[][] chunks = ArrayUtil.chunkArray(bytes, chunkSize); int bytesUploaded = 0; for (int i=0; i<chunks.length; i++) { boolean lastChunk = false; if (i == chunks.length-1) { lastChunk = true; } apiResponse = uploadChunk(uploadChunkToDocumentEndpoint, chunks[i], bytesUploaded, bytesUploaded+chunks[i].length-1, lastChunk); bytesUploaded += chunks[i].length; } } } catch (Exception e) { log.error("Exception occurred while sending fax", e); apiResponse.setStatusCode(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); } finally { if (response != null) response.close(); } return apiResponse; } public APIResponse uploadChunk( String uploadChunkToDocumentEndpoint, byte[] bytesToUpload, int startByteRange, int endByteRange, boolean lastChunk) { Response response = null; APIResponse apiResponse = new APIResponse(); try { URI uploadChunkToDocumentUri = UriBuilder .fromPath(uploadChunkToDocumentEndpoint) .scheme(scheme) .host(hostname) .port(8089) .build(); WebTarget target = client.target(uploadChunkToDocumentUri); response = target .request() .header("Range", "bytes="+startByteRange+"-"+endByteRange) .post(Entity.entity(bytesToUpload, MediaType.APPLICATION_OCTET_STREAM_TYPE)); int expectedResponseCode = lastChunk ? Response.Status.OK.getStatusCode(): Response.Status.ACCEPTED.getStatusCode(); if (response.getStatus() == expectedResponseCode) { log.info( "chunk uploaded at {}; totalByesUploaded = {}; lastChunk = {}", uploadChunkToDocumentEndpoint, endByteRange, lastChunk ); } else { // TODO: define and use a custom exception throw new Exception("Unexpected response code when uploading chunk"+response.getStatus()); } apiResponse.setStatusCode(response.getStatus()); copyHeadersToAPIResponse(response, apiResponse); } catch (Exception e) { log.error("Exception occurred while sending fax", e); apiResponse.setStatusCode(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); } finally { if (response != null) response.close(); } return apiResponse; } public void closeClient() { client.close(); } private void copyHeadersToAPIResponse(Response response, APIResponse apiResponse) { Map<String, List<Object>> headers = new HashMap<>(); response.getHeaders().forEach(headers::put); apiResponse.setHeaders(headers); } private void initialiseCredentials() { ClientCredentials clientCredentials = new ConfigLoader<>(ClientCredentials.class, "interfax-api-credentials.yaml").getTestConfig(); username = clientCredentials.getUsername(); password = clientCredentials.getPassword(); } private void initializeClient(String username, String password) { reentrantLock.lock(); try { if (client != null) return; // build client ClientConfig clientConfig = new ConfigLoader<>(ClientConfig.class, "interfax-api-config.yaml").getTestConfig(); HttpAuthenticationFeature httpAuthenticationFeature = HttpAuthenticationFeature.basic(username, password); client = ClientBuilder.newClient(); client.register(httpAuthenticationFeature); client.register(MultiPartFeature.class); client.register(RequestEntityProcessing.CHUNKED); // required for the document upload API, to set Content-Length header System.setProperty("sun.net.http.allowRestrictedHeaders", "true"); // for automatically deriving content type given a file tika = new Tika(); // read config from yaml scheme = clientConfig.getInterFAX().getScheme(); hostname = clientConfig.getInterFAX().getHostname(); outboundFaxesEndpoint = clientConfig.getInterFAX().getOutboundFaxesEndpoint(); outboundDocumentsEndpoint = clientConfig.getInterFAX().getOutboundDocumentsEndpoint(); } finally { reentrantLock.unlock(); } } }
package net.onrc.onos.intent.runtime; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.onrc.onos.datagrid.IDatagridService; import net.onrc.onos.datagrid.IEventChannel; import net.onrc.onos.datagrid.IEventChannelListener; import net.onrc.onos.intent.Intent; import net.onrc.onos.intent.Intent.IntentState; import net.onrc.onos.intent.IntentMap; import net.onrc.onos.intent.IntentOperation; import net.onrc.onos.intent.IntentOperation.Operator; import net.onrc.onos.intent.IntentOperationList; import net.onrc.onos.intent.PathIntent; import net.onrc.onos.intent.PathIntentMap; import net.onrc.onos.intent.ShortestPathIntent; import net.onrc.onos.intent.persist.PersistIntent; import net.onrc.onos.ofcontroller.networkgraph.DeviceEvent; import net.onrc.onos.ofcontroller.networkgraph.INetworkGraphListener; import net.onrc.onos.ofcontroller.networkgraph.INetworkGraphService; import net.onrc.onos.ofcontroller.networkgraph.LinkEvent; import net.onrc.onos.ofcontroller.networkgraph.PortEvent; import net.onrc.onos.ofcontroller.networkgraph.SwitchEvent; import net.onrc.onos.registry.controller.IControllerRegistryService; /** * @author Toshio Koide (t-koide@onlab.us) */ public class PathCalcRuntimeModule implements IFloodlightModule, IPathCalcRuntimeService, INetworkGraphListener, IEventChannelListener<Long, IntentStateList> { private PathCalcRuntime runtime; private IDatagridService datagridService; private INetworkGraphService networkGraphService; private IntentMap highLevelIntents; private PathIntentMap pathIntents; private IControllerRegistryService controllerRegistry; private PersistIntent persistIntent; private IEventChannel<Long, IntentOperationList> opEventChannel; private static final String INTENT_OP_EVENT_CHANNEL_NAME = "onos.pathintent"; private static final String INTENT_STATE_EVENT_CHANNEL_NAME = "onos.pathintent_state"; private static final Logger log = LoggerFactory.getLogger(PathCalcRuntimeModule.class); // private methods private void reroutePaths(Collection<Intent> oldPaths) { if (oldPaths == null || oldPaths.isEmpty()) return; IntentOperationList reroutingOperation = new IntentOperationList(); for (Intent intent : oldPaths) { PathIntent pathIntent = (PathIntent) intent; if (pathIntent.getState().equals(IntentState.INST_ACK) && !reroutingOperation.contains(pathIntent)) { reroutingOperation.add(Operator.ADD, pathIntent.getParentIntent()); } } executeIntentOperations(reroutingOperation); } private void log(String step) { log.error("Step:{}, Time:{}", step, System.nanoTime()); } // IFloodlightModule implementations @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<>(1); l.add(IPathCalcRuntimeService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<>(); m.put(IPathCalcRuntimeService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<>(2); l.add(IDatagridService.class); l.add(INetworkGraphService.class); return l; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { datagridService = context.getServiceImpl(IDatagridService.class); networkGraphService = context.getServiceImpl(INetworkGraphService.class); controllerRegistry = context.getServiceImpl(IControllerRegistryService.class); } @Override public void startUp(FloodlightModuleContext context) { highLevelIntents = new IntentMap(); runtime = new PathCalcRuntime(networkGraphService.getNetworkGraph()); pathIntents = new PathIntentMap(); opEventChannel = datagridService.createChannel(INTENT_OP_EVENT_CHANNEL_NAME, Long.class, IntentOperationList.class); datagridService.addListener(INTENT_STATE_EVENT_CHANNEL_NAME, this, Long.class, IntentStateList.class); networkGraphService.registerNetworkGraphListener(this); persistIntent = new PersistIntent(controllerRegistry, networkGraphService); } // IPathCalcRuntimeService implementations @Override public IntentOperationList executeIntentOperations(IntentOperationList list) { // update the map of high-level intents log("begin_updateInMemoryIntents"); highLevelIntents.executeOperations(list); // change states of high-level intents IntentStateList states = new IntentStateList(); for (IntentOperation op : list) { if (op.intent.getState().equals(IntentState.INST_ACK)) states.put(op.intent.getId(), IntentState.REROUTE_REQ); } highLevelIntents.changeStates(states); log("end_updateInMemoryIntents"); // calculate path-intents (low-level operations) log("begin_calcPathIntents"); IntentOperationList pathIntentOperations = runtime.calcPathIntents(list, highLevelIntents, pathIntents); log("end_calcPathIntents"); // persist calculated low-level operations into data store log("begin_persistPathIntents"); long key = persistIntent.getKey(); persistIntent.persistIfLeader(key, pathIntentOperations); log("end_persistPathIntents"); // remove error-intents and reflect them to high-level intents log("begin_removeErrorIntents"); states.clear(); Iterator<IntentOperation> i = pathIntentOperations.iterator(); while (i.hasNext()) { IntentOperation op = i.next(); if (op.operator.equals(Operator.ERROR)) { states.put(op.intent.getId(), IntentState.INST_NACK); i.remove(); } } highLevelIntents.changeStates(states); log("end_removeErrorIntents"); // update the map of path intents and publish the path operations log("begin_updateInMemoryPathIntents"); pathIntents.executeOperations(pathIntentOperations); log("end_updateInMemoryPathIntents"); // Demo special: add a complete path to remove operation log("begin_addPathToRemoveOperation"); for (IntentOperation op: pathIntentOperations) { if(op.operator.equals(Operator.REMOVE)) { op.intent = pathIntents.getIntent(op.intent.getId()); } } log("end_addPathToRemoveOperation"); // send notification log("begin_sendNotification"); opEventChannel.addEntry(key, pathIntentOperations); log("end_sendNotification"); return pathIntentOperations; } @Override public IntentMap getHighLevelIntents() { return highLevelIntents; } @Override public IntentMap getPathIntents() { return pathIntents; } @Override public void purgeIntents() { highLevelIntents.purge(); pathIntents.purge(); } // INetworkGraphListener implementations @Override public void networkGraphEvents(Collection<SwitchEvent> addedSwitchEvents, Collection<SwitchEvent> removedSwitchEvents, Collection<PortEvent> addedPortEvents, Collection<PortEvent> removedPortEvents, Collection<LinkEvent> addedLinkEvents, Collection<LinkEvent> removedLinkEvents, Collection<DeviceEvent> addedDeviceEvents, Collection<DeviceEvent> removedDeviceEvents) { log("called_networkGraphEvents"); HashSet<Intent> affectedPaths = new HashSet<>(); if (addedLinkEvents.size() > 0 || addedPortEvents.size() > 0 || addedSwitchEvents.size() > 0) { log("begin_getAllIntents"); affectedPaths.addAll(getPathIntents().getAllIntents()); log("end_getAllIntents"); } else { log("begin_getIntentsByLink"); for (LinkEvent linkEvent: removedLinkEvents) affectedPaths.addAll(pathIntents.getIntentsByLink(linkEvent)); log("end_getIntentsByLink"); log("begin_getIntentsByPort"); for (PortEvent portEvent: removedPortEvents) affectedPaths.addAll(pathIntents.getIntentsByPort(portEvent.getDpid(), portEvent.getNumber())); log("end_getIntentsByPort"); log("begin_getIntentsByDpid"); for (SwitchEvent switchEvent: removedSwitchEvents) affectedPaths.addAll(pathIntents.getIntentsByDpid(switchEvent.getDpid())); log("end_getIntentsByDpid"); } reroutePaths(affectedPaths); log("finished_networkGraphEvents"); } // IEventChannelListener implementations @Override public void entryAdded(IntentStateList value) { log("called_EntryAdded"); entryUpdated(value); } @Override public void entryRemoved(IntentStateList value) { // do nothing } @Override public void entryUpdated(IntentStateList value) { // TODO draw state transition diagram in multiple ONOS instances and update this method log("called_EntryUpdated"); // reflect state changes of path-level intent into application-level intents log("begin_changeStateByNotification"); IntentStateList parentStates = new IntentStateList(); for (Entry<String, IntentState> entry: value.entrySet()) { PathIntent pathIntent = (PathIntent) pathIntents.getIntent(entry.getKey()); if (pathIntent == null) continue; Intent parentIntent = pathIntent.getParentIntent(); if (parentIntent == null || !(parentIntent instanceof ShortestPathIntent) || !((ShortestPathIntent) parentIntent).getPathIntentId().equals(pathIntent.getId())) continue; IntentState state = entry.getValue(); switch (state) { case INST_REQ: case INST_ACK: case INST_NACK: case DEL_REQ: case DEL_ACK: case DEL_PENDING: parentStates.put(parentIntent.getId(), state); break; default: break; } } highLevelIntents.changeStates(parentStates); pathIntents.changeStates(value); log("end_changeStateByNotification"); } }
package tech.greenfield.vertx.irked; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import io.vertx.core.DeploymentOptions; import io.vertx.core.json.JsonObject; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.RunTestOnContext; import io.vertx.ext.unit.junit.Timeout; import io.vertx.ext.unit.junit.VertxUnitRunner; import tech.greenfield.vertx.irked.annotations.Get; import tech.greenfield.vertx.irked.server.Server; @RunWith(VertxUnitRunner.class) public class TestBadController { public class TestControllerBadField extends Controller { @Get("/") String invalidHandler = "test"; } public class TestControllerBadMethod extends Controller { @Get("/") boolean invalidMethod() { return true; } } @ClassRule public static RunTestOnContext rule = new RunTestOnContext(); @Rule public Timeout timeoutRule = Timeout.seconds(3600); final Integer port = 1234; @Test public void testInvalidFieldHandlerError(TestContext context) { Server server = new Server(new TestControllerBadField()); DeploymentOptions options = new DeploymentOptions().setConfig(new JsonObject().put("port", port)); rule.vertx().deployVerticle(server, options, context.asyncAssertFailure()); } @Test public void testInvalidMethodHandlerError(TestContext context) { Server server = new Server(new TestControllerBadMethod()); DeploymentOptions options = new DeploymentOptions().setConfig(new JsonObject().put("port", port)); rule.vertx().deployVerticle(server, options, context.asyncAssertFailure()); } }
package net.openhft.chronicle.wire; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.bytes.BytesUtil; import org.junit.Ignore; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.assertEquals; public class YamlTokeniserTest { public static String doTest(String resource) { try { Bytes bytes = BytesUtil.readFile(resource); YamlTokeniser yn = new YamlTokeniser(bytes); StringBuilder sb = new StringBuilder(); for (YamlToken t; (t = yn.next()) != YamlToken.NONE; ) { sb.append(t).append(' ').append(yn.text()).append('\n'); } return sb.toString(); } catch (IOException e) { throw new AssertionError(e); } } @Test public void eg2_1() { assertEquals("DIRECTIVES_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Mark McGwire\n" + "SEQUENCE_ENTRY \n" + "TEXT Sammy Sosa\n" + "SEQUENCE_ENTRY \n" + "TEXT Ken Griffey\n" + "SEQUENCE_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_1_SequenceOfScalars.yaml")); } @Test public void eg2_3() { assertEquals("DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT american\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Boston Red Sox\n" + "SEQUENCE_ENTRY \n" + "TEXT Detroit Tigers\n" + "SEQUENCE_ENTRY \n" + "TEXT New York Yankees\n" + "SEQUENCE_END \n" + "MAPPING_KEY \n" + "TEXT national\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT New York Mets\n" + "SEQUENCE_ENTRY \n" + "TEXT Chicago Cubs\n" + "SEQUENCE_ENTRY \n" + "TEXT Atlanta Braves\n" + "SEQUENCE_END \n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_3_MappingScalarsToSequences.yaml")); } @Test public void eg2_4() { assertEquals("DIRECTIVES_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT name\n" + "TEXT Mark McGwire\n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 65\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.278\n" + "MAPPING_END \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT name\n" + "TEXT Sammy Sosa\n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 63\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.288\n" + "MAPPING_END \n" + "SEQUENCE_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_4_SequenceOfMappings.yaml")); } @Test public void eg2_4B() { assertEquals("DIRECTIVES_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT name\n" + "TEXT Mark McGwire\n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 65\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.278\n" + "MAPPING_END \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT name\n" + "TEXT Sammy Sosa\n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 63\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.288\n" + "MAPPING_END \n" + "SEQUENCE_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_4_SequenceOfMappings-fixed.yaml")); } @Test public void eg2_6() { assertEquals("DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT Mark McGwire\n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 65\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.278\n" + "MAPPING_END \n" + "MAPPING_KEY \n" + "TEXT Sammy Sosa\n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT hr\n" + "TEXT 63\n" + "MAPPING_KEY \n" + "TEXT avg\n" + "TEXT 0.288\n" + "MAPPING_END \n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_6_MappingOfMappings.yaml")); } @Test public void eg2_7() { assertEquals("COMMENT Ranking of 1998 home runs\n" + "DIRECTIVES_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Mark McGwire\n" + "SEQUENCE_ENTRY \n" + "TEXT Sammy Sosa\n" + "SEQUENCE_ENTRY \n" + "TEXT Ken Griffey\n" + "COMMENT Team ranking\n" + "SEQUENCE_END \n" + "DOCUMENT_END \n" + "DIRECTIVES_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Chicago Cubs\n" + "SEQUENCE_ENTRY \n" + "TEXT St Louis Cardinals\n" + "SEQUENCE_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_7_TwoDocumentsInAStream.yaml")); } @Test public void eg2_8() { assertEquals("DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT time\n" + "TEXT 20:03:20\n" + "MAPPING_KEY \n" + "TEXT player\n" + "TEXT Sammy Sosa\n" + "MAPPING_KEY \n" + "TEXT action\n" + "TEXT strike (miss)\n" + "MAPPING_END \n" + "DOCUMENT_END \n" + "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT time\n" + "TEXT 20:03:47\n" + "MAPPING_KEY \n" + "TEXT player\n" + "TEXT Sammy Sosa\n" + "MAPPING_KEY \n" + "TEXT action\n" + "TEXT grand slam\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_8_PlayByPlayFeed.yaml")); } @Test public void eg2_9() { assertEquals("DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT hr\n" + "COMMENT 1998 hr ranking\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Mark McGwire\n" + "SEQUENCE_ENTRY \n" + "TEXT Sammy Sosa\n" + "SEQUENCE_END \n" + "MAPPING_KEY \n" + "TEXT rbi\n" + "COMMENT 1998 rbi ranking\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Sammy Sosa\n" + "SEQUENCE_ENTRY \n" + "TEXT Ken Griffey\n" + "SEQUENCE_END \n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_9_SingleDocumentWithTwoComments.yaml")); } @Test public void eg2_10() { assertEquals("DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT hr\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Mark McGwire\n" + "COMMENT Following node labeled SS\n" + "SEQUENCE_ENTRY \n" + "ANCHOR SS\n" + "TEXT Sammy Sosa\n" + "SEQUENCE_END \n" + "MAPPING_KEY \n" + "TEXT rbi\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "ALIAS SS\n" + "COMMENT Subsequent occurrence\n" + "SEQUENCE_ENTRY \n" + "TEXT Ken Griffey\n" + "SEQUENCE_END \n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_10_NodeAppearsTwiceInThisDocument.yaml")); } @Test public void eg2_11() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT Detroit Tigers\n" + "SEQUENCE_ENTRY \n" + "TEXT Chicago cubs\n" + "SEQUENCE_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT 2001-07-23\n" + "SEQUENCE_END \n" + "MAPPING_KEY \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT New York Yankees\n" + "SEQUENCE_ENTRY \n" + "TEXT Atlanta Braves\n" + "SEQUENCE_END \n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT 2001-07-02\n" + "SEQUENCE_ENTRY \n" + "TEXT 2001-08-12\n" + "SEQUENCE_ENTRY \n" + "TEXT 2001-08-14\n" + "SEQUENCE_END \n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_11MappingBetweenSequences.yaml")); } @Test public void eg2_12() { assertEquals( "DIRECTIVES_END \n" + "COMMENT Products purchased\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT item\n" + "TEXT Super Hoop\n" + "MAPPING_KEY \n" + "TEXT quantity\n" + "TEXT 1\n" + "MAPPING_END \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT item\n" + "TEXT Basketball\n" + "MAPPING_KEY \n" + "TEXT quantity\n" + "TEXT 4\n" + "MAPPING_END \n" + "SEQUENCE_ENTRY \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT item\n" + "TEXT Big Shoes\n" + "MAPPING_KEY \n" + "TEXT quantity\n" + "TEXT 1\n" + "MAPPING_END \n" + "SEQUENCE_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_12CompactNestedMapping.yaml")); } @Test public void eg2_17() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT unicode\n" + "TEXT Sosa did fine.\\u263A\n" + "MAPPING_KEY \n" + "TEXT control\n" + "TEXT \\b1998\\t1999\\t2000\\n\n" + "MAPPING_KEY \n" + "TEXT hex esc\n" + "TEXT \\x0d\\x0a is \\r\\n\n" + "MAPPING_KEY \n" + "TEXT single\n" + "TEXT \"Howdy!\" he cried.\n" + "MAPPING_KEY \n" + "TEXT quoted\n" + "TEXT # Not a \n" + "TEXT comment\n" + "TEXT .\n" + "MAPPING_KEY \n" + "TEXT tie-fighter\n" + "TEXT |\\-*-/|\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_17QuotedScalars.yaml")); } @Test public void eg2_19() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 12345\n" + "MAPPING_KEY \n" + "TEXT decimal\n" + "TEXT +12345\n" + "MAPPING_KEY \n" + "TEXT octal\n" + "TEXT 0o14\n" + "MAPPING_KEY \n" + "TEXT hexadecimal\n" + "TEXT 0xC\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_19Integers.yaml")); } @Test public void eg2_20() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 1.23015e+3\n" + "MAPPING_KEY \n" + "TEXT exponential\n" + "TEXT 12.3015e+02\n" + "MAPPING_KEY \n" + "TEXT fixed\n" + "TEXT 1230.15\n" + "MAPPING_KEY \n" + "TEXT negative infinity\n" + "TEXT -.inf\n" + "MAPPING_KEY \n" + "TEXT not a number\n" + "TEXT .NaN\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_20FloatingPoint.yaml")); } @Test public void eg2_21() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT null\n" + "MAPPING_KEY \n" + "TEXT booleans\n" + "SEQUENCE_START \n" + "SEQUENCE_ENTRY \n" + "TEXT true\n" + "SEQUENCE_ENTRY \n" + "TEXT false\n" + "SEQUENCE_END \n" + "MAPPING_KEY \n" + "TEXT string\n" + "TEXT 012345\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_21Miscellaneous.yaml")); } @Test public void eg2_22() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_22Timestamps.yaml")); } @Ignore("TODO FIX") @Test public void eg2_23() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_23VariousExplicitTags.yaml")); } @Ignore("TODO FIX") @Test public void eg2_24() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_24GlobalTags.yaml")); } @Ignore("TODO FIX") @Test public void eg2_25() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_25UnorderedSets.yaml")); } @Ignore("TODO FIX") @Test public void eg2_26() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_26OrderedMappings.yaml")); } @Ignore("TODO FIX") @Test public void eg2_27() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_27Invoice.yaml")); } @Ignore("TODO FIX") @Test public void eg2_28() { assertEquals( "DIRECTIVES_END \n" + "MAPPING_START \n" + "MAPPING_KEY \n" + "TEXT canonical\n" + "TEXT 2001-12-15T02:59:43.1Z\n" + "MAPPING_KEY \n" + "TEXT iso8601\n" + "TEXT 2001-12-14t21:59:43.10-05:00\n" + "MAPPING_KEY \n" + "TEXT spaced\n" + "TEXT 2001-12-14 21:59:43.10 -5\n" + "MAPPING_KEY \n" + "TEXT date\n" + "TEXT 2002-12-14\n" + "MAPPING_END \n" + "DOCUMENT_END \n", doTest("yaml/spec/2_28LogFile.yaml")); } }
package net.sourceforge.cilib.problem; import net.sourceforge.cilib.type.types.Type; /** * This class represents a solution to an {@link net.sourceforge.cilib.problem.OptimisationProblem}. * It is responsible for keeping track of the optimisation problem and position of the solution within the search * space. * * @author Edwin Peer */ public final class OptimisationSolution implements Solution, Comparable<OptimisationSolution> { private static final long serialVersionUID = 2119444179382452329L; private final Type position; private final Fitness fitness; /** * Constructs a new instance of {@code OptimisationSolution}. * * @param position The position of the solution within the search space of the problem. * @param fitness The fitness of the optimisation solution. */ public OptimisationSolution(Type position, Fitness fitness) { this.position = position.getClone(); this.fitness = fitness.getClone(); } @Override public boolean equals(Object other) { if (this == other) { return true; } if ((other == null) || (this.getClass() != other.getClass())) return false; OptimisationSolution otherSolution = (OptimisationSolution)other; return this.position.equals(otherSolution.position) && this.fitness.equals(otherSolution.fitness); } @Override public int hashCode() { int hash = 7; hash = 31 * hash + (this.position == null ? 0 : this.position.hashCode()); hash = 31 * hash + (this.position == null ? 0 : this.fitness.hashCode()); return hash; } /** * Returns the position of this solution within the search space of the problem. * * @return The position of this solution in search space. */ public Type getPosition() { return this.position; } /** * Returns the fitness of this solution according to {@link net.sourceforge.cilib.problem.OptimisationProblem#getFitness(Type, boolean)}. * Calling this function does not contribute to the number of fitness evaulations maintained by * {@link net.sourceforge.cilib.problem.OptimisationProblem}. * * @return The fitness of this solution. */ public Fitness getFitness() { return this.fitness; } /** * {@inheritDoc} */ @Override public int compareTo(OptimisationSolution other) { return this.fitness.compareTo(other.fitness); } }
package uk.co.epsilontechnologies.watcher; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.springframework.http.HttpMethod; import org.springframework.http.client.ClientHttpRequest; import org.springframework.http.client.ClientHttpResponse; import org.springframework.web.client.RequestCallback; import org.springframework.web.client.ResponseExtractor; import org.springframework.web.client.RestTemplate; import uk.co.epsilontechnologies.primer.Primer; import uk.co.epsilontechnologies.primer.PrimerStatics; import java.io.IOException; import java.util.Arrays; import static org.junit.Assert.assertEquals; import static uk.co.epsilontechnologies.watcher.WatcherStatics.when; public class WatcherTest { private final RestTemplate restTemplate = new RestTemplate(); private final Watcher watcher = new Watcher("/test", 9000, "localhost", 9001); private final Primer primer = new Primer("/test", 9001); @Before public void setUp() { this.watcher.start(); this.primer.start(); } @After public void tearDown() { this.watcher.stop(); this.primer.stop(); } @Test public void shouldCaptureParameterValue() { // arrange PrimerStatics.when(primer.get("/get")).thenReturn(PrimerStatics.response(200)); final ParameterValueRequestCaptor parameterValueCaptor = new ParameterValueRequestCaptor("key"); when(this.watcher.get("/get")).thenCapture(parameterValueCaptor); // act restTemplate.execute("http://localhost:9000/test/get?key=value", HttpMethod.GET, new TestRequestCallback(), new TestResponseExtractor()); // assert assertEquals("value", parameterValueCaptor.getCapturedValue()); } @Test public void shouldCaptureHeaderValue() { // arrange PrimerStatics.when(primer.get("/get")).thenReturn(PrimerStatics.response(200)); final HeaderValueRequestCaptor headerValueRequestCaptor = new HeaderValueRequestCaptor("key"); when(this.watcher.get("/get")).thenCapture(headerValueRequestCaptor); // act restTemplate.execute("http://localhost:9000/test/get", HttpMethod.GET, new TestRequestCallback(null, "value"), new TestResponseExtractor()); // assert assertEquals("value", headerValueRequestCaptor.getCapturedValue()); } @Test public void shouldCaptureBodyRegExValue() { // arrange PrimerStatics.when(primer.post("/post", "\\{ \"key\" : \"([a-z]{5})\" }")).thenReturn(PrimerStatics.response(200)); final RegExBodyRequestCaptor regExBodyRequestCaptor = new RegExBodyRequestCaptor("\\{ \"key\" : \"([a-z]{5})\" }"); when(this.watcher.post("/post", "\\{ \"key\" : \"([a-z]{5})\" }")).thenCapture(regExBodyRequestCaptor); // act restTemplate.execute("http://localhost:9000/test/post", HttpMethod.POST, new TestRequestCallback("{ \"key\" : \"value\" }", null), new TestResponseExtractor()); // assert assertEquals(1, regExBodyRequestCaptor.getCapturedValue().size()); assertEquals("value", regExBodyRequestCaptor.getFirstCapturedValue()); } @Test public void shouldCaptureBodyValue() { // arrange PrimerStatics.when(primer.post("/post", "\\{ \"key\" : \"([a-z]{5})\" }")).thenReturn(PrimerStatics.response(200)); final BodyRequestCaptor bodyRequestCaptor = new BodyRequestCaptor(); when(this.watcher.post("/post", "\\{ \"key\" : \"([a-z]{5})\" }")).thenCapture(bodyRequestCaptor); // act restTemplate.execute("http://localhost:9000/test/post", HttpMethod.POST, new TestRequestCallback("{ \"key\" : \"value\" }", null), new TestResponseExtractor()); // assert assertEquals("{ \"key\" : \"value\" }", bodyRequestCaptor.getCapturedValue()); } @Test public void shouldCaptureMultipleValuesSimultaneously() { // arrange PrimerStatics.when(primer.get("/get")).thenReturn(PrimerStatics.response(200)); final ParameterValueRequestCaptor parameterValueCaptor = new ParameterValueRequestCaptor("key"); final HeaderValueRequestCaptor headerValueRequestCaptor = new HeaderValueRequestCaptor("key"); when(this.watcher.get("/get")).thenCapture(parameterValueCaptor, headerValueRequestCaptor); // act restTemplate.execute("http://localhost:9000/test/get?key=value", HttpMethod.GET, new TestRequestCallback(null, "value"), new TestResponseExtractor()); // assert assertEquals("value", parameterValueCaptor.getCapturedValue()); assertEquals("value", headerValueRequestCaptor.getCapturedValue()); } static class TestRequestCallback implements RequestCallback { private String body; private String keyHeaderValue; TestRequestCallback() { this(null, null); } TestRequestCallback(final String body, final String keyHeaderValue) { this.body = body; this.keyHeaderValue = keyHeaderValue; } @Override public void doWithRequest(final ClientHttpRequest clientHttpRequest) throws IOException { if (body != null) { IOUtils.write(body, clientHttpRequest.getBody()); } if (keyHeaderValue != null) { clientHttpRequest.getHeaders().put("key", Arrays.asList(keyHeaderValue)); } } } static class TestResponseExtractor implements ResponseExtractor<String> { @Override public String extractData(ClientHttpResponse clientHttpResponse) throws IOException { return IOUtils.toString(clientHttpResponse.getBody()); } } }
package net.sf.jabref.exporter; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.Scanner; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.base.Charsets; import net.sf.jabref.BibDatabaseContext; import net.sf.jabref.Defaults; import net.sf.jabref.Globals; import net.sf.jabref.JabRefPreferences; import net.sf.jabref.MetaData; import net.sf.jabref.groups.GroupTreeNode; import net.sf.jabref.groups.structure.AllEntriesGroup; import net.sf.jabref.groups.structure.ExplicitGroup; import net.sf.jabref.groups.structure.GroupHierarchyType; import net.sf.jabref.importer.ImportFormatReader; import net.sf.jabref.importer.ParserResult; import net.sf.jabref.importer.fileformat.BibtexParser; import net.sf.jabref.logic.cleanup.FieldFormatterCleanup; import net.sf.jabref.logic.config.SaveOrderConfig; import net.sf.jabref.logic.formatter.casechanger.LowerCaseFormatter; import net.sf.jabref.logic.labelpattern.AbstractLabelPattern; import net.sf.jabref.logic.labelpattern.DatabaseLabelPattern; import net.sf.jabref.model.EntryTypes; import net.sf.jabref.model.database.BibDatabase; import net.sf.jabref.model.database.BibDatabaseMode; import net.sf.jabref.model.entry.BibEntry; import net.sf.jabref.model.entry.BibtexEntryTypes; import net.sf.jabref.model.entry.BibtexString; import net.sf.jabref.model.entry.CustomEntryType; import net.sf.jabref.model.entry.IdGenerator; public class BibDatabaseWriterTest { private BibDatabaseWriter databaseWriter; private StringWriter stringWriter; private BibDatabase database; private MetaData metaData; private BibDatabaseContext bibtexContext; @BeforeClass public static void setUpClass() { Globals.prefs = JabRefPreferences.getInstance(); } @Before public void setUp() { databaseWriter = new BibDatabaseWriter(); stringWriter = new StringWriter(); database = new BibDatabase(); metaData = new MetaData(); bibtexContext = new BibDatabaseContext(database, metaData, new Defaults(BibDatabaseMode.BIBTEX)); } @Test(expected = NullPointerException.class) public void writeWithNullWriterThrowsException() throws IOException { databaseWriter.writePartOfDatabase(null, bibtexContext, Collections.emptyList(), mock(SavePreferences.class)); } @Test(expected = NullPointerException.class) public void writeWithNullContextThrowsException() throws IOException { databaseWriter.writePartOfDatabase(mock(Writer.class), null, Collections.emptyList(), new SavePreferences()); } @Test(expected = NullPointerException.class) public void writeWithNullEntriesThrowsException() throws IOException { databaseWriter.writePartOfDatabase(mock(Writer.class), bibtexContext, null, new SavePreferences()); } @Test(expected = NullPointerException.class) public void writeWithNullPreferencesThrowsException() throws IOException { databaseWriter.writePartOfDatabase(mock(Writer.class), bibtexContext, Collections.emptyList(), null); } @Test public void writeEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writePreamble() throws IOException { database.setPreamble("Test preamble"); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Preamble{Test preamble}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writePreambleAndEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); database.setPreamble("Test preamble"); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "@Preamble{Test preamble}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeEntry() throws IOException { BibEntry entry = new BibEntry(); entry.setType(BibtexEntryTypes.ARTICLE); database.insertEntry(entry); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.singletonList(entry), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Article{," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeEncodingAndEntry() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); BibEntry entry = new BibEntry(); entry.setType(BibtexEntryTypes.ARTICLE); database.insertEntry(entry); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.singletonList(entry), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "@Article{," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeEpilogue() throws IOException { database.setEpilog("Test epilog"); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "Test epilog" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeEpilogueAndEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); database.setEpilog("Test epilog"); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "Test epilog" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeMetadata() throws IOException { DatabaseLabelPattern labelPattern = new DatabaseLabelPattern(); labelPattern.setDefaultValue("test"); metaData.setLabelPattern(labelPattern); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: keypatterndefault:test;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeMetadataAndEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); DatabaseLabelPattern labelPattern = new DatabaseLabelPattern(); labelPattern.setDefaultValue("test"); metaData.setLabelPattern(labelPattern); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: keypatterndefault:test;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeGroups() throws IOException { GroupTreeNode groupRoot = new GroupTreeNode(new AllEntriesGroup()); groupRoot.add(new GroupTreeNode(new ExplicitGroup("test", GroupHierarchyType.INCLUDING))); metaData.setGroups(groupRoot); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); // @formatter:off assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: groupstree:" + Globals.NEWLINE + "0 AllEntriesGroup:;" + Globals.NEWLINE + "1 ExplicitGroup:test\\;2\\;;" + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: groupsversion:3;}" + Globals.NEWLINE, stringWriter.toString()); // @formatter:on } @Test public void writeGroupsAndEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); GroupTreeNode groupRoot = new GroupTreeNode(new AllEntriesGroup()); groupRoot.add(new GroupTreeNode(new ExplicitGroup("test", GroupHierarchyType.INCLUDING))); metaData.setGroups(groupRoot); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); // @formatter:off assertEquals( "% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: groupstree:" + Globals.NEWLINE + "0 AllEntriesGroup:;" + Globals.NEWLINE + "1 ExplicitGroup:test\\;2\\;;" + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: groupsversion:3;}" + Globals.NEWLINE, stringWriter.toString()); // @formatter:on } @Test public void writeString() throws IOException { database.addString(new BibtexString("id", "name", "content")); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@String{name = {content}}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeStringAndEncoding() throws IOException { SavePreferences preferences = new SavePreferences().withEncoding(Charsets.US_ASCII); database.addString(new BibtexString("id", "name", "content")); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals("% Encoding: US-ASCII" + Globals.NEWLINE + Globals.NEWLINE + "@String{name = {content}}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeEntryWithCustomizedTypeAlsoWritesTypeDeclaration() throws IOException { EntryTypes.addOrModifyCustomEntryType(new CustomEntryType("customizedType", "required", "optional")); BibEntry entry = new BibEntry(); entry.setType("customizedType"); database.insertEntry(entry); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.singletonList(entry), new SavePreferences()); assertEquals( Globals.NEWLINE + "@Customizedtype{," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-entrytype: Customizedtype: req[required] opt[optional]}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void roundtrip() throws IOException { File testBibtexFile = new File("src/test/resources/testbib/complex.bib"); Charset encoding = StandardCharsets.UTF_8; ParserResult result = BibtexParser.parse(ImportFormatReader.getReader(testBibtexFile, encoding)); SavePreferences preferences = new SavePreferences().withEncoding(encoding).withSaveInOriginalOrder(true); BibDatabaseContext context = new BibDatabaseContext(result.getDatabase(), result.getMetaData(), new Defaults(BibDatabaseMode.BIBTEX)); databaseWriter.writePartOfDatabase(stringWriter, context, result.getDatabase().getEntries(), preferences); try (Scanner scanner = new Scanner(testBibtexFile,encoding.name())) { assertEquals(scanner.useDelimiter("\\A").next(), stringWriter.toString()); } } @Test public void writeSavedSerializationOfEntryIfUnchanged() throws IOException { BibEntry entry = new BibEntry(); entry.setType(BibtexEntryTypes.ARTICLE); entry.setField("author", "Mr. author"); entry.setParsedSerialization("presaved serialization"); entry.setChanged(false); database.insertEntry(entry); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.singletonList(entry), new SavePreferences()); assertEquals("presaved serialization" + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void reformatEntryIfAskedToDoSo() throws IOException { BibEntry entry = new BibEntry(); entry.setType(BibtexEntryTypes.ARTICLE); entry.setField("author", "Mr. author"); entry.setParsedSerialization("wrong serialization"); entry.setChanged(false); database.insertEntry(entry); SavePreferences preferences = new SavePreferences().withReformatFile(true); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.singletonList(entry), preferences); assertEquals(Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {Mr. author}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeSavedSerializationOfStringIfUnchanged() throws IOException { BibtexString string = new BibtexString("id", "name", "content"); string.setParsedSerialization("serialization"); database.addString(string); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals("serialization", stringWriter.toString()); } @Test public void reformatStringIfAskedToDoSo() throws IOException { BibtexString string = new BibtexString("id", "name", "content"); string.setParsedSerialization("wrong serialization"); database.addString(string); SavePreferences preferences = new SavePreferences().withReformatFile(true); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), preferences); assertEquals(Globals.NEWLINE + "@String{name = {content}}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeSaveActions() throws Exception { FieldFormatterCleanups saveActions = new FieldFormatterCleanups(true, Collections.singletonList(new FieldFormatterCleanup("title", new LowerCaseFormatter()))); metaData.setSaveActions(saveActions); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: saveActions:enabled;" + Globals.NEWLINE + "title[lower_case]" + Globals.NEWLINE + ";}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeSaveOrderConfig() throws Exception { SaveOrderConfig saveOrderConfig = new SaveOrderConfig(false, new SaveOrderConfig.SortCriterion("author", false), new SaveOrderConfig.SortCriterion("year", true), new SaveOrderConfig.SortCriterion("abstract", false)); metaData.setSaveOrderConfig(saveOrderConfig); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: saveOrderConfig:specified;author;false;year;true;abstract;false;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeCustomKeyPattern() throws Exception { AbstractLabelPattern pattern = new DatabaseLabelPattern(); pattern.setDefaultValue("test"); pattern.addLabelPattern("article", "articleTest"); metaData.setLabelPattern(pattern); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: keypattern_article:articleTest;}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: keypatterndefault:test;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeBiblatexMode() throws Exception { metaData.setMode(BibDatabaseMode.BIBLATEX); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: databaseType:biblatex;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeProtectedFlag() throws Exception { metaData.markAsProtected(); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: protectedFlag:true;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeContentSelectors() throws Exception { metaData.setContentSelectors("title", Arrays.asList("testWord", "word2")); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: selector_title:testWord;word2;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeFileDirectories() throws Exception { metaData.setDefaultFileDirectory("\\Literature\\"); metaData.setUserFileDirectory("defaultOwner-user", "D:\\Documents"); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals(Globals.NEWLINE + "@Comment{jabref-meta: fileDirectory:\\\\Literature\\\\;}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: fileDirectory-defaultOwner-user:D:\\\\Documents;}" + Globals.NEWLINE, stringWriter.toString()); } @Test public void writeNotEmptyContentSelectors() throws Exception { metaData.setContentSelectors("title", Collections.singletonList("")); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals("", stringWriter.toString()); } @Test public void writeNotCompletelyEmptyContentSelectors() throws Exception { metaData.setContentSelectors("title", Collections.emptyList()); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, Collections.emptyList(), new SavePreferences()); assertEquals("", stringWriter.toString()); } @Test public void writeEntriesSorted() throws IOException { SaveOrderConfig saveOrderConfig = new SaveOrderConfig(false, new SaveOrderConfig.SortCriterion("author", false), new SaveOrderConfig.SortCriterion("year", true), new SaveOrderConfig.SortCriterion("abstract", false)); metaData.setSaveOrderConfig(saveOrderConfig); BibEntry firstEntry = new BibEntry(); firstEntry.setType(BibtexEntryTypes.ARTICLE); firstEntry.setField("author", "A"); firstEntry.setField("year", "2000"); BibEntry secondEntry = new BibEntry(); secondEntry.setType(BibtexEntryTypes.ARTICLE); secondEntry.setField("author", "A"); secondEntry.setField("year", "2010"); BibEntry thirdEntry = new BibEntry(); thirdEntry.setType(BibtexEntryTypes.ARTICLE); thirdEntry.setField("author", "B"); thirdEntry.setField("year", "2000"); database.insertEntry(secondEntry); database.insertEntry(thirdEntry); database.insertEntry(firstEntry); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, database.getEntries(), new SavePreferences()); assertEquals( Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {A}," + Globals.NEWLINE + " year = {2000}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {A}," + Globals.NEWLINE + " year = {2010}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {B}," + Globals.NEWLINE + " year = {2000}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: saveOrderConfig:specified;author;false;year;true;abstract;false;}" + Globals.NEWLINE , stringWriter.toString()); } @Test public void writeEntriesInOriginalOrderWhenNoSaveOrderConfigIsSetInMetadata() throws IOException { BibEntry firstEntry = new BibEntry(IdGenerator.next()); firstEntry.setType(BibtexEntryTypes.ARTICLE); firstEntry.setField("author", "A"); firstEntry.setField("year", "2010"); BibEntry secondEntry = new BibEntry(IdGenerator.next()); secondEntry.setType(BibtexEntryTypes.ARTICLE); secondEntry.setField("author", "B"); secondEntry.setField("year", "2000"); BibEntry thirdEntry = new BibEntry(IdGenerator.next()); thirdEntry.setType(BibtexEntryTypes.ARTICLE); thirdEntry.setField("author", "A"); thirdEntry.setField("year", "2000"); database.insertEntry(firstEntry); database.insertEntry(secondEntry); database.insertEntry(thirdEntry); SavePreferences preferences = new SavePreferences().withSaveInOriginalOrder(false); databaseWriter.writePartOfDatabase(stringWriter, bibtexContext, database.getEntries(), preferences); assertEquals( Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {A}," + Globals.NEWLINE + " year = {2010}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {B}," + Globals.NEWLINE + " year = {2000}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Article{," + Globals.NEWLINE + " author = {A}," + Globals.NEWLINE + " year = {2000}," + Globals.NEWLINE + "}" + Globals.NEWLINE + Globals.NEWLINE + "@Comment{jabref-meta: databaseType:bibtex;}" + Globals.NEWLINE , stringWriter.toString()); } }
package net.spy.memcached.protocol.binary; import java.util.Collection; import java.util.Collections; import net.spy.memcached.ops.GetAndTouchOperation; import net.spy.memcached.ops.GetOperation; import net.spy.memcached.ops.GetlOperation; import net.spy.memcached.ops.GetsOperation; import net.spy.memcached.ops.OperationCallback; import net.spy.memcached.ops.OperationStatus; class GetOperationImpl extends OperationImpl implements GetOperation, GetsOperation, GetlOperation, GetAndTouchOperation { static final int GET_CMD=0x00; static final int GETL_CMD=0x94; static final int GAT_CMD=0x1d; /** * Length of the extra header stuff for a GET response. */ static final int EXTRA_HDR_LEN=4; private final String key; private final int exp; private final int cmd; public GetOperationImpl(String k, GetOperation.Callback cb) { super(GET_CMD, generateOpaque(), cb); key=k; exp=0; cmd=GET_CMD; } public GetOperationImpl(String k, GetsOperation.Callback cb) { super(GET_CMD, generateOpaque(), cb); key=k; exp=0; cmd=GET_CMD; } public GetOperationImpl(String k, int e, GetlOperation.Callback cb) { super(GETL_CMD, generateOpaque(), cb); key=k; exp=e; cmd=GETL_CMD; } public GetOperationImpl(String k, int e, GetAndTouchOperation.Callback cb) { super(GAT_CMD, generateOpaque(), cb); key=k; exp=e; cmd=GAT_CMD; } @Override public void initialize() { if (cmd == GETL_CMD) { prepareBuffer(key, 0, EMPTY_BYTES, 0, exp); } else if (cmd == GAT_CMD) { prepareBuffer(key, 0, EMPTY_BYTES, exp); } else { prepareBuffer(key, 0, EMPTY_BYTES); } } @Override protected void decodePayload(byte[] pl) { final int flags=decodeInt(pl, 0); final byte[] data=new byte[pl.length - EXTRA_HDR_LEN]; System.arraycopy(pl, EXTRA_HDR_LEN, data, 0, pl.length-EXTRA_HDR_LEN); // Assume we're processing a get unless the cast fails. OperationCallback cb = getCallback(); if (cb instanceof GetOperation.Callback) { GetOperation.Callback gcb=(GetOperation.Callback)cb; gcb.gotData(key, flags, data); } else if (cb instanceof GetsOperation.Callback) { GetsOperation.Callback gcb=(GetsOperation.Callback)cb; gcb.gotData(key, flags, responseCas, data); } else if (cb instanceof GetlOperation.Callback) { GetlOperation.Callback gcb=(GetlOperation.Callback)cb; gcb.gotData(key, flags, responseCas, data); } else if (cb instanceof GetAndTouchOperation.Callback) { GetAndTouchOperation.Callback gcb=(GetAndTouchOperation.Callback)cb; gcb.gotData(key, flags, responseCas, data); } else { throw new ClassCastException("Couldn't convert " + cb + "to a relevent op"); } getCallback().receivedStatus(STATUS_OK); } @Override protected OperationStatus getStatusForErrorCode(int errCode, byte[] errPl) { OperationStatus baseStatus = super.getStatusForErrorCode(errCode, errPl); if (baseStatus != null) { return baseStatus; } return errCode == ERR_NOT_FOUND ? NOT_FOUND_STATUS : null; } public Collection<String> getKeys() { return Collections.singleton(key); } }
package com.bomberman.client; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; import org.json.JSONArray; import org.json.JSONObject; public class Client { private MessageQueue receivedMsgs; private ClientReceiver cr; private DatagramSocket dsocket; private int playerid = 0; private int powerups; private int bombs; private int lives = 1; private boolean isGameOn = false; private boolean gameOver = false; private boolean isDebug = false; private JSONObject game = null; // State of the game private InetAddress server_ip; private int server_port; public Client(String IPAddress, int port) throws SocketException, UnknownHostException, InterruptedException { this.receivedMsgs = new MessageQueue(); dsocket = new DatagramSocket(); cr = new ClientReceiver(receivedMsgs, dsocket); this.server_ip = InetAddress.getByName(IPAddress); this.server_port = port; cr.start(); } /** * Start a new game on the server. * Must have connected first and have a player id. */ public void startGame() { isGameOn = true; JSONObject startMsg = new JSONObject(); startMsg.put("command", "button"); startMsg.put("button", "start"); startMsg.put("pid", playerid); send(startMsg.toString()); } /** * Create a new game. */ public void newGame() { gameOver = false; startGame(); } /** * Flush all the messages from the received messages queue. */ public void flushMessages() { receivedMsgs.clear(); } /** * End the game by sending a message to the server * telling it that we want to end the game. */ public void endGame() { if(isGameOn){ JSONObject endMsg = new JSONObject(); endMsg.put("pid", this.playerid); endMsg.put("command", "button"); endMsg.put("button", "end"); send(endMsg.toString()); } } /** * Send a message to the server * @param msg String message to send */ public void send(String s) { DatagramPacket data = new DatagramPacket(s.getBytes(), s.length(), server_ip, server_port); try { dsocket.send(data); } catch (IOException e) { e.printStackTrace(); } } /** * Receive a message from the server (including broadcasts) * @return String message received */ public String receive() { String s; synchronized(receivedMsgs) { while(receivedMsgs.isEmpty()) { try { receivedMsgs.wait(); } catch (InterruptedException e) { e.printStackTrace(); return ""; } } s = receivedMsgs.pop(); } if(isDebug) System.out.println("Received message: " + s); return s; } /** * Receive messages that are not broadcast messages. * * @return The first message received that isnt a broadcast */ public String receiveNoBroadcasts() { boolean isBroadcast = true; String msg = ""; JSONObject resp; while(isBroadcast) { msg = receive(); resp = new JSONObject(msg); if(!resp.get("type").equals("broadcast")) { isBroadcast = false; } } return msg; } /** * Connect to the server as a player. Get a player id. */ public void connect(String type) { JSONObject connMsg = new JSONObject(); connMsg.put("command", "join"); connMsg.put("type", type); send(connMsg.toString()); } /** * Quit the client receiving of messages by * terminating the client receiver. * * @throws InterruptedException */ public void quit() throws InterruptedException { cr.requestQuit(); cr.join(); } /** * Request to the server that it reset its state * To be typically used for debugging and testing. */ public void resetServer() { JSONObject resetMsg = new JSONObject(); resetMsg.put("command", "reset"); send(resetMsg.toString()); receive(); // wait until it replies } /** * Set the state of the game * @param s String of the game state */ public void setState(String s) { JSONObject resp = new JSONObject(s); if(resp.getString("type").equals("game_over")) { isGameOn = false; gameOver = true; } else if(resp.getString("type").equals("player_join") && resp.getString("resp").equals("Success")) { playerid = resp.getInt("pid"); } else if(resp.getString("type").equals("spectator_join") && resp.getString("resp").equals("Success")) { playerid = -1; } if(resp.keySet().contains("game")) { isGameOn = true; this.game = resp.getJSONObject("game"); } if(resp.keySet().contains("players")) { if(this.isPlayer()){ this.powerups = resp.getJSONObject("players").getJSONObject("" + playerid).getInt("powerups"); this.bombs = resp.getJSONObject("players").getJSONObject("" + playerid).getInt("bombs"); this.lives = resp.getJSONObject("players").getJSONObject("" + playerid).getInt("lives"); } else{ this.powerups = resp.getJSONObject("players").getJSONObject("1").getInt("powerups"); this.bombs = resp.getJSONObject("players").getJSONObject("1").getInt("bombs"); this.lives = resp.getJSONObject("players").getJSONObject("1").getInt("lives"); } } } /** * Get the board cell letter. 1 is P1, * 2 = P2, B is bomb, F is fire for example * @param type the integer value of the game object type * @return the String representation of that game object type */ public String getGameBoardTypeLetter(int type) { switch(type) { case 6: return "B"; case 10: return "F"; default: return (new Integer(type)).toString(); } } /** * Stringify the board so that we can display this to the view. * @param board board to stringify * @return string representation of the board. */ public String stringifyBoard(JSONArray board) { String result = ""; for(int col = 0; col < board.length(); col++) { for(int row = 0; row < board.length(); row++) { result += "[" + getGameBoardTypeLetter(board.getJSONArray(row).getInt(col)) + "]"; } result += "\n"; } return result; } String[] actions = {"up", "down", "right", "left", "deploy"}; /** * Move the player client in a specific direction. * @param d Action type of the direction you want to go. Options are UP, DOWN, LEFT, RIGHT */ public void move(Action d) { JSONObject moveMsg = new JSONObject(); moveMsg.put("command", "move"); moveMsg.put("direction", actions[d.ordinal()]); moveMsg.put("pid", playerid); send(moveMsg.toString()); } /** * Deploy (drop) a bomb where the client player * is currently standing on the game board. */ public void deployBomb() { System.out.println("Player " + playerid +" deploying bomb!"); JSONObject bombMsg = new JSONObject(); bombMsg.put("command", "button"); bombMsg.put("button", actions[Action.BOMB.ordinal()]); bombMsg.put("pid", playerid); send(bombMsg.toString()); } /** * Get the state of the game * @return String representation of the game state */ public String getGameBoard() { String board = (this.game == null) ? "" : stringifyBoard(this.game.getJSONArray("board")); return board; } /** * Returns the id of the client. * -1 is spectator, * positive number is player * 0 is not connected. * @return int representation of the client id */ public int getPlayerID() { return playerid; } /** * Returns whether a game is currently in progress (not game over, not lobby) * @return true if game on, false otherwise */ public boolean isGameOn() { return isGameOn; } /** * Returns whether the game of the client is over. * @return true if game over, false otherwise */ public boolean isGameOver() { return gameOver; } /** * Get the number of powerups that the player has. * @return int of the number of powerups */ public int getPowerups() { return powerups; } /** * Return the number of bombs that the client (player) has. * @return int of the number of bombs */ public int getBombs() { return bombs; } /** * Returns whether the client is a player. * @return true if player, false otherwise. */ public boolean isPlayer() { return this.playerid > 0; } /** * Returns whether the client is a spectator. * @return true if spectator, false otherwise */ public boolean isSpectator() { return this.playerid < 0; } /** * Returns whether the client is connected * as either a spectator or a player. * @return true if connected, false otherwise. */ public boolean isConnected() { return this.playerid != 0; } /** * Load a game from a string representation of a board. * @param board String representation of board */ public void loadGame(String board) { JSONObject game = new JSONObject(); game.put("game", new JSONObject(board)); game.put("command", "load"); send(game.toString()); } /** * Get the string representation of the board to save * to a file so that we can load it back in later. * @return String representation of board */ public String getBoardToSave(){ JSONObject board = new JSONObject(); board.put("game",this.game); board.put("command", "load"); return board.toString(); } /** * @return the lives */ public int getLives() { return this.lives; } }
package retrofit; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; import retrofit.client.Request; import retrofit.client.Response; import rx.Observable; import rx.Scheduler; import rx.Subscriber; import rx.schedulers.Schedulers; import static retrofit.RestAdapter.LogLevel; import static retrofit.RetrofitError.unexpectedError; public final class MockRestAdapter { private static final int DEFAULT_DELAY_MS = 2000; // Network calls will take 2 seconds. private static final int DEFAULT_VARIANCE_PCT = 40; private static final int DEFAULT_ERROR_PCT = 3; // 3% of network calls will fail. private static final int ERROR_DELAY_FACTOR = 3; // Network errors will be scaled by this value. /** * Create a new {@link MockRestAdapter} which will act as a factory for mock services. Some of * the configuration of the supplied {@link RestAdapter} will be used generating mock behavior. */ public static MockRestAdapter from(RestAdapter restAdapter) { return new MockRestAdapter(restAdapter); } /** A listener invoked when the network behavior values for a {@link MockRestAdapter} change. */ public interface ValueChangeListener { void onMockValuesChanged(long delayMs, int variancePct, int errorPct); ValueChangeListener EMPTY = new ValueChangeListener() { @Override public void onMockValuesChanged(long delayMs, int variancePct, int errorPct) { } }; } private final RestAdapter restAdapter; private final MockRxSupport mockRxSupport; final Random random = new Random(); private ValueChangeListener listener = ValueChangeListener.EMPTY; private int delayMs = DEFAULT_DELAY_MS; private int variancePct = DEFAULT_VARIANCE_PCT; private int errorPct = DEFAULT_ERROR_PCT; private MockRestAdapter(RestAdapter restAdapter) { this.restAdapter = restAdapter; if (Platform.HAS_RX_JAVA) { mockRxSupport = new MockRxSupport(restAdapter); } else { mockRxSupport = null; } } /** Set a listener to be notified when any mock value changes. */ public void setValueChangeListener(ValueChangeListener listener) { this.listener = listener; } private void notifyValueChangeListener() { listener.onMockValuesChanged(delayMs, variancePct, errorPct); } /** Set the network round trip delay, in milliseconds. */ public void setDelay(long delayMs) { if (delayMs < 0) { throw new IllegalArgumentException("Delay must be positive value."); } if (delayMs > Integer.MAX_VALUE) { throw new IllegalArgumentException("Delay value too large. Max: " + Integer.MAX_VALUE); } if (this.delayMs != delayMs) { this.delayMs = (int) delayMs; notifyValueChangeListener(); } } /** The network round trip delay, in milliseconds */ public long getDelay() { return delayMs; } /** Set the plus-or-minus variance percentage of the network round trip delay. */ public void setVariancePercentage(int variancePct) { if (variancePct < 0 || variancePct > 100) { throw new IllegalArgumentException("Variance percentage must be between 0 and 100."); } if (this.variancePct != variancePct) { this.variancePct = variancePct; notifyValueChangeListener(); } } /** The plus-or-minus variance percentage of the network round trip delay. */ public int getVariancePercentage() { return variancePct; } /** Set the percentage of calls to {@link #calculateIsFailure()} that return {@code true}. */ public void setErrorPercentage(int errorPct) { if (errorPct < 0 || errorPct > 100) { throw new IllegalArgumentException("Error percentage must be between 0 and 100."); } if (this.errorPct != errorPct) { this.errorPct = errorPct; notifyValueChangeListener(); } } /** The percentage of calls to {@link #calculateIsFailure()} that return {@code true}. */ public int getErrorPercentage() { return errorPct; } /** * Randomly determine whether this call should result in a network failure. * <p> * This method is exposed for implementing other, non-Retrofit services which exhibit similar * network behavior. Retrofit services automatically will exhibit network behavior when wrapped * using {@link #create(Class, Object)}. */ public boolean calculateIsFailure() { int randomValue = random.nextInt(100) + 1; return randomValue <= errorPct; } /** * Get the delay (in milliseconds) that should be used for triggering a network error. * <p> * Because we are triggering an error, use a random delay between 0 and three times the normal * network delay to simulate a flaky connection failing anywhere from quickly to slowly. * <p> * This method is exposed for implementing other, non-Retrofit services which exhibit similar * network behavior. Retrofit services automatically will exhibit network behavior when wrapped * using {@link #create(Class, Object)}. */ public int calculateDelayForError() { return random.nextInt(delayMs * ERROR_DELAY_FACTOR); } /** * Get the delay (in milliseconds) that should be used for delaying a network call response. * <p> * This method is exposed for implementing other, non-Retrofit services which exhibit similar * network behavior. Retrofit services automatically will exhibit network behavior when wrapped * using {@link #create(Class, Object)}. */ public int calculateDelayForCall() { float errorPercent = variancePct / 100f; // e.g., 20 / 100f == 0.2f float lowerBound = 1f - errorPercent; // 0.2f --> 0.8f float upperBound = 1f + errorPercent; // 0.2f --> 1.2f float bound = upperBound - lowerBound; // 1.2f - 0.8f == 0.4f float delayPercent = (random.nextFloat() * bound) + lowerBound; // 0.8 + (rnd * 0.4) return (int) (delayMs * delayPercent); } /** * Wrap the supplied mock implementation of a service so that it exhibits the delay and error * characteristics of a real network. * * @see #setDelay(long) * @see #setVariancePercentage(int) * @see #setErrorPercentage(int) */ @SuppressWarnings("unchecked") public <T> T create(Class<T> service, T mockService) { Utils.validateServiceClass(service); return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service }, new MockHandler(mockService, restAdapter.getMethodInfoCache(service))); } private class MockHandler implements InvocationHandler { private final Object mockService; private final Map<Method, RestMethodInfo> methodInfoCache; public MockHandler(Object mockService, Map<Method, RestMethodInfo> methodInfoCache) { this.mockService = mockService; this.methodInfoCache = methodInfoCache; } @Override public Object invoke(Object proxy, Method method, final Object[] args) throws Throwable { // If the method is a method from Object then defer to normal invocation. if (method.getDeclaringClass() == Object.class) { return method.invoke(this, args); } // Load or create the details cache for the current method. final RestMethodInfo methodInfo = RestAdapter.getMethodInfo(methodInfoCache, method); if (methodInfo.isSynchronous) { try { return invokeSync(methodInfo, restAdapter.requestInterceptor, args); } catch (RetrofitError error) { Throwable newError = restAdapter.errorHandler.handleError(error); if (newError == null) { throw new IllegalStateException("Error handler returned null for wrapped exception.", error); } throw newError; } } if (restAdapter.httpExecutor == null || restAdapter.callbackExecutor == null) { throw new IllegalStateException("Asynchronous invocation requires calling setExecutors."); } // Apply the interceptor synchronously, recording the interception so we can replay it later. // This way we still defer argument serialization to the background thread. final RequestInterceptorTape interceptorTape = new RequestInterceptorTape(); restAdapter.requestInterceptor.intercept(interceptorTape); if (methodInfo.isObservable) { return mockRxSupport.createMockObservable(this, methodInfo, interceptorTape, args); } restAdapter.httpExecutor.execute(new Runnable() { @Override public void run() { invokeAsync(methodInfo, interceptorTape, args); } }); return null; // Asynchronous methods should have return type of void. } private Request buildRequest(RestMethodInfo methodInfo, RequestInterceptor interceptor, Object[] args) throws Throwable { methodInfo.init(); // Begin building a normal request. String apiUrl = restAdapter.server.getUrl(); RequestBuilder requestBuilder = new RequestBuilder(apiUrl, methodInfo, restAdapter.converter); requestBuilder.setArguments(args); // Run it through the interceptor. interceptor.intercept(requestBuilder); Request request = requestBuilder.build(); if (restAdapter.logLevel.log()) { request = restAdapter.logAndReplaceRequest("MOCK", request); } return request; } private Object invokeSync(RestMethodInfo methodInfo, RequestInterceptor interceptor, Object[] args) throws Throwable { Request request = buildRequest(methodInfo, interceptor, args); String url = request.getUrl(); if (calculateIsFailure()) { sleep(calculateDelayForError()); IOException exception = new IOException("Mock network error!"); if (restAdapter.logLevel.log()) { restAdapter.logException(exception, url); } throw RetrofitError.networkError(url, exception); } LogLevel logLevel = restAdapter.logLevel; RestAdapter.Log log = restAdapter.log; int callDelay = calculateDelayForCall(); long beforeNanos = System.nanoTime(); try { Object returnValue = methodInfo.method.invoke(mockService, args); // Sleep for whatever amount of time is left to satisfy the network delay, if any. long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - beforeNanos); sleep(callDelay - tookMs); if (logLevel.log()) { log.log(String.format("<--- MOCK 200 %s (%sms)", url, callDelay)); if (logLevel.ordinal() >= LogLevel.FULL.ordinal()) { log.log(returnValue + ""); // Hack to convert toString while supporting null. log.log("<--- END MOCK"); } } return returnValue; } catch (InvocationTargetException e) { Throwable innerEx = e.getCause(); if (!(innerEx instanceof MockHttpException)) { throw innerEx; } MockHttpException httpEx = (MockHttpException) innerEx; Response response = httpEx.toResponse(restAdapter.converter); // Sleep for whatever amount of time is left to satisfy the network delay, if any. long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - beforeNanos); sleep(callDelay - tookMs); if (logLevel.log()) { log.log(String.format("<---- MOCK %s %s (%sms)", httpEx.code, url, callDelay)); if (logLevel.ordinal() >= LogLevel.FULL.ordinal()) { log.log(httpEx.responseBody + ""); // Hack to convert toString while supporting null. log.log("<--- END MOCK"); } } throw new MockHttpRetrofitError(url, response, httpEx.responseBody); } } private void invokeAsync(RestMethodInfo methodInfo, RequestInterceptor interceptorTape, Object[] args) { Request request; try { request = buildRequest(methodInfo, interceptorTape, args); } catch (final Throwable throwable) { restAdapter.callbackExecutor.execute(new Runnable() { @Override public void run() { throw new RuntimeException(throwable); } }); return; } LogLevel logLevel = restAdapter.logLevel; RestAdapter.Log log = restAdapter.log; long beforeNanos = System.nanoTime(); int callDelay = calculateDelayForCall(); final String url = request.getUrl(); final Callback realCallback = (Callback) args[args.length - 1]; if (calculateIsFailure()) { sleep(calculateDelayForError()); IOException exception = new IOException("Mock network error!"); if (restAdapter.logLevel.log()) { restAdapter.logException(exception, url); } RetrofitError error = RetrofitError.networkError(url, exception); Throwable cause = restAdapter.errorHandler.handleError(error); final RetrofitError e = cause == error ? error : unexpectedError(error.getUrl(), cause); restAdapter.callbackExecutor.execute(new Runnable() { @Override public void run() { realCallback.failure(e); } }); return; } // Replace the normal callback with one which supports the delay. Object[] newArgs = new Object[args.length]; System.arraycopy(args, 0, newArgs, 0, args.length - 1); newArgs[args.length - 1] = new DelayingCallback(beforeNanos, callDelay, url, realCallback); try { methodInfo.method.invoke(mockService, newArgs); } catch (Throwable throwable) { final Throwable innerEx = throwable.getCause(); if (!(innerEx instanceof MockHttpException)) { restAdapter.callbackExecutor.execute(new Runnable() { @Override public void run() { if (innerEx instanceof RuntimeException) { throw (RuntimeException) innerEx; } throw new RuntimeException(innerEx); } }); return; } MockHttpException httpEx = (MockHttpException) innerEx; Response response = httpEx.toResponse(restAdapter.converter); // Sleep for whatever amount of time is left to satisfy the network delay, if any. long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - beforeNanos); sleep(callDelay - tookMs); if (logLevel.log()) { log.log(String.format("<---- MOCK %s %s (%sms)", httpEx.code, url, callDelay)); if (logLevel.ordinal() >= LogLevel.FULL.ordinal()) { log.log(httpEx.responseBody + ""); // Hack to convert toString while supporting null. log.log("<--- END MOCK"); } } RetrofitError error = new MockHttpRetrofitError(url, response, httpEx.responseBody); Throwable cause = restAdapter.errorHandler.handleError(error); final RetrofitError e = cause == error ? error : unexpectedError(error.getUrl(), cause); restAdapter.callbackExecutor.execute(new Runnable() { @Override public void run() { realCallback.failure(e); } }); } } private class DelayingCallback implements Callback { private final long beforeNanos; private final String url; private final Callback realCallback; private final long callDelay; private DelayingCallback(long beforeNanos, int callDelay, String url, Callback realCallback) { this.beforeNanos = beforeNanos; this.callDelay = callDelay; this.url = url; this.realCallback = realCallback; } @Override public void success(final Object object, final Response response) { LogLevel logLevel = restAdapter.logLevel; RestAdapter.Log log = restAdapter.log; // Sleep for whatever amount of time is left to satisfy the network delay, if any. long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - beforeNanos); sleep(callDelay - tookMs); if (logLevel.log()) { log.log(String.format("<--- MOCK 200 %s (%sms)", url, callDelay)); if (logLevel.ordinal() >= LogLevel.FULL.ordinal()) { log.log(object + ""); // Hack to convert toString while supporting null. log.log("<--- END MOCK"); } } restAdapter.callbackExecutor.execute(new Runnable() { @SuppressWarnings("unchecked") @Override public void run() { realCallback.success(object, response); } }); } @Override public void failure(final RetrofitError error) { restAdapter.callbackExecutor.execute(new Runnable() { @Override public void run() { throw new IllegalStateException( "Calling failure directly is not supported. Throw MockHttpException instead."); } }); } } } /** * Waits a given number of milliseconds (of uptimeMillis) before returning. Similar to {@link * Thread#sleep(long)}, but does not throw {@link InterruptedException}; {@link * Thread#interrupt()} events are deferred until the next interruptible operation. Does not * return until at least the specified number of milliseconds has elapsed. * * @param ms to sleep before returning, in milliseconds of uptime. */ private static void sleep(long ms) { // This implementation is modified from Android's SystemClock#sleep. long start = uptimeMillis(); long duration = ms; boolean interrupted = false; while (duration > 0) { try { Thread.sleep(duration); } catch (InterruptedException e) { interrupted = true; } duration = start + ms - uptimeMillis(); } if (interrupted) { // Important: we don't want to quietly eat an interrupt() event, // so we make sure to re-interrupt the thread so that the next // call to Thread.sleep() or Object.wait() will be interrupted. Thread.currentThread().interrupt(); } } private static long uptimeMillis() { return System.nanoTime() / 1000000L; } /** Indirection to avoid VerifyError if RxJava isn't present. */ private static class MockRxSupport { private final Scheduler scheduler; private final ErrorHandler errorHandler; MockRxSupport(RestAdapter restAdapter) { scheduler = Schedulers.executor(restAdapter.httpExecutor); errorHandler = restAdapter.errorHandler; } Observable createMockObservable(final MockHandler mockHandler, final RestMethodInfo methodInfo, final RequestInterceptor interceptor, final Object[] args) { return Observable.create(new Observable.OnSubscribe<Object>() { @Override public void call(Subscriber<? super Object> subscriber) { try { Observable observable = (Observable) mockHandler.invokeSync(methodInfo, interceptor, args); //noinspection unchecked observable.subscribe(subscriber); } catch (RetrofitError e) { subscriber.onError(errorHandler.handleError(e)); } catch (Throwable e) { subscriber.onError(e); } } }).subscribeOn(scheduler); } } }
package ogiba.styleablesharedialog.ShareDialog; import android.content.ComponentName; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.res.Configuration; import android.graphics.Point; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.DialogFragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import ogiba.styleablesharedialog.R; import ogiba.styleablesharedialog.ShareDialog.Models.ShareActionModel; import ogiba.styleablesharedialog.ShareDialog.Utils.DisplayType; import ogiba.styleablesharedialog.ShareDialog.Utils.SizeType; public class ShareDialog extends DialogFragment implements ShareItemsAdapter.OnShareActionSelect { /** * Create new instance of {@link ShareDialog} with custom type * * @param shareType - type of shared content in string. * @return currently created instance of {@link ShareDialog} */ public static ShareDialog newInstance(String shareType) { Bundle args = new Bundle(); args.putString(Builder.TAG_TYPE_TEXT, shareType); ShareDialog fragment = new ShareDialog(); fragment.setStyle(DialogFragment.STYLE_NORMAL, R.style.StyleableDialog); fragment.setArguments(args); return fragment; } /** * Create new instance of {@link ShareDialog} * * @return currently created instance of {@link ShareDialog} */ public static ShareDialog newInstance() { Bundle args = new Bundle(); args.putString(Builder.TAG_TYPE_TEXT, TYPE_TEXT); ShareDialog fragment = new ShareDialog(); fragment.setStyle(DialogFragment.STYLE_NORMAL, R.style.StyleableDialog); fragment.setArguments(args); return fragment; } /** * Create instance of {@link ShareDialog} with {@link Bundle} * * @param args {@link Bundle} arguments * @return current instance of {@link ShareDialog} */ public static ShareDialog newInstance(Bundle args) { ShareDialog fragment = new ShareDialog(); fragment.setStyle(DialogFragment.STYLE_NORMAL, R.style.StyleableDialog); fragment.setArguments(args); return fragment; } /** * Shows currently created instance of {@link ShareDialog} * * @param fragmentManager instance of {@link FragmentManager} used to shows {@link ShareDialog} */ public void show(FragmentManager fragmentManager) { this.show(fragmentManager, SHARE_DIALOG_TAG); } /** * Shows currently created instance of {@link ShareDialog} via {@link FragmentTransaction} * * @param fragmentTransaction instance of {@link FragmentTransaction} used to shows {@link ShareDialog} */ public void show(FragmentTransaction fragmentTransaction) { this.show(fragmentTransaction, SHARE_DIALOG_TAG); } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); parseExtras(); if (savedInstanceState != null) parseSavedInstance(savedInstanceState); } @Override public void onStart() { super.onStart(); Window window = getDialog().getWindow(); Ratio sizeRatio = checkDialogRatioSize(); if (window != null) { Point size = new Point(); getActivity().getWindowManager().getDefaultDisplay().getSize(size); window.setLayout((int) (size.x * sizeRatio.x), (int) (size.y * sizeRatio.y)); window.setGravity(Gravity.BOTTOM); } } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.dialog_share, container, false); bindViews(view); setupAdapter(); setupTitle(); attachCustomLayoutToView(); setupRecyclerView(); return view; } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { shareActionModels = getSharableApps(); if (shareActionModels.size() > 0) adapter.setItems(shareActionModels); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString(Builder.TAG_TEXT_CONTENT, shareTextContent); outState.putStringArrayList(Builder.TAG_TEXT_LIST_CONTENT, shareTextListContent); } @Override public void onSelect(ShareActionModel model, int position) { shareContent(model); } private void parseExtras() { Bundle args = getArguments(); if (args != null) { this.shareType = args.getString(Builder.TAG_TYPE_TEXT); this.dialogTitle = args.getString(Builder.TAG_TITLE); this.dialogTitleTintColor = args.getInt(Builder.TAG_TITLE_TINT); this.dialogTitleTintBackground = args.getInt(Builder.TAG_TITLE_BACKGROUND); this.numberOfRows = args.getInt(Builder.TAG_ROWS_NUMBER); this.headerLayoutID = args.getInt(Builder.TAG_LAYOUT_HEADER); this.footerLayoutID = args.getInt(Builder.TAG_LAYOUT_FOOTER); this.isHorizontal = args.getBoolean(Builder.TAG_ORIENTATION_TAG); this.showAsList = args.getBoolean(Builder.TAG_LIST_FORM); } } private void parseSavedInstance(Bundle instance) { this.shareTextContent = instance.getString(Builder.TAG_TEXT_CONTENT); } private void attachCustomLayoutToView() { attachCustomHeader(); attachCustomFooter(); } private void attachCustomHeader() { if (headerLayoutID == null || headerLayoutID == 0) return; headerContainer.removeAllViews(); LayoutInflater.from(getContext()).inflate(headerLayoutID, headerContainer); } private void attachCustomFooter() { if (footerLayoutID == null || footerLayoutID == 0) return; footerContainer.removeAllViews(); footerContainer.setVisibility(View.VISIBLE); LayoutInflater.from(getContext()).inflate(footerLayoutID, footerContainer); } protected void bindViews(View layout) { this.headerContainer = (ViewGroup) layout.findViewById(R.id.above_container); this.footerContainer = (ViewGroup) layout.findViewById(R.id.below_container); this.sharableAppList = (RecyclerView) layout.findViewById(R.id.app_list); if (headerLayoutID == null || headerLayoutID == 0) this.titleView = (TextView) layout.findViewById(R.id.title); } private Ratio checkDialogRatioSize() { final Ratio ratio; switch (sizeType) { case FILL_WIDTH: ratio = new Ratio(1.0, 0.6); break; case WINDOWED: ratio = new Ratio(0.8, 0.6); break; default: ratio = new Ratio(1.0, 0.6); break; } return ratio; } private void setupAdapter() { displayType = checkDisplayType(); this.adapter = new ShareItemsAdapter(getContext(), displayType); this.adapter.setCallbackListener(this); } private DisplayType checkDisplayType() { final DisplayType type; if (isHorizontal && !showAsList) type = DisplayType.HORIZONTAL; else if (!isHorizontal && showAsList) type = DisplayType.LIST; else type = DisplayType.DEFAULT; return type; } private void setupTitle() { if (titleView == null) return; if (dialogTitleTintColor != null && dialogTitleTintColor != 0) titleView.setTextColor(dialogTitleTintColor); if (dialogTitleTintBackground != null) titleView.setBackgroundColor(dialogTitleTintBackground); if (dialogTitle != null && !dialogTitle.equals("")) titleView.setText(dialogTitle); else titleView.setText(R.string.default_dialog_title); } private void setupRecyclerView() { if (sharableAppList == null) return; checkNumberOfRows(); int managerOrientation = checkManagerOrientation(); RecyclerView.LayoutManager layoutManager = new GridLayoutManager(getContext(), numberOfRows, managerOrientation, false); sharableAppList.setLayoutManager(layoutManager); sharableAppList.setAdapter(adapter); } private void checkNumberOfRows() { switch (displayType) { case DEFAULT: case HORIZONTAL: if (numberOfRows == null || numberOfRows == 0) { numberOfRows = 4; if (getActivity().getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE && isHorizontal) numberOfRows = 2; } break; case LIST: numberOfRows = 1; break; } } private int checkManagerOrientation() { if (isHorizontal) return LinearLayoutManager.HORIZONTAL; else return LinearLayoutManager.VERTICAL; } private void shareContent(ShareActionModel model) { String intentAction = Intent.ACTION_SEND; if (shareTextListContent != null) { intentAction = Intent.ACTION_SEND_MULTIPLE; } Intent intent = new Intent(intentAction); if (model.getAppInfo() != null) intent.setComponent(new ComponentName(model.getAppInfo().activityInfo.packageName, model.getAppInfo().activityInfo.name)); intent.setType(shareType); if (shareTextListContent != null) { intent.putStringArrayListExtra(Intent.EXTRA_STREAM, shareTextListContent); } else { intent.putExtra(Intent.EXTRA_TEXT, shareTextContent); } startActivity(intent); } protected ArrayList<ShareActionModel> getSharableApps() { PackageManager pm = getActivity().getPackageManager(); Intent intent = new Intent(Intent.ACTION_SEND); intent.setType(shareType); List<ResolveInfo> apps = pm.queryIntentActivities(intent, PackageManager.GET_META_DATA); ArrayList<ShareActionModel> shareActionModels = new ArrayList<>(); for (ResolveInfo resolveInfo : apps) { shareActionModels.add(new ShareActionModel(resolveInfo)); } return shareActionModels; } public void setShareContent(String shareTextContent) { this.shareTextContent = shareTextContent; } /** * Not supported in current version of {@link ShareDialog} * * @param shareListContent {@link ArrayList} of strings that should be shared */ public void setShareContent(ArrayList<String> shareListContent) { this.shareTextListContent = shareListContent; } /** * {@link Builder} is responsible for setting up parameters of {@link ShareDialog} */ public static class Builder { private static final String TAG_TYPE_TEXT = "text"; private static final String TAG_TITLE = "title"; private static final String TAG_TITLE_TINT = "titleTintColor"; private static final String TAG_TITLE_BACKGROUND = "titleBackgroundColor"; private static final String TAG_LAYOUT_HEADER = "layoutHeader"; private static final String TAG_LAYOUT_FOOTER = "layoutFooter"; private static final String TAG_ROWS_NUMBER = "numberOfRows"; private static final String TAG_ORIENTATION_TAG = "orientation"; private static final String TAG_TEXT_CONTENT = "simpleTextContent"; private static final String TAG_TEXT_LIST_CONTENT = "textListContent"; private static final String TAG_LIST_FORM = "listForm"; private String type; private String title; private Integer titleTintColor; private Integer titleBackgroundColor; private Integer headerLayoutId; private Integer footerLayoutId; private Integer numberOfSections; private boolean isHorizontal = false; private boolean showAsList = false; * @param type {@link String} value that represent type of sharing content. For example: "text/*" * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setType(String type) { this.type = type; return this; } /** * Allows to specifies title of {@link ShareDialog}. Methods works only for default header. * If custom header is set this method will not make changes on header * * @param title {@link String} value that represent title * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setTitle(String title) { this.title = title; return this; } /** * Allows to set color of default header title * * @param color {@link Integer} value that represents selected color * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setTitleTintColor(int color) { this.titleTintColor = color; return this; } /** * Allows to set background color of default header. This method do not take impact * if custom layout was added. * * @param color {@link Integer} value that represents selected color * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setTitleBackgroundColor(int color) { this.titleBackgroundColor = color; return this; } /** * Sets provided layout as header. * * @param layoutID {@link Integer} value that represent custom layout * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setHeaderLayout(int layoutID) { this.headerLayoutId = layoutID; return this; } /** * Sets provided layout as footer * * @param layoutID {@link Integer} value that represent custom layout * @return instance of currently created {@link ShareDialog.Builder} */ public Builder setFooterLayout(int layoutID) { this.footerLayoutId = layoutID; return this; } /** * Not supported in current version * * @param numberOfSections number of elements sections * @return current instance of {@link ShareDialog.Builder} */ private Builder setSectionNumber(Integer numberOfSections) { this.numberOfSections = numberOfSections; return this; } /** * Provides possibility to change orientation of {@link ShareDialog} {@link RecyclerView} * * @param isHorizontal {@link Boolean} flag that informs library to provide for user * {@link ShareDialog} in required orientation. Can be * switched between Vertical and Horizontal * @return current instance of {@link ShareDialog.Builder} */ public Builder changeOrientation(boolean isHorizontal) { this.isHorizontal = isHorizontal; return this; } /** * Provides possibility to change items presentation from grid to list * * @param showAsList {@link Boolean} flag that informs library what kind of items styles is * required. Can be switched between Grid and List * @return current instance of {@link ShareDialog.Builder} */ public Builder showAsList(boolean showAsList) { this.showAsList = showAsList; return this; } /** * Collects all set properties and build new instance of {@link ShareDialog} * * @return {@link ShareDialog} */ public ShareDialog build() { Bundle args = new Bundle(); if (type != null) args.putString(TAG_TYPE_TEXT, TYPE_TEXT); if (title != null) args.putString(TAG_TITLE, title); if (titleTintColor != null) args.putInt(TAG_TITLE_TINT, titleTintColor); if (titleBackgroundColor != null) args.putInt(TAG_TITLE_BACKGROUND, titleBackgroundColor); if (headerLayoutId != null) args.putInt(TAG_LAYOUT_HEADER, headerLayoutId); if (footerLayoutId != null) args.putInt(TAG_LAYOUT_FOOTER, footerLayoutId); if (numberOfSections != null && numberOfSections > 0) args.putInt(TAG_ROWS_NUMBER, numberOfSections); args.putBoolean(TAG_ORIENTATION_TAG, isHorizontal); args.putBoolean(TAG_LIST_FORM, showAsList); return ShareDialog.newInstance(args); } } class Ratio { double x; double y; Ratio(double x, double y) { this.x = x; this.y = y; } } }
package org.dynalang.dynalink.beans; import static org.dynalang.dynalink.beans.TestBeansLinker.createCallSiteDescriptor; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.dynalang.dynalink.DynamicLinker; import org.dynalang.dynalink.DynamicLinkerFactory; import org.dynalang.dynalink.linker.CallSiteDescriptor; import org.dynalang.dynalink.linker.GuardedInvocation; import org.dynalang.dynalink.linker.GuardingDynamicLinker; import org.dynalang.dynalink.support.LinkRequestImpl; import junit.framework.TestCase; public class TestLengthGetter extends TestCase { public void testEarlyBoundArrayLengthGetter() throws Throwable { testEarlyBoundArrayLengthGetter(byte[].class); testEarlyBoundArrayLengthGetter(short[].class); testEarlyBoundArrayLengthGetter(char[].class); testEarlyBoundArrayLengthGetter(int[].class); testEarlyBoundArrayLengthGetter(long[].class); testEarlyBoundArrayLengthGetter(float[].class); testEarlyBoundArrayLengthGetter(double[].class); testEarlyBoundArrayLengthGetter(Object[].class); testEarlyBoundArrayLengthGetter(String[].class); } private static GuardedInvocation getGuardedInvocation(GuardingDynamicLinker linker, CallSiteDescriptor descriptor, Object... args) throws Exception { return linker.getGuardedInvocation(new LinkRequestImpl(descriptor, args), null); } private static void testEarlyBoundArrayLengthGetter(Class<?> arrayClass) throws Throwable { testEarlyBoundArrayLengthGetter(arrayClass, "getLength", true); } private static void testEarlyBoundArrayLengthGetter(Class<?> arrayClass, String op, boolean early) throws Throwable { final BeansLinker bl = new BeansLinker(); final CallSiteDescriptor csd = createCallSiteDescriptor("dyn:" + op, MethodType.methodType(int.class, arrayClass)); final Object array = Array.newInstance(arrayClass.getComponentType(), 2); final GuardedInvocation inv = getGuardedInvocation(bl, csd, array); if(early) { // early bound, as call site guarantees we'll pass an array assertNull(inv.getGuard()); } final MethodHandle mh = inv.getInvocation(); assertNotNull(mh); assertEquals(csd.getMethodType(), mh.type()); assertEquals(2, mh.invokeWithArguments(array)); } private static void testArrayLengthPropertyGetter(Class<?> arrayClass) throws Throwable { testEarlyBoundArrayLengthGetter(arrayClass, "getProp:length", false); } public static void testArrayLengthPropertyGetter() throws Throwable { testArrayLengthPropertyGetter(byte[].class); testArrayLengthPropertyGetter(short[].class); testArrayLengthPropertyGetter(char[].class); testArrayLengthPropertyGetter(int[].class); testArrayLengthPropertyGetter(long[].class); testArrayLengthPropertyGetter(float[].class); testArrayLengthPropertyGetter(double[].class); testArrayLengthPropertyGetter(Object[].class); testArrayLengthPropertyGetter(String[].class); } public void testEarlyBoundCollectionLengthGetter() throws Throwable { final BeansLinker bl = new BeansLinker(); final CallSiteDescriptor csd = createCallSiteDescriptor("dyn:getLength", MethodType.methodType(int.class, List.class)); final GuardedInvocation inv = getGuardedInvocation(bl, csd, Collections.EMPTY_LIST); assertNull(inv.getGuard()); final MethodHandle mh = inv.getInvocation(); assertNotNull(mh); assertEquals(csd.getMethodType(), mh.type()); assertEquals(0, mh.invokeWithArguments(new Object[] { Collections.EMPTY_LIST })); assertEquals(2, mh.invokeWithArguments(new Object[] { Arrays.asList(new Object[] { "a", "b" }) })); } public void testEarlyBoundMapLengthGetter() throws Throwable { final BeansLinker bl = new BeansLinker(); final CallSiteDescriptor csd = createCallSiteDescriptor("dyn:getLength", MethodType.methodType(int.class, Map.class)); final GuardedInvocation inv = getGuardedInvocation(bl, csd, Collections.EMPTY_MAP); assertNull(inv.getGuard()); final MethodHandle mh = inv.getInvocation(); assertNotNull(mh); assertEquals(csd.getMethodType(), mh.type()); assertEquals(0, mh.invokeWithArguments(Collections.EMPTY_MAP)); assertEquals(1, mh.invokeWithArguments(Collections.singletonMap("a", "b"))); } public void testLateBoundLengthGetter() throws Throwable { final DynamicLinker linker = new DynamicLinkerFactory().createLinker(); final RelinkCountingCallSite callSite = new RelinkCountingCallSite("dyn:getLength", MethodType.methodType(int.class, Object.class)); linker.link(callSite); assertEquals(0, callSite.getRelinkCount()); MethodHandle callSiteInvoker = callSite.dynamicInvoker(); assertEquals(2, callSiteInvoker.invokeWithArguments(new int[2])); assertEquals(1, callSite.getRelinkCount()); assertEquals(3, callSiteInvoker.invokeWithArguments(new Object[] { new Object[3] })); // No relink - length getter applies to all array classes assertEquals(1, callSite.getRelinkCount()); assertEquals(4, callSiteInvoker.invokeWithArguments(new long[4])); // Still no relink assertEquals(1, callSite.getRelinkCount()); assertEquals(5, callSiteInvoker.invokeWithArguments(new Object[] { Arrays.asList(new Object[5]) })); // Relinked for collections assertEquals(2, callSite.getRelinkCount()); assertEquals(0, callSiteInvoker.invokeWithArguments(new HashSet<Object>())); // No relink for various collection types assertEquals(2, callSite.getRelinkCount()); assertEquals(1, callSiteInvoker.invokeWithArguments(Collections.singletonMap("1", "2"))); // Relinked for maps assertEquals(3, callSite.getRelinkCount()); assertEquals(0, callSiteInvoker.invokeWithArguments(new HashMap<Object, Object>())); // No relink for various map types assertEquals(3, callSite.getRelinkCount()); assertEquals(6, callSiteInvoker.invokeWithArguments(new long[6])); // Relinked again for arrays assertEquals(4, callSite.getRelinkCount()); } }
package net.wasdev.gameon.room; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.atomic.AtomicBoolean; public class Engine { private static class ItemDesc { String name; String description; boolean takeable; boolean clearStateOnDrop; String state=""; interface CommandHandler{ public void processCommand(ItemDesc item, String execBy, String cmd, Room room); } CommandHandler handler; public ItemDesc(String name, String description){ this.name=name; this.description=description;this.takeable=false;this.clearStateOnDrop=true; } public ItemDesc(String name, String description, boolean takeable, boolean clearStateOnDrop){ this.name=name; this.description=description;this.takeable=takeable;this.clearStateOnDrop=clearStateOnDrop; } public ItemDesc(String name, String description, boolean takeable){ this.name=name; this.description=description;this.takeable=takeable;this.clearStateOnDrop=true; } public ItemDesc(String name, String description, boolean takeable, boolean clearStateOnDrap, CommandHandler handler){ this.name=name; this.description=description;this.takeable=takeable;this.handler=handler;this.clearStateOnDrop=clearStateOnDrap; } } private static class ContainerDesc extends ItemDesc{ Collection<ItemDesc> items; Collection<ItemDesc> defaultItems; //if you can't access the container, you don't get to know whats in it. //it may be out of reach, closed, or locked. interface AccessVerificationHandler{ public boolean verifyAccess(ItemDesc item, String execBy, Room room); } AccessVerificationHandler access; public ContainerDesc(String name, String description, boolean takeable,boolean clearStateOnDrop,ItemDesc[] items,AccessVerificationHandler access){ super(name,description,takeable,clearStateOnDrop); this.access=access; this.items=new CopyOnWriteArraySet<ItemDesc>(Arrays.asList(items)); this.defaultItems = Collections.unmodifiableSet(new HashSet<ItemDesc>(this.items)); } public ContainerDesc(String name, String description, boolean takeable,boolean clearStateOnDrop,ItemDesc[] items,AccessVerificationHandler access, CommandHandler handler){ super(name,description,takeable,clearStateOnDrop,handler); this.access=access; this.items=new CopyOnWriteArraySet<ItemDesc>(Arrays.asList(items)); this.defaultItems = Collections.unmodifiableSet(new HashSet<ItemDesc>(this.items)); } public ContainerDesc(String name, String description, boolean takeable,boolean clearStateOnDrop,ItemDesc[] items,CommandHandler handler){ super(name,description,takeable,clearStateOnDrop,handler); this.items=new CopyOnWriteArraySet<ItemDesc>(Arrays.asList(items)); this.defaultItems = Collections.unmodifiableSet(new HashSet<ItemDesc>(this.items)); } public ContainerDesc(String name, String description, boolean takeable,boolean clearStateOnDrop,ItemDesc[] items){ super(name,description,takeable,clearStateOnDrop); this.items=new CopyOnWriteArraySet<ItemDesc>(Arrays.asList(items)); this.defaultItems = Collections.unmodifiableSet(new HashSet<ItemDesc>(this.items)); } } private static class RoomDesc { String id; String name; String description; Collection<ItemDesc> items; Collection<ItemDesc> defaultItems; Collection<String> exits; Collection<String> defaultExits; public RoomDesc(String id, String name, String description, ItemDesc[] items, String[] exits){ this.id=id; this.name=name; this.description=description; this.items=new CopyOnWriteArraySet<ItemDesc>(Arrays.asList(items)); this.exits=new CopyOnWriteArraySet<String>(Arrays.asList(exits)); this.defaultItems = Collections.unmodifiableSet(new HashSet<ItemDesc>(this.items)); this.defaultExits = Collections.unmodifiableSet(new HashSet<String>(this.exits)); } } private static class User { String id; String username; Collection<ItemDesc> inventory; public User(String id, String username){ this.id=id; this.username=username; this.inventory=new HashSet<ItemDesc>(); } } private interface Command { public String getVerb(); public abstract void process(String execBy, String cmd, Room room); } private static ItemDesc findItemInInventory(String itemName, User execBy){ for(ItemDesc item : execBy.inventory){ if(item.name.equalsIgnoreCase(itemName)){ return item; } } return null; } private static ItemDesc findItemInRoom(String itemName, Room room){ for(ItemDesc item : room.roomDesc.items){ if(item.name.equalsIgnoreCase(itemName)){ return item; } } return null; } private static ItemDesc findItemInContainers(String itemName, Room room){ for(ItemDesc item : room.roomDesc.items){ if(item instanceof ContainerDesc){ ContainerDesc box = (ContainerDesc)item; for(ItemDesc boxItem : box.items){ if(boxItem.name.equalsIgnoreCase(itemName)){ return boxItem; } } } } return null; } private static ItemDesc findItemInRoomOrInventory(User execBy, String itemName, Room room){ ItemDesc result = findItemInRoom(itemName, room); if(result==null) result = findItemInInventory(itemName, execBy); return result; } private static String getCommandWithoutVerbAsString(String cmd){ Scanner s = new Scanner(cmd); //skip the verb.. s.next(); StringBuilder builder = new StringBuilder(); boolean first=true; while(s.hasNext()){ if(!first) builder.append(" "); builder.append(s.next()); first=false; } return builder.toString(); } private static String getCommandWithoutVerbAndItemAsString(String cmd, ItemDesc item){ Scanner s = new Scanner(cmd); //skip the verb.. s.next(); StringBuilder builder = new StringBuilder(); boolean first=true; while(s.hasNext()){ if(!first) builder.append(" "); builder.append(s.next().toUpperCase()); if(builder.toString().equals(item.name.toUpperCase())){ builder = new StringBuilder(); } first=false; } return builder.toString(); } private static String getItemNameFromCommand(String cmd, Room room, User execBy){ List<String> allItems = new ArrayList<String>(); for(ItemDesc item : room.roomDesc.items){ allItems.add(item.name.trim().toUpperCase()); if(item instanceof ContainerDesc){ ContainerDesc box = (ContainerDesc)item; for(ItemDesc boxItem: box.items){ allItems.add(boxItem.name.trim().toUpperCase()); } } } for(ItemDesc item : execBy.inventory){ allItems.add(item.name.trim().toUpperCase()); if(item instanceof ContainerDesc){ ContainerDesc box = (ContainerDesc)item; for(ItemDesc boxItem: box.items){ allItems.add(boxItem.name.trim().toUpperCase()); } } } //sort so we process longer item names first =) Collections.sort(allItems, new Comparator<String>() { @Override public int compare(String o1, String o2) { int o1len = o1.length(); int o2len = o2.length(); if(o1len>o2len){ return -1; }else if(o1len<o2len){ return 1; }else return o1.compareTo(o2); } }); String uCmd = cmd.toUpperCase(); for(String item: allItems){ if(uCmd.startsWith(item)) return item; } return null; } private static String getFirstWordFromCommand(String cmd){ Scanner s = new Scanner(cmd); if(s.hasNext()){ return s.next().trim().toUpperCase(); }else{ return null; } } public static class Room { RoomDesc roomDesc; Map<String,User> userMap = new ConcurrentHashMap<String,User>(); Map<String,Command> commandMap = new HashMap<String,Command>(); interface RoomResponseProcessor { //"Player message :: from("+senderId+") onlyForSelf("+String.valueOf(selfMessage)+") others("+String.valueOf(othersMessage)+")" public void playerEvent(String senderId, String selfMessage, String othersMessage); //"Message sent to everyone :: "+s public void roomEvent(String s); public void locationEvent(String senderId, String roomName, String roomDescription, Object exits, List<String>objects, List<String>inventory); } public static class DebugResponseProcessor implements RoomResponseProcessor { public void playerEvent(String senderId, String selfMessage, String othersMessage){ System.out.println("Player message :: from("+senderId+") onlyForSelf("+String.valueOf(selfMessage)+") others("+String.valueOf(othersMessage)+")"); } public void roomEvent(String s){ System.out.println("Message sent to everyone :: "+s); } public void locationEvent(String senderId, String roomName, String roomDescription, Object exits, List<String>objects, List<String>inventory){ System.out.println("Location: "+roomName+" (For "+senderId+") "+roomDescription); if(!objects.isEmpty()){ System.out.println("You can see the following items: "+objects); } if(!inventory.isEmpty()){ System.out.println("You are carrying "+inventory); } } } static RoomResponseProcessor rrp = new DebugResponseProcessor(); public Room(RoomDesc r, List<Command> globalCommands){ roomDesc = r; for(Command c: globalCommands){ commandMap.put(c.getVerb(), c); } } public void locationEvent(String senderId, String roomName, String roomDescription, Object exits, List<String>objects, List<String>inventory){ rrp.locationEvent(senderId, roomName, roomDescription, exits, objects, inventory); } public void playerEvent(String senderId, String selfMessage, String othersMessage){ rrp.playerEvent(senderId,selfMessage,othersMessage); } public void roomEvent(String s){ rrp.roomEvent(s); } public void setRoomResponseProcessor(RoomResponseProcessor rrp){ Room.rrp = rrp; } public void addUserToRoom(String id, String username){ User u = new User(id,username); if(!userMap.containsKey(id)){ userMap.put(id, u); this.roomEvent(u.username+" enters the room."); } } public void removeUserFromRoom(String id){ if(userMap.containsKey(id)){ User u = userMap.get(id); //drop all items in the users inventory when they leave. Iterator<ItemDesc> itemIter = u.inventory.iterator(); while(itemIter.hasNext()){ ItemDesc item = itemIter.next(); //add to the room this.roomDesc.items.add(item); //remove from the user. itemIter.remove(); this.playerEvent(id, "You drop the "+item.name, u.username+" drops the "+item.name); } userMap.remove(id); this.roomEvent(u.username+" leaves the room."); }else{ System.out.println("Unable to remove "+id+" from room "+roomDesc.id+" because user is not known to the room"); } } public void command(String userid, String cmd){ String command = getFirstWordFromCommand(cmd); if(command!=null){ Command c = commandMap.get(command); if(c!=null){ c.process(userid, cmd, this); }else{ playerEvent(userid,"\"I'm sorry dave, I don't know how to do that.\"",null); } }else{ playerEvent(userid,"You feel a disturbance in the force.",null); } } public String getRoomId(){ return roomDesc.id; } public String getRoomName(){ return roomDesc.name; } public String getRoomDescription(){ return roomDesc.description; } } private static class Look implements Command { public Look(){ } public String getVerb(){ return "LOOK"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ //did they do /look ? or /look object or /look at object ? String restOfCommand = getCommandWithoutVerbAsString(cmd); if(restOfCommand.length()==0){ List<String> invItems = new ArrayList<String>(); List<String> roomItems = new ArrayList<String>(); for(ItemDesc i : room.roomDesc.items){ roomItems.add(i.name); } for(ItemDesc i : u.inventory){ invItems.add(i.name); } room.locationEvent(execBy, room.roomDesc.id, room.roomDesc.description, null, roomItems, invItems); }else{ //priority goes to looking if we can match an item next.. in case anyone adds an "AT AT" as an item ;p String item = getItemNameFromCommand(restOfCommand, room, u); if(item==null){ String nextWord = getFirstWordFromCommand(restOfCommand); if("AT".equalsIgnoreCase(nextWord)){ restOfCommand = getCommandWithoutVerbAsString(restOfCommand); item = getItemNameFromCommand(restOfCommand, room, u); } } if(item!=null){ //delegate to examine. Examine examineCommand = new Examine(); examineCommand.process(execBy, "EXAMINE "+item, room); }else{ room.playerEvent(execBy, "You pull out your magnifying glass to look at '"+restOfCommand+"' but realise you have no idea what that is.", null); } } } } } private static class Inventory implements Command { public Inventory(){ } public String getVerb(){ return "INVENTORY"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ if(u.inventory.isEmpty()){ room.playerEvent(execBy,"You do not appear to be carrying anything.",null); }else{ StringBuilder sb = new StringBuilder(); sb.append("You are carrying; "); boolean first=true; for(ItemDesc item : u.inventory){ if(!first)sb.append(", "); sb.append(item.name); first=false; } room.playerEvent(execBy,sb.toString(),null); } } } } private static class Take implements Command { public Take(){ } public String getVerb(){ return "TAKE"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ String restOfCommand = getCommandWithoutVerbAsString(cmd); String itemName = getItemNameFromCommand(restOfCommand, room, u); //see if we can find the item in the room. ItemDesc item = findItemInRoom(itemName, room); if(item!=null){ restOfCommand = getCommandWithoutVerbAndItemAsString(cmd, item).trim(); if("".equals(restOfCommand.trim())){ if(item.takeable){ //we have found a match! //add to the player u.inventory.add(item); //remove from the room. //(using copy on write, so it's safe to call remove on list, else we'd call remove on the iter) room.roomDesc.items.remove(item); room.playerEvent(execBy, "You pick up the "+item.name, u.username+" picks up the "+item.name); }else{ room.playerEvent(execBy, "You try really hard to pick up the "+item.name+" but it's just too tiring.", u.username+" tries to pick up the "+item.name+" and fails."); } }else{ room.playerEvent(execBy, "You reach out to take the "+item.name+" but then are confused by what you meant by '"+restOfCommand+"' so leave it there instead.",null); } }else{ //item was not in room.. this gets a little messy.. as we now need to find items inside containers item = findItemInContainers(itemName, room); if(item!=null){ restOfCommand = getCommandWithoutVerbAndItemAsString(cmd, item); String nextWord = getFirstWordFromCommand(restOfCommand); if("FROM".equalsIgnoreCase(nextWord)){ //skip from restOfCommand = getCommandWithoutVerbAsString(restOfCommand); //from what? String otherItemName = getItemNameFromCommand(restOfCommand, room, u); if(otherItemName!=null){ ItemDesc otherItem = findItemInRoomOrInventory(u, otherItemName, room); if(otherItem instanceof ContainerDesc){ ContainerDesc box = (ContainerDesc)otherItem; //access check.. boolean accessAllowed = true; if(box.access !=null ){ accessAllowed = box.access.verifyAccess(box, execBy, room); } if(accessAllowed){ if(box.items.contains(item)){ room.playerEvent(execBy, "You take the "+item.name+" from the "+otherItem.name, u.username+" takes the "+item.name+" from the "+otherItem.name); box.items.remove(item); u.inventory.add(item); }else{ room.playerEvent(execBy, "You look in the "+otherItem.name+" but the "+item.name+" does not appear to be there to take.", null); } }else{ room.playerEvent(execBy, "You appear unable to take things from "+box.name, null); } }else{ room.playerEvent(execBy, "The "+otherItemName+" doesn't look like the kind of thing you should be rummaging around inside.", null); } }else{ room.playerEvent(execBy, "I'm really not sure where to find '"+restOfCommand+"' to do that with", null); } }else{ String originalInputWithoutCommand = getCommandWithoutVerbAsString(cmd); room.playerEvent(execBy, "You reach out to take the "+originalInputWithoutCommand+" but then are confused by what you meant by '"+restOfCommand+"' so leave it there instead.",null); } }else{ if(restOfCommand.trim().length()>0){ room.playerEvent(execBy, "You search for the "+restOfCommand+" to pick up, but cannot seem to locate it anywhere!",null); }else{ room.playerEvent(execBy, "Here is a list of words that rhyme with Take, Rake, Lake, Bake, Fake. If you wish to pick up an item, you need to say which item you wish to Take.",null); } } } }else{ System.out.println("Cannot process take command for user "+execBy+" as they are not known to the room"); } } } private static class Drop implements Command { public Drop(){ } public String getVerb(){ return "DROP"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ String itemName = getCommandWithoutVerbAsString(cmd); //see if we can find the item in the user. ItemDesc item = findItemInInventory(itemName, u); if(item!=null){ //add to the room room.roomDesc.items.add(item); //remove from the user. //(using copy on write, so it's safe to call remove on list, else we'd call remove on the iter) u.inventory.remove(item); //if the item requested it.. clear its state. if(item.clearStateOnDrop){ item.state=""; } room.playerEvent(execBy, "You drop the "+item.name, u.username+" drops the "+item.name); }else{ if(itemName.trim().length()>0){ room.playerEvent(execBy, "You try to drop the "+itemName+" but it appears you don't actually have one of those.", null); }else{ room.playerEvent(execBy, "The bassline drops away.. leaving the crowd without a beat. Alternatively, specify which item you wish to drop next time.", null); } } }else{ System.out.println("Cannot process drop command for user "+execBy+" as they are not known to the room"); } } } private static class Quit implements Command { public Quit(){ } public String getVerb(){ return "QUIT"; } public void process(String execBy, String cmd, Room room){ room.playerEvent(execBy,"QUIT?? HA! You can NEVER leave !!! Muahahah!",null); } } private static class Examine implements Command { public Examine(){ } public String getVerb(){ return "EXAMINE"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ String itemName = getCommandWithoutVerbAsString(cmd); //see if we can find the item in the room or inventory ItemDesc item = findItemInRoomOrInventory(u, itemName, room); if(item!=null){ if(item instanceof ContainerDesc){ ContainerDesc box = (ContainerDesc)item; StringBuilder result = new StringBuilder(); result.append(item.description); result.append(" "); boolean accessAllowed = true; if(box.access !=null ){ accessAllowed = box.access.verifyAccess(box, execBy, room); } if(accessAllowed){ if(box.items.isEmpty()){ result.append("The "+box.name+" appears to be empty."); }else{ result.append("There appear to be the following items inside the "+box.name); List<String> itemNames = new ArrayList<String>(); for(ItemDesc i : box.items){ itemNames.add(i.name); } result.append(itemNames.toString()); } }else{ result.append("Maybe there's something inside, you can't tell."); } room.playerEvent(execBy, result.toString(), null); }else{ room.playerEvent(execBy, item.description, null); } }else{ if(itemName.trim().length()==0){ room.playerEvent(execBy, "You want to examine what??!",null); }else{ room.playerEvent(execBy, "You search for the "+itemName+" to examine, but cannot seem to locate it anywhere!",null); } } }else{ System.out.println("Cannot process examine command for user "+execBy+" as they are not known to the room"); } } } private static class Use implements Command { public Use(){ } public String getVerb(){ return "USE"; } public void process(String execBy, String cmd, Room room){ User u = room.userMap.get(execBy); if(u!=null){ String restOfCommand = getCommandWithoutVerbAsString(cmd); String itemName = getItemNameFromCommand(restOfCommand, room, u); //see if we can find the item in the room or inventory ItemDesc item = findItemInRoomOrInventory(u, itemName, room); if(item==null){ if(restOfCommand.trim().length()==0){ room.playerEvent(execBy, "Normally, in these text adventurey things, you'd specify the item you wish to use, but you win a prize for being different.",null); }else{ room.playerEvent(execBy, "You search for the "+restOfCommand+" to use, but cannot seem to locate it anywhere!",null); } }else{ if(item.handler!=null){ item.handler.processCommand(item, execBy, cmd, room); }else{ room.playerEvent(execBy, "You stare confused at the "+itemName+" unsure quite what to do with it!",null); } } }else{ System.out.println("Cannot process use command for user "+execBy+" as they are not known to the room"); } } } private static class Help implements Command { public Help(){ } public String getVerb(){ return "HELP"; } public void process(String execBy, String cmd, Room room){ List<String> currentCmds = new ArrayList<String>(); for(Command c: globalCommands){ currentCmds.add(c.getVerb()); } room.playerEvent(execBy, "The following commands are supported: "+currentCmds,null); } } private static List<Command> globalCommands = Arrays.asList(new Command[]{new Look(), new Inventory(), new Drop(), new Take(), new Quit(), new Use(), new Examine(), new Help()}); String mugEmpty = "A Somewhat sturdy container for liquids, with a small handle."; String mugFull = "A Somewhat sturdy container for liquids, with a small handle, full of steaming hot coffee."; ItemDesc mug = new ItemDesc("Mug",mugEmpty,true,true,new ItemDesc.CommandHandler(){ @Override public void processCommand(ItemDesc item, String execBy, String cmd, Room room) { //allow use of mug with coffee machine User u = room.userMap.get(execBy); if(u!=null){ String restOfCmd = getCommandWithoutVerbAndItemAsString(cmd,item); String next = getFirstWordFromCommand(restOfCmd); if(next==null){ if(item.state.equals("full")){ room.playerEvent(execBy, "You drink the entire cup of coffee.",u.username+" drinks the mug of coffee."); item.description = mugEmpty; item.state="empty"; }else{ //note that default state is "" not "empty", so the else block works great here. room.playerEvent(execBy, "You place the mug on your head. Nothing Happens. You put it back.",null); } }else{ if("WITH".equals(next)){ restOfCmd = getCommandWithoutVerbAsString(restOfCmd); ItemDesc otherItem = findItemInRoom(restOfCmd, room); if(otherItem!=null){ if(otherItem.equals(coffeeMachine)){ ItemDesc playerMug = findItemInInventory(item.name, u); if(playerMug!=null){ if(playerMug.state.equals("full")){ room.playerEvent(execBy, "You attempt to fill the already full cup with more coffee. Coffee goes everywhere, you desperately clean up the coffee hoping nobody noticed.",u.username+" spills coffee all over the floor, then cleans it up."); }else{ room.playerEvent(execBy, "You make a hot cup of coffee.",u.username+" makes a mug of coffee."); playerMug.description = mugFull; playerMug.state="full"; } }else{ room.playerEvent(execBy, "You try to telepathically make ther mug interact with the coffee machine, and fail. Perhaps you should take the mug first?",null); } }else{ room.playerEvent(execBy, "You try several times to use the "+item.name+" with the "+otherItem.name+" but can't seem to figure out how.",null); } }else{ room.playerEvent(execBy, "You aren't quite sure what "+restOfCmd+" is, or how to use it with "+item.name,null); } }else{ room.playerEvent(execBy, "I'm sorry dave, I cannot do that!", null); } } }else{ //player not in room anymore. } }}); ItemDesc coffeeMachine = new ItemDesc("Coffee Machine","A machine for making coffee, it appears to be functional.",false,false,new ItemDesc.CommandHandler(){ @Override public void processCommand(ItemDesc item, String execBy, String cmd, Room room) { //allow use of coffee machine with mug User u = room.userMap.get(execBy); if(u!=null){ //remove the 'use itemname' String restOfCmd = getCommandWithoutVerbAndItemAsString(cmd,item); //is the next word 'with' ? String next = getFirstWordFromCommand(restOfCmd); if(next==null){ room.playerEvent(execBy, "You randomly press buttons on the coffee machine, hot liquid spills all over the floor, you mop it up, you decide that's probably not how this machine is supposed to be used.",u.username+" uses the coffee machine, spilling coffee everywhere, then quietly mops it up while mumbling about reading instruction manuals"); }else{ if("WITH".equals(next)){ //remove with.. restOfCmd = getCommandWithoutVerbAsString(restOfCmd); //assume rest of string is an item. ItemDesc otherItem = findItemInRoomOrInventory(u, restOfCmd, room); if(otherItem!=null){ if(otherItem.equals(mug)){ ItemDesc playerMug = findItemInInventory(restOfCmd, u); if(playerMug!=null){ if(playerMug.state.equals("full")){ room.playerEvent(execBy, "You attempt to fill the already full cup with more coffee. Coffee goes everywhere, you desperately clean up the coffee hoping nobody noticed.",u.username+" spills coffee all over the floor, then cleans it up."); }else{ room.playerEvent(execBy, "You make a hot cup of coffee.",u.username+" makes a mug of coffee."); playerMug.description = mugFull; playerMug.state="full"; } }else{ room.playerEvent(execBy, "You try to telepathically make the mug interact with the coffee machine, and fail. Perhaps you should take the mug first?",null); } }else{ room.playerEvent(execBy, "You try several times to use the "+item.name+" with the "+otherItem.name+" but can't seem to figure out how.",null); } }else{ room.playerEvent(execBy, "You aren't quite sure what "+restOfCmd+" is, or how to use it with "+item.name,null); } }else{ room.playerEvent(execBy, "I'm sorry dave, I cannot do that!", null); } } }else{ //player not in room anymore. } }}); ItemDesc stilettoHeels = new ItemDesc("Stilettos", "A bright red pair of six inch stilleto heels.",true,true,new ItemDesc.CommandHandler(){ @Override public void processCommand(ItemDesc item, String execBy, String cmd, Room room) { //allow use of heels with player (just use heels, must be in inventory) //allow use of heels with cupboard (if in inventory) //both will set state of heels to 'worn by playerid' //heels have 'clear state on drop' set, so if dropped, removes that state. //allow use of coffee machine with mug User u = room.userMap.get(execBy); if(u!=null){ boolean itemIsInInventory = findItemInInventory(item.name, u)!=null; //remove the 'use itemname' String restOfCmd = getCommandWithoutVerbAndItemAsString(cmd,item); //is the next word 'with' ? String next = getFirstWordFromCommand(restOfCmd); if(next==null){ //player tried to use heels, are they in player inventory? if(itemIsInInventory){ //yes, player has item in inventory room.playerEvent(execBy, "You look at the heels carefully, and realise they are just your size. You slip your feet into the shoes, and slowly stand up. You feel taller!",u.username+" wears the stilettos."); item.state = "wornby:"+u.id; }else{ //no, item is in room. room.playerEvent(execBy, "From here, it looks like they might be your size, but you can't be sure, perhaps if you picked them up?",null); } }else{ if("WITH".equals(next)){ //remove with.. restOfCmd = getCommandWithoutVerbAsString(restOfCmd); //assume rest of string is an item. ItemDesc otherItem = findItemInRoomOrInventory(u, restOfCmd, room); if(otherItem!=null){ if(otherItem.equals(cupboard)){ //player tried to use heels, are they in player inventory? if(itemIsInInventory){ //yes, player has item in inventory room.playerEvent(execBy, "You look at the heels carefully, and realise they are just your size. You slip your feet into the shoes, and slowly stand up. You feel tall enough to reach the cupboard now!",u.username+" wears the stilettos."); item.state = "wornby:"+u.id; }else{ //no, item is in room. room.playerEvent(execBy, "From here, it looks like they might be your size, but you can't be sure, perhaps if you picked them up?",null); } }else{ room.playerEvent(execBy, "You try several times to use the "+item.name+" with the "+otherItem.name+" but can't seem to figure out how.",null); } }else{ room.playerEvent(execBy, "You aren't quite sure what "+restOfCmd+" is, or how to use it with "+item.name,null); } }else{ room.playerEvent(execBy, "I'm sorry dave, I cannot do that!", null); } } }else{ //player not in room anymore. } }}); ItemDesc jukebox = new ContainerDesc("Jukebox", "A gaudy looking unit, it has seen better days.", false,false, new ItemDesc[]{},new ItemDesc.CommandHandler(){ //we really only want one jukebox to play at once ;p AtomicBoolean isPlaying = new AtomicBoolean(false); class JukeBoxPlayer implements Runnable { Room room; public JukeBoxPlayer(Room room){ this.room =room; } @Override public void run() { if(isPlaying.compareAndSet(false,true)){ try{ room.roomEvent("The jukebox sings \"Never gonna give you up.. \""); Thread.sleep(1000*10); room.roomEvent("The jukebox sings \"Never gonna let you down.. \""); Thread.sleep(1000*10); room.roomEvent("The jukebox sings \"Never gonna run around.. \""); Thread.sleep(1000*10); room.roomEvent("The jukebox sings \"And desert you.. \""); Thread.sleep(1000*5); room.roomEvent("The jukebox emits a bright arc of light, and a small puff of smoke.. and stops working."); Thread.sleep(1000*1); }catch(InterruptedException io){ //ignore. } ContainerDesc box = (ContainerDesc)jukebox; box.items.remove(fuse); ContainerDesc cupboardBox = (ContainerDesc)cupboard; cupboardBox.items.add(fuse); room.roomEvent("You experience an odd feeling of deja vu."); isPlaying.compareAndSet(true, false); } } } @Override public void processCommand(ItemDesc item, String execBy, String cmd, Room room) { //allow use of jukebox. //should only work if jukebox contains fuse User u = room.userMap.get(execBy); if(u!=null){ ContainerDesc jb = (ContainerDesc)item; //remove the 'use itemname' String restOfCmd = getCommandWithoutVerbAndItemAsString(cmd,item); //is the next word 'with' ? String next = getFirstWordFromCommand(restOfCmd); if(next==null){ if(jb.items.contains(fuse)){ room.playerEvent(execBy, "The jukebox plays music, you are so happy!",u.username+" makes the jukebox play music."); (new Thread(new JukeBoxPlayer(room))).start(); }else{ room.playerEvent(execBy, "The jukebox appears to be non functional, there's a large slot marked 15A that appears to be empty.",null); } }else{ if("WITH".equals(next)){ //remove with.. restOfCmd = getCommandWithoutVerbAsString(restOfCmd); //assume rest of string is an item. ItemDesc otherItem = findItemInRoomOrInventory(u, restOfCmd, room); if(otherItem!=null){ if(otherItem.equals(fuse)){ //is the fuse in the users inventory.. boolean itemIsInInventory = findItemInInventory(otherItem.name, u)!=null; if(itemIsInInventory){ //yes, player has item in inventory room.playerEvent(execBy, "You take the fuse, and insert it into the jukebox. Fingers crossed!",u.username+" installs the fuse into the jukebox."); jb.items.add(fuse); u.inventory.remove(fuse); }else{ //no, item is in room. room.playerEvent(execBy, "That fuse looks remarkably like it might fit in that jukebox, but the fuse is all the way over there, perhaps you should take the fuse first?",null); } }else{ room.playerEvent(execBy, "You try several times to use the "+item.name+" with the "+otherItem.name+" but can't seem to figure out how.",null); } }else{ room.playerEvent(execBy, "You aren't quite sure what "+restOfCmd+" is, or how to use it with "+item.name,null); } }else{ room.playerEvent(execBy, "I'm sorry dave, I cannot do that!", null); } } }else{ //player not in room anymore. } }}); ItemDesc fuse = new ItemDesc("Fuse","A small 5 amp cartridge fuse, it appears to be functional.",true,false,new ItemDesc.CommandHandler(){ @Override public void processCommand(ItemDesc item, String execBy, String cmd, Room room) { //allow use of fuse with jukebox //using fuse with jukebox should move it from player inv into jukebox. User u = room.userMap.get(execBy); if(u!=null){ //remove the 'use itemname' String restOfCmd = getCommandWithoutVerbAndItemAsString(cmd,item); //is the next word 'with' ? String next = getFirstWordFromCommand(restOfCmd); if(next==null){ room.playerEvent(execBy, "The thing about fuses, is they are not all that interesting, they don't play video games, and they don't have buttons to press.",null); }else{ if("WITH".equals(next)){ //remove with.. restOfCmd = getCommandWithoutVerbAsString(restOfCmd); //assume rest of string is an item. ItemDesc otherItem = findItemInRoomOrInventory(u, restOfCmd, room); if(otherItem!=null){ if(otherItem.equals(jukebox)){ //is the fuse in the users inventory.. boolean itemIsInInventory = findItemInInventory(item.name, u)!=null; if(itemIsInInventory){ //yes, player has item in inventory room.playerEvent(execBy, "You take the fuse, and insert it into the jukebox. Fingers crossed!",u.username+" installs the fuse into the jukebox."); ContainerDesc jb = (ContainerDesc)otherItem; jb.items.add(fuse); u.inventory.remove(fuse); }else{ //no, item is in room. room.playerEvent(execBy, "That fuse looks remarkably like it might fit in that jukebox, but the fuse is all the way over there, perhaps you should take the fuse first?",null); } }else{ room.playerEvent(execBy, "You try several times to use the "+item.name+" with the "+otherItem.name+" but can't seem to figure out how.",null); } }else{ room.playerEvent(execBy, "You aren't quite sure what "+restOfCmd+" is, or how to use it with "+item.name,null); } }else{ room.playerEvent(execBy, "I'm sorry dave, I cannot do that!", null); } } }else{ //player not in room anymore. } }}); ItemDesc cupboard = new ContainerDesc("Cupboard","A wall mounted cupboard above the Jukebox, it's just out of your reach.", false,false, new ItemDesc[]{fuse},new ContainerDesc.AccessVerificationHandler() { @Override public boolean verifyAccess(ItemDesc item, String execBy, Room room) { //only allow access if execBy player has item heels in inventory, and state is 'worn by execBy' User u = room.userMap.get(execBy); if(u!=null){ if(u.inventory.contains(stilettoHeels) && stilettoHeels.state.equals("wornby:"+u.id)){ return true; } } return false; }}); RoomDesc bar = new RoomDesc("RecRoom", "Rec Room","A dimly lit shabbily decorated room, that appears tired and dated. It looks like someone attempted to provide kitchen facilities here once, but you really wouldn't want to eat anything off those surfaces!", new ItemDesc[] {mug,coffeeMachine,stilettoHeels,jukebox,cupboard}, new String[]{}); Collection<Room> rooms = new ArrayList<Room>(Arrays.asList(new Room[]{new Room(bar,globalCommands)})); public Collection<Room> getRooms(){ return Collections.unmodifiableCollection(rooms); } private Engine(){ } private static final Engine engine = new Engine(); public static Engine getEngine(){ return engine; } public static void main(String[] args){ Engine e = new Engine(); Collection<Room> rooms = e.getRooms(); Room current = rooms.iterator().next(); //go interactive ;p System.out.println(" current.addUserToRoom("oz", "Ozzy"); //make player look at first room. current.command("oz", "LOOK"); Scanner input = new Scanner(System.in); String cmd = input.nextLine(); while(!"EXIT".equals(cmd.toUpperCase())){ current.command("oz", cmd); cmd = input.nextLine(); } current.removeUserFromRoom("oz"); } }
package org.fujaba.graphengine.unitTests; import org.junit.Test; import java.util.ArrayList; import org.fujaba.graphengine.GraphEngine; import org.fujaba.graphengine.Match; import org.fujaba.graphengine.PatternEngine; import org.fujaba.graphengine.graph.Graph; import org.fujaba.graphengine.graph.Node; import org.fujaba.graphengine.pattern.PatternAttribute; import org.fujaba.graphengine.pattern.PatternEdge; import org.fujaba.graphengine.pattern.PatternGraph; import org.fujaba.graphengine.pattern.PatternNode; import org.junit.Assert; /** * This class is for testing Patterns. * * @author Philipp Kolodziej */ public class PatternTest { @Test public void testPatternSerialization() { PatternGraph patternGraph = getTranportRule(); String toJson = GraphEngine.getGson().toJson(patternGraph); // hand-made graph to json PatternGraph fromJson = GraphEngine.getGson().fromJson(toJson, PatternGraph.class); // json from hand-made to object String backToJson = GraphEngine.getGson().toJson(fromJson); // automaticly built object to json PatternGraph fromJson2 = GraphEngine.getGson().fromJson(backToJson, PatternGraph.class); // json from automaticly built object to object String backToJson2 = GraphEngine.getGson().toJson(fromJson2); // automaticly built object to json Assert.assertEquals(backToJson, backToJson2); // since there's no check for isomorphism between two PatternGraphs, this only works if the serialized String is the same... } @Test public void testPatternMatching() { PatternGraph transportRule = getTranportRule(); Graph ferrymansGraph = getFerrymansGraph(); ArrayList<Match> matches = PatternEngine.matchPattern(ferrymansGraph, transportRule, true); Assert.assertEquals(1, matches.size()); // finds a match (at all) for the transport rule matches = PatternEngine.matchPattern(ferrymansGraph, transportRule, false); Assert.assertEquals(3, matches.size()); // finds all 3 matches for the transport rule } @Test public void testApplyingMatches() { PatternGraph transportRule = getTranportRule(); Graph ferrymansGraph = getFerrymansGraph(); ArrayList<Match> matches = PatternEngine.matchPattern(ferrymansGraph, transportRule, false); Assert.assertEquals(3, matches.size()); Graph wolfTransported = PatternEngine.applyMatch(matches.get(0)); // System.out.println(wolfTransported); Graph goatTransported = PatternEngine.applyMatch(matches.get(1)); // System.out.println(goatTransported); Graph cabbageTransported = PatternEngine.applyMatch(matches.get(2)); // System.out.println(cabbageTransported); Assert.assertTrue(!GraphEngine.isIsomorphTo(ferrymansGraph, wolfTransported)); Assert.assertTrue(!GraphEngine.isIsomorphTo(ferrymansGraph, goatTransported)); Assert.assertTrue(!GraphEngine.isIsomorphTo(ferrymansGraph, cabbageTransported)); } @Test public void testSolvingFerrymansProblem() { PatternGraph eatingRule = getEatingRule(); PatternGraph transportRule = getTranportRule(); PatternGraph emptyTransportRule = getEmptyTranportRule(); Graph ferrymansGraph = getFerrymansGraph(); Graph ferrymansSolutionGraph = getFerrymansSolutionGraph(); ArrayList<Match> matches; Graph current = ferrymansGraph; matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(3, matches.size()); // he could bring each species current = PatternEngine.applyMatch(matches.get(1)); // 1st turn: we know he has to bring the goat first matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, emptyTransportRule, false); Assert.assertEquals(1, matches.size()); // he could go back alone current = PatternEngine.applyMatch(matches.get(0)); // 2nd turn: he has to go back matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(2, matches.size()); // he could now bring wolf or cabbage current = PatternEngine.applyMatch(matches.get(0)); // 3rd turn: he brings the wolf matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(2, matches.size()); // he could now bring back wolf or goat current = PatternEngine.applyMatch(matches.get(1)); // 4th turn: he brings back the goat matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(2, matches.size()); // he could now bring goat or cabbage current = PatternEngine.applyMatch(matches.get(1)); // 5th turn: he brings the cabbage matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, emptyTransportRule, false); Assert.assertEquals(1, matches.size()); // he could go back alone current = PatternEngine.applyMatch(matches.get(0)); // 6th turn: he has to go back matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(1, matches.size()); // he could now finally bring the goat and he's done current = PatternEngine.applyMatch(matches.get(0)); // 7th turn: he brings the goat and completes the challenge matches = PatternEngine.matchPattern(current, eatingRule, false); Assert.assertEquals(0, matches.size()); // noone gets eaten matches = PatternEngine.matchPattern(current, transportRule, false); Assert.assertEquals(3, matches.size()); // he could now start over and bring each species Assert.assertTrue(GraphEngine.isIsomorphTo(ferrymansSolutionGraph, current)); // the solution is as expected } @Test public void testSolvingFerrymansProblemWithReachabilityGraph() { PatternGraph transportRule = getCorrectTranportRule(); PatternGraph emptyTransportRule = getCorrectEmptyTranportRule(); Graph ferrymansGraph = getFerrymansGraph(); ArrayList<ArrayList<PatternGraph>> patterns = new ArrayList<ArrayList<PatternGraph>>(); ArrayList<PatternGraph> priorityLevel = new ArrayList<PatternGraph>(); priorityLevel.add(transportRule); priorityLevel.add(emptyTransportRule); patterns.add(priorityLevel); // calculate reachability graph with corrected rules: Graph rg = PatternEngine.calculateReachabilityGraph(ferrymansGraph, patterns); // check if the solution is contained in the reachability graph: Assert.assertNotNull(PatternEngine.findGraphInReachabilityGraph(rg, getFerrymansSolutionGraph())); } @Test public void testNegativePatternVariants() { // example: find all cars, that have no blue wheel // build a test graph with 6 cars - with two of them having any blue wheels Graph carGraph = new Graph(); // build car 'A' with 4 black wheels: Node carA = new Node().setAttribute("type", "Car"); Node wheelA1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); Node wheelA2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); Node wheelA3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); Node wheelA4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); carGraph.addNode(carA).addNode(wheelA1).addNode(wheelA2).addNode(wheelA3).addNode(wheelA4); carA.addEdge("has", wheelA1).addEdge("has", wheelA2).addEdge("has", wheelA3).addEdge("has", wheelA4); // build a car 'B' with 3 black wheels and 1 blue wheel: Node carB = new Node().setAttribute("type", "Car"); Node wheelB1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "blue"); Node wheelB2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); Node wheelB3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); Node wheelB4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "black"); carGraph.addNode(carB).addNode(wheelB1).addNode(wheelB2).addNode(wheelB3).addNode(wheelB4); carB.addEdge("has", wheelB1).addEdge("has", wheelB2).addEdge("has", wheelB3).addEdge("has", wheelB4); carA.addEdge("next", carB); // // build a car 'C' with 4 blue wheels: // Node carC = new Node().setAttribute("type", "Car"); // Node wheelC1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "blue"); // Node wheelC2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "blue"); // Node wheelC3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "blue"); // Node wheelC4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "blue"); // carGraph.addNode(carC).addNode(wheelC1).addNode(wheelC2).addNode(wheelC3).addNode(wheelC4); // carC.addEdge("has", wheelC1).addEdge("has", wheelC2).addEdge("has", wheelC3).addEdge("has", wheelC4); // carB.addEdge("next", carC); // // build a car 'D' with 4 orange wheels // Node carD = new Node().setAttribute("type", "Car"); // Node wheelD1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "orange"); // Node wheelD2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "orange"); // Node wheelD3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "orange"); // Node wheelD4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "orange"); // carGraph.addNode(carD).addNode(wheelD1).addNode(wheelD2).addNode(wheelD3).addNode(wheelD4); // carD.addEdge("has", wheelD1).addEdge("has", wheelD2).addEdge("has", wheelD3).addEdge("has", wheelD4); // carC.addEdge("next", carD); // // build a car 'E' with 4 red wheels // Node carE = new Node().setAttribute("type", "Car"); // Node wheelE1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "red"); // Node wheelE2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "red"); // Node wheelE3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "red"); // Node wheelE4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "red"); // carGraph.addNode(carE).addNode(wheelE1).addNode(wheelE2).addNode(wheelE3).addNode(wheelE4); // carE.addEdge("has", wheelE1).addEdge("has", wheelE2).addEdge("has", wheelE3).addEdge("has", wheelE4); // carD.addEdge("next", carE); // // build a car 'F' with 4 green wheels // Node carF = new Node().setAttribute("type", "Car"); // Node wheelF1 = new Node().setAttribute("type", "Wheel").setAttribute("color", "green"); // Node wheelF2 = new Node().setAttribute("type", "Wheel").setAttribute("color", "green"); // Node wheelF3 = new Node().setAttribute("type", "Wheel").setAttribute("color", "green"); // Node wheelF4 = new Node().setAttribute("type", "Wheel").setAttribute("color", "green"); // carGraph.addNode(carF).addNode(wheelF1).addNode(wheelF2).addNode(wheelF3).addNode(wheelF4); // carF.addEdge("has", wheelF1).addEdge("has", wheelF2).addEdge("has", wheelF3).addEdge("has", wheelF4); // carE.addEdge("next", carF); // build a pattern that says 'car without a blue wheel': PatternGraph carWithoutBlueWheel = new PatternGraph(); PatternNode car = new PatternNode() .setAttributeMatchExpression("#{type} == 'Car'"); PatternNode wheel = new PatternNode() .setAction("!=").setAttributeMatchExpression("#{type} == 'Wheel' && #{color} == 'blue'"); carWithoutBlueWheel.addPatternNode(car).addPatternNode(wheel); car.addPatternEdge(new PatternEdge().setSource(car).setName("has").setTarget(wheel)); // test if only the 4 cars without blue wheels are found: ArrayList<Match> matches = PatternEngine.matchPattern(carGraph, carWithoutBlueWheel, false); Assert.assertEquals(1, matches.size()); // test if the reachability graph doesn't go rogue and somehow find multiple graphs: ArrayList<ArrayList<PatternGraph>> patterns = new ArrayList<ArrayList<PatternGraph>>(); ArrayList<PatternGraph> priorityLevel = new ArrayList<PatternGraph>(); priorityLevel.add(carWithoutBlueWheel); patterns.add(priorityLevel); Graph rg = PatternEngine.calculateReachabilityGraph(carGraph, patterns); Assert.assertEquals(1, rg.getNodes().size()); } /** * Method to obtain the 'eating rule' of the ferryman's problem graph. * @return the 'eating rule' of the ferryman's problem graph */ private PatternGraph getEatingRule() { PatternNode cargoEats = new PatternNode(), cargoGetsEaten = new PatternNode(), ferry = new PatternNode(), bank = new PatternNode(); return new PatternGraph() .addPatternNode(cargoEats .setAttributeMatchExpression("#{type} == 'Cargo'") .addPatternEdge(new PatternEdge() .setSource(cargoEats) .setName("at") .setTarget(bank) ).addPatternEdge(new PatternEdge() .setAction("-") .setSource(cargoEats) .setName("eats") .setTarget(cargoGetsEaten) )).addPatternNode(cargoGetsEaten.setAction("-").addPatternAttribute(new PatternAttribute() .setAction("-") .setName("type") .setValue("#{type} == 'Cargo'") ).addPatternEdge(new PatternEdge() .setAction("-") .setSource(cargoGetsEaten) .setName("at") .setTarget(bank) )).addPatternNode(ferry .setAttributeMatchExpression("#{type} == 'Ferry'") .addPatternEdge(new PatternEdge() .setAction("!=") .setSource(ferry) .setName("at") .setTarget(bank) )).addPatternNode(bank .setAttributeMatchExpression("#{type} == 'Bank'") ); } /** * Method to obtain the (uncorrected) 'transport rule' of the ferryman's problem graph. * Note: This rule allows for the ferryman to leave two species alone, that eat each other! * @return the (uncorrected) 'transport rule' of the ferryman's problem graph */ private PatternGraph getTranportRule() { PatternNode cargo = new PatternNode(), ferry = new PatternNode(), bankHere = new PatternNode(), bankThere = new PatternNode(); return new PatternGraph() .addPatternNode(cargo .setAttributeMatchExpression("#{type} == 'Cargo'") .addPatternEdge(new PatternEdge() .setAction("-") .setSource(cargo) .setName("at") .setTarget(bankHere) ).addPatternEdge(new PatternEdge() .setAction("+") .setSource(cargo) .setName("at") .setTarget(bankThere) )).addPatternNode(ferry .setAttributeMatchExpression("#{type} == 'Ferry'") .addPatternEdge(new PatternEdge() .setAction("-") .setSource(ferry) .setName("at") .setTarget(bankHere) ).addPatternEdge(new PatternEdge() .setAction("+") .setSource(ferry) .setName("at") .setTarget(bankThere) )).addPatternNode(bankHere .setAttributeMatchExpression("#{type} == 'Bank'") .addPatternEdge(new PatternEdge() .setSource(bankHere) .setName("opposite") .setTarget(bankThere) )).addPatternNode(bankThere .setAttributeMatchExpression("#{type} == 'Bank'") ); } /** * Method to obtain the (uncorrected) 'empty transport rule' of the ferryman's problem graph. * Note: This rule allows for the ferryman to leave two species alone, that eat each other! * @return the (uncorrected) 'empty transport rule' of the ferryman's problem graph */ private PatternGraph getEmptyTranportRule() { PatternNode ferry = new PatternNode(), bankHere = new PatternNode(), bankThere = new PatternNode(); return new PatternGraph() .addPatternNode(ferry .setAttributeMatchExpression("#{type} == 'Ferry'") .addPatternEdge(new PatternEdge() .setAction("-") .setSource(ferry) .setName("at") .setTarget(bankHere) ).addPatternEdge(new PatternEdge() .setAction("+") .setSource(ferry) .setName("at") .setTarget(bankThere) )).addPatternNode(bankHere .setAttributeMatchExpression("#{type} == 'Bank'") .addPatternEdge(new PatternEdge() .setSource(bankHere) .setName("opposite") .setTarget(bankThere) )).addPatternNode(bankThere .setAttributeMatchExpression("#{type} == 'Bank'") ); } /** * Method to obtain the (corrected) 'transport rule' of the ferryman's problem graph. * Note: This rule doesn't allow the ferryman to leave two species alone, that eat each other! * @return the (corrected) 'transport rule' of the ferryman's problem graph */ private PatternGraph getCorrectTranportRule() { PatternGraph pattern = new PatternGraph(); PatternNode cargo = new PatternNode(), ferry = new PatternNode(), bankHere = new PatternNode(), bankThere = new PatternNode(), eater = new PatternNode(), getsEaten = new PatternNode(); pattern.addPatternNode(cargo).addPatternNode(ferry).addPatternNode(bankHere).addPatternNode(bankThere).addPatternNode(eater).addPatternNode(getsEaten); cargo.setAttributeMatchExpression("#{type} == 'Cargo'"); cargo.addPatternEdge(new PatternEdge() .setAction("-") .setSource(cargo) .setName("at") .setTarget(bankHere) ); cargo.addPatternEdge(new PatternEdge() .setAction("+") .setSource(cargo) .setName("at") .setTarget(bankThere) ); ferry.setAttributeMatchExpression("#{type} == 'Ferry'"); ferry.addPatternEdge(new PatternEdge() .setAction("-") .setSource(ferry) .setName("at") .setTarget(bankHere) ); ferry.addPatternEdge(new PatternEdge() .setAction("+") .setSource(ferry) .setName("at") .setTarget(bankThere) ); bankHere.setAttributeMatchExpression("#{type} == 'Bank'"); bankHere.addPatternEdge(new PatternEdge() .setSource(bankHere) .setName("opposite") .setTarget(bankThere) ); bankThere.setAttributeMatchExpression("#{type} == 'Bank'"); eater.setAttributeMatchExpression("#{type} == 'Cargo'"); eater.addPatternEdge(new PatternEdge() .setSource(eater) .setName("eats") .setTarget(getsEaten)); eater.addPatternEdge(new PatternEdge() .setSource(eater) .setName("at") .setTarget(bankHere)); eater.setAction("!="); getsEaten.setAttributeMatchExpression("#{type} == 'Cargo'"); getsEaten.addPatternEdge(new PatternEdge() .setTarget(getsEaten) .setName("at") .setTarget(bankHere)); getsEaten.setAction("!="); return pattern; } /** * Method to obtain the (corrected) 'empty transport rule' of the ferryman's problem graph. * Note: This rule doesn't allow the ferryman to leave two species alone, that eat each other! * @return the (corrected) 'empty transport rule' of the ferryman's problem graph */ private PatternGraph getCorrectEmptyTranportRule() { PatternGraph pattern = new PatternGraph(); PatternNode ferry = new PatternNode(), bankHere = new PatternNode(), bankThere = new PatternNode(), eater = new PatternNode(), getsEaten = new PatternNode(); pattern.addPatternNode(ferry).addPatternNode(bankHere).addPatternNode(bankThere).addPatternNode(eater).addPatternNode(getsEaten); ferry.setAttributeMatchExpression("#{type} == 'Ferry'"); ferry.addPatternEdge(new PatternEdge() .setAction("-") .setSource(ferry) .setName("at") .setTarget(bankHere) ); ferry.addPatternEdge(new PatternEdge() .setAction("+") .setSource(ferry) .setName("at") .setTarget(bankThere) ); bankHere.setAttributeMatchExpression("#{type} == 'Bank'"); bankHere.addPatternEdge(new PatternEdge() .setSource(bankHere) .setName("opposite") .setTarget(bankThere) ); bankThere.setAttributeMatchExpression("#{type} == 'Bank'"); eater.setAttributeMatchExpression("#{type} == 'Cargo'"); eater.addPatternEdge(new PatternEdge() .setSource(eater) .setName("eats") .setTarget(getsEaten)); eater.addPatternEdge(new PatternEdge() .setSource(eater) .setName("at") .setTarget(bankHere)); eater.setAction("!="); getsEaten.setAttributeMatchExpression("#{type} == 'Cargo'"); getsEaten.addPatternEdge(new PatternEdge() .setTarget(getsEaten) .setName("at") .setTarget(bankHere)); getsEaten.setAction("!="); return pattern; } /** * Method to obtain the initial situation of the ferryman's problem as a graph. * @return the initial situation of the ferryman's problem as a graph. */ private Graph getFerrymansGraph() { Graph ferrymansGraph = new Graph(); Node wolf = new Node(), goat = new Node(), cabbage = new Node(), ferry = new Node(), north = new Node(), south = new Node(); ferrymansGraph.addNode(wolf).addNode(goat).addNode(cabbage).addNode(ferry).addNode(north).addNode(south); wolf.setAttribute("type", "Cargo").setAttribute("species", "Wolf").addEdge("eats", goat).addEdge("at", north); goat.setAttribute("type", "Cargo").setAttribute("species", "Goat").addEdge("eats", cabbage).addEdge("at", north); cabbage.setAttribute("type", "Cargo").setAttribute("species", "Cabbage").addEdge("at", north); ferry.setAttribute("type", "Ferry").addEdge("at", north); north.setAttribute("type", "Bank").setAttribute("side", "north").addEdge("opposite", south); south.setAttribute("type", "Bank").setAttribute("side", "south").addEdge("opposite", north); return ferrymansGraph; } /** * Method to obtain the solution of the ferryman's problem as a graph. * @return the solution of the ferryman's problem as a graph. */ private Graph getFerrymansSolutionGraph() { Graph ferrymansGraph = new Graph(); Node wolf = new Node(), goat = new Node(), cabbage = new Node(), ferry = new Node(), north = new Node(), south = new Node(); ferrymansGraph.addNode(wolf).addNode(goat).addNode(cabbage).addNode(ferry).addNode(north).addNode(south); wolf.setAttribute("type", "Cargo").setAttribute("species", "Wolf").addEdge("eats", goat).addEdge("at", south); goat.setAttribute("type", "Cargo").setAttribute("species", "Goat").addEdge("eats", cabbage).addEdge("at", south); cabbage.setAttribute("type", "Cargo").setAttribute("species", "Cabbage").addEdge("at", south); ferry.setAttribute("type", "Ferry").addEdge("at", south); north.setAttribute("type", "Bank").setAttribute("side", "north").addEdge("opposite", south); south.setAttribute("type", "Bank").setAttribute("side", "south").addEdge("opposite", north); return ferrymansGraph; } }
package org.littleshoot.proxy; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import org.jboss.netty.handler.codec.http.HttpRequest; import org.junit.Test; /** * Test for HTTP request URI rules. */ public class HttpRequestUriRuleTest { @Test public void testMatching() throws Exception { final HttpRequestPathMatcher matcher = new HttpRequestPathMatcher("/search"); final HttpRequest httpRequest = createRequest("/search?hl=en&source=hp&q=bop&aq=f&aqi=g10&aql=&oq="); assertTrue("Rule should have matches request", matcher.filterResponses(httpRequest)); } @Test public void testIsNotMatching() throws Exception { final HttpRequestPathMatcher matcher = new HttpRequestPathMatcher("/webhp"); final HttpRequest httpRequest = createRequest("/search?hl=en&source=hp&q=bop&aq=f&aqi=g10&aql=&oq="); assertFalse("Rule should have matches request", matcher.filterResponses(httpRequest)); } private HttpRequest createRequest(String uri) { final HttpRequest httpRequest = mock(HttpRequest.class); when(httpRequest.getUri()).thenReturn(uri); return httpRequest; } }
package org.mythtv.services.api.test.v027; import org.junit.Ignore; import org.junit.Test; import org.mythtv.services.api.Bool; import org.mythtv.services.api.ETagInfo; import org.mythtv.services.api.v027.DvrOperations; import org.mythtv.services.api.v027.beans.ProgramList; import org.mythtv.services.api.v027.beans.RecRuleList; import org.springframework.http.ResponseEntity; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * @author Sebastien Astie */ public class DvrApiTest extends BaseApiTester { protected DvrOperations operations; @Override protected void prepareOperations() { operations = mythservices.dvrOperations(); } @Ignore public void testAddRecordSchedule() throws Exception { fail("Not yet implemented"); } @Ignore public void testDisableRecordSchedule() throws Exception { fail("Not yet implemented"); } @Ignore public void testEnableRecordSchedule() throws Exception { ResponseEntity<Bool> boolResponseEntity = operations.enableRecordSchedule(2386); assertNotNull(boolResponseEntity); } @Ignore public void testGetConflictList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetEncoderList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetExpiringList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetRecGroupList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetRecordSchedule() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetRecordScheduleList() throws Exception { ResponseEntity<RecRuleList> responseEntity = operations.getRecordScheduleList(0, 0, ETagInfo.createEmptyETag()); assertNotNull(responseEntity); RecRuleList list = responseEntity.getBody(); assertNotNull(list); assertNotNull(list.getRecRules()); assertTrue(list.getRecRules().length > 0); } @Ignore public void testGetRecorded() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetRecordedList() throws Exception { ResponseEntity<ProgramList> response = operations.getRecordedList(true,null,null,null,null, null, ETagInfo.createEmptyETag()); assertNotNull(response); ProgramList list = response.getBody(); assertNotNull(list); assertNotNull(list.getPrograms()); assertTrue(list.getPrograms().length > 0); } @Ignore public void testGetTitleInfoList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetTitleList() throws Exception { fail("Not yet implemented"); } @Ignore public void testGetUpcomingList() throws Exception { fail("Not yet implemented"); } @Ignore public void testRemoveRecordSchedule() throws Exception { fail("Not yet implemented"); } @Ignore public void testRemoveRecorded() throws Exception { fail("Not yet implemented"); } @Ignore public void testUpdateRecordSchedule() throws Exception { fail("Not yet implemented"); } }
package org.asteriskjava.manager; import org.asteriskjava.manager.event.ManagerEvent; import org.asteriskjava.util.DaemonThreadFactory; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * Proxies a ManagerEventListener and dispatches events asynchronously by using * a single threaded executor.<p> * Use this proxy to prevent the reader thread from being blocked while your * application processes {@link org.asteriskjava.manager.event.ManagerEvent}s. * If you want to use the {@link org.asteriskjava.manager.ManagerConnection} for * sending actions in your {@link org.asteriskjava.manager.ManagerEventListener} * using a proxy like this one is mandatory; otherwise you will always run into * a timeout because the reader thread that is supposed to read the response to * your action is still blocked processing the event.<p> * If in doubt use the proxy as it won't hurt.<p> * Example: * <pre> * ManagerConnection connection; * ManagerEventListener myListener; * ... * connection.addEventListener(new ManagerEventListenerProxy(myListener)); * </pre> * * @author srt * @author fink * @since 0.3 */ public class ManagerEventListenerProxy implements ManagerEventListener { private final ThreadPoolExecutor executor; private final ManagerEventListener target; /** * Creates a new ManagerEventListenerProxy that notifies the given target * asynchronously when new events are received. * * @param target the target listener to invoke. * @see Executors#newSingleThreadExecutor(ThreadFactory) */ public ManagerEventListenerProxy(ManagerEventListener target) { executor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(), new DaemonThreadFactory()); this.target = target; if (target == null) { throw new NullPointerException("ManagerEventListener target is null!"); } }//new @Override public void onManagerEvent(final ManagerEvent event) { executor.execute(new Runnable() { @Override public void run() { target.onManagerEvent(event); } }); }//onManagerEvent public void shutdown() { executor.shutdown(); } public static class Access { public static int getThreadQueueSize (ManagerEventListenerProxy proxy) { return proxy.executor.getQueue().size(); } }//Access }
package teetime.framework.pipe; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.Test; import teetime.util.classpath.ClassForNameResolver; import com.google.common.base.Charsets; import com.google.common.io.Files; public class PipeFactoryLoaderTest { @Test public void emptyConfig() throws IOException { List<IPipeFactory> list = PipeFactoryLoader.loadPipeFactoriesFromClasspath("data/empty-test.conf"); Assert.assertEquals(true, list.isEmpty()); } @Test public void singleConfig() throws IOException { List<IPipeFactory> list = PipeFactoryLoader.loadPipeFactoriesFromClasspath("pipe-factories.conf"); int lines = Files.readLines(new File("conf/pipe-factories.conf"), Charsets.UTF_8).size(); Assert.assertEquals(lines, list.size()); } @Test public void multipleConfigs() throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { List<URL> files = new ArrayList<URL>(); File pipeConfig = new File("conf/pipe-factories.conf"); File testConfig = new File("src/test/resources/data/normal-test.conf"); files.add(testConfig.toURI().toURL()); files.add(pipeConfig.toURI().toURL()); List<IPipeFactory> pipeFactories = PipeFactoryLoader.mergeFiles(files); List<String> contents = Files.readLines(pipeConfig, Charsets.UTF_8); contents.addAll(Files.readLines(testConfig, Charsets.UTF_8)); // Check if all read factories are contained in one of the files for (IPipeFactory iPipeFactory : pipeFactories) { Assert.assertTrue(contents.indexOf(iPipeFactory.getClass().getCanonicalName()) != -1); } // Second part of the test: PipeFactoryRegistry PipeFactoryRegistry pipeRegistry = PipeFactoryRegistry.INSTANCE; ClassForNameResolver<IPipeFactory> classResolver = new ClassForNameResolver<IPipeFactory>(IPipeFactory.class); // Look for the "normal" pipes for (String className : Files.readLines(pipeConfig, Charsets.UTF_8)) { IPipeFactory pipeFactory = classResolver.classForName(className).newInstance(); IPipeFactory returnedFactory = pipeRegistry.getPipeFactory(pipeFactory.getThreadCommunication(), pipeFactory.getOrdering(), pipeFactory.isGrowable()); Assert.assertEquals(pipeFactory.getClass().getCanonicalName(), returnedFactory.getClass().getCanonicalName()); } // Second "and a half" part for (String className : Files.readLines(testConfig, Charsets.UTF_8)) { IPipeFactory pipeFactory = classResolver.classForName(className).newInstance(); // Still old factory IPipeFactory returnedFactory = pipeRegistry.getPipeFactory(pipeFactory.getThreadCommunication(), pipeFactory.getOrdering(), pipeFactory.isGrowable()); Assert.assertNotEquals(pipeFactory.getClass().getCanonicalName(), returnedFactory.getClass().getCanonicalName()); // Overload factory and check for the new one pipeRegistry.register(pipeFactory); returnedFactory = pipeRegistry.getPipeFactory(pipeFactory.getThreadCommunication(), pipeFactory.getOrdering(), pipeFactory.isGrowable()); Assert.assertEquals(pipeFactory.getClass().getCanonicalName(), returnedFactory.getClass().getCanonicalName()); } } }
package org.b3log.symphony.processor.channel; import org.b3log.latke.Keys; import org.b3log.latke.ioc.LatkeBeanManager; import org.b3log.latke.ioc.Lifecycle; import org.b3log.latke.logging.Logger; import org.b3log.latke.model.User; import org.b3log.latke.service.ServiceException; import org.b3log.symphony.model.Pointtransfer; import org.b3log.symphony.service.ActivityMgmtService; import org.b3log.symphony.service.UserQueryService; import org.json.JSONException; import org.json.JSONObject; import javax.inject.Inject; import javax.websocket.CloseReason; import javax.websocket.OnClose; import javax.websocket.OnError; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import javax.websocket.server.ServerEndpoint; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; @ServerEndpoint(value = "/gobang-game-channel", configurator = Channels.WebSocketConfigurator.class) public class GobangChannel { /** * Session set. */ public static final Map<String, Session> SESSIONS = new ConcurrentHashMap<String, Session>(); /** * . * String1userId * ChessGame */ public static final Map<String, ChessGame> chessPlaying = new ConcurrentHashMap<String, ChessGame>(); /** * Map. * Stringplayer1, * Stringplayer2 */ public static final Map<String, String> antiPlayer = new ConcurrentHashMap<String, String>(); public static final Queue<ChessGame> chessRandomWait = new ConcurrentLinkedQueue<ChessGame>(); /** * Logger. */ private static final Logger LOGGER = Logger.getLogger(GobangChannel.class); /** * Activity management service. */ @Inject private ActivityMgmtService activityMgmtService; /** * Called when the socket connection with the browser is established. * * @param session session */ @OnOpen public void onConnect(final Session session) { final JSONObject user = (JSONObject) Channels.getHttpSessionAttribute(session, User.USER); if (null == user) { return; } final String userId = user.optString(Keys.OBJECT_ID); final String userName = user.optString(User.USER_NAME); boolean playing = false; LOGGER.debug("new connection from " + userName); SESSIONS.put(userId, session); for (String temp : chessPlaying.keySet()) { ChessGame chessGame = chessPlaying.get(temp); if (userId.equals(chessGame.getPlayer1())){ recoverGame(userId,userName,chessGame.getPlayer2(),chessGame); chessGame.setPlayState1(true); playing = true; }else if(userId.equals(chessGame.getPlayer2())){ recoverGame(userId,userName,chessGame.getPlayer1(),chessGame); chessGame.setPlayState2(true); playing = true; } } if (playing) { return; } else { ChessGame chessGame=null; JSONObject sendText = new JSONObject(); do{ chessGame = chessRandomWait.poll(); }while(chessRandomWait.size() > 0 && SESSIONS.get(chessGame.getPlayer1()) == null); if(chessGame==null){ chessGame = new ChessGame(userId,userName); chessRandomWait.add(chessGame); sendText.put("type", 3); sendText.put("playerName", userName); sendText.put("message", ""); session.getAsyncRemote().sendText(sendText.toString()); }else if(userId.equals(chessGame.getPlayer1())){ chessRandomWait.add(chessGame); sendText.put("type", 3); sendText.put("playerName", userName); sendText.put( "message", ""); session.getAsyncRemote().sendText(sendText.toString()); } else { final LatkeBeanManager beanManager = Lifecycle.getBeanManager(); chessGame.setPlayer2(userId); chessGame.setName2(userName); chessGame.setPlayState2(true); chessGame.setStep(1); chessPlaying.put(chessGame.getPlayer1(), chessGame); antiPlayer.put(chessGame.getPlayer1(), chessGame.getPlayer2()); final ActivityMgmtService activityMgmtService = beanManager.getReference(ActivityMgmtService.class); sendText.put("type", 4); sendText.put("message", "<" + userName + ">"); sendText.put("player", chessGame.getPlayer1()); SESSIONS.get(chessGame.getPlayer1()).getAsyncRemote().sendText(sendText.toString()); sendText.put("message", "~<"+chessGame.getName1()+">"); sendText.put("player", chessGame.getPlayer2()); session.getAsyncRemote().sendText(sendText.toString()); JSONObject r1=activityMgmtService.startGobang(chessGame.getPlayer1()); JSONObject r2=activityMgmtService.startGobang(chessGame.getPlayer2()); } } } /** * Called when the connection closed. * * @param session session * @param closeReason close reason */ @OnClose public void onClose(final Session session, final CloseReason closeReason) { removeSession(session); } /** * Called when a message received from the browser. * * @param message message */ @OnMessage public void onMessage(final String message) throws JSONException { JSONObject jsonObject = new JSONObject(message); final String player = jsonObject.optString("player"); final String anti = getAntiPlayer(player); JSONObject sendText = new JSONObject(); final LatkeBeanManager beanManager = Lifecycle.getBeanManager(); switch (jsonObject.optInt("type")) { case 1: LOGGER.debug(jsonObject.optString("message")); final UserQueryService userQueryService = beanManager.getReference(UserQueryService.class); sendText.put("type", 1); try { sendText.put("player", userQueryService.getUser(player).optString(User.USER_NAME)); } catch (ServiceException e) { LOGGER.error("service not avaliable"); } sendText.put("message", jsonObject.optString("message")); SESSIONS.get(anti).getAsyncRemote().sendText(sendText.toString()); break; case 2: ChessGame chessGame = chessPlaying.keySet().contains(player) ? chessPlaying.get(player) : chessPlaying.get(anti); int x = jsonObject.optInt("x"); int y = jsonObject.optInt("y"); int size = jsonObject.optInt("size"); if (chessGame != null) { if(chessGame.getChess()[x / size][y / size] != 0){ return; } boolean flag = false; if (player.equals(chessGame.getPlayer1())) { if (chessGame.getStep() != 1) { return; } else { sendText.put("color", "black"); chessGame.getChess()[x / size][y / size] = 1; flag = chessGame.chessCheck(1); chessGame.setStep(2); } } else { if (chessGame.getStep() != 2) { return; } else { sendText.put("color", "white"); chessGame.getChess()[x / size][y / size] = 2; flag = chessGame.chessCheck(2); chessGame.setStep(1); } } sendText.put("type", 2); sendText.put("player", player); sendText.put("posX", x); sendText.put("posY", y); if (flag) { sendText.put("result", "You Win"); } SESSIONS.get(player).getAsyncRemote().sendText(sendText.toString()); if (flag) { sendText.put("result", "You Lose"); } SESSIONS.get(anti).getAsyncRemote().sendText(sendText.toString()); if (flag) { final ActivityMgmtService activityMgmtService = beanManager.getReference(ActivityMgmtService.class); activityMgmtService.collectGobang(player, Pointtransfer.TRANSFER_SUM_C_ACTIVITY_GOBANG_START * 2); chessPlaying.remove(chessGame); } } break; /* *case 5:// for(ChessGame cg:chessRandomWait){ if(cg.getPlayer1().equals(jsonObject.optString("player"))){ chessRandomWait.remove(cg); break; } } break;*/ } } /** * Called in case of an error. * * @param session session * @param error error */ @OnError public void onError(final Session session, final Throwable error) { removeSession(session); } /** * Removes the specified session. * * @param session the specified session */ private void removeSession(final Session session) { for (String player : SESSIONS.keySet()) { if (session.equals(SESSIONS.get(player))) { if(chessPlaying.get(player)!=null){ ChessGame chessGame=chessPlaying.get(player); chessGame.setPlayState1(false); if(!chessGame.isPlayState2()){ chessPlaying.remove(player); antiPlayer.remove(player); }else{ JSONObject sendText = new JSONObject(); sendText.put("type",6); sendText.put("message",""); SESSIONS.get(chessGame.getPlayer2()).getAsyncRemote().sendText(sendText.toString()); } }else if(chessPlaying.get(getAntiPlayer(player))!=null){ String player1=getAntiPlayer(player); ChessGame chessGame=chessPlaying.get(player1); chessGame.setPlayState2(false); if(!chessGame.isPlayState1()){ chessPlaying.remove(player1); antiPlayer.remove(player1); }else{ JSONObject sendText = new JSONObject(); sendText.put("type",6); sendText.put("message",""); SESSIONS.get(chessGame.getPlayer1()).getAsyncRemote().sendText(sendText.toString()); } }else{ for(ChessGame chessGame:chessRandomWait){ if(player.equals(chessGame.getPlayer1())){ chessRandomWait.remove(chessGame); } } } SESSIONS.remove(player); } } } private String getAntiPlayer(String player) { String anti = antiPlayer.get(player); if (null == anti || anti.equals("")) { for (String temp : antiPlayer.keySet()) { if (player.equals(antiPlayer.get(temp))) { anti = temp; } } } return anti; } private void recoverGame(String userId,String userName,String antiUserId,ChessGame chessGame){ JSONObject sendText = new JSONObject(); sendText.put("type", 5); sendText.put("chess",chessGame.getChess()); sendText.put("message", ""+(chessGame.getStep()==1?chessGame.getName1():chessGame.getName2())+""); sendText.put("playerName", userName); sendText.put("player", userId); SESSIONS.get(userId).getAsyncRemote().sendText(sendText.toString()); sendText=new JSONObject(); sendText.put("type",6); sendText.put("message", ""+(chessGame.getStep()==1?chessGame.getName1():chessGame.getName2())+""); SESSIONS.get(antiUserId).getAsyncRemote().sendText(sendText.toString()); } } class ChessGame { private long chessId; private String player1; private String player2; private String name1; private String name2; private boolean playState1; private boolean playState2; private int state;//012 private int[][] chess = null; private int step;//1-player1,2-player2; private long starttime; public ChessGame(String player1,String name1) { this.chessId = System.currentTimeMillis(); this.player1 = player1; this.name1 = name1; this.playState1=true; this.playState2=false; this.chess = new int[20][20]; this.starttime = System.currentTimeMillis(); for (int i = 0; i < 20; i++) { for (int j = 0; j < 20; j++) { chess[i][j] = 0; } } } public boolean chessCheck(int step) { for (int i = 0; i < this.chess.length; i++) { int count = 0; for (int j = 0; j < this.chess[i].length; j++) { if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } for (int j = 0; j < this.chess[0].length; j++) { int count = 0; for (int i = 0; i < this.chess.length; i++) { if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } for (int x = 0, y = 0; x < this.chess.length; x++) { int count = 0; for (int i = x, j = y; i < this.chess.length; i++, j++) { if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } for (int x = 0, y = 0; y < this.chess[0].length; y++) { int count = 0; for (int i = x, j = y; j < this.chess.length; i++, j++) { if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } //x-1,y+1 for (int x = 0, y = 0; x < this.chess.length; x++) { int count = 0; for (int i = x, j = y; i >= 0; i if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } for (int x = this.chess.length - 1, y = 0; y < this.chess[0].length; y++) { int count = 0; for (int i = x, j = y; j < this.chess.length; i if (this.chess[i][j] == step) { count++; } else if (this.chess[i][j] != step && count < 5) { count = 0; } } if (count >= 5) { return true; } } return false; } public long getChessId() { return chessId; } public void setChessId(long chessId) { this.chessId = chessId; } public String getPlayer1() { return player1; } public void setPlayer1(String player1) { this.player1 = player1; } public String getPlayer2() { return player2; } public void setPlayer2(String player2) { this.player2 = player2; } public int getState() { return state; } public void setState(int state) { this.state = state; } public int getStep() { return step; } public void setStep(int step) { this.step = step; } public int[][] getChess() { return chess; } public void setChess(int[][] chess) { this.chess = chess; } public long getStarttime() { return starttime; } public void setStarttime(long starttime) { this.starttime = starttime; } public boolean isPlayState1() { return playState1; } public void setPlayState1(boolean playState1) { this.playState1 = playState1; } public boolean isPlayState2() { return playState2; } public void setPlayState2(boolean playState2) { this.playState2 = playState2; } public String getName1() { return name1; } public void setName1(String name1) { this.name1 = name1; } public String getName2() { return name2; } public void setName2(String name2) { this.name2 = name2; } }
package org.apache.commons.io.testtools; import java.io.BufferedOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.util.Arrays; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.ByteArrayOutputStream; import junit.framework.AssertionFailedError; import junit.framework.TestCase; /** * Base class for testcases doing tests with files. * * @author Jeremias Maerki */ public abstract class FileBasedTestCase extends TestCase { private static File testDir; public FileBasedTestCase(String name) { super(name); } public static File getTestDirectory() { if (testDir == null) { testDir = (new File("test/io/")).getAbsoluteFile(); } return testDir; } protected void createFile(final File file, final long size) throws IOException { if (!file.getParentFile().exists()) { throw new IOException("Cannot create file " + file + " as the parent directory does not exist"); } final BufferedOutputStream output = new BufferedOutputStream(new java.io.FileOutputStream(file)); try { generateTestData(output, size); } finally { IOUtils.shutdownStream(output); } } protected byte[] generateTestData(final long size) { try { ByteArrayOutputStream baout = new ByteArrayOutputStream(); generateTestData(baout, size); return baout.toByteArray(); } catch (IOException ioe) { throw new RuntimeException("This should never happen: " + ioe.getMessage()); } } protected void generateTestData(final OutputStream out, final long size) throws IOException { for (int i = 0; i < size; i++) { //output.write((byte)'X'); // nice varied byte pattern compatible with Readers and Writers out.write( (byte)( (i % 127) + 1) ); } } protected File newFile(String filename) throws IOException { final File destination = new File( getTestDirectory(), filename ); /* assertTrue( filename + "Test output data file shouldn't previously exist", !destination.exists() ); */ if (destination.exists()) { FileUtils.forceDelete(destination); } return destination; } protected void checkFile( final File file, final File referenceFile ) throws Exception { assertTrue( "Check existence of output file", file.exists() ); assertEqualContent( referenceFile, file ); } /** Assert that the content of two files is the same. */ private void assertEqualContent( final File f0, final File f1 ) throws IOException { /* This doesn't work because the filesize isn't updated until the file * is closed. assertTrue( "The files " + f0 + " and " + f1 + " have differing file sizes (" + f0.length() + " vs " + f1.length() + ")", ( f0.length() == f1.length() ) ); */ final InputStream is0 = new java.io.FileInputStream( f0 ); try { final InputStream is1 = new java.io.FileInputStream( f1 ); try { final byte[] buf0 = new byte[ 1024 ]; final byte[] buf1 = new byte[ 1024 ]; int n0 = 0; int n1 = 0; while( -1 != n0 ) { n0 = is0.read( buf0 ); n1 = is1.read( buf1 ); assertTrue( "The files " + f0 + " and " + f1 + " have differing number of bytes available (" + n0 + " vs " + n1 + ")", ( n0 == n1 ) ); assertTrue( "The files " + f0 + " and " + f1 + " have different content", Arrays.equals( buf0, buf1 ) ); } } finally { is1.close(); } } finally { is0.close(); } } /** Assert that the content of a file is equal to that in a byte[]. */ protected void assertEqualContent( final byte[] b0, final File file ) throws IOException { final InputStream is = new java.io.FileInputStream( file ); try { byte[] b1 = new byte[ b0.length ]; int numRead = is.read( b1 ); assertTrue( "Different number of bytes", numRead == b0.length && is.available() == 0 ); for( int i = 0; i < numRead; assertTrue( "Byte " + i + " differs (" + b0[ i ] + " != " + b1[ i ] + ")", b0[ i ] == b1[ i ] ), i++ ); } finally { is.close(); } } protected void checkWrite(final OutputStream output) throws Exception { try { new java.io.PrintStream(output).write(0); } catch (final Throwable t) { throw new AssertionFailedError( "The copy() method closed the stream " + "when it shouldn't have. " + t.getMessage()); } } protected void checkWrite(final Writer output) throws Exception { try { new java.io.PrintWriter(output).write('a'); } catch (final Throwable t) { throw new AssertionFailedError( "The copy() method closed the stream " + "when it shouldn't have. " + t.getMessage()); } } protected void deleteFile( final File file ) throws Exception { if (file.exists()) { assertTrue("Couldn't delete file: " + file, file.delete()); } } }
package org.buddycloud.channelserver.channel; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.log4j.Logger; import org.dom4j.Element; import org.dom4j.dom.DOMElement; public class ValidateEntry { public static final String MISSING_CONTENT_ELEMENT = "content-required"; public static final String CONTENT_TEXT = "text"; public static final String CONTENT_XHTML = "xhtml"; private static Logger LOGGER = Logger.getLogger(ValidateEntry.class); private Element entry; private String errorMsg = ""; private String inReplyTo; private Element meta; private Element media; Map<String, String> params = new HashMap<String, String>(); private Element geoloc; public ValidateEntry(Element entry) { this.entry = entry; } public String getErrorMessage() { return this.errorMsg; } /** * This is a big hackety-hack. */ public boolean isValid() { if (this.entry == null) { this.errorMsg = "Dude, the entry is missing."; return false; } Element id = this.entry.element("id"); if ((id == null) || (true == id.getText().isEmpty())) { if (null != id) id.detach(); LOGGER.debug("ID of the entry was missing. We add a default one to it: 1"); this.entry.addElement("id").setText("1"); } Element title = this.entry.element("title"); if (null == title) { LOGGER.debug("Title of the entry was missing. We add a default one to it: 'Post'."); title = this.entry.addElement("title"); title.setText("Post"); } this.params.put("title", title.getText()); Element content = this.entry.element("content"); if (null == content) { this.errorMsg = MISSING_CONTENT_ELEMENT; return false; } this.params.put("content", content.getText()); Element updated = this.entry.element("updated"); if (null == updated) { String updateTime = Conf.formatDate(new Date()); LOGGER.debug("Update of the entry was missing. We add a default one to it: '" + updateTime + "'."); this.entry.addElement("updated").setText(updateTime); } this.geoloc = this.entry.element("geoloc"); Element reply = this.entry.element("in-reply-to"); if (null != reply) { inReplyTo = reply.attributeValue("ref"); if (-1 != inReplyTo.indexOf(",")) { String[] tokens = inReplyTo.split(","); inReplyTo = tokens[2]; } } Element meta = this.entry.element("meta"); if (null != meta) { this.meta = meta; } Element media = this.entry.element("media"); if (null != media) { this.media = media; } return true; } public Element createBcCompatible(String bareJID, String channelServerJID, String node) { Element entry = new DOMElement("entry", new org.dom4j.Namespace("", "http: entry.add(new org.dom4j.Namespace("activity", "http://activitystrea.ms/spec/1.0/")); String id = UUID.randomUUID().toString(); String postType = "note"; entry.addElement("id").setText( "tag:" + channelServerJID + "," + node + "," + id); entry.addElement("title").setText(this.params.get("title")); entry.addElement("content").setText(this.params.get("content")); String publishedDate = Conf.formatDate(new Date()); entry.addElement("published").setText(publishedDate); entry.addElement("updated").setText(publishedDate); Element author = entry.addElement("author"); author.addElement("name").setText(bareJID); author.addElement("uri").setText("acct:" + bareJID); author.addElement("activity:object-type").setText("person"); if (this.geoloc != null) { entry.add(this.geoloc.createCopy()); } if (this.inReplyTo != null) { Element reply = entry.addElement("in-reply-to"); reply.addNamespace("", "http://purl.org/syndication/thread/1.0"); reply.addAttribute("ref", inReplyTo); postType = "comment"; } this.geoloc = this.entry.element("geoloc"); entry.addElement("activity:verb").setText("post"); Element activity_object = entry.addElement("activity:object"); activity_object.addElement("activity:object-type").setText(postType); if (null != meta) { entry.add(meta.createCopy()); } if (null != media) { entry.add(media.createCopy()); } return entry; } }
package org.ditacommunity.i18n.collation; import java.text.CollationKey; import java.text.Collator; import java.util.HashMap; import java.util.Locale; import com.ibm.icu.text.RuleBasedCollator; import net.sf.saxon.expr.sort.AtomicMatchKey; import net.sf.saxon.lib.StringCollator; /** * Rules-based collator that uses the CC-CEDICT Simplified Chinese * dictionary to do collation of Simplified Chinese. */ public class ZhCnAwareCollator extends Collator implements java.util.Comparator<Object>, StringCollator { private final com.ibm.icu.text.Collator delegate; private final Locale locale ; private final boolean isZhCn; private HashMap<String, ZhCnAwareCollationKey> colKeyCache = new HashMap<String, ZhCnAwareCollationKey>(); private ZhCnDictionary zhCnDictionary; private String collationURI; public ZhCnAwareCollator(String collationURI) { this.collationURI = collationURI; this.locale = Locale.getDefault(); this.isZhCn = Locale.SIMPLIFIED_CHINESE == locale; this.delegate = RuleBasedCollator.getInstance(locale); } public ZhCnAwareCollator(Locale locale) { this.delegate = RuleBasedCollator.getInstance(locale); this.locale = locale; this.isZhCn = Locale.SIMPLIFIED_CHINESE == locale; } @Override public synchronized CollationKey getCollationKey(String source) { if (this.colKeyCache.containsKey(source)) { return this.colKeyCache.get(source); } ZhCnAwareCollationKey colKey = new ZhCnAwareCollationKey(delegate, source); this.colKeyCache.put(source, colKey); return colKey; } @Override public int hashCode() { return delegate.hashCode(); } /** * Gets the Collator for the desired locale. Always returns a * ZhCnAwareCollator. * * @param desiredLocale the desired locale. * @return the Collator for the desired locale. * @see java.util.Locale * @see java.util.ResourceBundle */ public static synchronized Collator getInstance(Locale desiredLocale) { if (false) { System.out.print("getInstance(): desiredLocale=" + desiredLocale); } Collator collator = new ZhCnAwareCollator(desiredLocale); return collator; } @Override public int compare(String source, String target) { int result; if (isZhCn) { try { result = zhCnCompare(source, target); } catch (Exception e) { System.out.println(" + [ERROR] ZhCnAwareCollator.compare(): " + e.getClass().getSimpleName()); e.printStackTrace(); result = delegate.compare(source, target); } } else { result = delegate.compare(source, target); } return result; } /** * Use the Simplified Chinese sort keys to do the comparison. The sort keys * are then passed to the base ICU collator so that its normal rule-based * configuration rules are then applied. For Simplified Chinese the sort key * is the pinyin transliteration of the Chinese word, so the base part of the sort * key is always in latin script. * @param source Source string * @param target Target string * @return Comparison value. */ private int zhCnCompare(String source, String target) { String sourceSortKey = getZhCnSortKey(source); String targetSortKey = getZhCnSortKey(target); return delegate.compare(sourceSortKey, targetSortKey); } /** * Given a string that may contain Simplified Chinese ideographs, return * the appropiate sort key, which is the pinyin transliteration as the * primary key and the original text as the secondary key. * @param source Source string. * @return Sort key for use by the RuleBasedCollator.compare() method. */ protected String getZhCnSortKey(String source) { if (this.colKeyCache.containsKey(source)) { return colKeyCache.get(source).getSortKey(); } if (zhCnDictionary == null) { // Because the dictionary is in the jar this should never fail in normal operation. this.zhCnDictionary = new ZhCnDictionary(); } String pinyin = ZhCnDictionary.getPinYin(source); // FIXME: Construct proper primary/secondary ICU sort key. return pinyin + source; } @Override public int compare(Object o1, Object o2) { return delegate.compare(o1, o2); } public void setCollationURI(String collationURI) { this.collationURI = collationURI; } @Override public String getCollationURI() { return this.collationURI; } @Override public int compareStrings(CharSequence charSequence, CharSequence charSequence1) { return zhCnCompare(charSequence.toString(), charSequence.toString()); } @Override public boolean comparesEqual(CharSequence charSequence, CharSequence charSequence1) { throw new UnsupportedOperationException(); } @Override public AtomicMatchKey getCollationKey(CharSequence charSequence) { throw new UnsupportedOperationException(); } /** * Gets the locale associated with the collators * @return */ public Locale getLocale() { return this.locale; } }
package org.lightmare.jpa.datasource; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Properties; import java.util.Set; import javax.naming.Context; import javax.sql.DataSource; import org.apache.log4j.Logger; import org.lightmare.config.Configuration; import org.lightmare.jndi.JndiManager; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.reflect.MetaUtils; /** * Parses xml and property files to initialize and cache {@link DataSource} * objects * * @author levan * */ public class DataSourceInitializer { private static final Set<String> INITIALIZED_SOURCES = Collections .synchronizedSet(new HashSet<String>()); private static final Set<String> INITIALIZED_NAMES = Collections .synchronizedSet(new HashSet<String>()); public static final Logger LOG = Logger .getLogger(DataSourceInitializer.class); // Connection properties public static final String DRIVER_PROPERTY = "driver"; public static final String USER_PROPERTY = "user"; public static final String PASSWORD_PROPERTY = "password"; public static final String URL_PROPERTY = "url"; public static final String JNDI_NAME_PROPERTY = "jndiname"; public static final String NAME_PROPERTY = "name"; private DataSourceInitializer() { } private static boolean checkForDataSource(String path) { return ObjectUtils.available(path); } public static String getJndiName(Properties properties) { String jndiName = properties .getProperty(DataSourceInitializer.JNDI_NAME_PROPERTY); return jndiName; } /** * Loads jdbc driver class * * @param driver */ public static void initializeDriver(String driver) throws IOException { MetaUtils.initClassForName(driver); } /** * Initialized datasource * * @throws IOException */ public static void initializeDataSource(String path) throws IOException { if (checkForDataSource(path) && !DataSourceInitializer.checkDSPath(path)) { FileParsers parsers = new FileParsers(); parsers.parseStandaloneXml(path); } } /** * Initialized datasources * * @throws IOException */ public static void initializeDataSources(Configuration config) throws IOException { Collection<String> paths = config.getDataSourcePath(); if (ObjectUtils.available(paths)) { for (String path : paths) { initializeDataSource(path); } } } /** * Initializes and registers {@link DataSource} object in jndi by * {@link Properties} {@link Context} * * @param poolingProperties * @param dataSource * @param jndiName * @throws IOException */ public static void registerDataSource(Properties properties) throws IOException { InitDataSource initDataSource = InitDataSourceFactory.get(properties); initDataSource.create(); // Caches jndiName for data source String jndiName = getJndiName(properties); INITIALIZED_NAMES.add(jndiName); } public static void setDsAsInitialized(String datasourcePath) { INITIALIZED_SOURCES.add(datasourcePath); } public static void removeInitialized(String datasourcePath) { INITIALIZED_SOURCES.remove(datasourcePath); } public static boolean checkDSPath(String datasourcePath) { return INITIALIZED_SOURCES.contains(datasourcePath); } /** * Closes and unbinds from context data source with specified jndi name * * @param jndiName * @throws IOException */ public static void close(String jndiName) throws IOException { JndiManager jndiManager = new JndiManager(); DataSource dataSource = jndiManager.lookup(jndiName); if (ObjectUtils.notNull(dataSource)) { cleanUp(dataSource); } dataSource = null; jndiManager.unbind(jndiName); INITIALIZED_NAMES.remove(jndiName); } /** * Closes and unbinds from context all existing sources * * @throws IOException */ public static void closeAll() throws IOException { Set<String> dataSources = new HashSet<String>(INITIALIZED_NAMES); for (String jndiName : dataSources) { close(jndiName); } } /** * Closes and unbinds from {@link Context} all data sources from passed file * path * * @param dataSourcePath * @throws IOException */ public static void undeploy(String dataSourcePath) throws IOException { Collection<String> jndiNames = FileParsers .dataSourceNames(dataSourcePath); if (ObjectUtils.available(dataSourcePath)) { for (String jndiName : jndiNames) { close(jndiName); } } removeInitialized(dataSourcePath); } /** * Clean and destroy data source * * @param dataSource */ public static void cleanUp(DataSource dataSource) { InitDataSourceFactory.destroy(dataSource); } }
package dk.aau.sw402F15.CodeGenerator; import dk.aau.sw402F15.Symboltable.Scope; import dk.aau.sw402F15.Symboltable.ScopeDepthFirstAdapter; import dk.aau.sw402F15.Symboltable.Symbol; import dk.aau.sw402F15.Symboltable.SymbolArray; import dk.aau.sw402F15.Symboltable.SymbolFunction; import dk.aau.sw402F15.Symboltable.Type.SymbolType; import dk.aau.sw402F15.parser.node.*; import sun.reflect.generics.reflectiveObjects.NotImplementedException; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.EmptyStackException; public class CodeGenerator extends ScopeDepthFirstAdapter { private int jumpLabel = 0; private int returnlabel; private int nextDAddress = -2; private int nextWAddress = 0; private int nextHAddress = 0; private ArrayList<String> functions; PrintWriter instructionWriter; PrintWriter symbolWriter; public String getNextDAddress(boolean increment) { if (nextDAddress > 32763) throw new OutOfMemoryError(); if (increment) return "D" + (nextDAddress += 2); else return "D" + nextDAddress; } public String getPreviousDAddress() { int tmp = nextDAddress; return "D" + (nextDAddress - 2); } public String getNextWAddress(boolean increment) { if (nextWAddress > 508) throw new OutOfMemoryError(); if (increment) return "W" + ((nextWAddress += 1) * 100); else return "W" + nextWAddress * 100; } public String stackPointer(boolean increment) { if (nextHAddress > 4091) throw new OutOfMemoryError(); int currentAddress = nextHAddress; nextHAddress += 4; if (increment) return "H" + (currentAddress); else return "H" + currentAddress; } public <T> void push(T value) { if (value.getClass() == Integer.class) Emit("MOVL(498) &" + value + " " + stackPointer(true), true); else if (value.getClass() == Float.class || value.getClass() == Double.class) Emit("+F(454) +0,0 +" + value.toString().replace(".", ",") + " " + stackPointer(true) + "", true); else if (value.getClass() == String.class) Emit("MOVL(498) " + value + " " + stackPointer(true), true); else throw new ClassFormatError(); } public String pop() { if (nextHAddress < 0) throw new EmptyStackException(); return "H" + (nextHAddress -= 4); } public String peek() { return "H" + nextHAddress; } public CodeGenerator(Scope scope, ArrayList functions) { super(scope, scope); this.functions = functions; try { // create writer instances instructionWriter = new PrintWriter("InstructionList.txt", "UTF-8"); symbolWriter = new PrintWriter("SymbolList.txt", "UTF-8"); // here we call the init method Emit("LD P_First_Cycle", true); // reset all addresses Emit("SSET(630) " + getNextDAddress(true) + " &32767", true); Emit("SSET(630) " + stackPointer(true) + " &1535", true); Emit("SBS(091) 0", true); // here we call the run Method Emit("LD P_On", true); Emit("SBS(091) 1", true); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } @Override public void outStart(Start node){ Emit("END(001)", true); instructionWriter.close(); symbolWriter.close(); } @Override public void caseAAssignmentExpr(AAssignmentExpr node) { inAAssignmentExpr(node); if(node.getRight() != null) { node.getRight().apply(this); } if(node.getLeft() != null) { node.getLeft().apply(this); } outAAssignmentExpr(node); } @Override public void outAAssignmentExpr(AAssignmentExpr node) { super.outAAssignmentExpr(node); if(node.getLeft().getClass() == AArrayExpr.class){ Emit("MOVL(498) " + pop() + " @" + getNextDAddress(false), true); } else if (!(node.getLeft() instanceof APortOutputExpr)){ Emit("MOVL(498) " + pop() + " " + node.getLeft(), true); } } @Override public void caseAArrayExpr(AArrayExpr node){ SymbolArray symbol = (SymbolArray) currentScope.getSymbolOrThrow(node.getName().getText(), node.getName()); push(node.getName().getText()); node.getExpr().apply(this); Emit("*(420) " + pop() + " &2 " + stackPointer(true), true); Emit("+(400) " + pop() + " " + pop() + " " + stackPointer(true), true); // offset + start = location Emit("MOVL(498) " + pop() + " " + getNextDAddress(false), true); Node parent = node.parent(); if (parent.getClass() != AAssignmentExpr.class || ((AAssignmentExpr)parent).getLeft() != node) { Emit("MOVL(498) @" + getNextDAddress(false) + " " + stackPointer(true), true); } } @Override public void caseADeclaration(ADeclaration node){ super.caseADeclaration(node); } @Override public void outABreakStatement(ABreakStatement node){ super.outABreakStatement(node); Emit("BREAK(514)", true); } @Override public void outACaseStatement(ACaseStatement node){ //throw new NotImplementedException(); } @Override public void outAIncrementExpr(AIncrementExpr node) { super.outAIncrementExpr(node); Emit("++(590) " + node.getName(), true); } @Override public void outADecrementExpr(ADecrementExpr node) { super.outADecrementExpr(node); Emit("--(592) " + node.getName(), true); } @Override public void outACompareAndExpr(ACompareAndExpr node){ super.outACompareAndExpr(node); Emit("ANDW(034) D" + (nextDAddress - 4) + " " + getNextDAddress(false) + " " + getNextDAddress(true), true); } @Override public void outACompareOrExpr(ACompareOrExpr node){ super.outACompareOrExpr(node); Emit("ORW(035) D" + (nextDAddress - 4) + " " + getNextDAddress(false) + " " + getNextDAddress(true), true); } @Override public void outACompareEqualExpr(ACompareEqualExpr node){ super.outACompareEqualExpr(node); String arg1 = pop(); String arg2 = pop(); Emit("AND=(300)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(true), true); } @Override public void outACompareGreaterExpr(ACompareGreaterExpr node){ super.outACompareGreaterExpr(node); String arg1 = pop(); String arg2 = pop(); Emit("AND>(320)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(true), true); } @Override public void outACompareGreaterOrEqualExpr(ACompareGreaterOrEqualExpr node){ super.outACompareGreaterOrEqualExpr(node); String arg1 = pop(); String arg2 = pop(); Emit("AND>=(325)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(true), true); } @Override public void outACompareLessExpr(ACompareLessExpr node){ super.outACompareLessExpr(node); String arg1 = pop(); String arg2 = pop(); if (node.parent().getClass() == AWhileStatement.class) { Emit("AND<(310)" + " " + arg1 + " " + arg2, true); Emit("SET " + getNextWAddress(false), true); Emit("AND<(310)" + " " + arg2 + " " + arg1, true); Emit("RSET " + getNextWAddress(false), true); } else { Emit("AND<(310)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(false), true); } } @Override public void outACompareLessOrEqualExpr(ACompareLessOrEqualExpr node) { super.outACompareLessOrEqualExpr(node); String arg1 = pop(); String arg2 = pop(); Emit("AND<=(315)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(true), true); } @Override public void outACompareNotEqualExpr(ACompareNotEqualExpr node) { super.outACompareNotEqualExpr(node); String arg1 = pop(); String arg2 = pop(); Emit("AND<>(305)" + " " + arg2 + " " + arg1, true); Emit("SET " + getNextWAddress(true), true); } @Override public void outAContinueStatement(AContinueStatement node){ //throw new NotImplementedException(); } @Override public void outADeclaration(ADeclaration node) { Symbol symbol = currentScope.getSymbolOrThrow(node.getName().getText(), node); if (symbol.getType().equals(SymbolType.Boolean())) { if (node.getExpr() != null){ declareAndAssignBool(node.getName().getText(), pop()); } else { declareBool(node.getName().getText()); } } else if (symbol.getType().equals(SymbolType.Int())) { if (node.getExpr() != null){ declareAndAssignInt(node.getName().getText(), pop()); } else { declareInt(node.getName().getText()); } } else if (symbol.getType().equals(SymbolType.Char())) { throw new NotImplementedException(); } else if (symbol.getType().equals(SymbolType.Decimal())) { if (node.getExpr() != null){ declareAndAssignDecimal(node.getName().getText(), pop()); } else { declareDecimal(node.getName().getText()); } } else if (symbol.getType().equals(SymbolType.Timer())) { if (node.getExpr() != null){ declareAndAssignTimer(node.getName().getText(), pop()); } else { declareTimer(node.getName().getText()); } } else if (symbol.getType().equals(SymbolType.Timer())) { throw new NotImplementedException(); } else if (symbol.getType().equals(SymbolType.Array())) { declareArray((SymbolArray)symbol); } else if (symbol.getType().equals(SymbolType.Void())){ // Method is a void function throw new NotImplementedException(); } else if (symbol.getType().equals(SymbolType.Type.Function)) { throw new NotImplementedException(); } else if (symbol.getType().equals(SymbolType.Type.Struct)) { throw new NotImplementedException(); } else { // throw new RuntimeException(); // TODO Need new Exception. Pretty unknown error though } } private void declareArray(SymbolArray symbol){ ADeclaration node = (ADeclaration) symbol.getNode(); AArrayDefinition array = (AArrayDefinition) node.getArray(); int size = Integer.parseInt(array.getNumber().getText()); int address = nextDAddress; for (int i = 0; i < size; i++){ getNextDAddress(true); } String name = symbol.getName(); Emit(name + "\tINT\t D"+ address + "\t\t0\t", false); Emit("MOVL(498) &" + (address + 2) + " " + name, true); } private void declareInt(String name){ // get next free address in symbolList String address = getNextDAddress(true); // Declare Emit(name + "\tINT\t" + address + "\t\t0\t", false); } private void declareAndAssignInt(String name, String value){ // get next free address in symbolList String address = getNextDAddress(true); // Declare Emit(name + "\tINT\t" + address + "\t\t0\t", false); // Assign Emit("MOVL(498) " + value + " " + name, true); } private void declareBool(String name){ // get next free address in symbolList String address = getNextWAddress(true); // Declare Emit(name + "\tBOOL\t" + address + "\t\t0\t", false); } private void declareAndAssignBool(String name, String value){ // get next free address in symbolList String address = getNextWAddress(true); // Declare Emit(name + "\tBOOL\t" + address + "\t\t0\t", false); // assign Emit("LD P_On",false); Emit("OUT TR0",false); Emit("AND " + value,false); Emit("SET " + name,false); Emit("LD TR0",false); Emit("ANDNOT " + value,false); Emit("RSET " + name,false); } private void declareDecimal(String name){ // get next free address in symbolList String address = getNextDAddress(true); // Declare Emit(name + "\tREAL\t" + address + "\t\t0\t", false); } private void declareAndAssignDecimal(String name, String value){ // get next free address in symbolList String address = getNextDAddress(true); // Declare Emit(name + "\tREAL\t" + address + "\t\t0\t", false); // Assign Emit("MOVL(498) " + value + " " + name, true); } private void declareTimer(String name){ throw new NotImplementedException(); // get next free address in symbolList //String address = getNextDAddress(true); //Emit(node.getName().getText() + "\tTIMER\tD" + getNextDAddress(true) + "\t\t0\t", false); // Declare //Emit(name + "\tBOOL\t" + address + ".00\t\t0\t", false); // Assign //Emit("MOVL(498) &" + value + " " + address, true); } private void declareAndAssignTimer(String name, String value){ throw new NotImplementedException(); } @Override public void outADefaultStatement(ADefaultStatement node){ //throw new NotImplementedException(); } @Override public void inAFunctionCallExpr(AFunctionCallExpr node){ SymbolFunction function = (SymbolFunction) currentScope.getSymbolOrThrow(node.getName().getText(), node); if (function.getReturnType().equals(SymbolType.Type.Void)) { Emit("LD P_On", true); Emit("SBS(091) " + functions.indexOf(node.getName().getText()), true); } else { Emit("MCRO(099) " + functions.indexOf(node.getName().getText()) + " " + getNextDAddress(true) + " " + pop(), true); } //Emit("SBS(091) " + getFunctionNumber(true), true); } @Override public void inAFunctionRootDeclaration(AFunctionRootDeclaration node){ super.inAFunctionRootDeclaration(node); Emit("SBN(092) " + functions.indexOf(node.getName().getText()), true); if (!node.getStatements().isEmpty()) Emit("LD P_First_Cycle", true); //returnlabel = getNextJump(); } @Override public void outAFunctionRootDeclaration(AFunctionRootDeclaration node) { super.outAFunctionRootDeclaration(node); //Emit("JME(005) #" + returnlabel, true); Emit("RET(093)", true); //Emit("END(001)", true); } @Override public void outAIdentifierExpr(AIdentifierExpr node){ push(node.getName().getText()); } @Override public void outAMemberExpr(AMemberExpr node){ //throw new NotImplementedException(); } @Override public void outANegationExpr(ANegationExpr node){ super.outANegationExpr(node); Emit("NOT " + getNextDAddress(false), true); } @Override public void outAPortAnalogInputExpr(APortAnalogInputExpr node){ //throw new NotImplementedException(); } @Override public void outAPortAnalogOutputExpr(APortAnalogOutputExpr node){ //throw new NotImplementedException(); } @Override public void outAPortInputExpr(APortInputExpr node){ //throw new NotImplementedException(); } @Override public void outAPortOutputExpr(APortOutputExpr node){ super.outAPortOutputExpr(node); Emit("LD " + getNextWAddress(false), true); Emit("OUT " + node.getExpr(), true); } @Override public void outAReturnStatement(AReturnStatement node){ super.outAReturnStatement(node); //Emit("JMP(004) #" + returnlabel, true); //Emit("RET(093)", true); } @Override public void outASwitchStatement(ASwitchStatement node){ //throw new NotImplementedException(); } @Override public void outATrueExpr(ATrueExpr node){ super.outATrueExpr(node); Emit("MOVL(498) #1 " + getNextDAddress(true), true); Emit("SET " + getNextWAddress(true), true); } @Override public void outAFalseExpr(AFalseExpr node) { super.outAFalseExpr(node); Emit("MOVL(498) #0 " + getNextDAddress(true), true); Emit("RSET " + getNextWAddress(true), true); } @Override public void outATypeCastExpr(ATypeCastExpr node){ //throw new NotImplementedException(); } @Override public void caseABranchStatement(ABranchStatement node) { //Do not call super as this function handles calls of the child classes if (node.getRight() != null) { // If - else statement int ifLabel = getNextJump(); int elseLabel = getNextJump(); node.getCondition().apply(this); Emit("CJP(510) #" + ifLabel, true); node.getRight().apply(this); Emit("JMP(004) #" + elseLabel, true); Emit("JME(005) #" + ifLabel, true); node.getLeft().apply(this); Emit("JME(005) #" + elseLabel, true); } else { // If statement int label = getNextJump(); node.getCondition().apply(this); Emit("CJPN(511) #" + label, true); node.getLeft().apply(this); Emit("JME(005) #" + label, true); } } @Override public void caseASwitchStatement(ASwitchStatement node){ //throw new NotImplementedException(); } @Override public void caseAWhileStatement(AWhileStatement node){ int jumpLabel = getNextJump(); int loopLabel = getNextJump(); //node.getCondition().apply(this); Emit("LDNOT " + getNextWAddress(true), true); Emit("JMP(004) #" + jumpLabel, true); node.getStatement().apply(this); node.getCondition().apply(this); Emit("JME(005) #" + jumpLabel, true); Emit("LD P_On", true); } @Override public void outAIntegerExpr(AIntegerExpr node) { super.outAIntegerExpr(node); push(Integer.parseInt(node.getIntegerLiteral().getText())); // TODO Ouch, a hack... } @Override public void outADecimalExpr(ADecimalExpr node) { super.outADecimalExpr(node); if (!(node.parent() instanceof APortOutputExpr)) push(Float.parseFloat(node.getDecimalLiteral().getText())); } @Override public void outAAddExpr(AAddExpr node) { super.outAAddExpr(node); String arg1 = pop(); String arg2 = pop(); if (node.getRight() instanceof ADecimalExpr) Emit("+F(454) " + arg1 + " " + arg2 + " " + stackPointer(true), true); else Emit("+(400) " + arg1 + " " + arg2 + " " + stackPointer(true), true); } @Override public void outADivExpr(ADivExpr node) { super.outADivExpr(node); String arg1 = pop(); String arg2 = pop(); if (node.getRight() instanceof ADecimalExpr) Emit("/F(457) " + arg1 + " " + arg2 + " " + stackPointer(true), true); else Emit("/(430) " + pop() + " " + pop() + " " + stackPointer(true), true); } @Override public void outAMultiExpr(AMultiExpr node) { super.outAMultiExpr(node); String arg1 = pop(); String arg2 = pop(); if (node.getRight() instanceof ADecimalExpr) Emit("*F(456) " + arg1 + " " + arg2 + " " + stackPointer(true), true); else Emit("*(420) " + pop() + " " + pop() + " " + stackPointer(true), true); } @Override public void outASubExpr(ASubExpr node) { super.outASubExpr(node); String arg1 = pop(); String arg2 = pop(); if (node.getRight() instanceof ADecimalExpr) Emit("-F(455) " + arg1 + " " + arg2 + " " + stackPointer(true), true); else Emit("-(410) " + pop() + " " + pop() + " " + stackPointer(true), true); } private int getNextJump(){ jumpLabel = jumpLabel + 1; if(jumpLabel > 255) throw new IndexOutOfBoundsException(); return jumpLabel; } protected void Emit(String s, boolean instruction){ if (instruction == true) { // Write to InstructionList, if instruction instructionWriter.println(s); } else { // Otherwise it's a symbol, then write to SymbolList symbolWriter.println(s); } } }
package org.parboiled.transform.process; import com.github.parboiled1.grappa.annotations.WillBeFinal; import com.github.parboiled1.grappa.transform.CodeBlock; import com.github.parboiled1.grappa.transform.asm.LoadingOpcode; import com.google.common.base.Preconditions; import me.qmx.jitescript.util.CodegenUtils; import org.objectweb.asm.Type; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.FieldInsnNode; import org.objectweb.asm.tree.FieldNode; import org.objectweb.asm.tree.InsnList; import org.objectweb.asm.tree.InsnNode; import org.objectweb.asm.tree.MethodInsnNode; import org.objectweb.asm.tree.VarInsnNode; import org.parboiled.common.Factory; import org.parboiled.transform.InstructionGraphNode; import org.parboiled.transform.InstructionGroup; import org.parboiled.transform.ParserClassNode; import org.parboiled.transform.RuleMethod; import javax.annotation.Nonnull; import static org.objectweb.asm.Opcodes.DUP; import static org.objectweb.asm.Opcodes.PUTFIELD; /** * Inserts action group class instantiation code at the groups respective * placeholders. */ @WillBeFinal(version = "1.1") public class RuleMethodRewriter implements RuleMethodProcessor { private RuleMethod method; private int actionNr = 0; private int varInitNr = 0; @Override public boolean appliesTo(@Nonnull final ParserClassNode classNode, @Nonnull final RuleMethod method) { Preconditions.checkNotNull(classNode, "classNode"); Preconditions.checkNotNull(method, "method"); return method.containsExplicitActions() || method.containsVars(); } @Override public void process(@Nonnull final ParserClassNode classNode, @Nonnull final RuleMethod method) throws Exception { this.method = Preconditions.checkNotNull(method, "method"); actionNr = 0; varInitNr = 0; for (final InstructionGroup group: method.getGroups()) { createNewGroupClassInstance(group); initializeFields(group); final InstructionGraphNode root = group.getRoot(); final AbstractInsnNode rootInsn = root.getInstruction(); if (root.isActionRoot()) method.instructions.remove(rootInsn); else // if (root.isVarInitRoot()) // TODO: replace with Supplier ((MethodInsnNode) rootInsn).desc = CodegenUtils.sig(void.class, Factory.class); } method.setBodyRewritten(); } private void createNewGroupClassInstance(final InstructionGroup group) { final String internalName = group.getGroupClassType().getInternalName(); final InstructionGraphNode root = group.getRoot(); final AbstractInsnNode rootInsn = root.getInstruction(); final InsnList insnList = method.instructions; final String constant = method.name + (root.isActionRoot() ? "_Action" + ++actionNr : "_VarInit" + ++varInitNr); final CodeBlock block = CodeBlock.newCodeBlock(); block.newobj(internalName) .dup() .ldc(constant) .invokespecial(internalName, "<init>", CodegenUtils.sig(void.class, String.class)); if (root.isActionRoot() && method.hasSkipActionsInPredicatesAnnotation()) block.dup().invokevirtual(internalName, "setSkipInPredicates", CodegenUtils.sig(void.class)); insnList.insertBefore(rootInsn, block.getInstructionList()); } private void initializeFields(final InstructionGroup group) { final String internalName = group.getGroupClassType().getInternalName(); InsnList insnList; AbstractInsnNode rootInsn; int opcode; VarInsnNode varNode; FieldInsnNode fieldNode; for (final FieldNode field: group.getFields()) { insnList = method.instructions; rootInsn = group.getRoot().getInstruction(); // TODO: replace with method in CodeBlock? opcode = LoadingOpcode.forType((Type) field.value); varNode = new VarInsnNode(opcode, field.access); fieldNode = new FieldInsnNode(PUTFIELD, internalName, field.name, field.desc); insnList.insertBefore(rootInsn, new InsnNode(DUP)); // the FieldNodes access and value members have been reused for the // var index / Type respectively! insnList.insertBefore(rootInsn, varNode); insnList.insertBefore(rootInsn, fieldNode); } } }
package start; import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.image.Image; import javafx.stage.Stage; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; public class Main extends Application { @Override public void start(Stage stage) throws Exception { Parent root = FXMLLoader.load(getClass().getResource("sketch.fxml")); Scene scene = new Scene(root, 1200, 700); scene.getStylesheets().add("minimalistStyle.css"); // scene.getStylesheets().add("gruvjan.css"); stage.setTitle("Conan"); stage.getIcons().add(new Image("icon.png")); stage.setScene(scene); stage.show(); } public static void main(String[] args) { launch(args); } }
package us.kbase.workspace.test.kbase; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static us.kbase.common.test.TestCommon.set; import static us.kbase.common.test.controllers.ControllerCommon.makeTempDirs; import java.io.BufferedWriter; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.io.FileUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.ImmutableMap; import nl.jqno.equalsverifier.EqualsVerifier; import software.amazon.awssdk.regions.Region; import us.kbase.common.test.MapBuilder; import us.kbase.common.test.TestCommon; import us.kbase.workspace.kbase.BackendType; import us.kbase.workspace.kbase.KBaseWorkspaceConfig; import us.kbase.workspace.kbase.KBaseWorkspaceConfig.KBaseWorkspaceConfigException; import us.kbase.workspace.kbase.KBaseWorkspaceConfig.ListenerConfig; public class KBaseWorkspaceConfigTest { private final static String CI_SERV = "https://ci.kbase.us/services/"; private final static String AUTH_LEGACY_URL = CI_SERV + "auth/api/legacy/KBase/Sessions/Login"; private final static String IGNORE_HANDLE = "Ignoring Handle Service config. Objects with handle IDs will fail typechecking."; private final static String MISSING_PARAM = "Must provide param %s in config file"; private static Path TEMP_DIR; @BeforeClass public static void setUpClass() throws Exception { TEMP_DIR = makeTempDirs( Paths.get(TestCommon.getTempDir()), KBaseWorkspaceConfigTest.class.getSimpleName(), Collections.emptyList()); } @AfterClass public static void tearDownClass() throws Exception { if (TEMP_DIR != null && TestCommon.getDeleteTempFiles()) { FileUtils.deleteDirectory(TEMP_DIR.toFile()); } } private <T> List<T> list(@SuppressWarnings("unchecked") T... things) { return Arrays.asList(things); } @Test public void equals() throws Exception { EqualsVerifier.forClass(KBaseWorkspaceConfig.class).usingGetClass().verify(); EqualsVerifier.forClass(ListenerConfig.class).usingGetClass().verify(); } @Test public void listenerConfig() throws Exception { final ListenerConfig lc = new ListenerConfig("class", ImmutableMap.of("foo", "bar")); assertThat("incorrect class", lc.getListenerClass(), is("class")); assertThat("incorrect config", lc.getConfig(), is(ImmutableMap.of("foo", "bar"))); } @Test public void listenerConfigImmutable() throws Exception { final Map<String, String> cfg = new HashMap<>(); cfg.put("foo", "bar"); final ListenerConfig lc = new ListenerConfig("class", cfg); try { lc.getConfig().put("baz", "bat"); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passes. } cfg.put("baz", "bat"); assertThat("incorrect config", lc.getConfig(), is(ImmutableMap.of("foo", "bar"))); } @Test public void listenerConstructFailBadArgs() throws Exception { final Map<String, String> mt = Collections.emptyMap(); listenerConstructFail(null, mt, new IllegalArgumentException( "listenerClass cannot be null or whitespace only")); listenerConstructFail(" \t ", mt, new IllegalArgumentException( "listenerClass cannot be null or whitespace only")); listenerConstructFail("l", null, new NullPointerException("config")); } private void listenerConstructFail( final String clazz, final Map<String, String> config, final Exception expected) { try { new ListenerConfig(clazz, config); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, expected); } } @Test public void constructFailNullMap() throws Exception { try { new KBaseWorkspaceConfig(null); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, new NullPointerException("config")); } } @Test public void loadFromFileFail() throws Exception { final Path fakefile = Paths.get("superfakefile_princesspeachismyirlgf"); failLoadFromFile(fakefile, "thing", new KBaseWorkspaceConfigException(String.format( "Could not read from configuration file superfakefile_princesspeachismyirlgf: " + "%s (No such file or directory)", fakefile.toAbsolutePath()))); final Path notsofakefile = TEMP_DIR.resolve("imnotfakeimnotfake"); Files.write(notsofakefile, list(""), StandardCharsets.UTF_8); failLoadFromFile(notsofakefile, "thing", new KBaseWorkspaceConfigException(String.format( "The configuration file %s/imnotfakeimnotfake has no section thing", TEMP_DIR))); final Path realfile = TEMP_DIR.resolve("keepinitreal"); Files.write(realfile, list("[Workspace]", "mongodb-host=foo"), StandardCharsets.UTF_8); failLoadFromFile(realfile, null, new KBaseWorkspaceConfigException(String.format( "The configuration file %s/keepinitreal has no section null", TEMP_DIR))); failLoadFromFile(realfile, "Workspacey", new KBaseWorkspaceConfigException(String.format( "The configuration file %s/keepinitreal has no section Workspacey", TEMP_DIR))); } private void failLoadFromFile( final Path file, final String section, final Exception expected) { try { new KBaseWorkspaceConfig(file, section); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, expected); } } private static class ExpectedConfig { public URL auth2URL = null; public URL authURL = null; public Set<String> adminReadOnlyRoles = set(); public Set<String> adminRoles = set(); public String workspaceAdmin = null; public String mongohost = null; public String mongoDBname = null; public String typeDBname = null; public String mongoPwd = null; public String mongoUser = null; public List<ListenerConfig> listenerConfigs = Collections.emptyList(); public BackendType backendType = null; public URL backendURL = null; public String backendUser = null; public String backendToken = null; public String backendContiner = null; public Region backendRegion = null; public boolean backendTrustAllCerts = false; public URL handleServiceURL = null; public String handleServiceToken = null; public boolean ignoreHandleService = false; public URL bytestreamURL = null; public String bytestreamUser = null; public String bytestreamToken = null; public URL sampleServiceURL = null; public String sampleServiceToken = null; public String tempDir = null; public List<String> infoMessages = Collections.emptyList(); public String paramReport = null; public boolean hasErrors = false; public List<String> errors = Collections.emptyList(); public ExpectedConfig withAdminReadOnlyRoles(final Set<String> adminReadOnlyRoles) { this.adminReadOnlyRoles = adminReadOnlyRoles; return this; } public ExpectedConfig withAdminRoles(final Set<String> adminRoles) { this.adminRoles = adminRoles; return this; } public ExpectedConfig withAuth2URL(final URL auth2url) { auth2URL = auth2url; return this; } public ExpectedConfig withAuthURL(final URL authURL) { this.authURL = authURL; return this; } public ExpectedConfig withBackendToken(final String backendToken) { this.backendToken = backendToken; return this; } public ExpectedConfig withBackendType(final BackendType backendType) { this.backendType = backendType; return this; } public ExpectedConfig withBackendURL(final URL backendURL) { this.backendURL = backendURL; return this; } public ExpectedConfig withBackendUser(final String backendUser) { this.backendUser = backendUser; return this; } public ExpectedConfig withBackendContainer(final String backendContiner) { this.backendContiner = backendContiner; return this; } public ExpectedConfig withBackendRegion(final Region backendRegion) { this.backendRegion = backendRegion; return this; } public ExpectedConfig withBackendTrustAllCerts(final boolean backendTrustAllCerts) { this.backendTrustAllCerts = backendTrustAllCerts; return this; } public ExpectedConfig withMongoDBname(final String mongoDBname) { this.mongoDBname = mongoDBname; return this; } public ExpectedConfig withErrors(final List<String> errors) { this.errors = errors; return this; } public ExpectedConfig withHandleServiceToken(final String handleServiceToken) { this.handleServiceToken = handleServiceToken; return this; } public ExpectedConfig withHandleServiceURL(final URL handleServiceURL) { this.handleServiceURL = handleServiceURL; return this; } public ExpectedConfig withMongohost(final String mongohost) { this.mongohost = mongohost; return this; } public ExpectedConfig withInfoMessages(final List<String> infoMessages) { this.infoMessages = infoMessages; return this; } public ExpectedConfig withListenerConfigs(final List<ListenerConfig> listenerConfigs) { this.listenerConfigs = listenerConfigs; return this; } public ExpectedConfig withMongoPwd(final String mongoPwd) { this.mongoPwd = mongoPwd; return this; } public ExpectedConfig withMongoUser(final String mongoUser) { this.mongoUser = mongoUser; return this; } public ExpectedConfig withParamReport(final String paramReport) { this.paramReport = paramReport; return this; } public ExpectedConfig withBytestreamToken(final String bytestreamToken) { this.bytestreamToken = bytestreamToken; return this; } public ExpectedConfig withBytestreamURL(final URL bytestreamURL) { this.bytestreamURL = bytestreamURL; return this; } public ExpectedConfig withBytestreamUser(final String bytestreamUser) { this.bytestreamUser = bytestreamUser; return this; } public ExpectedConfig withSampleServiceToken(final String sampleServiceToken) { this.sampleServiceToken = sampleServiceToken; return this; } public ExpectedConfig withSampleServiceURL(final URL sampleServiceURL) { this.sampleServiceURL = sampleServiceURL; return this; } public ExpectedConfig withTempDir(final String tempDir) { this.tempDir = tempDir; return this; } public ExpectedConfig withTypeDBname(final String typeDBname) { this.typeDBname = typeDBname; return this; } public ExpectedConfig withWorkspaceAdmin(final String workspaceAdmin) { this.workspaceAdmin = workspaceAdmin; return this; } public ExpectedConfig withHasErrors(final boolean hasErrors) { this.hasErrors = hasErrors; return this; } public ExpectedConfig withIgnoreHandleService(final boolean ignoreHandleService) { this.ignoreHandleService = ignoreHandleService; return this; } } private void assertConfigCorrect(final Map<String, String> cfg, final ExpectedConfig exp) throws Exception { final Path tempfile = Files.createTempFile(TEMP_DIR, TestCommon.getMethodName(2), ""); try (final BufferedWriter w = Files.newBufferedWriter(tempfile)) { w.write("[Workspace]\n"); for (final Entry<String, String> e: cfg.entrySet()) { w.write(String.format("%s=%s\n", e.getKey(), e.getValue() == null ? "" : e.getValue())); } } for (final KBaseWorkspaceConfig kwc: list( new KBaseWorkspaceConfig(cfg), new KBaseWorkspaceConfig(tempfile, "Workspace") )) { assertThat("incorrect admin read roles", kwc.getAdminReadOnlyRoles(), is(exp.adminReadOnlyRoles)); assertThat("incorrect admin roles", kwc.getAdminRoles(), is(exp.adminRoles)); assertThat("incorrect auth2 url", kwc.getAuth2URL(), is(exp.auth2URL)); assertThat("incorrect auth url", kwc.getAuthURL(), is(exp.authURL)); assertThat("incorrect backend token", kwc.getBackendToken(), is(exp.backendToken)); assertThat("incorrect backend type", kwc.getBackendType(), is(exp.backendType)); assertThat("incorrect backend url", kwc.getBackendURL(), is(exp.backendURL)); assertThat("incorrect backend user", kwc.getBackendUser(), is(exp.backendUser)); assertThat("incorrect backend container", kwc.getBackendContainer(), is(exp.backendContiner)); assertThat("incorrect backend region", kwc.getBackendRegion(), is(exp.backendRegion)); assertThat("incorrect backend trust certs", kwc.getBackendTrustAllCerts(), is(exp.backendTrustAllCerts)); assertThat("incorrect db", kwc.getDBname(), is(exp.mongoDBname)); assertThat("incorrect errors", kwc.getErrors(), is(exp.errors)); assertThat("incorrect srvc token", kwc.getHandleServiceToken(), is(exp.handleServiceToken)); assertThat("incorrect srvc url", kwc.getHandleServiceURL(), is(exp.handleServiceURL)); assertThat("incorrect host", kwc.getHost(), is(exp.mongohost)); assertThat("incorrect info msgs", kwc.getInfoMessages(), is(exp.infoMessages)); assertThat("incorrect listeners", kwc.getListenerConfigs(), is(exp.listenerConfigs)); assertThat("incorrect mongo pwd", kwc.getMongoPassword(), is(exp.mongoPwd)); assertThat("incorrect mongo user", kwc.getMongoUser(), is(exp.mongoUser)); assertThat("incorrect param report", kwc.getParamReport(), is(exp.paramReport)); assertThat("incorrect bytestream token", kwc.getBytestreamToken(), is(exp.bytestreamToken)); assertThat("incorrect bytestream url", kwc.getBytestreamURL(), is(exp.bytestreamURL)); assertThat("incorrect bytestream user", kwc.getBytestreamUser(), is(exp.bytestreamUser)); assertThat("incorrect sample token", kwc.getSampleServiceToken(), is(exp.sampleServiceToken)); assertThat("incorrect sample url", kwc.getSampleServiceURL(), is(exp.sampleServiceURL)); assertThat("incorrect temp dir", kwc.getTempDir(), is(exp.tempDir)); assertThat("incorrect type db", kwc.getTypeDBName(), is(exp.typeDBname)); assertThat("incorrect ws admin", kwc.getWorkspaceAdmin(), is(exp.workspaceAdmin)); assertThat("incorrect has err", kwc.hasErrors(), is(exp.hasErrors)); assertThat("incorrect ignore hs", kwc.ignoreHandleService(), is(exp.ignoreHandleService)); } } @Test public void configMinimal() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", "somehost") .with("mongodb-database", "somedb") .with("mongodb-type-database", "typedb") .with("backend-type", " GridFS ") .with("temp-dir", "temp") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "blearg") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=GridFS\n"; assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withBackendType(BackendType.GridFS) .withInfoMessages(Arrays.asList(IGNORE_HANDLE)) .withParamReport(paramReport) .withTempDir("temp") .withIgnoreHandleService(true) ); } @Test public void configMaximalS3() throws Exception { // also tests full sample service config final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " typedb ") .with("temp-dir", " temp ") .with("auth-service-url", " " + AUTH_LEGACY_URL + " ") .with("auth2-service-url", " " + CI_SERV + "auth ") .with("mongodb-user", " muser ") .with("mongodb-pwd", " mpwd ") .with("ws-admin", " wsadminuser ") .with("auth2-ws-admin-read-only-roles", " role1, , role2 , ") .with("auth2-ws-admin-full-roles", " role3, , role4 , ") .with("backend-type", " S3 ") .with("backend-url", " http://localhost:34567 ") .with("backend-user", " someuser ") .with("backend-token", " token token token ") .with("backend-container", " mahbukkit ") .with("backend-region", " a-lovely-region ") .with("backend-trust-all-ssl-certificates", " true ") .with("handle-service-token", " hstoken ") .with("handle-manager-token", " hmtoken ") // test service takes precedence .with("handle-manager-url", " " + CI_SERV + "handle_mngr ") .with("handle-service-url", " " + CI_SERV + "handle_service ") .with("bytestream-url", " " + CI_SERV + "shock-api2 ") .with("bytestream-user", " otheruser ") .with("bytestream-token", " token token ") .with("sample-service-url", " " + CI_SERV + "sample_service2 ") .with("sample-service-administrator-token", " sstoken2 ") .with("listeners", "listener1, , listener2 , ") .with("listener-listener1-class", " us.kbase.MyListener ") .with("listener-listener1-config-key1", "value1") .with("listener-listener1-config-key2", "value2") .with("listener-listener2-class", " us.kbase.MyListener2 ") .with("listener-listener2-config-key1", "value3") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "mongodb-user=muser\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "auth2-ws-admin-read-only-roles=role1, , role2 ,\n" + "auth2-ws-admin-full-roles=role3, , role4 ,\n" + "backend-type=S3\n" + "backend-url=http://localhost:34567\n" + "backend-user=someuser\n" + "backend-region=a-lovely-region\n" + "backend-container=mahbukkit\n" + "backend-trust-all-ssl-certificates=true\n" + "handle-service-url=" + CI_SERV + "handle_service\n" + "bytestream-url=" + CI_SERV + "shock-api2\n" + "bytestream-user=otheruser\n" + "sample-service-url=" + CI_SERV +"sample_service2\n" + "mongodb-pwd=[redacted for your safety and comfort]\n" + "listeners=us.kbase.MyListener,us.kbase.MyListener2\n"; assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withAdminReadOnlyRoles(set("role1", "role2")) .withAdminRoles(set("role3", "role4")) .withWorkspaceAdmin("wsadminuser") .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withMongoUser("muser") .withMongoPwd("mpwd") .withListenerConfigs(Arrays.asList( new ListenerConfig("us.kbase.MyListener", ImmutableMap.of("key1", "value1", "key2", "value2")), new ListenerConfig("us.kbase.MyListener2", ImmutableMap.of("key1", "value3")))) .withBackendType(BackendType.S3) .withBackendURL(new URL("http://localhost:34567")) .withBackendUser("someuser") .withBackendToken("token token token") .withBackendContainer("mahbukkit") .withBackendRegion(Region.of("a-lovely-region")) .withBackendTrustAllCerts(true) .withHandleServiceURL(new URL(CI_SERV + "handle_service")) .withHandleServiceToken("hstoken") .withBytestreamURL(new URL(CI_SERV + "shock-api2")) .withBytestreamUser("otheruser") .withBytestreamToken("token token") .withSampleServiceURL(new URL(CI_SERV + "sample_service2")) .withSampleServiceToken("sstoken2") .withTempDir("temp") .withParamReport(paramReport) ); } @Test public void configWithWhitespace() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", "somehost") .with("mongodb-database", "somedb") .with("mongodb-type-database", " typedb ") .with("temp-dir", "temp") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("mongodb-user", " \t ") .with("mongodb-pwd", " \t ") .with("ws-admin", " \t ") .with("ignore-handle-service", " \t ") .with("auth2-ws-admin-read-only-roles", " \t ") .with("auth2-ws-admin-full-roles", " \t ") .with("backend-type", " GridFS ") .with("backend-token", " \t ") .with("backend-user", " \t ") .with("backend-url", " \t ") .with("backend-container", " \t ") .with("backend-trust-all-ssl-certificates", " \t ") .with("backend-region", " \t ") .with("handle-manager-token", " hmtoken ") // test that backwards compat ok .with("handle-service-url", CI_SERV + "handle_service") .with("bytestream-token", " \t ") .with("bytestream-user", " \t ") .with("bytestream-url", " \t ") .with("sample-service-url", " " + CI_SERV + "sample_service3 ") .with("sample-service-administrator-token", " sstoken3 ") .with("listeners", " \t ") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=GridFS\n" + "handle-service-url=" + CI_SERV + "handle_service\n" + "sample-service-url=" + CI_SERV +"sample_service3\n"; assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withBackendType(BackendType.GridFS) .withHandleServiceURL(new URL(CI_SERV + "handle_service")) .withHandleServiceToken("hmtoken") .withSampleServiceURL(new URL(CI_SERV + "sample_service3")) .withSampleServiceToken("sstoken3") .withParamReport(paramReport) .withTempDir("temp") ); } @Test public void immutable() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", "somehost") .with("mongodb-database", "somedb") .with("temp-dir", "temp") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "true") .with("auth2-ws-admin-read-only-roles", " r1 \t ") .with("auth2-ws-admin-full-roles", " \t r3 ") .with("listeners", "listener1, , , ") .with("listener-listener1-class", " us.kbase.MyListener ") .build(); final KBaseWorkspaceConfig kwc = new KBaseWorkspaceConfig(cfg); try { kwc.getAdminReadOnlyRoles().add("foo"); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passed } try { kwc.getAdminRoles().add("foo"); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passed } try { kwc.getErrors().add("foo"); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passed } try { kwc.getInfoMessages().add("foo"); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passed } try { kwc.getListenerConfigs().add(new ListenerConfig("c", Collections.emptyMap())); fail("expected exception"); } catch (UnsupportedOperationException e) { // test passed } } @Test public void configFailNullRequiredEntries() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", null) .with("mongodb-database", null) .with("mongodb-type-database", null) .with("mongodb-user", "user") .with("mongodb-pwd", null) .with("temp-dir", null) .with("auth-service-url", null) .with("auth2-service-url", null) .with("backend-type", null) .with("handle-manager-token", null) .with("handle-service-url", null) .with("sample-service-url", "https://foo.com") .with("sample-service-administrator-token", null) .with("listeners", "listener1, , listener2 , ") .with("listener-listener1-class", null) .with("listener-listener1-config-key1", "value1") .with("listener-listener1-config-key2", "value2") .with("listener-listener2-class", "us.kbase.MyListener2") .with("listener-listener2-config-", "value3") .build(); final List<String> errors = Arrays.asList( String.format(MISSING_PARAM, "mongodb-host"), String.format(MISSING_PARAM, "mongodb-database"), String.format(MISSING_PARAM, "mongodb-type-database"), String.format(MISSING_PARAM, "temp-dir"), String.format(MISSING_PARAM, "backend-type"), String.format(MISSING_PARAM, "auth-service-url"), String.format(MISSING_PARAM, "auth2-service-url"), "If sample-service-url is supplied, sample-service-administrator-token is " + "required", "Must provide both mongodb-user and mongodb-pwd params in config file if " + "MongoDB authentication is to be used", String.format(MISSING_PARAM, "handle-service-url"), String.format(MISSING_PARAM, "handle-service-token"), "Missing listener class: listener-listener1-class", "Invalid listener configuration item: listener-listener2-config-"); final String paramReport = "mongodb-user=user\nsample-service-url=https://foo.com\n"; assertConfigCorrect( cfg, new ExpectedConfig() .withParamReport(paramReport) .withSampleServiceURL(new URL("https://foo.com")) .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailWhitespaceRequiredEntries() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " \t ") .with("mongodb-database", " \t ") .with("mongodb-type-database", " \t ") .with("mongodb-user", " \t ") .with("mongodb-pwd", "pwd") .with("backend-type", " \t ") .with("temp-dir", " \t ") .with("auth-service-url", " \t ") .with("auth2-service-url", " \t ") .with("handle-manager-token", " \t ") .with("handle-service-url", " \t ") .with("sample-service-url", "https://foo2.com") .with("sample-service-administrator-token", " \t ") .with("listeners", "listener1, , listener2 , ") .with("listener-listener1-class", " \t ") .with("listener-listener1-config-key1", "value1") .with("listener-listener1-config-key2", "value2") .with("listener-listener2-class", "us.kbase.MyListener2") .with("listener-listener2-config- \t ", "value3") .build(); final List<String> errors = Arrays.asList( String.format(MISSING_PARAM, "mongodb-host"), String.format(MISSING_PARAM, "mongodb-database"), String.format(MISSING_PARAM, "mongodb-type-database"), String.format(MISSING_PARAM, "temp-dir"), String.format(MISSING_PARAM, "backend-type"), String.format(MISSING_PARAM, "auth-service-url"), String.format(MISSING_PARAM, "auth2-service-url"), "If sample-service-url is supplied, sample-service-administrator-token is " + "required", "Must provide both mongodb-user and mongodb-pwd params in config file if " + "MongoDB authentication is to be used", String.format(MISSING_PARAM, "handle-service-url"), String.format(MISSING_PARAM, "handle-service-token"), "Missing listener class: listener-listener1-class", "Invalid listener configuration item: listener-listener2-config-"); final String paramReport = "sample-service-url=https://foo2.com\n"; assertConfigCorrect( cfg, new ExpectedConfig() .withParamReport(paramReport) .withSampleServiceURL(new URL("https://foo2.com")) .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailBadURLs() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " typedb ") .with("backend-type", "S3") .with("backend-token", " bet ") .with("backend-user", " buser ") .with("backend-url", " crappy ass url for backend ") .with("backend-container", "foo") .with("backend-region", " over there") .with("bytestream-url", " crappy ass url for shock ") .with("temp-dir", " temp ") .with("auth-service-url", " crappy ass url ") .with("auth2-service-url", " crappy ass url2 ") .with("handle-manager-token", " hmtoken ") .with("handle-service-url", " crappy ass url4 ") .with("sample-service-url", " crappy ass url5 ") .with("sample-service-administration-token", " t ") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=crappy ass url\n" + "auth2-service-url=crappy ass url2\n" + "backend-type=S3\n" + "backend-url=crappy ass url for backend\n" + "backend-user=buser\n" + "backend-region=over there\n" + "backend-container=foo\n" + "handle-service-url=crappy ass url4\n"; final String err = "Invalid url for parameter %s: crappy ass url%s"; final List<String> errors = Arrays.asList( String.format(err, "auth-service-url", ""), String.format(err, "auth2-service-url", "2"), String.format(err, "backend-url", " for backend"), String.format(err, "bytestream-url", " for shock"), String.format(err, "sample-service-url", "5"), String.format(err, "handle-service-url", "4")); assertConfigCorrect( cfg, new ExpectedConfig() .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withBackendType(BackendType.S3) .withBackendUser("buser") .withBackendToken("bet") .withBackendContainer("foo") .withBackendRegion(Region.of("over there")) .withHandleServiceToken("hmtoken") .withParamReport(paramReport) .withTempDir("temp") .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailDuplicateDBs() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " somedb ") .with("backend-type", "GridFS") .with("temp-dir", " temp ") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "foo") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=somedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=GridFS\n"; final List<String> errors = Arrays.asList( "The parameters mongodb-database and mongodb-type-database have the same " + "value, somedb"); assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("somedb") .withBackendType(BackendType.GridFS) .withIgnoreHandleService(true) .withParamReport(paramReport) .withTempDir("temp") .withInfoMessages(Arrays.asList(IGNORE_HANDLE)) .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailIllegalBackendType() throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " typedb ") .with("backend-type", " GreedFS ") .with("temp-dir", " temp ") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "foo") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=GreedFS\n"; final List<String> errors = Arrays.asList("Illegal backend type: GreedFS"); assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withIgnoreHandleService(true) .withParamReport(paramReport) .withTempDir("temp") .withInfoMessages(Arrays.asList(IGNORE_HANDLE)) .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailBytestreamParamsMissing() throws Exception { configFailBytestreamParamsMissing(null, "user", "bytestream-user=user\n"); configFailBytestreamParamsMissing(" \t ", "user", "bytestream-user=user\n"); configFailBytestreamParamsMissing("token", null, ""); configFailBytestreamParamsMissing("token", " \t ", ""); } private void configFailBytestreamParamsMissing( String bytestreamToken, String bytestreamUser, final String paramReportLast) throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " typedb ") .with("backend-type", "GridFS") .with("bytestream-token", bytestreamToken) .with("bytestream-user", bytestreamUser) .with("bytestream-url", "https://foo.com") .with("temp-dir", " temp ") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "foo") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=GridFS\n" + "bytestream-url=https://foo.com\n" + paramReportLast; final List<String> errors = Arrays.asList( "Must provide bytestream-user and bytestream-token parameters in config file if " + "bytestream-url is provided"); bytestreamToken = bytestreamToken == null || bytestreamToken.trim().isEmpty() ? null : bytestreamToken; bytestreamUser = bytestreamUser == null || bytestreamUser.trim().isEmpty() ? null : bytestreamUser; assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withBackendType(BackendType.GridFS) .withIgnoreHandleService(true) .withBytestreamURL(new URL("https://foo.com")) .withBytestreamUser(bytestreamUser) .withBytestreamToken(bytestreamToken) .withParamReport(paramReport) .withTempDir("temp") .withInfoMessages(Arrays.asList(IGNORE_HANDLE)) .withHasErrors(true) .withErrors(errors) ); } @Test public void configFailS3ParamsMissing() throws Exception { configFailS3ParamsMissing(null); configFailS3ParamsMissing(" \t "); } private void configFailS3ParamsMissing(final String backendParam) throws Exception { final Map<String, String> cfg = MapBuilder.<String, String>newHashMap() .with("mongodb-host", " somehost ") .with("mongodb-database", " somedb ") .with("mongodb-type-database", " typedb ") .with("backend-type", "S3") .with("backend-token", backendParam) .with("backend-user", backendParam) .with("backend-url", backendParam) .with("backend-container", backendParam) .with("backend-region", backendParam) .with("temp-dir", " temp ") .with("auth-service-url", AUTH_LEGACY_URL) .with("auth2-service-url", CI_SERV + "auth") .with("ignore-handle-service", "foo") .build(); final String paramReport = "mongodb-host=somehost\n" + "mongodb-database=somedb\n" + "mongodb-type-database=typedb\n" + "auth-service-url=" + AUTH_LEGACY_URL + "\n" + "auth2-service-url=" + CI_SERV + "auth\n" + "backend-type=S3\n"; final String err = "Must provide S3 param %s in config file"; final List<String> errors = Arrays.asList( String.format(err, "backend-token"), String.format(err, "backend-url"), String.format(err, "backend-user"), String.format(err, "backend-container"), String.format(err, "backend-region")); assertConfigCorrect( cfg, new ExpectedConfig() .withAuth2URL(new URL(CI_SERV + "auth")) .withAuthURL(new URL(AUTH_LEGACY_URL)) .withMongohost("somehost") .withMongoDBname("somedb") .withTypeDBname("typedb") .withBackendType(BackendType.S3) .withIgnoreHandleService(true) .withParamReport(paramReport) .withTempDir("temp") .withInfoMessages(Arrays.asList(IGNORE_HANDLE)) .withHasErrors(true) .withErrors(errors) ); } }
package org.petapico.npop.fingerprint; import java.util.ArrayList; import java.util.List; import net.trustyuri.TrustyUriUtils; import net.trustyuri.rdf.RdfHasher; import net.trustyuri.rdf.RdfPreprocessor; import org.nanopub.Nanopub; import org.nanopub.NanopubUtils; import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.impl.ContextStatementImpl; import org.openrdf.model.impl.URIImpl; import org.petapico.npop.Fingerprint.FingerprintHandler; public class DisgenetFingerprints implements FingerprintHandler { private static final URI assertionUriPlaceholder = new URIImpl("http://purl.org/nanopub/placeholders/assertion"); private static final URI provUriPlaceholder = new URIImpl("http://purl.org/nanopub/placeholders/provenance"); private static final URI timestampPlaceholder = new URIImpl("http://purl.org/nanopub/placeholders/timestamp"); private static final URI disgenetGdaPlaceholder = new URIImpl("http://purl.org/nanopub/placeholders/disgenet-gda"); private static final URI pav1importedOn = new URIImpl("http://purl.org/pav/importedOn"); private static final URI pav2importedOn = new URIImpl("http://purl.org/pav/2.0/importedOn"); @Override public String getFingerprint(Nanopub np) { String artifactCode = TrustyUriUtils.getArtifactCode(np.getUri().toString()); if (artifactCode == null) { throw new RuntimeException("Not a trusty URI: " + np.getUri()); } List<Statement> statements = getNormalizedStatements(np); statements = RdfPreprocessor.run(statements, artifactCode); String fingerprint = RdfHasher.makeArtifactCode(statements); return fingerprint.substring(2); } private List<Statement> getNormalizedStatements(Nanopub np) { List<Statement> statements = NanopubUtils.getStatements(np); List<Statement> n = new ArrayList<>(); for (Statement st : statements) { boolean isInAssertion = st.getContext().equals(np.getAssertionUri()); boolean isInProvenance = st.getContext().equals(np.getProvenanceUri()); if (!isInProvenance && !isInAssertion) continue; URI graphURI; if (isInAssertion) { graphURI = assertionUriPlaceholder; } else { graphURI = provUriPlaceholder; } Resource subj = st.getSubject(); URI pred = st.getPredicate(); Value obj = st.getObject(); if (isInAssertion) { String subjS = subj.stringValue(); if (subjS.startsWith("http://rdf.disgenet.org/resource/gda/DGN") || subjS.startsWith("http://rdf.disgenet.org/gene-disease-association.ttl#DGN")) { subj = disgenetGdaPlaceholder; } } else if (isInProvenance) { if (pred.equals(pav1importedOn) || pred.equals(pav2importedOn)) { pred = pav2importedOn; obj = timestampPlaceholder; } if (subj.equals(np.getAssertionUri())) { subj = assertionUriPlaceholder; } } n.add(new ContextStatementImpl((Resource) transform(subj), (URI) transform(pred), transform(obj), graphURI)); } return n; } private Value transform(Value v) { if (v instanceof URI) { String s = ((URI) v).stringValue(); if (s.matches("http://rdf.disgenet.org/v.*/void.*")) { if (s.matches("http://rdf.disgenet.org/v.*/void.*-20......")) { String r = s.replaceFirst("^http://rdf.disgenet.org/v.*/void.*(/| return new URIImpl(r); } else { String r = s.replaceFirst("^http://rdf.disgenet.org/v.*/void.*(/| return new URIImpl(r); } } else if (s.startsWith("http://purl.obolibrary.org/obo/eco.owl return new URIImpl(s.replace("http://purl.obolibrary.org/obo/eco.owl } } return v; } }
package bisq.monitor.metric; import java.io.File; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.springframework.core.env.PropertySource; import bisq.common.Clock; import bisq.common.app.Version; import bisq.common.proto.network.NetworkEnvelope; import bisq.common.proto.network.NetworkProtoResolver; import bisq.core.app.BisqEnvironment; import bisq.core.btc.BaseCurrencyNetwork; import bisq.core.btc.BtcOptionKeys; import bisq.core.network.p2p.seed.DefaultSeedNodeRepository; import bisq.core.network.p2p.seed.SeedNodeAddressLookup; import bisq.core.proto.network.CoreNetworkProtoResolver; import bisq.core.proto.persistable.CorePersistenceProtoResolver; import bisq.monitor.AvailableTor; import bisq.monitor.Metric; import bisq.monitor.Monitor; import bisq.monitor.Reporter; import bisq.monitor.ThreadGate; import bisq.network.p2p.network.Connection; import bisq.network.p2p.network.MessageListener; import bisq.network.p2p.network.NetworkNode; import bisq.network.p2p.network.SetupListener; import bisq.network.p2p.network.TorNetworkNode; import bisq.network.p2p.peers.PeerManager; import bisq.network.p2p.peers.keepalive.KeepAliveManager; import bisq.network.p2p.peers.peerexchange.PeerExchangeManager; import bisq.network.p2p.storage.messages.BroadcastMessage; import lombok.extern.slf4j.Slf4j; /** * Contacts a list of hosts and asks them for all the data we do not have. The * answers are then compiled into buckets of message types. Based on these * buckets, the Metric reports (for each host) the message types observed and * their number along with a relative comparison between all hosts. * * @author Florian Reimair * */ @Slf4j public class P2PNetworkLoad extends Metric implements MessageListener, SetupListener { private static final String TOR_PROXY_PORT = "run.torProxyPort"; private static final String MAX_CONNECTIONS = "run.maxConnections"; private NetworkNode networkNode; private final File torHiddenServiceDir = new File("metric_p2pNetworkLoad"); private final ThreadGate hsReady = new ThreadGate(); private Map<String, Counter> buckets = new ConcurrentHashMap<>(); private KeepAliveManager keepAliveManager; /** * Efficient way to count message occurrences. */ private class Counter { private int value = 1; /** * atomic get and reset * * @return the current value */ synchronized int getAndReset() { try { return value; } finally { value = 0; } } synchronized void increment() { value++; } } public P2PNetworkLoad(Reporter reporter) { super(reporter); Version.setBaseCryptoNetworkId(0); // set to BTC_MAINNET } @Override protected void execute() { // in case we do not have a NetworkNode up and running, we create one if (null == networkNode) { // prepare the gate hsReady.engage(); // start the network node networkNode = new TorNetworkNode(Integer.parseInt(configuration.getProperty(TOR_PROXY_PORT, "9053")), new CoreNetworkProtoResolver(), false, new AvailableTor(Monitor.TOR_WORKING_DIR, torHiddenServiceDir.getName())); networkNode.start(this); // wait for the HS to be published hsReady.await(); // boot up P2P node File storageDir = new File("/tmp/bisq-metric-storage"); String seedNodes = ""; try { BisqEnvironment environment = new BisqEnvironment(new PropertySource<String>("name") { @Override public String getProperty(String name) { if(BtcOptionKeys.BASE_CURRENCY_NETWORK.equals(name)) return BaseCurrencyNetwork.BTC_MAINNET.name(); return ""; } }); int maxConnections = Integer.parseInt(configuration.getProperty(MAX_CONNECTIONS, "12")); NetworkProtoResolver networkProtoResolver = new CoreNetworkProtoResolver(); CorePersistenceProtoResolver persistenceProtoResolver = new CorePersistenceProtoResolver(null, networkProtoResolver, storageDir); DefaultSeedNodeRepository seedNodeRepository = new DefaultSeedNodeRepository( new SeedNodeAddressLookup(environment, false, 0, null, seedNodes)); PeerManager peerManager = new PeerManager(networkNode, seedNodeRepository, new Clock(), persistenceProtoResolver, maxConnections, storageDir); PeerExchangeManager peerExchangeManager = new PeerExchangeManager(networkNode, seedNodeRepository, peerManager); // updates the peer list every now and then as well peerExchangeManager .requestReportedPeersFromSeedNodes(seedNodeRepository.getSeedNodeAddresses().iterator().next()); // irgendeine // seednode // nehmen keepAliveManager = new KeepAliveManager(networkNode, peerManager); keepAliveManager.start(); networkNode.addMessageListener(this); } catch (Throwable e) { e.printStackTrace(); } } // report Map<String, String> report = new HashMap<>(); // - get snapshot so we do not loose data Set<String> keys = new HashSet<>(buckets.keySet()); // - transfer values to report keys.forEach(key -> report.put(key, String.valueOf(buckets.get(key).getAndReset()))); // - report reporter.report(report, "bisq." + getName()); } @Override public void onMessage(NetworkEnvelope networkEnvelope, Connection connection) { // TODO check if we already have this very message if (networkEnvelope instanceof BroadcastMessage) { try { buckets.get(networkEnvelope.getClass().getSimpleName()).increment(); } catch (NullPointerException e) { // use exception handling because we hardly ever need to add a fresh bucket buckets.put(networkEnvelope.getClass().getSimpleName(), new Counter()); } } } @Override public void onTorNodeReady() { } @Override public void onHiddenServicePublished() { // open the gate hsReady.proceed(); } @Override public void onSetupFailed(Throwable throwable) { } @Override public void onRequestCustomBridges() { } @Override public void shutdown() { keepAliveManager.shutDown(); super.shutdown(); } }
package org.pfaa.geologica.registration; import java.util.List; import org.pfaa.chemica.item.IndustrialMaterialItem; import org.pfaa.chemica.model.Strength; import org.pfaa.chemica.processing.TemperatureLevel; import org.pfaa.chemica.registration.RecipeRegistry; import org.pfaa.chemica.registration.RecipeUtils; import org.pfaa.chemica.util.ChanceStack; import org.pfaa.core.block.CompositeBlock; import org.pfaa.geologica.GeoMaterial; import org.pfaa.geologica.Geologica; import org.pfaa.geologica.GeologicaBlocks; import org.pfaa.geologica.GeologicaItems; import org.pfaa.geologica.block.GeoBlock; import org.pfaa.geologica.block.LooseGeoBlock; import org.pfaa.geologica.block.ProxyBlock; import org.pfaa.geologica.block.StairsBlock; import org.pfaa.geologica.block.WallBlock; import org.pfaa.geologica.integration.TCIntegration; import org.pfaa.geologica.processing.Ore; import cpw.mods.fml.common.Loader; import cpw.mods.fml.common.registry.GameRegistry; import net.minecraft.block.Block; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.IRecipe; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import net.minecraftforge.oredict.ShapelessOreRecipe; public class RecipeRegistration { private static final RecipeRegistry registry = org.pfaa.chemica.registration.RecipeRegistration.getTarget(); public static void init() { registerSmeltingRecipes(); registerCraftingRecipes(); registerCommunitionRecipes(); registerStoneToolRecipes(); registerCompatibilityRecipes(); } private static void registerCompatibilityRecipes() { GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Blocks.cobblestone), "cobblestone")); GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Blocks.stone), "stone")); GameRegistry.addShapelessRecipe(new ItemStack(Items.coal), GeologicaItems.CRUDE_LUMP.of(GeoMaterial.BITUMINOUS_COAL)); } private static void registerStoneToolRecipes() { registerStoneToolRecipes(GeologicaBlocks.WEAK_RUBBLE); registerStoneToolRecipes(GeologicaBlocks.MEDIUM_COBBLE); registerStoneToolRecipes(GeologicaBlocks.STRONG_COBBLE); registerStoneToolRecipes(GeologicaBlocks.VERY_STRONG_COBBLE); if (Loader.isModLoaded("TConstruct")) TCIntegration.addStoneMaterials(); // TODO: implement this } // TODO (chemica): add metal tool recipes, once we have ingots // - Valid metals/alloys: all types of steel, magnesium // - When GT loaded, tools are automatic, based on material system // - Should probably try to make tools balanced with Metallurgy/Railcraft tools // - No need for a plain steel pickaxe when those mods are loaded // - Perhaps we could just check if the oredict-based recipe already exists? // - When Tinker's Construct is loaded, we skip this (but our materials are registered) private static void registerStoneToolRecipes(GeoBlock block) { registerStoneToolRecipe(block, Items.stone_pickaxe); registerStoneToolRecipe(block, Items.stone_axe); registerStoneToolRecipe(block, Items.stone_shovel); registerStoneToolRecipe(block, Items.stone_hoe); registerStoneToolRecipe(block, Items.stone_sword); } private static void registerStoneToolRecipe(GeoBlock block, Item tool) { ItemStack damaged = new ItemStack(tool, 1, (int)(getInitialStoneToolDamage(block.getStrength()) * tool.getMaxDamage())); ItemStack material = new ItemStack(block, 1, OreDictionary.WILDCARD_VALUE); ItemStack cobblestone = new ItemStack(Blocks.cobblestone, 1, OreDictionary.WILDCARD_VALUE); @SuppressWarnings("unchecked") List<IRecipe> recipes = (List<IRecipe>)CraftingManager.getInstance().getRecipeList(); for (IRecipe recipe : recipes) { ItemStack output = recipe.getRecipeOutput(); if (output != null && output.getItem() == tool && recipe instanceof ShapedOreRecipe) { ShapedOreRecipe shapedRecipe = (ShapedOreRecipe)recipe; Object[] origIngredients = shapedRecipe.getInput(); Object[] ingredients = origIngredients.clone(); for (int i = 0; i < ingredients.length; i++) { if (ingredients[i] instanceof List) { @SuppressWarnings("unchecked") List<ItemStack> oreIngredient = (List<ItemStack>)ingredients[i]; for (ItemStack ingredient : oreIngredient) { if (ingredient.getItem() == cobblestone.getItem()) { ingredients[i] = material; origIngredients[i] = cobblestone; } } } else if (ingredients[i] instanceof ItemStack) { if (((ItemStack)ingredients[i]).getItem() == cobblestone.getItem()) { ingredients[i] = material; } } } recipes.add(RecipeUtils.recreateOreRecipe(shapedRecipe, damaged, ingredients)); break; } } } private static float getInitialStoneToolDamage(Strength strength) { return Geologica.getConfiguration().getInitialStoneToolDamage(strength); } private static void registerSmeltingRecipes() { for (Block block : GeologicaBlocks.getBlocks()) { if (block instanceof GeoBlock) { registerSmeltingRecipes((GeoBlock)block); } } } private static void registerSmeltingRecipes(GeoBlock output) { TemperatureLevel temp = TemperatureLevel.values()[output.getStrength().ordinal()]; GeoBlock input = output.getBrokenRockBlock(); if (input != null) { registerSmeltingRecipesByMeta(input, output, temp); } } private static void registerCraftingRecipes() { registerSlabRecipe(GeologicaBlocks.MEDIUM_COBBLE, GeologicaBlocks.MEDIUM_COBBLE_SLAB); registerSlabRecipe(GeologicaBlocks.STRONG_COBBLE, GeologicaBlocks.STRONG_COBBLE_SLAB); registerSlabRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK, GeologicaBlocks.MEDIUM_STONE_BRICK_SLAB); registerSlabRecipe(GeologicaBlocks.STRONG_STONE_BRICK, GeologicaBlocks.STRONG_STONE_BRICK_SLAB); registerSlabRecipe(GeologicaBlocks.VERY_STRONG_STONE_BRICK, GeologicaBlocks.VERY_STRONG_STONE_BRICK_SLAB); registerSlabRecipe(GeologicaBlocks.MEDIUM_STONE, GeologicaBlocks.MEDIUM_STONE_SLAB); registerSlabRecipe(GeologicaBlocks.STRONG_STONE, GeologicaBlocks.STRONG_STONE_SLAB); registerSlabRecipe(GeologicaBlocks.VERY_STRONG_STONE, GeologicaBlocks.VERY_STRONG_STONE_SLAB); registerWallRecipe(GeologicaBlocks.MEDIUM_COBBLE, GeologicaBlocks.MEDIUM_COBBLE_WALL); registerWallRecipe(GeologicaBlocks.STRONG_COBBLE, GeologicaBlocks.STRONG_COBBLE_WALL); registerWallRecipe(GeologicaBlocks.VERY_STRONG_COBBLE, GeologicaBlocks.VERY_STRONG_COBBLE_WALL); registerWallRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK, GeologicaBlocks.MEDIUM_STONE_BRICK_WALL); registerWallRecipe(GeologicaBlocks.STRONG_STONE_BRICK, GeologicaBlocks.STRONG_STONE_BRICK_WALL); registerWallRecipe(GeologicaBlocks.VERY_STRONG_STONE_BRICK, GeologicaBlocks.VERY_STRONG_STONE_BRICK_WALL); registerBrickRecipe(GeologicaBlocks.MEDIUM_STONE, GeologicaBlocks.MEDIUM_STONE_BRICK); registerBrickRecipe(GeologicaBlocks.STRONG_STONE, GeologicaBlocks.STRONG_STONE_BRICK); registerBrickRecipe(GeologicaBlocks.VERY_STRONG_STONE, GeologicaBlocks.VERY_STRONG_STONE_BRICK); registerBrickRecipe(GeologicaBlocks.WEAK_CLAY, GeologicaBlocks.WEAK_CLAY_BRICK); registerStairsRecipe(GeologicaBlocks.MEDIUM_COBBLE_STAIRS__LIMESTONE); registerStairsRecipe(GeologicaBlocks.STRONG_COBBLE_STAIRS__GRANITE); registerStairsRecipe(GeologicaBlocks.STRONG_COBBLE_STAIRS__MARBLE); registerStairsRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK_STAIRS__LIMESTONE); registerStairsRecipe(GeologicaBlocks.STRONG_STONE_BRICK_STAIRS__GRANITE); registerStairsRecipe(GeologicaBlocks.STRONG_STONE_BRICK_STAIRS__MARBLE); registerQuarterToBlockRecipe(GeologicaItems.EARTHY_CLUMP); } private static void registerCommunitionRecipes() { for (Block block : GeologicaBlocks.getBlocks()) { registerCommunitionRecipes(block); } } private static void registerCommunitionRecipes(Block block) { if (block.getMaterial() != Material.rock) { return; } if (block instanceof StairsBlock) { registerCrushingRecipes((StairsBlock)block); return; } GeoBlock geoBlock = null; if (block instanceof WallBlock) { geoBlock = (GeoBlock)((ProxyBlock)block).getModelBlock(); } else if (block instanceof GeoBlock) { geoBlock = (GeoBlock)block; } if (geoBlock != null) { GeoBlock broken = geoBlock.getBrokenRockBlock(); if (broken != null) { registerCrushingRecipes(block, broken); } else if (block instanceof LooseGeoBlock) { registerGrindingRecipes((GeoBlock)block); } else if (Ore.class.isAssignableFrom(geoBlock.getComposition())) { registerOreCommunitionRecipes((GeoBlock)block); } else if (Crude.class.isAssignableFrom(geoBlock.getComposition())) { registerCrudeCommunitionRecipes((GeoBlock)block); } } } private static void registerCrushingRecipes(StairsBlock input) { GeoBlock model = (GeoBlock)input.getModelBlock(); GeoBlock output = model.getBrokenRockBlock(); if (output == null) { return; } int damage = input.getModelBlockMeta(); registry.registerCrushingRecipe(new ItemStack(input), new ItemStack(output, 1, damage), model.getStrength()); } private static void registerCrushingRecipes(Block input, GeoBlock output) { for(GeoMaterial material : output.getGeoMaterials()) { int damage = output.getMeta(material); registry.registerCrushingRecipe(new ItemStack(input, 1, damage), new ItemStack(output, 1, damage), material.getStrength()); } } private static void registerOreCommunitionRecipes(GeoBlock input) { for(GeoMaterial material : input.getGeoMaterials()) { ItemStack crushed = GeologicaItems.ORE_CRUSHED.getItemStack(material, 2); registry.registerCrushingRecipe(input.getItemStack(material), crushed, input.getStrength()); registerGrindingRecipes(crushed.splitStack(1), material); } } private static void registerGrindingRecipes(GeoBlock input) { for(GeoMaterial material : input.getGeoMaterials()) { registerGrindingRecipes(input.getItemStack(material), material); } } private static void registerGrindingRecipes(ItemStack input, GeoMaterial material) { ItemStack primary = RecipeUtils.getPrimaryGrindingOutput(material.getComposition()); List<ChanceStack> secondaries = RecipeUtils.getSecondaryGrindingOutputs(material.getComposition(), false); IndustrialMaterial host = material.getHost(); if (host instanceof GeoMaterial) { List<ChanceStack> hostSecondaries = RecipeUtils.getSecondaryGrindingOutputs(((GeoMaterial)host).getComposition(), true); for (ChanceStack hostSecondary : hostSecondaries) { secondaries.add(hostSecondary.weightChance(0.2F)); } } registry.registerGrindingRecipe(input, primary, secondaries, material.getStrength()); } private static void registerCrudeCommunitionRecipes(GeoBlock input) { for(GeoMaterial material : input.getGeoMaterials()) { ItemStack lump = GeologicaItems.CRUDE_LUMP.getItemStack(material, 2); registry.registerCrushingRecipe(input.getItemStack(material), lump, input.getStrength()); ItemStack dust = GeologicaItems.CRUDE_DUST.getItemStack(material); registry.registerGrindingRecipe(lump.copy().splitStack(1), dust, Collections.<ChanceStack> emptyList(), input.getStrength()); } } private static void registerSlabRecipe(CompositeBlock input, Block output) { registerCraftingRecipesByMeta(input, output, 6, " } private static void registerWallRecipe(CompositeBlock input, Block output) { registerCraftingRecipesByMeta(input, output, 6, " } private static void registerBrickRecipe(CompositeBlock input, Block output) { registerCraftingRecipesByMeta(input, output, 4, " } private static void registerCraftingRecipesByMeta(CompositeBlock input, Block output, int outputSize, String... shape) { for(int meta = 0; meta < input.getMetaCount(); meta++) { ItemStack outputStack = new ItemStack(output, outputSize, meta); ItemStack inputStack = new ItemStack(input, 1, meta); GameRegistry.addRecipe(outputStack, shape, shape[0].charAt(0), inputStack); } } private static void registerQuarterToBlockRecipe(IndustrialMaterialItem<GeoMaterial> input) { for(GeoMaterial material : input.getIndustrialMaterials()) { ItemStack outputStack = GeoBlock.getNative(material).getItemStack(1); ItemStack inputStack = input.getItemStack(material); GameRegistry.addRecipe(outputStack, "##", "##", '#', inputStack); } } private static void registerStairsRecipe(StairsBlock output) { ItemStack outputStack = new ItemStack(output, 4, output.getModelBlockMeta()); ItemStack inputStack = new ItemStack(output.getModelBlock(), 1, output.getModelBlockMeta()); GameRegistry.addRecipe(outputStack, " } private static void registerSmeltingRecipesByMeta(CompositeBlock input, Block output, TemperatureLevel temp) { for(int meta = 0; meta < input.getMetaCount(); meta++) { ItemStack outputStack = new ItemStack(output, 1, meta); ItemStack inputStack = new ItemStack(input, 1, meta); registry.registerSmeltingRecipe(inputStack, outputStack, temp); } } }
package org.pfaa.geologica.registration; import java.util.List; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.FurnaceRecipes; import net.minecraft.item.crafting.IRecipe; import net.minecraftforge.fluids.FluidContainerRegistry; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import net.minecraftforge.oredict.ShapelessOreRecipe; import org.pfaa.RecipeUtils; import org.pfaa.block.CompositeBlock; import org.pfaa.chemica.model.Compound.Compounds; import org.pfaa.geologica.GeoMaterial; import org.pfaa.geologica.GeoMaterial.Strength; import org.pfaa.geologica.Geologica; import org.pfaa.geologica.GeologicaBlocks; import org.pfaa.geologica.GeologicaItems; import org.pfaa.geologica.block.BrickGeoBlock; import org.pfaa.geologica.block.BrokenGeoBlock; import org.pfaa.geologica.block.ChanceDropRegistry; import org.pfaa.geologica.block.GeoBlock; import org.pfaa.geologica.block.LooseGeoBlock; import org.pfaa.geologica.block.ProxyBlock; import org.pfaa.geologica.block.SlabBlock; import org.pfaa.geologica.block.StairsBlock; import org.pfaa.geologica.block.VanillaOreOverrideBlock; import org.pfaa.geologica.block.WallBlock; import org.pfaa.geologica.integration.FMPIntegration; import org.pfaa.geologica.integration.IC2Integration; import org.pfaa.geologica.integration.TCIntegration; import org.pfaa.geologica.integration.TEIntegration; import org.pfaa.geologica.processing.Aggregate; import org.pfaa.geologica.processing.Crude; import org.pfaa.geologica.processing.IndustrialMineral.IndustrialMinerals; import org.pfaa.geologica.processing.Ore; import org.pfaa.geologica.processing.OreMineral.SmeltingTemperature; import com.google.common.base.CaseFormat; import cpw.mods.fml.common.Loader; import cpw.mods.fml.common.registry.GameRegistry; public class RecipeRegistration { public static void init() { registerOres(); addSmeltingRecipes(); addCraftingRecipes(); addGrindingRecipes(); addMeltingRecipes(); addStoneToolRecipes(); addStoneAbstractionRecipesForBrokenMods(); registerMicroblocks(); registerOreDrops(); } private static void addStoneAbstractionRecipesForBrokenMods() { GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Blocks.cobblestone), "cobblestone")); GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Blocks.stone), "stone")); } private static void addStoneToolRecipes() { addStoneToolRecipes(GeologicaBlocks.WEAK_RUBBLE); addStoneToolRecipes(GeologicaBlocks.MEDIUM_COBBLE); addStoneToolRecipes(GeologicaBlocks.STRONG_COBBLE); addStoneToolRecipes(GeologicaBlocks.VERY_STRONG_COBBLE); if (Loader.isModLoaded("TConstruct")) TCIntegration.addStoneMaterials(); } private static void addStoneToolRecipes(GeoBlock block) { addStoneToolRecipe(block, Items.stone_pickaxe); addStoneToolRecipe(block, Items.stone_axe); addStoneToolRecipe(block, Items.stone_shovel); addStoneToolRecipe(block, Items.stone_hoe); addStoneToolRecipe(block, Items.stone_sword); } private static void addStoneToolRecipe(GeoBlock block, Item tool) { ItemStack damaged = new ItemStack(tool, 1, (int)(getInitialStoneToolDamage(block.getStrength()) * tool.getMaxDamage())); ItemStack material = new ItemStack(block, 1, OreDictionary.WILDCARD_VALUE); ItemStack cobblestone = new ItemStack(Blocks.cobblestone, 1, OreDictionary.WILDCARD_VALUE); List<IRecipe> recipes = (List<IRecipe>)CraftingManager.getInstance().getRecipeList(); for (IRecipe recipe : recipes) { ItemStack output = recipe.getRecipeOutput(); if (output != null && output.getItem() == tool && recipe instanceof ShapedOreRecipe) { ShapedOreRecipe shapedRecipe = (ShapedOreRecipe)recipe; Object[] origIngredients = shapedRecipe.getInput(); Object[] ingredients = origIngredients.clone(); for (int i = 0; i < ingredients.length; i++) { if (ingredients[i] instanceof List) { for (ItemStack ingredient : (List<ItemStack>)ingredients[i]) { if (ingredient.getItem() == cobblestone.getItem()) { ingredients[i] = material; origIngredients[i] = cobblestone; } } } else if (ingredients[i] instanceof ItemStack) { if (((ItemStack)ingredients[i]).getItem() == cobblestone.getItem()) { ingredients[i] = material; } } } recipes.add(RecipeUtils.recreateOreRecipe(shapedRecipe, damaged, ingredients)); break; } } } private static float getInitialStoneToolDamage(Strength strength) { return Geologica.getConfiguration().getInitialStoneToolDamage(strength); } private static void registerOres() { oreDictifyGeoBlocks(); oreDictifyStoneBrick(); } private static void addSmeltingRecipes() { addSmeltingRecipesByMeta(GeologicaBlocks.WEAK_RUBBLE, GeologicaBlocks.WEAK_STONE, SmeltingTemperature.MEDIUM); addSmeltingRecipesByMeta(GeologicaBlocks.MEDIUM_COBBLE, GeologicaBlocks.MEDIUM_STONE, SmeltingTemperature.MEDIUM); addSmeltingRecipesByMeta(GeologicaBlocks.STRONG_COBBLE, GeologicaBlocks.STRONG_STONE, SmeltingTemperature.HIGH); } private static void addCraftingRecipes() { addSlabRecipe(GeologicaBlocks.MEDIUM_COBBLE, GeologicaBlocks.MEDIUM_COBBLE_SLAB); addSlabRecipe(GeologicaBlocks.STRONG_COBBLE, GeologicaBlocks.STRONG_COBBLE_SLAB); addSlabRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK, GeologicaBlocks.MEDIUM_STONE_BRICK_SLAB); addSlabRecipe(GeologicaBlocks.STRONG_STONE_BRICK, GeologicaBlocks.STRONG_STONE_BRICK_SLAB); addSlabRecipe(GeologicaBlocks.VERY_STRONG_STONE_BRICK, GeologicaBlocks.VERY_STRONG_STONE_BRICK_SLAB); addSlabRecipe(GeologicaBlocks.MEDIUM_STONE, GeologicaBlocks.MEDIUM_STONE_SLAB); addSlabRecipe(GeologicaBlocks.STRONG_STONE, GeologicaBlocks.STRONG_STONE_SLAB); addSlabRecipe(GeologicaBlocks.VERY_STRONG_STONE, GeologicaBlocks.VERY_STRONG_STONE_SLAB); addWallRecipe(GeologicaBlocks.MEDIUM_COBBLE, GeologicaBlocks.MEDIUM_COBBLE_WALL); addWallRecipe(GeologicaBlocks.STRONG_COBBLE, GeologicaBlocks.STRONG_COBBLE_WALL); addWallRecipe(GeologicaBlocks.VERY_STRONG_COBBLE, GeologicaBlocks.VERY_STRONG_COBBLE_WALL); addWallRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK, GeologicaBlocks.MEDIUM_STONE_BRICK_WALL); addWallRecipe(GeologicaBlocks.STRONG_STONE_BRICK, GeologicaBlocks.STRONG_STONE_BRICK_WALL); addWallRecipe(GeologicaBlocks.VERY_STRONG_STONE_BRICK, GeologicaBlocks.VERY_STRONG_STONE_BRICK_WALL); addBrickRecipe(GeologicaBlocks.MEDIUM_STONE, GeologicaBlocks.MEDIUM_STONE_BRICK); addBrickRecipe(GeologicaBlocks.STRONG_STONE, GeologicaBlocks.STRONG_STONE_BRICK); addBrickRecipe(GeologicaBlocks.VERY_STRONG_STONE, GeologicaBlocks.VERY_STRONG_STONE_BRICK); addStairsRecipe(GeologicaBlocks.MEDIUM_COBBLE_STAIRS__LIMESTONE); addStairsRecipe(GeologicaBlocks.STRONG_COBBLE_STAIRS__GRANITE); addStairsRecipe(GeologicaBlocks.STRONG_COBBLE_STAIRS__MARBLE); addStairsRecipe(GeologicaBlocks.MEDIUM_STONE_BRICK_STAIRS__LIMESTONE); addStairsRecipe(GeologicaBlocks.STRONG_STONE_BRICK_STAIRS__GRANITE); addStairsRecipe(GeologicaBlocks.STRONG_STONE_BRICK_STAIRS__MARBLE); } private static void addGrindingRecipes() { addStoneGrindingRecipes(); addCobbleGrindingRecipes(); } private static void addCobbleGrindingRecipes() { addCobbleGrindingRecipe(GeoMaterial.ANDESITE, Blocks.sand, IndustrialMinerals.FELDSPAR, 0.1); addCobbleGrindingRecipe(GeoMaterial.BRECCIA, new ItemStack(Blocks.gravel, 2)); addCobbleGrindingRecipe(GeoMaterial.CARBONATITE, Blocks.sand, Compounds.CaCO3, 0.5); addCobbleGrindingRecipe(GeoMaterial.CONGLOMERATE, new ItemStack(Blocks.sand), new ItemStack(Blocks.gravel), 1.0); addCobbleGrindingRecipe(GeoMaterial.CLAYSTONE, new ItemStack(GeologicaItems.CLAY_DUST, 2)); addCobbleGrindingRecipe(GeoMaterial.DIORITE, Blocks.sand, IndustrialMinerals.FELDSPAR, 0.1); addCobbleGrindingRecipe(GeoMaterial.GABBRO, Blocks.sand, IndustrialMinerals.FELDSPAR, 0.2); addCobbleGrindingRecipe(GeoMaterial.GNEISS, Blocks.sand, IndustrialMinerals.FELDSPAR, 0.1); addCobbleGrindingRecipe(GeoMaterial.GRANITE, Blocks.sand, IndustrialMinerals.QUARTZ, 0.1); addCobbleGrindingRecipe(GeoMaterial.GREENSCHIST, Blocks.sand, IndustrialMinerals.CHRYSOTILE, 0.1); addCobbleGrindingRecipe(GeoMaterial.HORNFELS, Blocks.sand, IndustrialMinerals.MICA, 0.2); addCobbleGrindingRecipe(GeoMaterial.LIMESTONE, Blocks.sand, Compounds.CaCO3, 0.5); addCobbleGrindingRecipe(GeoMaterial.MARBLE, Blocks.sand, Compounds.CaCO3, 1.0); addCobbleGrindingRecipe(GeoMaterial.MUDSTONE, new ItemStack(Blocks.sand), new ItemStack(GeologicaItems.CLAY_DUST), 0.1); addCobbleGrindingRecipe(GeoMaterial.PEGMATITE, IndustrialMinerals.FELDSPAR, IndustrialMinerals.QUARTZ, 1.0); addCobbleGrindingRecipe(GeoMaterial.PERIDOTITE, Blocks.sand, IndustrialMinerals.OLIVINE, 0.5); addCobbleGrindingRecipe(GeoMaterial.RHYOLITE, Blocks.sand, IndustrialMinerals.QUARTZ, 0.1); addCobbleGrindingRecipe(GeoMaterial.SCHIST, Blocks.sand, IndustrialMinerals.MICA, 0.2); addCobbleGrindingRecipe(GeoMaterial.SERPENTINITE, Blocks.sand, IndustrialMinerals.CHRYSOTILE, 0.1); addCobbleGrindingRecipe(GeoMaterial.SLATE, Blocks.sand, IndustrialMinerals.MICA, 0.1); addCobbleGrindingRecipe(GeoMaterial.SKARN, Blocks.sand, IndustrialMinerals.WOLLASTONITE, 0.1); addCobbleGrindingRecipe(GeoMaterial.QUARTZITE, new ItemStack(Blocks.sand, 2)); } private static void addStoneGrindingRecipes() { addStoneGrindingRecipes(GeologicaBlocks.WEAK_STONE, GeologicaBlocks.WEAK_RUBBLE); addStoneGrindingRecipes(GeologicaBlocks.MEDIUM_STONE, GeologicaBlocks.MEDIUM_COBBLE); addStoneGrindingRecipes(GeologicaBlocks.STRONG_STONE, GeologicaBlocks.STRONG_COBBLE); addStoneGrindingRecipes(GeologicaBlocks.VERY_STRONG_STONE, GeologicaBlocks.VERY_STRONG_COBBLE); } private static void addMeltingRecipes() { addStoneMeltingRecipes(GeologicaBlocks.MEDIUM_STONE); addStoneMeltingRecipes(GeologicaBlocks.MEDIUM_COBBLE); addStoneMeltingRecipes(GeologicaBlocks.STRONG_STONE); addStoneMeltingRecipes(GeologicaBlocks.STRONG_COBBLE); addStoneMeltingRecipes(GeologicaBlocks.VERY_STRONG_STONE); addStoneMeltingRecipes(GeologicaBlocks.VERY_STRONG_COBBLE); } private static void addStoneMeltingRecipes(GeoBlock block) { addMeltingRecipe(new ItemStack(block), new FluidStack(FluidRegistry.LAVA, FluidContainerRegistry.BUCKET_VOLUME), block.getStrength()); } private static void addMeltingRecipe(ItemStack solid, FluidStack liquid, Strength strength) { TEIntegration.addCrucibleRecipe(solid, liquid, strength); } private static void addSlabRecipe(CompositeBlock input, Block output) { addCraftingRecipesByMeta(input, output, 6, " } private static void addWallRecipe(CompositeBlock input, Block output) { addCraftingRecipesByMeta(input, output, 6, " } private static void addBrickRecipe(CompositeBlock input, Block output) { addCraftingRecipesByMeta(input, output, 4, " } private static void addCraftingRecipesByMeta(CompositeBlock input, Block output, int outputSize, String... shape) { for(int meta = 0; meta < input.getMetaCount(); meta++) { ItemStack outputStack = new ItemStack(output, outputSize, meta); ItemStack inputStack = new ItemStack(input, 1, meta); GameRegistry.addRecipe(outputStack, shape, shape[0].charAt(0), inputStack); } } private static void addStairsRecipe(StairsBlock output) { ItemStack outputStack = new ItemStack(output, 4, output.getModelBlockMeta()); ItemStack inputStack = new ItemStack(output.getModelBlock(), 1, output.getModelBlockMeta()); GameRegistry.addRecipe(outputStack, " } private static void addSmeltingRecipesByMeta(CompositeBlock input, Block output, SmeltingTemperature temperature) { for(int meta = 0; meta < input.getMetaCount(); meta++) { ItemStack outputStack = new ItemStack(output, 1, meta); FurnaceRecipes.smelting().func_151394_a(new ItemStack(input, 1, meta), outputStack, 0); TEIntegration.addFurnaceRecipe(new ItemStack(input, 1, meta), outputStack, temperature); } } // FIXME: remove this hack when we get this into Forge private static void oreDictifyStoneBrick() { OreDictionary.registerOre("stoneBricks", Blocks.stonebrick); } private static void oreDictifyGeoBlocks() { for (Block block : GeologicaBlocks.getBlocks()) { oreDictify(block); } } private static void oreDictify(Block block) { if (block instanceof GeoBlock) { oreDictify((GeoBlock)block); } else if (block instanceof ProxyBlock) { oreDictify((ProxyBlock)block); } else if (block instanceof VanillaOreOverrideBlock) { oreDictify((VanillaOreOverrideBlock)block); } } private static void oreDictify(GeoBlock block) { for (GeoMaterial material : block.getGeoMaterials()) { if (block.hasComposition(Aggregate.class)) { oreDictifyAggregate(block, material); } else if (block.hasComposition(Ore.class) || (block.hasComposition(Crude.class) && block.getMaterial() == Material.rock)) { oreDictifyOre(block, material); } } } private static void oreDictifyOre(GeoBlock block, GeoMaterial substance) { ItemStack oreStack = block.getItemStack(substance); String postfix = substance.getOreDictKey(); if (postfix != null && !Geologica.isTechnical()) { oreDictifyOre(postfix, oreStack); } oreDictifyOre(substance.getLowerName(), oreStack); } private static String oreDictKey(String prefix, String postfix) { return prefix + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, postfix); } private static void oreDictifyOre(String postfix, ItemStack itemStack) { String key = oreDictKey("ore", postfix); OreDictionary.registerOre(key, itemStack); ItemStack smeltingOutput = getSmeltingOutput(key); if (smeltingOutput != null) { FurnaceRecipes.smelting().func_151394_a(itemStack, smeltingOutput, 0); } } private static ItemStack getSmeltingOutput(String key) { List<ItemStack> ores = OreDictionary.getOres(key); for (ItemStack ore : ores) { ItemStack output = FurnaceRecipes.smelting().getSmeltingResult(ore); if (output != null) { return output; } } return null; } private static String getAggregateOreDictKey(GeoBlock block) { if (block.getMaterial() == Material.clay && block instanceof IntactGeoBlock) { return "clay"; } else if (block.getMaterial() == Material.sand) { return "sand"; } else if (block instanceof BrokenGeoBlock) { return "cobblestone"; } else if (block instanceof BrickGeoBlock) { return "stoneBricks"; } else if (block instanceof LooseGeoBlock) { return "rubble"; } return "stone"; } private static void oreDictify(ProxyBlock block) { String prefix = null; if (block instanceof StairsBlock) { prefix = "stair"; } else if (block instanceof SlabBlock) { prefix = "slab"; } else if (block instanceof WallBlock) { prefix = "wall"; } String postfix = getAggregateOreDictKey((GeoBlock)block.getModelBlock()); String key = oreDictKey(prefix, postfix); OreDictionary.registerOre(key, new ItemStack((Block)block, 1, OreDictionary.WILDCARD_VALUE)); } private static void oreDictifyAggregate(GeoBlock block, GeoMaterial material) { String key = getAggregateOreDictKey(block); ItemStack itemStack = block.getItemStack(material); OreDictionary.registerOre(key, itemStack); } private static void oreDictify(VanillaOreOverrideBlock block) { String name = Block.blockRegistry.getNameForObject(block); if (name != null) { String material = name.substring(name.indexOf(':') + 1, name.length() - 3); OreDictionary.registerOre(oreDictKey("ore", material), new ItemStack(block, 1, OreDictionary.WILDCARD_VALUE)); } } private static GeoBlock getCobbleBlock(Strength strength) { switch(strength) { case WEAK: return GeologicaBlocks.WEAK_RUBBLE; case MEDIUM: return GeologicaBlocks.MEDIUM_COBBLE; case STRONG: return GeologicaBlocks.STRONG_COBBLE; case VERY_STRONG: return GeologicaBlocks.VERY_STRONG_COBBLE; } return null; } private static void addCobbleGrindingRecipe(GeoMaterial material, IndustrialMinerals primaryDust, IndustrialMinerals secondaryDust, double secondaryChance) { addCobbleGrindingRecipe(material, GeologicaItems.CRUDE_DUST.getItemStack(primaryDust, 2), secondaryDust, secondaryChance); } private static void addCobbleGrindingRecipe(GeoMaterial material, Block primaryOutput, IndustrialMinerals secondaryDust, double secondaryChance) { addCobbleGrindingRecipe(material, new ItemStack(primaryOutput), secondaryDust, secondaryChance); } private static void addCobbleGrindingRecipe(GeoMaterial material, ItemStack primaryOutput, IndustrialMinerals secondaryDust, double secondaryChance) { ItemStack secondaryOutput = GeologicaItems.CRUDE_DUST.getItemStack(secondaryDust); addCobbleGrindingRecipe(material, primaryOutput, secondaryOutput, secondaryChance); } private static void addCobbleGrindingRecipe(GeoMaterial material, Block primaryOutput, Compounds secondaryDust, double secondaryChance) { addCobbleGrindingRecipe(material, new ItemStack(primaryOutput), secondaryDust, secondaryChance); } private static void addCobbleGrindingRecipe(GeoMaterial material, ItemStack primaryOutput, Compounds secondaryDust, double secondaryChance) { //ItemStack secondaryOutput = ChemicaItems.DUST.getItemStack(secondaryDust); //addCobbleGrindingRecipe(material, primaryOutput, secondaryOutput, secondaryChance); } private static void addCobbleGrindingRecipe(GeoMaterial material, ItemStack primaryOutput, ItemStack secondaryOutput, double secondaryChance) { ItemStack input = getCobbleBlock(material.getStrength()).getItemStack(material); addGrindingRecipe(input, primaryOutput, secondaryOutput, secondaryChance, material.getStrength()); } private static void addCobbleGrindingRecipe(GeoMaterial material, ItemStack primaryOutput) { ItemStack input = getCobbleBlock(material.getStrength()).getItemStack(material); addGrindingRecipe(input, primaryOutput, null, 0, material.getStrength()); } private static void addStoneGrindingRecipes(GeoBlock intact, GeoBlock broken) { for (GeoMaterial material : intact.getGeoMaterials()) { addGrindingRecipe(intact.getItemStack(material), broken.getItemStack(material), null, 0, intact.getStrength()); } } private static void addGrindingRecipe(ItemStack input, ItemStack output, ItemStack secondaryOutput, double secondaryChance, Strength strength) { TEIntegration.addPulverizerRecipe(input, output, secondaryOutput, secondaryChance, strength); if (strength == Strength.WEAK || strength == Strength.MEDIUM) { IC2Integration.addMaceratorRecipe(input, output); } } private static void registerMicroblocks() { if (Loader.isModLoaded("ForgeMicroblock")) { FMPIntegration.registerMicroblock(GeologicaBlocks.WEAK_STONE); FMPIntegration.registerMicroblock(GeologicaBlocks.MEDIUM_STONE); FMPIntegration.registerMicroblock(GeologicaBlocks.STRONG_STONE); FMPIntegration.registerMicroblock(GeologicaBlocks.VERY_STRONG_STONE); FMPIntegration.registerMicroblock(GeologicaBlocks.MEDIUM_COBBLE); FMPIntegration.registerMicroblock(GeologicaBlocks.STRONG_COBBLE); FMPIntegration.registerMicroblock(GeologicaBlocks.VERY_STRONG_COBBLE); } } private static void registerOreDrops() { ChanceDropRegistry drops = ChanceDropRegistry.instance(); registerOreDrop(drops, GeoMaterial.CONGLOMERATE, "nuggetCopper", 1, 3, 0.1F, true); registerOreDrop(drops, GeoMaterial.GARNET_SAND, Items.gold_nugget, 4, 4, 0.1F, true); registerOreDrop(drops, GeoMaterial.GARNET_SAND, "nuggetElectrum", 2, 2, 0.05F, true); registerOreDrop(drops, GeoMaterial.GARNET_SAND, "nuggetSilver", 1, 2, 0.05F, true); if (Geologica.getConfiguration().isVanillaOreGemDropEnabled()) { registerOreDrop(drops, GeoMaterial.COAL, Items.coal, 1, 0, 1.0F, true); registerOreDrop(drops, GeoMaterial.DIAMOND, Items.diamond, 1, 0, 1.0F, true); registerOreDrop(drops, GeoMaterial.LAPIS, "gemLapis", 4, 5, 1.0F, true); registerOreDrop(drops, GeoMaterial.EMERALD, Items.emerald, 1, 0, 1.0F, true); registerOreDrop(drops, GeoMaterial.REDSTONE, Items.redstone, 4, 2, 1.0F, false); } } private static void registerOreDrop(ChanceDropRegistry drops, GeoMaterial material, String key, int quantity, int bonus, float chance, boolean fortuneMultiplies) { List<ItemStack> ores = OreDictionary.getOres(key); if (ores.size() > 0) { ItemStack drop = ores.get(0).copy(); drop.stackSize = quantity; drops.addChanceDrop(material, drop, bonus, chance, fortuneMultiplies); } } private static void registerOreDrop(ChanceDropRegistry drops, GeoMaterial material, Item item, int quantity, int bonus, float chance, boolean fortuneMultiplies) { registerOreDrop(drops, material, new ItemStack(item, quantity), bonus, chance, fortuneMultiplies); } private static void registerOreDrop(ChanceDropRegistry drops, GeoMaterial material, ItemStack item, int bonus, float chance, boolean fortuneMultiplies) { drops.addChanceDrop(material, item, bonus, chance, fortuneMultiplies); } }
package org.sagebionetworks.web.client.view; import org.gwtbootstrap3.client.ui.Heading; import org.sagebionetworks.web.client.DisplayUtils; import org.sagebionetworks.web.client.presenter.EntityPresenter; import org.sagebionetworks.web.client.widget.LoadingSpinner; import com.google.gwt.core.client.GWT; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import com.google.web.bindery.event.shared.binder.EventBinder; public class EntityViewImpl implements EntityView { public interface EntityViewImplUiBinder extends UiBinder<Widget, EntityViewImpl> {} @UiField SimplePanel entityPageTopPanel; @UiField SimplePanel openInvitesPanel; @UiField SimplePanel synAlertContainer; @UiField LoadingSpinner loadingUI; @UiField Heading accessDependentMessage; private Widget widget; @Inject public EntityViewImpl(EntityViewImplUiBinder binder) { widget = binder.createAndBindUi(this); Window.scrollTo(0, 0); // scroll user to top of page // TODO : need to dynamically set the header widget //headerWidget.setMenuItemActive(MenuItems.PROJECTS); } @Override public void setEntityPageTopWidget(IsWidget entityPageTopWidget) { entityPageTopPanel.clear(); entityPageTopPanel.setWidget(entityPageTopWidget); } @Override public void setOpenTeamInvitesWidget(IsWidget openTeamInvitesWidget) { openInvitesPanel.clear(); openInvitesPanel.setWidget(openTeamInvitesWidget); } @Override public Widget asWidget() { return widget; } @Override public void showErrorMessage(String message) { DisplayUtils.showErrorMessage(message); } @Override public void setLoadingVisible(boolean isVisible) { loadingUI.setVisible(isVisible); } @Override public void showInfo(String message) { DisplayUtils.showInfo(message); } @Override public void setSynAlertWidget(IsWidget synAlert) { synAlert.asWidget().addStyleName("min-height-400 margin-top-60"); synAlertContainer.setWidget(synAlert); } @Override public void clear() { openInvitesPanel.setVisible(false); accessDependentMessage.setVisible(false); loadingUI.setVisible(false); Window.scrollTo(0, 0); // scroll user to top of page } @Override public void setEntityPageTopVisible(boolean isVisible) { entityPageTopPanel.setVisible(isVisible); } @Override public void setAccessDependentMessageVisible(boolean isVisible) { accessDependentMessage.setVisible(isVisible); } @Override public void setOpenTeamInvitesVisible(boolean isVisible) { openInvitesPanel.setVisible(isVisible); } /** Event binder code **/ interface EntityViewBinder extends EventBinder<EntityPresenter> {}; private final EntityViewBinder eventBinder = GWT.create(EntityViewBinder.class); @Override public EventBinder<EntityPresenter> getEventBinder() { return eventBinder; } }
package org.spongepowered.api.block.tile.carrier; import org.spongepowered.api.entity.projectile.source.BlockProjectileSource; /** * Represents a Dispenser. */ public interface Dispenser extends TileEntityCarrier, BlockProjectileSource { /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
package org.spongepowered.api.event.world; import org.spongepowered.api.Game; import org.spongepowered.api.world.World; /** * Called when a {@link Game} unloads a {@link World} level. */ public interface WorldUnloadEvent extends WorldEvent { /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
package nakadi; import com.google.gson.GsonBuilder; import io.reactivex.Flowable; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import okhttp3.OkHttpClient; /** * Essentially a dummy main class to keep the Shadow plugin happy, but also lets us check the * shading step works ok. */ public class NakadiClientMain { public static void main(String[] args) throws Exception { final NakadiClientMain main = new NakadiClientMain(); main.hello("thing one", "thing two"); OkHttpClient client = new OkHttpClient(); final List<InetAddress> localhost = client.dns().lookup("localhost"); System.out.println("OkHttpClient found: " + localhost); } public static void hello(String... names) { List<String> strings = new ArrayList<>(names.length); Collections.addAll(strings, names); new GsonBuilder().create().toJson(strings, System.out); Flowable.fromIterable(strings).subscribe(s -> System.out.println("Hello " + s + "!")); } }
package org.spongepowered.common.block; import net.minecraft.core.BlockPos; import net.minecraft.nbt.CompoundTag; import net.minecraft.server.level.ServerLevel; import net.minecraft.world.level.block.Blocks; import net.minecraft.world.level.block.entity.BlockEntity; import org.apache.logging.log4j.Level; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.framework.qual.DefaultQualifier; import org.spongepowered.api.ResourceKey; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.entity.BlockEntityArchetype; import org.spongepowered.api.data.DataHolder; import org.spongepowered.api.data.DataManipulator; import org.spongepowered.api.data.Key; import org.spongepowered.api.data.persistence.AbstractDataBuilder; import org.spongepowered.api.data.persistence.DataContainer; import org.spongepowered.api.data.persistence.DataView; import org.spongepowered.api.data.persistence.InvalidDataException; import org.spongepowered.api.data.persistence.Queries; import org.spongepowered.api.data.value.Value; import org.spongepowered.api.world.BlockChangeFlag; import org.spongepowered.api.world.BlockChangeFlags; import org.spongepowered.api.world.server.ServerLocation; import org.spongepowered.api.world.server.storage.ServerWorldProperties; import org.spongepowered.common.SpongeCommon; import org.spongepowered.common.bridge.data.DataCompoundHolder; import org.spongepowered.common.bridge.data.DataContainerHolder; import org.spongepowered.common.data.holder.SpongeImmutableDataHolder; import org.spongepowered.common.data.persistence.NBTTranslator; import org.spongepowered.common.data.provider.nbt.NBTDataType; import org.spongepowered.common.data.provider.nbt.NBTDataTypes; import org.spongepowered.common.event.tracking.BlockChangeFlagManager; import org.spongepowered.common.event.tracking.PhaseContext; import org.spongepowered.common.event.tracking.PhaseTracker; import org.spongepowered.common.event.tracking.phase.block.BlockPhase; import org.spongepowered.common.util.Constants; import org.spongepowered.common.util.DataUtil; import org.spongepowered.common.util.PrettyPrinter; import org.spongepowered.common.util.VecHelper; import org.spongepowered.common.world.BlockChange; import org.spongepowered.common.world.SpongeBlockChangeFlag; import org.spongepowered.math.vector.Vector3i; import java.lang.ref.WeakReference; import java.util.Arrays; import java.util.Deque; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.StringJoiner; import java.util.UUID; import java.util.concurrent.ConcurrentLinkedDeque; @DefaultQualifier(NonNull.class) public final class SpongeBlockSnapshot implements BlockSnapshot, SpongeImmutableDataHolder<BlockSnapshot>, DataContainerHolder.Immutable<BlockSnapshot>, DataCompoundHolder { private final BlockState blockState; private final ResourceKey worldKey; private final Vector3i pos; final @Nullable CompoundTag compound; // Internal use only private final BlockPos blockPos; private final SpongeBlockChangeFlag changeFlag; @Nullable WeakReference<ServerLevel> world; @MonotonicNonNull public BlockChange blockChange; // used for post event SpongeBlockSnapshot(final BuilderImpl builder, final boolean copyCompound) { this.blockState = Objects.requireNonNull(builder.blockState); this.worldKey = Objects.requireNonNull(builder.worldKey); this.pos = Objects.requireNonNull(builder.coordinates); this.blockPos = VecHelper.toBlockPos(this.pos); if (copyCompound) { // defensive copy as the builder may further be modified this.compound = builder.compound == null ? null : builder.compound.copy(); } else { // pooled builder has been reset so this won't be modified. this.compound = builder.compound; } this.changeFlag = builder.flag; this.world = builder.worldRef; builder.worldRef = null; } SpongeBlockSnapshot() { this.blockState = (BlockState) Blocks.AIR.defaultBlockState(); this.worldKey = Constants.World.INVALID_WORLD_KEY; this.pos = Vector3i.ZERO; this.blockPos = BlockPos.ZERO; this.compound = null; this.changeFlag = null; } @Override public BlockState state() { return this.blockState; } public net.minecraft.world.level.block.state.BlockState nativeState() { return ((net.minecraft.world.level.block.state.BlockState) this.blockState); } @Override public BlockSnapshot withState(final BlockState blockState) { return this.createBuilder().blockState(blockState).build(); } @Override public BlockSnapshot withContainer(final DataContainer container) { return BuilderImpl.pooled().build(container).get(); } @Override public ResourceKey world() { return this.worldKey; } @Override public Vector3i position() { return this.pos; } @Override public Optional<ServerLocation> location() { return this.getServerWorld() .map(world -> ServerLocation.of((org.spongepowered.api.world.server.ServerWorld) world, this.pos)); } @Override public BlockSnapshot withLocation(final ServerLocation location) { return SpongeBlockSnapshot.BuilderImpl.pooled().from(this).position(location.blockPosition()).world(location.worldKey()).build(); } @Override public boolean restore(final boolean force, final BlockChangeFlag flag) { // TODO - rewrite with the PhaseTracker being the hook or use SpongeImplHooks to do the restore. final Optional<ServerLevel> optionalWorld = Optional.ofNullable(this.world.get()); if (!optionalWorld.isPresent()) { return false; } final ServerLevel world = optionalWorld.get(); // We need to deterministically define the context as nullable if we don't need to enter. // this way we guarantee an exit. try (final PhaseContext<?> context = BlockPhase.State.RESTORING_BLOCKS.createPhaseContext(PhaseTracker.SERVER)) { context.buildAndSwitch(); final BlockPos pos = VecHelper.toBlockPos(this.pos); if (!net.minecraft.world.level.Level.isInWorldBounds(pos)) { // Invalid position. Inline this check return false; } final net.minecraft.world.level.block.state.BlockState current = world.getBlockState(pos); final net.minecraft.world.level.block.state.BlockState replaced = (net.minecraft.world.level.block.state.BlockState) this.blockState; if (!force && (current.getBlock() != replaced.getBlock() || current != replaced)) { return false; } // Prevent Shulker Boxes from dropping when restoring BlockSnapshot // if (current.getBlock().getClass() == BlockShulkerBox.class) { // world.bridge$removeTileEntity(pos); world.removeBlockEntity(pos); world.setBlock(pos, replaced, BlockChangeFlagManager.andNotifyClients(flag).getRawFlag()); if (this.compound != null) { @Nullable BlockEntity te = world.getBlockEntity(pos); if (te != null) { te.load((net.minecraft.world.level.block.state.BlockState) this.blockState, this.compound); } else { // Because, some mods will "unintentionally" only obey some of the rules but not all. // In cases like this, we need to directly just say "fuck it" and deserialize from the compound directly. try { te = BlockEntity.loadStatic((net.minecraft.world.level.block.state.BlockState) this.blockState, this.compound); if (te != null) { world.getChunk(pos).setBlockEntity(pos, te); } } catch (final Exception e) { // Seriously? The mod should be broken then. final PrettyPrinter printer = new PrettyPrinter(60).add("Unable to restore").centre().hr() .add("A mod is not correctly deserializing a TileEntity that is being restored. ") .addWrapped(60, "Note that this is not the fault of Sponge. Sponge is understanding that " + "a block is supposed to have a TileEntity, but the mod is breaking the contract" + "on how to re-create the tile entity. Please open an issue with the offending mod.") .add("Here's the provided compound:"); printer.add(); try { printer.addWrapped(80, "%s : %s", "This compound", this.compound); } catch (final Throwable error) { printer.addWrapped( 80, "Unable to get the string of this compound. Printing out some of the entries to better assist" ); } printer.add() .add("Desired World: " + this.worldKey) .add("Position: " + this.pos) .add("Desired BlockState: " + this.blockState); printer.add(); printer.log(SpongeCommon.logger(), Level.ERROR); return true; // I mean, I guess. the block was set up, but not the tile entity. } } if (te != null) { te.setChanged(); } } // Finally, mark the location as being updated. world.getChunkSource().blockChanged(pos); return true; } } @Override public Optional<UUID> creator() { return Optional.empty(); } @Override public Optional<UUID> notifier() { return Optional.empty(); } @Override public Optional<BlockEntityArchetype> createArchetype() { throw new UnsupportedOperationException("Not implemented yet, please fix when this is called"); } @Override public BlockSnapshot withRawData(final DataView container) throws InvalidDataException { return BuilderImpl.pooled().buildContent(container).orElseThrow(InvalidDataException::new); } @Override public boolean validateRawData(final DataView container) { return BuilderImpl.pooled().buildContent(container).isPresent(); } @Override public BlockSnapshot copy() { return this; } @Override public int contentVersion() { return 1; } @Override public DataContainer toContainer() { final DataContainer container = DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED) .set(Queries.CONTENT_VERSION, this.contentVersion()) .set(Queries.WORLD_KEY, this.worldKey.asString()) .createView(Constants.Sponge.SNAPSHOT_WORLD_POSITION) .set(Queries.POSITION_X, this.pos.x()) .set(Queries.POSITION_Y, this.pos.y()) .set(Queries.POSITION_Z, this.pos.z()) .container() .set(Constants.Block.BLOCK_STATE, this.blockState); if (this.compound != null) { container.set(Constants.Sponge.UNSAFE_NBT, NBTTranslator.INSTANCE.translateFrom(this.compound)); } return container; } public Optional<ServerLevel> getServerWorld() { @Nullable ServerLevel world = this.world != null ? this.world.get() : null; if (world == null) { world = (ServerLevel) Sponge.server().worldManager().world(this.worldKey).orElse(null); if (world != null) { this.world = new WeakReference<>(world); } } return Optional.ofNullable(world); } public Optional<CompoundTag> getCompound() { return this.compound == null ? Optional.empty() : Optional.of(this.compound.copy()); } public BuilderImpl createBuilder() { final BuilderImpl builder = BuilderImpl.pooled(); builder.blockState(this.blockState) .position(this.pos); if (this.world != null && this.world.get() != null) { builder.world(this.world.get()); } else { builder.world(this.worldKey); } if (this.compound != null) { builder.addUnsafeCompound(this.compound); } return builder; } @Override public DataContainer data$getDataContainer() { if (this.compound == null) { return DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED); } return NBTTranslator.INSTANCE.translate(this.compound); } @Override public BlockSnapshot data$withDataContainer(final DataContainer container) { final BuilderImpl builder = this.createBuilder(); builder.compound = NBTTranslator.INSTANCE.translate(container);; return builder.build(); } @Override public CompoundTag data$getCompound() { return this.compound == null ? new CompoundTag() : this.compound.copy(); } @Override public void data$setCompound(final CompoundTag nbt) { // do nothing this is immutable } @Override public List<DataHolder> impl$delegateDataHolder() { return Arrays.asList(this, this.state(), this.state().type()); } @Override public NBTDataType data$getNBTDataType() { return NBTDataTypes.BLOCK_ENTITY; } // Used internally for restores public SpongeBlockChangeFlag getChangeFlag() { return this.changeFlag; } public BlockPos getBlockPos() { return this.blockPos; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || this.getClass() != o.getClass()) { return false; } final SpongeBlockSnapshot that = (SpongeBlockSnapshot) o; return this.blockState.equals(that.blockState) && this.changeFlag == that.changeFlag && Objects.equals(this.worldKey, that.worldKey) && Objects.equals(this.pos, that.pos) && Objects.equals(this.compound, that.compound); } @Override public int hashCode() { return Objects .hash(this.blockState, this.worldKey, this.pos, this.changeFlag, this.compound); } @Override public String toString() { return new StringJoiner(", ", SpongeBlockSnapshot.class.getSimpleName() + "[", "]") .add("world=" + this.worldKey) .add("position=" + this.blockPos) .add("blockState=" + this.blockState) .toString(); } public static final class BuilderImpl extends AbstractDataBuilder<@NonNull BlockSnapshot> implements BlockSnapshot.Builder { private static final Deque<BuilderImpl> pool = new ConcurrentLinkedDeque<>(); public static BuilderImpl unpooled() { return new BuilderImpl(false); } public static BuilderImpl pooled() { final BuilderImpl builder = BuilderImpl.pool.pollFirst(); if (builder != null) { return builder.reset(); } return new BuilderImpl(true); } BlockState blockState; ResourceKey worldKey; @Nullable UUID creatorUniqueId; @Nullable UUID notifierUniqueId; Vector3i coordinates; @Nullable List<DataManipulator.Immutable> manipulators; @Nullable CompoundTag compound; SpongeBlockChangeFlag flag = (SpongeBlockChangeFlag) BlockChangeFlags.ALL; @Nullable WeakReference<ServerLevel> worldRef; private final boolean pooled; private BuilderImpl(final boolean pooled) { super(BlockSnapshot.class, 1); this.pooled = pooled; } @Override public @NonNull BuilderImpl world(final @NonNull ServerWorldProperties worldProperties) { this.worldKey = Objects.requireNonNull(worldProperties).key(); return this; } public BuilderImpl world(final ResourceKey key) { this.worldKey = Objects.requireNonNull(key); return this; } public BuilderImpl world(final ServerLevel world) { this.worldKey = ((org.spongepowered.api.world.server.ServerWorld) Objects.requireNonNull(world)).key(); this.worldRef = new WeakReference<>(world); return this; } @Override public @NonNull BuilderImpl blockState(final @NonNull BlockState blockState) { this.blockState = Objects.requireNonNull(blockState); return this; } public BuilderImpl blockState(final net.minecraft.world.level.block.state.BlockState blockState) { this.blockState = Objects.requireNonNull((BlockState) blockState); return this; } @Override public @NonNull BuilderImpl position(final @NonNull Vector3i position) { this.coordinates = Objects.requireNonNull(position); if (this.compound != null) { this.compound.putInt(Constants.Sponge.BlockSnapshot.TILE_ENTITY_POSITION_X, position.x()); this.compound.putInt(Constants.Sponge.BlockSnapshot.TILE_ENTITY_POSITION_Y, position.y()); this.compound.putInt(Constants.Sponge.BlockSnapshot.TILE_ENTITY_POSITION_Z, position.z()); } return this; } @Override public BlockSnapshot.@NonNull Builder from(final @NonNull ServerLocation location) { return this.from(location.createSnapshot()); } @Override public @NonNull BuilderImpl creator(final UUID uuid) { this.creatorUniqueId = Objects.requireNonNull(uuid); return this; } @Override public @NonNull BuilderImpl notifier(final UUID uuid) { this.notifierUniqueId = Objects.requireNonNull(uuid); return this; } @Override public <V> BlockSnapshot.@NonNull Builder add(final @NonNull Key<@NonNull ? extends Value<V>> key, final @NonNull V value) { Objects.requireNonNull(key); Objects.requireNonNull(value); this.blockState = this.blockState.with(key, value) .orElseThrow(() -> new IllegalArgumentException(String.format("Key %s is not supported for block state %s", key.key().asString(), this.blockState.toString()))); return this; } @Override public @NonNull BuilderImpl from(final BlockSnapshot holder) { Objects.requireNonNull(holder); this.blockState = holder.state(); this.worldKey = holder.world(); if (holder.creator().isPresent()) { this.creatorUniqueId = holder.creator().get(); } if (holder.notifier().isPresent()) { this.notifierUniqueId = holder.notifier().get(); } this.coordinates = holder.position(); return this; } public BuilderImpl from(final SpongeBlockSnapshot snapshot) { Objects.requireNonNull(snapshot); this.blockState = snapshot.state(); this.worldKey = snapshot.world(); this.worldRef = snapshot.world; if (snapshot.compound != null) { // make a copy so that any changes to this compound in the builder // (position) won't accidently be reflected in the original snapshot. this.compound = snapshot.compound.copy(); } else { this.compound = null; } this.coordinates = snapshot.position(); this.flag = snapshot.getChangeFlag(); return this; } public BlockState getBlockState() { return this.blockState; } public ResourceKey getWorldKey() { return this.worldKey; } public @Nullable UUID getCreatorUniqueId() { return this.creatorUniqueId; } public Vector3i getCoordinates() { return this.coordinates; } public @Nullable List<DataManipulator.Immutable> getManipulators() { return this.manipulators; } public @Nullable CompoundTag getCompound() { return this.compound; } public SpongeBlockChangeFlag getFlag() { return this.flag; } @Override public @NonNull BuilderImpl reset() { this.blockState = (BlockState) Blocks.AIR.defaultBlockState(); this.worldKey = Constants.World.INVALID_WORLD_KEY; this.creatorUniqueId = null; this.notifierUniqueId = null; this.coordinates = null; this.manipulators = null; this.compound = null; this.flag = null; return this; } @Override public @NonNull SpongeBlockSnapshot build() { Objects.requireNonNull(this.blockState, "BlockState cannot be null!"); final SpongeBlockSnapshot spongeBlockSnapshot = new SpongeBlockSnapshot(this, !this.pooled); this.reset(); if (this.pooled) { BuilderImpl.pool.push(this); } return spongeBlockSnapshot; } @Override protected @NonNull Optional<BlockSnapshot> buildContent(final DataView container) throws InvalidDataException { if (!container.contains(Constants.Block.BLOCK_STATE, Constants.Sponge.SNAPSHOT_WORLD_POSITION)) { return Optional.empty(); } // if we have no world-key check if we can find by uuid if (!container.contains(Queries.WORLD_KEY)) { if (!container.contains(Constants.Sponge.BlockSnapshot.WORLD_UUID)) { return Optional.empty(); } final UUID uuid = UUID.fromString(container.getString(Constants.Sponge.BlockSnapshot.WORLD_UUID).get()); Sponge.server().worldManager().worldKey(uuid).ifPresent(worldKey -> container.set(Queries.WORLD_KEY, worldKey)); } DataUtil.checkDataExists(container, Constants.Block.BLOCK_STATE); DataUtil.checkDataExists(container, Queries.WORLD_KEY); final BuilderImpl builder = BuilderImpl.pooled(); final ResourceKey worldKey = container.getResourceKey(Queries.WORLD_KEY).get(); final Vector3i coordinate = DataUtil.getPosition3i(container); final Optional<String> creatorUuid = container.getString(Queries.CREATOR_ID); final Optional<String> notifierUuid = container.getString(Queries.NOTIFIER_ID); final BlockState blockState = container.getSerializable(Constants.Block.BLOCK_STATE, BlockState.class).get(); builder.blockState(blockState).world(worldKey).position(coordinate); creatorUuid.ifPresent(s -> builder.creator(UUID.fromString(s))); notifierUuid.ifPresent(s -> builder.notifier(UUID.fromString(s))); container.getView(Constants.Sponge.UNSAFE_NBT) .map(dataView -> NBTTranslator.INSTANCE.translate(dataView)) .ifPresent(builder::addUnsafeCompound); return Optional.of(builder.build()); } public BuilderImpl addUnsafeCompound(final CompoundTag compound) { Objects.requireNonNull(compound); this.compound = compound.copy(); return this; } public BuilderImpl flag(final BlockChangeFlag flag) { this.flag = (SpongeBlockChangeFlag) flag; return this; } public BuilderImpl tileEntity(final BlockEntity added) { this.compound = null; final CompoundTag tag = new CompoundTag(); added.save(tag); this.compound = tag; return this; } } public static final class FactoryImpl implements Factory { private static final SpongeBlockSnapshot EMPTY = new SpongeBlockSnapshot(); @Override public BlockSnapshot empty() { return FactoryImpl.EMPTY; } } }
package org.unitedinternet.cosmo.dav.impl; import carldav.jackrabbit.webdav.property.CustomDavPropertySet; import carldav.jackrabbit.webdav.version.report.CustomReportType; import org.apache.commons.lang.StringEscapeUtils; import org.unitedinternet.cosmo.dav.CosmoDavException; import org.unitedinternet.cosmo.dav.DavResourceFactory; import org.unitedinternet.cosmo.dav.DavResourceLocator; import org.unitedinternet.cosmo.dav.caldav.CaldavConstants; import org.unitedinternet.cosmo.dav.caldav.property.AddressbookHomeSet; import org.unitedinternet.cosmo.dav.caldav.property.CalendarHomeSet; import org.unitedinternet.cosmo.dav.property.*; import org.unitedinternet.cosmo.model.hibernate.User; import org.unitedinternet.cosmo.server.ServerConstants; import javax.servlet.http.HttpServletResponse; import javax.xml.namespace.QName; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Map; import java.util.Set; import static carldav.CarldavConstants.*; import static org.springframework.http.HttpHeaders.ETAG; import static org.springframework.http.HttpHeaders.LAST_MODIFIED; public class DavUserPrincipal extends DavResourceBase implements CaldavConstants { private final User user; public DavUserPrincipal(User user, DavResourceLocator locator, DavResourceFactory factory) throws CosmoDavException { super(locator, factory); registerLiveProperty(GET_LAST_MODIFIED); registerLiveProperty(DISPLAY_NAME); registerLiveProperty(IS_COLLECTION); registerLiveProperty(RESOURCE_TYPE); registerLiveProperty(GET_ETAG); registerLiveProperty(CALENDAR_HOME_SET); registerLiveProperty(PRINCIPAL_URL); registerLiveProperty(ADDRESSBOOK_HOME_SET); this.user = user; } public String getSupportedMethods() { return "OPTIONS, GET, PROPFIND"; } public boolean isCollection() { return false; } public long getModificationTime() { return -1; //TODO user.getModifiedDate().getTime(); } public boolean exists() { return true; } public String getDisplayName() { return user.getEmail(); } public String getETag() { return null; //TODO"\"" + user.getEntityTag() + "\""; } protected Set<QName> getResourceTypes() { return Collections.emptySet(); } public Set<CustomReportType> getReportTypes() { return Collections.emptySet(); } protected void loadLiveProperties(CustomDavPropertySet properties) { properties.add(new DisplayName(getDisplayName())); properties.add(new ResourceType(getResourceTypes())); properties.add(new IsCollection(isCollection())); //TODO properties.add(new Etag(user.getEntityTag())); //TODO properties.add(new LastModified(user.getModifiedDate())); properties.add(new CalendarHomeSet("/" + ServerConstants.SVC_DAV, user)); properties.add(new PrincipalUrl(getResourceLocator(), user)); properties.add(new AddressbookHomeSet("/" + ServerConstants.SVC_DAV, user)); } public void writeHead(final HttpServletResponse response) throws IOException { response.setContentType(TEXT_HTML_VALUE); if (getModificationTime() >= 0) { response.addDateHeader(LAST_MODIFIED, getModificationTime()); } if (getETag() != null) { response.setHeader(ETAG, getETag()); } } public void writeBody(final HttpServletResponse response) throws IOException { PrintWriter writer = new PrintWriter(new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8)); try { writer.write("<html>\n<head><title>"); writer.write(StringEscapeUtils.escapeHtml(getDisplayName())); writer.write("</title></head>\n"); writer.write("<body>\n"); writer.write("<h1>"); writer.write(StringEscapeUtils.escapeHtml(getDisplayName())); writer.write("</h1>\n"); writer.write("<h2>Properties</h2>\n"); writer.write("<dl>\n"); for (final Map.Entry<String, WebDavProperty> i : getWebDavProperties().entrySet()) { WebDavProperty prop = i.getValue(); String text = prop.getValueText(); writer.write("<dt>"); writer.write(StringEscapeUtils.escapeHtml(prop.getName().toString())); writer.write("</dt><dd>"); generateHrefIfNecessary(writer, prop, text); writer.write("</dd>\n"); } writer.write("</dl>\n"); writer.write("<p>\n"); final DavResourceLocator principalLocator = getResourceLocator().getFactory().createPrincipalLocator(getResourceLocator().getContext(), user); writer.write("<a href=\""); writer.write(principalLocator.getHref(true)); writer.write("\">"); writer.write("Home collection"); writer.write("</a><br>\n"); writer.write("</body>"); writer.write("</html>\n"); } finally { writer.close(); } } }
package org.vetmeduni.tools.implemented; import htsjdk.samtools.*; import htsjdk.samtools.fastq.FastqWriter; import htsjdk.samtools.fastq.FastqWriterFactory; import htsjdk.samtools.util.ProgressLogger; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.vetmeduni.io.readers.SamReaderSanger; import org.vetmeduni.io.writers.PairFastqWriters; import org.vetmeduni.methods.barcodes.BarcodeDictionary; import org.vetmeduni.methods.barcodes.BarcodeDictionaryFactory; import org.vetmeduni.methods.barcodes.BarcodeMethods; import org.vetmeduni.tools.AbstractTool; import org.vetmeduni.tools.CommonOptions; import org.vetmeduni.utils.IOUtils; import org.vetmeduni.utils.fastq.ProgressLoggerExtension; import org.vetmeduni.utils.record.SAMRecordUtils; import java.io.File; import java.io.IOException; public class BarcodedBamToFastq extends AbstractTool { @Override public int run(String[] args) { try { // parsing command line CommandLine cmd = programParser(args); String inputString = cmd.getOptionValue("i"); String outputPrefix = cmd.getOptionValue("o"); String barcodes = cmd.getOptionValue("bc"); int[] max = getIntArrayOptions(cmd.getOptionValues("m"), BarcodeMethods.DEFAULT_MISMATCHES); String[] tags = cmd.getOptionValues("t"); logger.debug("Maximum mistmaches (", max.length, "): ", max); logger.debug("Tags (", tags.length, "): ", tags); if (max.length != 1 && max.length != tags.length) { throw new ParseException("Number of maximum mismatches provided and number of tags does not match"); } // FINISH PARSING: log the command line (not longer in the param file) logCmdLine(args); // open the barcode dictionary BarcodeDictionary barcodeDict = BarcodeDictionaryFactory .createDefaultDictionary(new File(barcodes), tags.length); logger.info("Loaded barcode file for ", barcodeDict.numberOfUniqueSamples(), " samples with ", barcodeDict.numberOfSamples(), " different barcode sets"); BarcodeMethods methods = new BarcodeMethods(barcodeDict); // open the bam file SamReader input; // if the format is maintained if (CommonOptions.isMaintained(logger, cmd)) { // if the format is maintained, create a default sam reader input = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT) .open(new File(inputString)); } else { // if not, standardize input = new SamReaderSanger(new File(inputString), ValidationStringency.SILENT); } // single end processing if (cmd.hasOption("s")) { runSingle(input, outputPrefix, !(cmd.hasOption(CommonOptions.disableZippedOutput.getOpt())), methods, max, tags); } else { runPaired(input, outputPrefix, !(cmd.hasOption(CommonOptions.disableZippedOutput.getOpt())), methods, max, tags); } // close the readers and writers input.close(); } catch (ParseException e) { // This exceptions comes from the command line parsing (I think so) printUsage(e.getMessage()); return 1; } catch (IOException | SAMException e) { // this are expected errors: IO if the user provides bad inputs, SAM if there are problems in the files logger.error(e.getMessage()); logger.debug(e); return 1; } catch (Exception e) { // unknow exception logger.debug(e); return 2; } return 0; } private void runPaired(SamReader reader, String outputPrefix, boolean gzip, BarcodeMethods methods, int[] max, String[] tags) { PairFastqWriters writers = new PairFastqWriters(outputPrefix, gzip); PairFastqWriters discarded = new PairFastqWriters(String.format("%s_discarded", outputPrefix), gzip); SAMRecordIterator it = reader.iterator(); ProgressLoggerExtension progress = new ProgressLoggerExtension(logger, 1000000, "Processed", "pairs"); int unknown = 0; while (it.hasNext()) { SAMRecord record1 = it.next(); if (!it.hasNext()) { throw new SAMException("Truncated interleaved BAM file"); } SAMRecord record2 = it.next(); String[] barcodes = getBarcodeFromTags(record1, tags); String best = methods.getBestBarcode(max, barcodes); if (best.equals(BarcodeMethods.UNKNOWN_STRING)) { SAMRecordUtils.addBarcodeToName(record1, String.join("", barcodes)); SAMRecordUtils.addBarcodeToName(record2, String.join("", barcodes)); discarded .writePairs(SAMRecordUtils.toFastqRecord(record1, 1), SAMRecordUtils.toFastqRecord(record2, 2)); unknown++; } else { SAMRecordUtils.addBarcodeToName(record1, best); SAMRecordUtils.addBarcodeToName(record2, best); writers.writePairs(SAMRecordUtils.toFastqRecord(record1, 1), SAMRecordUtils.toFastqRecord(record2, 2)); } progress.record(record1); } logger.info(progress.numberOfVariantsProcessed()); BarcodeDictionary dict = methods.getDictionary(); logger.info("Found ", unknown, " pairs with unknown barcodes"); for (int i = 0; i < dict.numberOfSamples(); i++) { logger.info("Found ", dict.getValueFor(i), " pairs for ", dict.getCombinedBarcodesFor(i), " (", dict.getSampleNames().get(i), ")"); } writers.close(); discarded.close(); } private void runSingle(SamReader reader, String outputPrefix, boolean gzip, BarcodeMethods methods, int[] max, String[] tags) { FastqWriterFactory factory = new FastqWriterFactory(); FastqWriter writer = factory.newWriter(new File(IOUtils.makeInputFastqWithDefaults(outputPrefix, gzip))); FastqWriter discarded = factory .newWriter(new File(IOUtils.makeInputFastqWithDefaults(outputPrefix + "_discarded", gzip))); ProgressLogger progress = new ProgressLogger(logger); int unknown = 0; for (SAMRecord record : reader) { String[] barcodes = getBarcodeFromTags(record, tags); String best = methods.getBestBarcode(max, barcodes); if (best.equals(BarcodeMethods.UNKNOWN_STRING)) { SAMRecordUtils.addBarcodeToName(record, String.join("", barcodes)); discarded.write(SAMRecordUtils.toFastqRecord(record, null)); unknown++; } else { SAMRecordUtils.addBarcodeToName(record, best); writer.write(SAMRecordUtils.toFastqRecord(record, null)); } progress.record(record); } BarcodeDictionary dict = methods.getDictionary(); logger.info("Found ", unknown, " records with unknown barcodes"); for (int i = 0; i < dict.numberOfSamples(); i++) { logger.info("Found ", dict.getValueFor(i), " records for ", dict.getSampleNames().get(i), " (", dict.getCombinedBarcodesFor(i), ")"); } writer.close(); discarded.close(); } private static String[] getBarcodeFromTags(SAMRecord record, String... tags) { String[] toReturn = new String[tags.length]; for (int i = 0; i < tags.length; i++) { toReturn[i] = getBarcodeFromTag(record, tags[i]); } return toReturn; } private static String getBarcodeFromTag(SAMRecord record, String tag) { String barcode = record.getStringAttribute(tag); if (barcode == null) { throw new SAMException(tag + " not found in record " + record); } return barcode; } @Override protected Options programOptions() { Option input = Option.builder("i").longOpt("input") .desc("Input BAM/SAM file. If pair-end, it should be interleaved").hasArg() .argName("INPUT.bam").numberOfArgs(1).required().build(); Option output = Option.builder("o").longOpt("output").desc("FASTQ output prefix").hasArg() .argName("OUTPUT_PREFIX").numberOfArgs(1).required().build(); Option tag = Option.builder("t").longOpt("tag").desc( "Tag in the BAM file for the stored barcodes. It should be provided the same number of times as barcodes provided in the file.") .hasArg().numberOfArgs(1).argName("TAG").required().build(); Option barcodes = Option.builder("bc").longOpt("barcodes").desc( "Tab-delimited file with the first column with the sample name and the following containing the barcodes (1 or 2 depending on the barcoding method)") .hasArg().numberOfArgs(1).argName("BARCODES.tab").required().build(); Option max = Option.builder("m").longOpt("maximum-mismatches").desc( "Maximum number of mismatches alowwed for a matched barcode. It could be provided only once for use in all barcodes or the same number of times as barcodes provided in the file. [Default=" + BarcodeMethods.DEFAULT_MISMATCHES + "]").hasArg().numberOfArgs(1).argName("INT").required(false) .build(); Option single = Option.builder("s").longOpt("single").desc("Switch to single-end parsing").hasArg(false) .required(false).build(); Options options = new Options(); options.addOption(single); options.addOption(tag); options.addOption(barcodes); options.addOption(max); options.addOption(output); options.addOption(input); // add common options options.addOption(CommonOptions.maintainFormat); // mantain the format options.addOption(CommonOptions.disableZippedOutput); // disable zipped output // options.addOption(CommonOptions.parallel); // TODO: parallelization allowed return options; } }
package refinedstorage.tile.data; import net.minecraft.item.ItemStack; import net.minecraft.network.PacketBuffer; import net.minecraft.network.datasync.DataParameter; import net.minecraft.network.datasync.DataSerializer; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fml.common.network.ByteBufUtils; import refinedstorage.tile.ClientCraftingTask; import refinedstorage.tile.ClientNode; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; public final class RefinedStorageSerializers { public static final DataSerializer<List<ClientNode>> CLIENT_NODE_SERIALIZER = new DataSerializer<List<ClientNode>>() { @Override public void write(PacketBuffer buf, List<ClientNode> nodes) { buf.writeInt(nodes.size()); for (ClientNode node : nodes) { ByteBufUtils.writeItemStack(buf, node.getStack()); buf.writeInt(node.getAmount()); buf.writeInt(node.getEnergyUsage()); } } @Override public List<ClientNode> read(PacketBuffer buf) { List<ClientNode> nodes = new ArrayList<>(); int size = buf.readInt(); for (int i = 0; i < size; ++i) { nodes.add(new ClientNode(ByteBufUtils.readItemStack(buf), buf.readInt(), buf.readInt())); } return nodes; } @Override public DataParameter<List<ClientNode>> createKey(int id) { return null; } }; public static final DataSerializer<List<ClientCraftingTask>> CLIENT_CRAFTING_TASK_SERIALIZER = new DataSerializer<List<ClientCraftingTask>>() { @Override public void write(PacketBuffer buf, List<ClientCraftingTask> tasks) { buf.writeInt(tasks.size()); for (ClientCraftingTask task : tasks) { ByteBufUtils.writeUTF8String(buf, task.getInfo()); buf.writeInt(task.getOutputs().length); for (ItemStack output : task.getOutputs()) { ByteBufUtils.writeItemStack(buf, output); } } } @Override public List<ClientCraftingTask> read(PacketBuffer buf) { int size = buf.readInt(); List<ClientCraftingTask> tasks = new ArrayList<>(); for (int i = 0; i < size; ++i) { String info = ByteBufUtils.readUTF8String(buf); int outputs = buf.readInt(); for (int j = 0; j < outputs; ++j) { tasks.add(new ClientCraftingTask(ByteBufUtils.readItemStack(buf), i, info)); } } Collections.reverse(tasks); return tasks; } @Override public DataParameter<List<ClientCraftingTask>> createKey(int id) { return null; } }; public static final DataSerializer<FluidStack> FLUID_STACK_SERIALIZER = new DataSerializer<FluidStack>() { @Override public void write(PacketBuffer buf, FluidStack value) { if (value == null) { buf.writeBoolean(false); } else { buf.writeBoolean(true); ByteBufUtils.writeUTF8String(buf, FluidRegistry.getFluidName(value)); buf.writeInt(value.amount); buf.writeNBTTagCompoundToBuffer(value.tag); } } @Override public FluidStack read(PacketBuffer buf) throws IOException { if (buf.readBoolean()) { return new FluidStack(FluidRegistry.getFluid(ByteBufUtils.readUTF8String(buf)), buf.readInt(), buf.readNBTTagCompoundFromBuffer()); } return null; } @Override public DataParameter<FluidStack> createKey(int id) { return null; } }; }
package ru.VirtaMarketAnalyzer.data; import com.google.gson.annotations.SerializedName; import java.util.List; public final class ProductionAboveAverage { @SerializedName("mi") final private String manufactureID; @SerializedName("s") final private String specialization; @SerializedName("pi") final private String productID; @SerializedName("v") final private long volume; @SerializedName("q") final private double quality; @SerializedName("c") final private double cost; @SerializedName("tl") final private double techLvl; @SerializedName("ir") final private List<ProductRemain> ingredientsRemain; @SerializedName("mwc") final private long maxWorkplacesCount; @SerializedName("ctm") final private boolean cheaperThenMarket; public ProductionAboveAverage(final String manufactureID , final String specialization , final String productID , final long volume , final double quality , final double cost , final List<ProductRemain> ingredientsRemain , final double techLvl , final long maxWorkplacesCount , final boolean cheaperThenMarket ) { this.manufactureID = manufactureID; this.specialization = specialization; this.productID = productID; this.volume = volume; this.quality = quality; this.cost = cost; this.ingredientsRemain = ingredientsRemain; this.techLvl = techLvl; this.maxWorkplacesCount = maxWorkplacesCount; this.cheaperThenMarket = cheaperThenMarket; } public String getManufactureID() { return manufactureID; } public String getSpecialization() { return specialization; } public String getProductID() { return productID; } public long getVolume() { return volume; } public double getQuality() { return quality; } public double getCost() { return cost; } public double getTechLvl() { return techLvl; } public List<ProductRemain> getIngredientsRemain() { return ingredientsRemain; } public long getMaxWorkplacesCount() { return maxWorkplacesCount; } public boolean isCheaperThenMarket() { return cheaperThenMarket; } }
package us.guihouse.projector.projection; import java.awt.*; import com.sun.javafx.embed.EmbeddedSceneInterface; import javafx.embed.swing.JFXPanel; import javafx.embed.swing.SwingNode; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.layout.Pane; import javafx.scene.web.WebView; import javax.swing.*; /** * * @author guilherme */ public class ProjectionWebView implements Projectable { private final CanvasDelegate delegate; private SwingNode node; private Pane container; private WebView webView; private JFXPanel panel; private Dimension maxSize; private Scene scene; public ProjectionWebView(CanvasDelegate delegate) { this.delegate = delegate; } @Override public void paintComponent(Graphics2D g) { panel.paint(g); } @Override public CanvasDelegate getCanvasDelegate() { return delegate; } @Override public void rebuildLayout() { int width = delegate.getWidth(); int height = delegate.getHeight(); webView.setPrefWidth(width); webView.setPrefHeight(height); webView.setMinWidth(width); webView.setMinHeight(height); node.minWidth(width); node.minHeight(height); node.resize(width, height); panel.setMinimumSize(new Dimension(width, height)); panel.setPreferredSize(new Dimension(width, height)); SwingUtilities.invokeLater(() -> { Component root = panel; while (root != null) { root.setMinimumSize(new Dimension(width, height)); root.setBounds(0, 0, width, height); root = root.getParent(); } }); } @Override public void init() { if (webView == null) { webView = new WebView(); int width = delegate.getWidth(); int height = delegate.getHeight(); scene = new Scene(webView, width, height); panel = new JFXPanel() { @Override public void setBounds(int x, int y, int width, int height) { if (width >= delegate.getWidth() || height >= delegate.getHeight()) { super.setBounds(x, y, width, height); } else { System.out.println("bug"); } } }; panel.setScene(scene); this.node = new SwingNode() { @Override public boolean isResizable() { return false; } }; node.setContent(panel); this.container = new Pane(); container.setMinWidth(width); container.setMinHeight(height); container.getChildren().add(node); } rebuildLayout(); } @Override public void finish() { } public Dimension getMaxSize() { return maxSize; } public void setMaxSize(Dimension maxSize) { this.maxSize = maxSize; } public WebView getWebView() { return webView; } public void setWebView(WebView webView) { this.webView = webView; } public Node getNode() { return container; } }
package works.chatterbox.chatterbox.channels; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import ninja.leaping.configurate.ConfigurationNode; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import works.chatterbox.chatterbox.Chatterbox; import works.chatterbox.chatterbox.channels.files.FormatFiles; import works.chatterbox.chatterbox.channels.radius.Radius; import works.chatterbox.chatterbox.channels.worlds.WorldRecipients; import works.chatterbox.chatterbox.wrappers.CPlayer; import java.io.File; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; public class ConfigChannel implements Channel { private final static FormatFiles formatFiles = new FormatFiles(); private final ConfigurationNode node; private final Set<CPlayer> members = Sets.newHashSet(); private final Chatterbox chatterbox; public ConfigChannel(@NotNull final Chatterbox chatterbox, @NotNull final String name) { Preconditions.checkNotNull(chatterbox, "chatterbox was null"); Preconditions.checkNotNull(name, "name was null"); this.chatterbox = chatterbox; this.node = chatterbox.getConfiguration().getNode("channels").getChildrenList().stream() .filter(node -> name.equalsIgnoreCase(node.getNode(ChannelConfiguration.NAME.getKey()).getString())) .findFirst() .orElseThrow(() -> new IllegalStateException("No channel by the name " + name)); } public ConfigChannel(@NotNull final Chatterbox chatterbox, @NotNull final ConfigurationNode node) { Preconditions.checkNotNull(chatterbox, "chatterbox was null"); Preconditions.checkNotNull(node, "node was null"); this.chatterbox = chatterbox; this.node = node; } /** * Determines the correct node under {@code format} to use for the format. If a {@code file} node is specified, this * will load the file and return its contents. If not, this will return the contents of the {@code text} node. If * both are missing, this returns null. * * @return Format or null */ @Nullable private String determineFormat() { final ConfigurationNode format = this.getConfiguration(ChannelConfiguration.FORMAT, node -> node); Preconditions.checkState(format != null, "No format specified for " + this.getName()); if (!format.getNode(ChannelConfiguration.FORMAT_FILE.getKey()).isVirtual()) { return this.getFileFormat(format); } if (!format.getNode(ChannelConfiguration.FORMAT_TEXT.getKey()).isVirtual()) { return this.getTextFormat(format); } return null; } /** * Gets a configuration value from the given {@link ChannelConfiguration}. * <p>This gets all parent nodes, then the node corresponding to {@code configuration}, which is then checked to see * if it is virtual. If it is virtual, null is returned. If not, {@code function} is applied to the node. If * {@code function} returns null for the channel, it is applied to the master node. If it still returns null, null * is returned. If either the channel or master value is not null, that value is returned. * * @param configuration ChannelConfiguration that the value is desired of * @param function Function to apply on the node represented by {@code configuration}, on channel and possibly master * @param <T> Type that {@code function} returns, which this method returns * @return Desired value, from channel or master, or null */ @Nullable private <T> T getConfiguration(@NotNull final ChannelConfiguration configuration, @NotNull final Function<ConfigurationNode, T> function) { Preconditions.checkNotNull(configuration, "configuration was null"); Preconditions.checkNotNull(function, "function was null"); return this.localOrMaster(node -> { ConfigurationNode child = node; for (final String parent : configuration.getParents()) { child = child.getNode(parent); } child = child.getNode(configuration.getKey()); return child.isVirtual() ? null : function.apply(child); }); } /** * Gets the cached contents of the {@code file} node in the given node. If there is any error, null will be * returned. * * @param formatNode Format node * @return File contents or null */ @Nullable private String getFileFormat(@NotNull final ConfigurationNode formatNode) { Preconditions.checkNotNull(formatNode, "formatNode was null"); return ConfigChannel.formatFiles.getFileContents( new File(this.chatterbox.getDataFolder(), formatNode.getNode(ChannelConfiguration.FORMAT_FILE.getKey()).getString()) ); } /** * Returns the value of the {@code text} node inside of the given node. If such a node is not present, null will be * returned. * * @param formatNode Format node * @return Node contents or null */ @Nullable private String getTextFormat(@NotNull final ConfigurationNode formatNode) { Preconditions.checkNotNull(formatNode, "formatNode was null"); return formatNode.getNode(ChannelConfiguration.FORMAT_TEXT.getKey()).getString(); } @Nullable private <T> T localOrMaster(@NotNull final Function<ConfigurationNode, T> function) { Preconditions.checkNotNull(function, "function was null"); final T local = function.apply(this.node); return local == null ? function.apply(this.chatterbox.getAPI().getChannelAPI().getMaster()) : local; } @Override public void addMember(@NotNull final CPlayer cp) { Preconditions.checkNotNull(cp, "cp was null"); if (this.members.contains(cp)) return; this.members.add(cp); cp.joinChannel(this); } @NotNull @Override public String getFormat() { final String format = this.determineFormat(); Preconditions.checkState(format != null, "No format specified for " + this.getName()); return format; } @Override @Nullable public String getJSONSection(@NotNull final String sectionName) { Preconditions.checkNotNull(sectionName, "sectionName was null"); final String section = this.getConfiguration(ChannelConfiguration.FORMAT_JSON, node -> node.getNode(sectionName).getString()); if (section == null) return null; return section; } /** * {@inheritDoc} * <p>Note: the set returned is immutable. */ @NotNull @Override public Set<CPlayer> getMembers() { return ImmutableSet.copyOf(this.members); } @NotNull @Override public String getName() { // May not use master final String name = this.node.getNode(ChannelConfiguration.NAME.getKey()).getString(); Preconditions.checkState(name != null, "No name specified for channel"); return name; } @Nullable @Override public Radius getRadius() { return this.getConfiguration(ChannelConfiguration.RADIUS, node -> node.getValue(input -> { if (!(input instanceof Map)) return null; @SuppressWarnings("unchecked") final Map<String, Object> internal = (Map<String, Object>) input; // TODO: Better way to handle this if (!Boolean.parseBoolean(internal.get(ChannelConfiguration.RADIUS_ENABLED.getKey()).toString())) { return null; } return new Radius( Double.parseDouble(internal.get(ChannelConfiguration.RADIUS_HORIZONTAL.getKey()).toString()), Double.parseDouble(internal.get(ChannelConfiguration.RADIUS_VERTICAL.getKey()).toString()) ); })); } @Nullable @Override public String getRecipientSection(@NotNull final String sectionName) { Preconditions.checkNotNull(sectionName, "sectionName was null"); final String section = this.getConfiguration(ChannelConfiguration.FORMAT_RECIPIENT, node -> node.getNode(sectionName).getString()); if (section == null) return null; return section; } @NotNull @Override public String getTag() { // May not use master final String tag = this.node.getNode(ChannelConfiguration.TAG.getKey()).getString(); Preconditions.checkState(tag != null, "No tag specified for channel"); return tag; } @NotNull @Override public WorldRecipients getWorldRecipients() { final Boolean toAll = this.getConfiguration(ChannelConfiguration.WORLDS_ALL, ConfigurationNode::getBoolean); final Boolean toSelf = this.getConfiguration(ChannelConfiguration.WORLDS_SELF, ConfigurationNode::getBoolean); final Map<String, Boolean> individual = this.getConfiguration( ChannelConfiguration.WORLDS_INDIVIDUAL, node -> node.getChildrenMap().entrySet().stream() .collect(Collectors.toMap( entry -> entry.getKey().toString(), entry -> entry.getValue().getBoolean() )) ); Preconditions.checkState(toAll != null, "No all worlds option specified for " + this.getName()); Preconditions.checkState(toSelf != null, "No self world option specified for " + this.getName()); Preconditions.checkArgument(individual != null, "No individual worlds option specified for " + this.getName()); return new WorldRecipients(individual, toSelf, toAll); } @Override public boolean isPermanent() { final Boolean permanent = this.getConfiguration(ChannelConfiguration.PERMANENT, ConfigurationNode::getBoolean); Preconditions.checkState(permanent != null, "No permanent setting was specified for " + this.getName()); return permanent; } @Override public void removeMember(@NotNull final CPlayer cp) { Preconditions.checkNotNull(cp, "cp was null"); if (!this.members.contains(cp)) return; this.members.remove(cp); cp.leaveChannel(this); } /** * Returns the {@link ConfigurationNode} that this ConfigChannel is based upon. This represents one element of the * channel list in the {@code config.yml}. * <p>This may be useful for developers to access custom config options. * * @return Node */ @NotNull public ConfigurationNode getNode() { return this.node; } @Override public String toString() { return Objects.toStringHelper(this) .add("name", this.getName()) .add("tag", this.getTag()) .add("members", this.members) .toString(); } }
/* * @author wangzhongqiu * (0)Javanative * JVMJDKUnsafe * CAS * unsafe.compareAndSwapInt(this, stateOffset, expect, update) * CASCPU * CAS * ABA * (0)LockSupport * park():Waiting, * unpark(thread):thread * (1)ReentrantLockstateCASFIFO * ReentrantLockAQSjava * synchronizedAbstractQueuedSynchronizerConditionReentrantLock * wait/notify * ReentrantLock implements LockSync sync; * Sync extends AbstractQueuedSynchronizerCLH * AbstractQueuedSynchronizer extends AbstractOwnableSynchronizer * long stateOffset;int state;Node tail;Node head;Node(waitStatus,prev,next,thread); * AbstractOwnableSynchronizerThread exclusiveOwnerThread; * NonfairSync extends Sync * FairSync extends Sync * NonfairSynclock(); * lock * casif compareAndSetState(0, 1);setExclusiveOwnerThread(Thread.currentThread()); * CASFIFOacquire(1)() * casstate0cassetExclusiveOwnerThreadstate+1; * acquireQueued(addWaiter(Node.EXCLUSIVE), arg)); * addWaiter,tailnode;tailforcompareAndSetHead,compareAndSetTail * acquireQueuedfor * nodepre,node,,node,return interrupted * nodewaitStatus-1() * trueLockSupport.park(this)interruptedThread.interrupted(),for * 1()prefalsefor * acquireQueued trueThread.currentThread().interrupt(); * unlock * tryRelease(1);unparkSuccessor(h)hwaitstatus-10unparkhnext */ package zhongqiu.common.jdk5.concurrent.locks;