code stringlengths 3 1.18M | language stringclasses 1
value |
|---|---|
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import org.apache.commons.dbcp.*;
import org.apache.commons.pool.ObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPoolFactory;
import org.apache.commons.pool.impl.GenericObjectPool;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import javax.sql.DataSource;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.SQLException;
public class DataSourceInit {
private static Logger log = Logger.getLogger(DataSourceInit.class);
private static DataSource dataSource = null;
public static DataSource getDatasource() throws SQLException
{
if (dataSource != null)
{
return dataSource;
}
try
{
// Register basic JDBC driver
Class driverClass = Class.forName(ConfigurationManager
.getProperty("db.driver"));
Driver basicDriver = (Driver) driverClass.newInstance();
DriverManager.registerDriver(basicDriver);
// Read pool configuration parameter or use defaults
// Note we check to see if property is null; getIntProperty returns
// '0' if the property is not set OR if it is actually set to zero.
// But 0 is a valid option...
int maxConnections = ConfigurationManager
.getIntProperty("db.maxconnections");
if (ConfigurationManager.getProperty("db.maxconnections") == null)
{
maxConnections = 30;
}
int maxWait = ConfigurationManager.getIntProperty("db.maxwait");
if (ConfigurationManager.getProperty("db.maxwait") == null)
{
maxWait = 5000;
}
int maxIdle = ConfigurationManager.getIntProperty("db.maxidle");
if (ConfigurationManager.getProperty("db.maxidle") == null)
{
maxIdle = -1;
}
boolean useStatementPool = ConfigurationManager.getBooleanProperty("db.statementpool",true);
// Create object pool
ObjectPool connectionPool = new GenericObjectPool(null, // PoolableObjectFactory
// - set below
maxConnections, // max connections
GenericObjectPool.WHEN_EXHAUSTED_BLOCK, maxWait, // don't
// block
// more than 5
// seconds
maxIdle, // max idle connections (unlimited)
true, // validate when we borrow connections from pool
false // don't bother validation returned connections
);
// ConnectionFactory the pool will use to create connections.
ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(
ConfigurationManager.getProperty("db.url"),
ConfigurationManager.getProperty("db.username"),
ConfigurationManager.getProperty("db.password"));
//
// Now we'll create the PoolableConnectionFactory, which wraps
// the "real" Connections created by the ConnectionFactory with
// the classes that implement the pooling functionality.
//
String validationQuery = "SELECT 1";
// Oracle has a slightly different validation query
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
validationQuery = "SELECT 1 FROM DUAL";
}
GenericKeyedObjectPoolFactory statementFactory = null;
if (useStatementPool)
{
// The statement Pool is used to pool prepared statements.
GenericKeyedObjectPool.Config statementFactoryConfig = new GenericKeyedObjectPool.Config();
// Just grow the pool size when needed.
//
// This means we will never block when attempting to
// create a query. The problem is unclosed statements,
// they can never be reused. So if we place a maximum
// cap on them, then we might reach a condition where
// a page can only be viewed X number of times. The
// downside of GROW_WHEN_EXHAUSTED is that this may
// allow a memory leak to exist. Both options are bad,
// but I'd prefer a memory leak over a failure.
//
// FIXME: Perhaps this decision should be derived from config parameters?
statementFactoryConfig.whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
statementFactory = new GenericKeyedObjectPoolFactory(null,statementFactoryConfig);
}
PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(
connectionFactory, connectionPool, statementFactory,
validationQuery, // validation query
false, // read only is not default for now
false); // Autocommit defaults to none
//
// Finally, we create the PoolingDataSource itself...
//
PoolingDataSource poolingDataSource = new PoolingDataSource();
//
// ...and register our pool with it.
//
poolingDataSource.setPool(connectionPool);
dataSource = poolingDataSource;
return poolingDataSource;
}
catch (Exception e)
{
// Need to be able to catch other exceptions. Pretend they are
// SQLExceptions, but do log
log.warn("Exception initializing DB pool", e);
throw new SQLException(e.toString(), e);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.log4j.Level;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
/**
* Executes SQL queries.
*
* @author Peter Breton
* @author Jim Downing
* @version $Revision: 6092 $
*/
public class DatabaseManager
{
/** log4j category */
private static Logger log = Logger.getLogger(DatabaseManager.class);
/** True if initialization has been done */
private static boolean initialized = false;
private static Map<String, String> insertSQL = new HashMap<String, String>();
private static boolean isOracle = false;
private static boolean isPostgres = false;
static
{
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
isOracle = true;
isPostgres = false;
}
else
{
isOracle = false;
isPostgres = true;
}
}
/** DataSource (retrieved from jndi */
private static DataSource dataSource = null;
private static String sqlOnBorrow = null;
/** Name to use for the pool */
private static String poolName = "dspacepool";
/**
* This regular expression is used to perform sanity checks
* on database names (i.e. tables and columns).
*
* FIXME: Regular expressions can be slow to solve this in the future we should
* probably create a system where we don't pass in column and table names to these low
* level database methods. This approach is highly exploitable for injection
* type attacks because we are unable to determine where the input came from. Instead
* we could pass in static integer constants which are then mapped to their sql name.
*/
private static final Pattern DB_SAFE_NAME = Pattern.compile("^[a-zA-Z_1-9.]+$");
/**
* A map of database column information. The key is the table name, a
* String; the value is an array of ColumnInfo objects.
*/
private static Map<String, Map<String, ColumnInfo>> info = new HashMap<String, Map<String, ColumnInfo>>();
/**
* Protected Constructor to prevent instantiation except by derived classes.
*/
protected DatabaseManager()
{
}
public static boolean isOracle()
{
return isOracle;
}
/**
* Set the constraint check to deferred (commit time)
*
* @param context
* The context object
* @param constraintName
* the constraint name to deferred
* @throws SQLException
*/
public static void setConstraintDeferred(Context context,
String constraintName) throws SQLException
{
Statement statement = null;
try
{
statement = context.getDBConnection().createStatement();
statement.execute("SET CONSTRAINTS " + constraintName + " DEFERRED");
statement.close();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Set the constraint check to immediate (every query)
*
* @param context
* The context object
* @param constraintName
* the constraint name to check immediately after every query
* @throws SQLException
*/
public static void setConstraintImmediate(Context context,
String constraintName) throws SQLException
{
Statement statement = null;
try
{
statement = context.getDBConnection().createStatement();
statement.execute("SET CONSTRAINTS " + constraintName + " IMMEDIATE");
statement.close();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Return an iterator with the results of the query. The table parameter
* indicates the type of result. If table is null, the column names are read
* from the ResultSetMetaData.
*
* @param context
* The context object
* @param table
* The name of the table which results
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRowIterator with the results of the query
* @exception SQLException
* If a database error occurs
*/
public static TableRowIterator queryTable(Context context, String table, String query, Object... parameters ) throws SQLException
{
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder("Running query \"").append(query).append("\" with parameters: ");
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug(sb.toString());
}
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
try
{
loadParameters(statement, parameters);
TableRowIterator retTRI = new TableRowIterator(statement.executeQuery(), canonicalize(table));
retTRI.setStatement(statement);
return retTRI;
}
catch (SQLException sqle)
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException s)
{
}
}
throw sqle;
}
}
/**
* Return an iterator with the results of the query.
*
* @param context
* The context object
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRowIterator with the results of the query
* @exception SQLException
* If a database error occurs
*/
public static TableRowIterator query(Context context, String query,
Object... parameters) throws SQLException
{
if (log.isDebugEnabled())
{
StringBuffer sb = new StringBuffer();
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug("Running query \"" + query + "\" with parameters: " + sb.toString());
}
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
try
{
loadParameters(statement,parameters);
TableRowIterator retTRI = new TableRowIterator(statement.executeQuery());
retTRI.setStatement(statement);
return retTRI;
}
catch (SQLException sqle)
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException s)
{
}
}
throw sqle;
}
}
/**
* Return the single row result to this query, or null if no result. If more
* than one row results, only the first is returned.
*
* @param context
* Current DSpace context
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRow object, or null if no result
* @exception SQLException
* If a database error occurs
*/
public static TableRow querySingle(Context context, String query,
Object... parameters) throws SQLException
{
TableRow retRow = null;
TableRowIterator iterator = null;
try
{
iterator = query(context, query, parameters);
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
{
iterator.close();
}
}
return (retRow);
}
/**
* Return the single row result to this query, or null if no result. If more
* than one row results, only the first is returned.
*
* @param context
* Current DSpace context
* @param table
* The name of the table which results
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRow object, or null if no result
* @exception SQLException
* If a database error occurs
*/
public static TableRow querySingleTable(Context context, String table,
String query, Object... parameters) throws SQLException
{
TableRow retRow = null;
TableRowIterator iterator = queryTable(context, canonicalize(table), query, parameters);
try
{
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
{
iterator.close();
}
}
return (retRow);
}
/**
* Execute an update, insert or delete query. Returns the number of rows
* affected by the query.
*
* @param context
* Current DSpace context
* @param query
* The SQL query to execute
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return The number of rows affected by the query.
* @exception SQLException
* If a database error occurs
*/
public static int updateQuery(Context context, String query, Object... parameters) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder("Running query \"").append(query).append("\" with parameters: ");
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug(sb.toString());
}
try
{
statement = context.getDBConnection().prepareStatement(query);
loadParameters(statement, parameters);
return statement.executeUpdate();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Create a new row in the given table, and assigns a unique id.
*
* @param context
* Current DSpace context
* @param table
* The RDBMS table in which to create the new row
* @return The newly created row
*/
public static TableRow create(Context context, String table)
throws SQLException
{
TableRow row = new TableRow(canonicalize(table), getColumnNames(table));
insert(context, row);
return row;
}
/**
* Find a table row by its primary key. Returns the row, or null if no row
* with that primary key value exists.
*
* @param context
* Current DSpace context
* @param table
* The table in which to find the row
* @param id
* The primary key value
* @return The row resulting from the query, or null if no row with that
* primary key value exists.
* @exception SQLException
* If a database error occurs
*/
public static TableRow find(Context context, String table, int id)
throws SQLException
{
String ctable = canonicalize(table);
return findByUnique(context, ctable, getPrimaryKeyColumn(ctable),
Integer.valueOf(id));
}
/**
* Find a table row by a unique value. Returns the row, or null if no row
* with that primary key value exists. If multiple rows with the value
* exist, one is returned.
*
* @param context
* Current DSpace context
* @param table
* The table to use to find the object
* @param column
* The name of the unique column
* @param value
* The value of the unique column
* @return The row resulting from the query, or null if no row with that
* value exists.
* @exception SQLException
* If a database error occurs
*/
public static TableRow findByUnique(Context context, String table,
String column, Object value) throws SQLException
{
String ctable = canonicalize(table);
if ( ! DB_SAFE_NAME.matcher(ctable).matches())
{
throw new SQLException("Unable to execute select query because table name (" + ctable + ") contains non alphanumeric characters.");
}
if ( ! DB_SAFE_NAME.matcher(column).matches())
{
throw new SQLException("Unable to execute select query because column name (" + column + ") contains non alphanumeric characters.");
}
StringBuilder sql = new StringBuilder("select * from ").append(ctable).append(" where ").append(column).append(" = ? ");
return querySingleTable(context, ctable, sql.toString(), value);
}
/**
* Delete a table row via its primary key. Returns the number of rows
* deleted.
*
* @param context
* Current DSpace context
* @param table
* The table to delete from
* @param id
* The primary key value
* @return The number of rows deleted
* @exception SQLException
* If a database error occurs
*/
public static int delete(Context context, String table, int id)
throws SQLException
{
String ctable = canonicalize(table);
return deleteByValue(context, ctable, getPrimaryKeyColumn(ctable),
Integer.valueOf(id));
}
/**
* Delete all table rows with the given value. Returns the number of rows
* deleted.
*
* @param context
* Current DSpace context
* @param table
* The table to delete from
* @param column
* The name of the column
* @param value
* The value of the column
* @return The number of rows deleted
* @exception SQLException
* If a database error occurs
*/
public static int deleteByValue(Context context, String table,
String column, Object value) throws SQLException
{
String ctable = canonicalize(table);
if ( ! DB_SAFE_NAME.matcher(ctable).matches())
{
throw new SQLException("Unable to execute delete query because table name (" + ctable + ") contains non alphanumeric characters.");
}
if ( ! DB_SAFE_NAME.matcher(column).matches())
{
throw new SQLException("Unable to execute delete query because column name (" + column + ") contains non alphanumeric characters.");
}
StringBuilder sql = new StringBuilder("delete from ").append(ctable).append(" where ").append(column).append(" = ? ");
return updateQuery(context, sql.toString(), value);
}
/**
* Obtain an RDBMS connection.
*
* @return A new database connection.
* @exception SQLException
* If a database error occurs, or a connection cannot be
* obtained.
*/
public static Connection getConnection() throws SQLException
{
initialize();
if (dataSource != null) {
Connection conn = dataSource.getConnection();
if (!StringUtils.isEmpty(sqlOnBorrow))
{
PreparedStatement pstmt = conn.prepareStatement(sqlOnBorrow);
try
{
pstmt.execute();
}
finally
{
if (pstmt != null)
{
pstmt.close();
}
}
}
return conn;
}
return null;
}
public static DataSource getDataSource()
{
try
{
initialize();
}
catch (SQLException e)
{
throw new IllegalStateException(e.getMessage(), e);
}
return dataSource;
}
/**
* Release resources associated with this connection.
*
* @param c
* The connection to release
*/
public static void freeConnection(Connection c)
{
try
{
if (c != null)
{
c.close();
}
}
catch (SQLException e)
{
log.warn(e.getMessage(), e);
}
}
/**
* Create a table row object that can be passed into the insert method, not
* commonly used unless the table has a referential integrity constraint.
*
* @param table
* The RDBMS table in which to create the new row
* @return The newly created row
* @throws SQLException
*/
public static TableRow row(String table) throws SQLException
{
return new TableRow(canonicalize(table), getColumnNames(table));
}
/**
* Insert a table row into the RDBMS.
*
* @param context
* Current DSpace context
* @param row
* The row to insert
* @exception SQLException
* If a database error occurs
*/
public static void insert(Context context, TableRow row) throws SQLException
{
int newID;
if (isPostgres)
{
newID = doInsertPostgres(context, row);
}
else
{
newID = doInsertGeneric(context, row);
}
row.setColumn(getPrimaryKeyColumn(row), newID);
}
/**
* Update changes to the RDBMS. Note that if the update fails, the values in
* the row will NOT be reverted.
*
* @param context
* Current DSpace context
* @param row
* The row to update
* @return The number of rows affected (1 or 0)
* @exception SQLException
* If a database error occurs
*/
public static int update(Context context, TableRow row) throws SQLException
{
String table = row.getTable();
StringBuilder sql = new StringBuilder().append("update ").append(table)
.append(" set ");
List<ColumnInfo> columns = new ArrayList<ColumnInfo>();
ColumnInfo pk = getPrimaryKeyColumnInfo(table);
Collection<ColumnInfo> info = getColumnInfo(table);
String separator = "";
for (ColumnInfo col : info)
{
// Only update this column if it has changed
if (!col.isPrimaryKey())
{
if (row.hasColumnChanged(col.getName()))
{
sql.append(separator).append(col.getName()).append(" = ?");
columns.add(col);
separator = ", ";
}
}
}
// Only execute the update if there is anything to update
if (columns.size() > 0)
{
sql.append(" where ").append(pk.getName()).append(" = ?");
columns.add(pk);
return executeUpdate(context.getDBConnection(), sql.toString(), columns, row);
}
return 1;
}
/**
* Delete row from the RDBMS.
*
* @param context
* Current DSpace context
* @param row
* The row to delete
* @return The number of rows affected (1 or 0)
* @exception SQLException
* If a database error occurs
*/
public static int delete(Context context, TableRow row) throws SQLException
{
if (null == row.getTable())
{
throw new IllegalArgumentException("Row not associated with a table");
}
String pk = getPrimaryKeyColumn(row);
if (row.isColumnNull(pk))
{
throw new IllegalArgumentException("Primary key value is null");
}
return delete(context, row.getTable(), row.getIntColumn(pk));
}
/**
* Return metadata about a table.
*
* @param table
* The name of the table
* @return An array of ColumnInfo objects
* @exception SQLException
* If a database error occurs
*/
static Collection<ColumnInfo> getColumnInfo(String table) throws SQLException
{
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(table);
return (cinfo == null) ? null : cinfo.values();
}
/**
* Return info about column in table.
*
* @param table
* The name of the table
* @param column
* The name of the column
* @return Information about the column
* @exception SQLException
* If a database error occurs
*/
static ColumnInfo getColumnInfo(String table, String column)
throws SQLException
{
Map<String, ColumnInfo> info = getColumnInfoInternal(table);
return (info == null) ? null : info.get(column);
}
/**
* Return the names of all the columns of the given table.
*
* @param table
* The name of the table
* @return The names of all the columns of the given table, as a List. Each
* element of the list is a String.
* @exception SQLException
* If a database error occurs
*/
static List<String> getColumnNames(String table) throws SQLException
{
List<String> results = new ArrayList<String>();
Collection<ColumnInfo> info = getColumnInfo(table);
for (ColumnInfo col : info)
{
results.add(col.getName());
}
return results;
}
/**
* Return the names of all the columns of the ResultSet.
*
* @param meta
* The ResultSetMetaData
* @return The names of all the columns of the given table, as a List. Each
* element of the list is a String.
* @exception SQLException
* If a database error occurs
*/
static List<String> getColumnNames(ResultSetMetaData meta) throws SQLException
{
List<String> results = new ArrayList<String>();
int columns = meta.getColumnCount();
for (int i = 0; i < columns; i++)
{
results.add(meta.getColumnLabel(i + 1));
}
return results;
}
/**
* Return the canonical name for a table.
*
* @param table
* The name of the table.
* @return The canonical name of the table.
*/
static String canonicalize(String table)
{
// Oracle expects upper-case table names
if (isOracle)
{
return (table == null) ? null : table.toUpperCase();
}
// default database postgres wants lower-case table names
return (table == null) ? null : table.toLowerCase();
}
////////////////////////////////////////
// SQL loading methods
////////////////////////////////////////
/**
* Load SQL into the RDBMS.
*
* @param sql
* The SQL to load.
* throws SQLException
* If a database error occurs
*/
public static void loadSql(String sql) throws SQLException
{
try
{
loadSql(new StringReader(sql));
}
catch (IOException ioe)
{
}
}
/**
* Load SQL from a reader into the RDBMS.
*
* @param r
* The Reader from which to read the SQL.
* @throws SQLException
* If a database error occurs
* @throws IOException
* If an error occurs obtaining data from the reader
*/
public static void loadSql(Reader r) throws SQLException, IOException
{
BufferedReader reader = new BufferedReader(r);
StringBuilder sqlBuilder = new StringBuilder();
String sql = null;
String line = null;
Connection connection = null;
Statement statement = null;
try
{
connection = getConnection();
connection.setAutoCommit(true);
statement = connection.createStatement();
boolean inquote = false;
while ((line = reader.readLine()) != null)
{
// Look for comments
int commentStart = line.indexOf("--");
String input = (commentStart != -1) ? line.substring(0, commentStart) : line;
// Empty line, skip
if (input.trim().equals(""))
{
continue;
}
// Put it on the SQL buffer
sqlBuilder.append(input.replace(';', ' ')); // remove all semicolons
// from sql file!
// Add a space
sqlBuilder.append(" ");
// More to come?
// Look for quotes
int index = 0;
int count = 0;
int inputlen = input.length();
while ((index = input.indexOf('\'', count)) != -1)
{
// Flip the value of inquote
inquote = !inquote;
// Move the index
count = index + 1;
// Make sure we do not exceed the string length
if (count >= inputlen)
{
break;
}
}
// If we are in a quote, keep going
// Note that this is STILL a simple heuristic that is not
// guaranteed to be correct
if (inquote)
{
continue;
}
int endMarker = input.indexOf(';', index);
if (endMarker == -1)
{
continue;
}
sql = sqlBuilder.toString();
if (log.isDebugEnabled())
{
log.debug("Running database query \"" + sql + "\"");
}
try
{
// Use execute, not executeQuery (which expects results) or
// executeUpdate
statement.execute(sql);
}
catch (SQLWarning sqlw)
{
if (log.isDebugEnabled())
{
log.debug("Got SQL Warning: " + sqlw, sqlw);
}
}
catch (SQLException sqle)
{
String msg = "Got SQL Exception: " + sqle;
String sqlmessage = sqle.getMessage();
// These are Postgres-isms:
// There's no easy way to check if a table exists before
// creating it, so we always drop tables, then create them
boolean isDrop = ((sql != null) && (sqlmessage != null)
&& (sql.toUpperCase().startsWith("DROP"))
&& (sqlmessage.indexOf("does not exist") != -1));
// Creating a view causes a bogus warning
boolean isNoResults = ((sql != null)
&& (sqlmessage != null)
&& (sql.toUpperCase().startsWith("CREATE VIEW")
|| sql.toUpperCase().startsWith("CREATE FUNCTION"))
&& (sqlmessage.indexOf("No results were returned") != -1));
// If the messages are bogus, give them a low priority
if (isDrop || isNoResults)
{
if (log.isDebugEnabled())
{
log.debug(msg, sqle);
}
}
// Otherwise, we need to know!
else
{
if (log.isEnabledFor(Level.WARN))
{
log.warn(msg, sqle);
}
}
}
// Reset SQL buffer
sqlBuilder = new StringBuilder();
sql = null;
}
}
finally
{
if (connection != null)
{
connection.close();
}
if (statement != null)
{
statement.close();
}
}
}
////////////////////////////////////////
// Helper methods
////////////////////////////////////////
/**
* Convert the current row in a ResultSet into a TableRow object.
*
* @param results
* A ResultSet to process
* @param table
* The name of the table
* @return A TableRow object with the data from the ResultSet
* @exception SQLException
* If a database error occurs
*/
static TableRow process(ResultSet results, String table) throws SQLException
{
return process(results, table, null);
}
/**
* Convert the current row in a ResultSet into a TableRow object.
*
* @param results
* A ResultSet to process
* @param table
* The name of the table
* @param pColumnNames
* The name of the columns in this resultset
* @return A TableRow object with the data from the ResultSet
* @exception SQLException
* If a database error occurs
*/
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException
{
ResultSetMetaData meta = results.getMetaData();
int columns = meta.getColumnCount() + 1;
// If we haven't been passed the column names try to generate them from the metadata / table
List<String> columnNames = pColumnNames != null ? pColumnNames :
((table == null) ? getColumnNames(meta) : getColumnNames(table));
TableRow row = new TableRow(canonicalize(table), columnNames);
// Process the columns in order
// (This ensures maximum backwards compatibility with
// old JDBC drivers)
for (int i = 1; i < columns; i++)
{
String name = meta.getColumnName(i);
int jdbctype = meta.getColumnType(i);
switch (jdbctype)
{
case Types.BIT:
row.setColumn(name, results.getBoolean(i));
break;
case Types.INTEGER:
case Types.NUMERIC:
if (isOracle)
{
long longValue = results.getLong(i);
if (longValue <= (long)Integer.MAX_VALUE)
{
row.setColumn(name, (int) longValue);
}
else
{
row.setColumn(name, longValue);
}
}
else
{
row.setColumn(name, results.getInt(i));
}
break;
case Types.DECIMAL:
case Types.BIGINT:
row.setColumn(name, results.getLong(i));
break;
case Types.DOUBLE:
row.setColumn(name, results.getDouble(i));
break;
case Types.CLOB:
if (isOracle)
{
row.setColumn(name, results.getString(i));
}
else
{
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
break;
case Types.VARCHAR:
try
{
byte[] bytes = results.getBytes(i);
if (bytes != null)
{
String mystring = new String(results.getBytes(i), "UTF-8");
row.setColumn(name, mystring);
}
else
{
row.setColumn(name, results.getString(i));
}
}
catch (UnsupportedEncodingException e)
{
log.error("Unable to parse text from database", e);
}
break;
case Types.DATE:
row.setColumn(name, results.getDate(i));
break;
case Types.TIME:
row.setColumn(name, results.getTime(i));
break;
case Types.TIMESTAMP:
row.setColumn(name, results.getTimestamp(i));
break;
default:
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
// Determines if the last column was null, and sets the tablerow accordingly
if (results.wasNull())
{
row.setColumnNull(name);
}
}
// Now that we've prepped the TableRow, reset the flags so that we can detect which columns have changed
row.resetChanged();
return row;
}
/**
* Return the name of the primary key column. We assume there's only one
* primary key per table; if there are more, only the first one will be
* returned.
*
* @param row
* The TableRow to return the primary key for.
* @return The name of the primary key column, or null if the row has no
* primary key.
* @exception SQLException
* If a database error occurs
*/
public static String getPrimaryKeyColumn(TableRow row) throws SQLException
{
return getPrimaryKeyColumn(row.getTable());
}
/**
* Return the name of the primary key column in the given table. We assume
* there's only one primary key per table; if there are more, only the first
* one will be returned.
*
* @param table
* The name of the RDBMS table
* @return The name of the primary key column, or null if the table has no
* primary key.
* @exception SQLException
* If a database error occurs
*/
protected static String getPrimaryKeyColumn(String table)
throws SQLException
{
ColumnInfo info = getPrimaryKeyColumnInfo(table);
return (info == null) ? null : info.getName();
}
/**
* Return column information for the primary key column, or null if the
* table has no primary key. We assume there's only one primary key per
* table; if there are more, only the first one will be returned.
*
* @param table
* The name of the RDBMS table
* @return A ColumnInfo object, or null if the table has no primary key.
* @exception SQLException
* If a database error occurs
*/
static ColumnInfo getPrimaryKeyColumnInfo(String table) throws SQLException
{
Collection<ColumnInfo> cinfo = getColumnInfo(canonicalize(table));
for (ColumnInfo info : cinfo)
{
if (info.isPrimaryKey())
{
return info;
}
}
return null;
}
/**
* Execute SQL as a PreparedStatement on Connection. Bind parameters in
* columns to the values in the table row before executing.
*
* @param connection
* The SQL connection
* @param sql
* The query to execute
* @param columns
* The columns to bind
* @param row
* The row
* @return The number of rows affected by the query.
* @exception SQLException
* If a database error occurs
*/
private static void execute(Connection connection, String sql, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
log.debug("Running query \"" + sql + "\"");
}
try
{
statement = connection.prepareStatement(sql);
loadParameters(statement, columns, row);
statement.execute();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
private static int executeUpdate(Connection connection, String sql, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
log.debug("Running query \"" + sql + "\"");
}
try
{
statement = connection.prepareStatement(sql);
loadParameters(statement, columns, row);
return statement.executeUpdate();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Return metadata about a table.
*
* @param table
* The name of the table
* @return An map of info.
* @exception SQLException
* If a database error occurs
*/
private static Map<String, ColumnInfo> getColumnInfoInternal(String table) throws SQLException
{
String ctable = canonicalize(table);
Map<String, ColumnInfo> results = info.get(ctable);
if (results != null)
{
return results;
}
results = retrieveColumnInfo(ctable);
info.put(ctable, results);
return results;
}
/**
* Read metadata about a table from the database.
*
* @param table
* The RDBMS table.
* @return A map of information about the columns. The key is the name of
* the column, a String; the value is a ColumnInfo object.
* @exception SQLException
* If there is a problem retrieving information from the
* RDBMS.
*/
private static Map<String, ColumnInfo> retrieveColumnInfo(String table) throws SQLException
{
Connection connection = null;
ResultSet pkcolumns = null;
ResultSet columns = null;
try
{
String schema = ConfigurationManager.getProperty("db.schema");
String catalog = null;
int dotIndex = table.indexOf('.');
if (dotIndex > 0)
{
catalog = table.substring(0, dotIndex);
table = table.substring(dotIndex + 1, table.length());
log.warn("catalog: " + catalog);
log.warn("table: " + table);
}
connection = getConnection();
DatabaseMetaData metadata = connection.getMetaData();
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
int max = metadata.getMaxTableNameLength();
String tname = (table.length() >= max) ? table
.substring(0, max - 1) : table;
pkcolumns = metadata.getPrimaryKeys(catalog, schema, tname);
Set<String> pks = new HashSet<String>();
while (pkcolumns.next())
{
pks.add(pkcolumns.getString(4));
}
columns = metadata.getColumns(catalog, schema, tname, null);
while (columns.next())
{
String column = columns.getString(4);
ColumnInfo cinfo = new ColumnInfo();
cinfo.setName(column);
cinfo.setType((int) columns.getShort(5));
if (pks.contains(column))
{
cinfo.setIsPrimaryKey(true);
}
results.put(column, cinfo);
}
return Collections.unmodifiableMap(results);
}
finally
{
if (pkcolumns != null)
{
try { pkcolumns.close(); } catch (SQLException sqle) { }
}
if (columns != null)
{
try { columns.close(); } catch (SQLException sqle) { }
}
if (connection != null)
{
try { connection.close(); } catch (SQLException sqle) { }
}
}
}
/**
* Provide a means for a (web) application to cleanly terminate the connection pool.
* @throws SQLException
*/
public static synchronized void shutdown() throws SQLException
{
if (initialized)
{
dataSource = null;
initialized = false;
}
}
/**
* Initialize the DatabaseManager.
*/
private static synchronized void initialize() throws SQLException
{
if (initialized)
{
return;
}
try
{
String jndiName = ConfigurationManager.getProperty("db.jndi");
if (!StringUtils.isEmpty(jndiName))
{
try
{
javax.naming.Context ctx = new InitialContext();
javax.naming.Context env = ctx == null ? null : (javax.naming.Context)ctx.lookup("java:/comp/env");
dataSource = (DataSource)(env == null ? null : env.lookup(jndiName));
}
catch (Exception e)
{
log.error("Error retrieving JNDI context: " + jndiName, e);
}
if (dataSource != null)
{
if (isOracle)
{
sqlOnBorrow = "ALTER SESSION SET current_schema=" + ConfigurationManager.getProperty("db.username").trim().toUpperCase();
}
log.debug("Using JNDI dataSource: " + jndiName);
}
else
{
log.info("Unable to locate JNDI dataSource: " + jndiName);
}
}
if (isOracle)
{
if (!StringUtils.isEmpty(ConfigurationManager.getProperty("db.postgres.schema")))
{
sqlOnBorrow = "SET SEARCH_PATH TO " + ConfigurationManager.getProperty("db.postgres.schema").trim();
}
}
if (dataSource == null)
{
if (!StringUtils.isEmpty(jndiName))
{
log.info("Falling back to creating own Database pool");
}
dataSource = DataSourceInit.getDatasource();
}
initialized = true;
}
catch (SQLException se)
{
// Simply throw up SQLExceptions
throw se;
}
catch (Exception e)
{
// Need to be able to catch other exceptions. Pretend they are
// SQLExceptions, but do log
log.warn("Exception initializing DB pool", e);
throw new SQLException(e.toString(), e);
}
}
/**
* Iterate over the given parameters and add them to the given prepared statement.
* Only a select number of datatypes are supported by the JDBC driver.
*
* @param statement
* The unparameterized statement.
* @param parameters
* The parameters to be set on the statement.
*/
protected static void loadParameters(PreparedStatement statement, Object[] parameters) throws SQLException
{
statement.clearParameters();
int idx = 1;
for (Object parameter : parameters)
{
if (parameter instanceof String)
{
statement.setString(idx,(String) parameter);
}
else if (parameter instanceof Long)
{
statement.setLong(idx,((Long) parameter).longValue());
}
else if (parameter instanceof Integer)
{
statement.setInt(idx,((Integer) parameter).intValue());
}
else if (parameter instanceof Short)
{
statement.setShort(idx,((Short) parameter).shortValue());
}
else if (parameter instanceof Date)
{
statement.setDate(idx,(Date) parameter);
}
else if (parameter instanceof Time)
{
statement.setTime(idx,(Time) parameter);
}
else if (parameter instanceof Timestamp)
{
statement.setTimestamp(idx,(Timestamp) parameter);
}
else if (parameter instanceof Double)
{
statement.setDouble(idx,((Double) parameter).doubleValue());
}
else if (parameter instanceof Float)
{
statement.setFloat(idx,((Float) parameter).floatValue());
}
else if (parameter == null)
{
throw new SQLException("Attempting to insert null value into SQL query.");
}
else
{
throw new SQLException("Attempting to insert unknown datatype ("+parameter.getClass().getName()+") into SQL statement.");
}
idx++;
}
}
private static void loadParameters(PreparedStatement statement, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
int count = 0;
for (ColumnInfo info : columns)
{
count++;
String column = info.getCanonicalizedName();
int jdbctype = info.getType();
if (row.isColumnNull(column))
{
statement.setNull(count, jdbctype);
}
else
{
switch (jdbctype)
{
case Types.BIT:
statement.setBoolean(count, row.getBooleanColumn(column));
break;
case Types.INTEGER:
if (isOracle)
{
statement.setLong(count, row.getLongColumn(column));
}
else
{
statement.setInt(count, row.getIntColumn(column));
}
break;
case Types.NUMERIC:
case Types.DECIMAL:
statement.setLong(count, row.getLongColumn(column));
// FIXME should be BigDecimal if TableRow supported that
break;
case Types.BIGINT:
statement.setLong(count, row.getLongColumn(column));
break;
case Types.CLOB:
if (isOracle)
{
// Support CLOBs in place of TEXT columns in Oracle
statement.setString(count, row.getStringColumn(column));
}
else
{
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
break;
case Types.VARCHAR:
statement.setString(count, row.getStringColumn(column));
break;
case Types.DATE:
statement.setDate(count, new java.sql.Date(row.getDateColumn(column).getTime()));
break;
case Types.TIME:
statement.setTime(count, new Time(row.getDateColumn(column).getTime()));
break;
case Types.TIMESTAMP:
statement.setTimestamp(count, new Timestamp(row.getDateColumn(column).getTime()));
break;
default:
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
}
}
}
/**
* Postgres-specific row insert, combining getnextid() and insert into single statement for efficiency
* @param context
* @param row
* @return
* @throws SQLException
*/
private static int doInsertPostgres(Context context, TableRow row) throws SQLException
{
String table = row.getTable();
Collection<ColumnInfo> info = getColumnInfo(table);
Collection<ColumnInfo> params = new ArrayList<ColumnInfo>();
String primaryKey = getPrimaryKeyColumn(table);
String sql = insertSQL.get(table);
boolean firstColumn = true;
boolean foundPrimaryKey = false;
if (sql == null)
{
// Generate SQL and filter parameter columns
StringBuilder insertBuilder = new StringBuilder("INSERT INTO ").append(table).append(" ( ");
StringBuilder valuesBuilder = new StringBuilder(") VALUES ( ");
for (ColumnInfo col : info)
{
if (firstColumn)
{
firstColumn = false;
}
else
{
insertBuilder.append(",");
valuesBuilder.append(",");
}
insertBuilder.append(col.getName());
if (!foundPrimaryKey && col.isPrimaryKey())
{
valuesBuilder.append("getnextid('").append(table).append("')");
foundPrimaryKey = true;
}
else
{
valuesBuilder.append('?');
params.add(col);
}
}
sql = insertBuilder.append(valuesBuilder.toString()).append(") RETURNING ").append(getPrimaryKeyColumn(table)).toString();
insertSQL.put(table, sql);
}
else
{
// Already have SQL, just filter parameter columns
for (ColumnInfo col : info)
{
if (!foundPrimaryKey && col.isPrimaryKey())
{
foundPrimaryKey = true;
}
else
{
params.add(col);
}
}
}
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
log.debug("Running query \"" + sql + "\"");
}
ResultSet rs = null;
try
{
statement = context.getDBConnection().prepareStatement(sql);
loadParameters(statement, params, row);
rs = statement.executeQuery();
rs.next();
return rs.getInt(1);
}
finally
{
if (rs != null)
{
try
{
rs.close();
}
catch (SQLException sqle)
{
}
}
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Generic version of row insertion with separate id get / insert
* @param context
* @param row
* @return
* @throws SQLException
*/
private static int doInsertGeneric(Context context, TableRow row) throws SQLException
{
int newID = -1;
String table = row.getTable();
PreparedStatement statement = null;
ResultSet rs = null;
try
{
// Get an ID (primary key) for this row by using the "getnextid"
// SQL function in Postgres, or directly with sequences in Oracle
if (isOracle)
{
statement = context.getDBConnection().prepareStatement("SELECT " + table + "_seq" + ".nextval FROM dual");
}
else
{
statement = context.getDBConnection().prepareStatement("SELECT getnextid(?) AS result");
loadParameters(statement, new Object[] { table });
}
rs = statement.executeQuery();
rs.next();
newID = rs.getInt(1);
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
if (newID < 0)
{
throw new SQLException("Unable to retrieve sequence ID");
}
// Set the ID in the table row object
row.setColumn(getPrimaryKeyColumn(table), newID);
Collection<ColumnInfo> info = getColumnInfo(table);
String sql = insertSQL.get(table);
if (sql == null)
{
StringBuilder sqlBuilder = new StringBuilder().append("INSERT INTO ").append(table).append(" ( ");
boolean firstColumn = true;
for (ColumnInfo col : info)
{
if (firstColumn)
{
sqlBuilder.append(col.getName());
firstColumn = false;
}
else
{
sqlBuilder.append(",").append(col.getName());
}
}
sqlBuilder.append(") VALUES ( ");
// Values to insert
firstColumn = true;
for (int i = 0; i < info.size(); i++)
{
if (firstColumn)
{
sqlBuilder.append("?");
firstColumn = false;
}
else
{
sqlBuilder.append(",").append("?");
}
}
// Watch the syntax
sqlBuilder.append(")");
sql = sqlBuilder.toString();
insertSQL.put(table, sql);
}
execute(context.getDBConnection(), sql, info, row);
return newID;
}
/**
* Main method used to perform tests on the database
*
* @param args The command line arguments
*/
public static void main(String[] args)
{
// Get something from dspace.cfg to get the log lines out the way
String url = ConfigurationManager.getProperty("db.url");
// Try to connect to the database
System.out.println("\nAttempting to connect to database: ");
System.out.println(" - URL: " + url);
System.out.println(" - Driver: " + ConfigurationManager.getProperty("db.driver"));
System.out.println(" - Username: " + ConfigurationManager.getProperty("db.username"));
System.out.println(" - Password: " + ConfigurationManager.getProperty("db.password"));
System.out.println(" - Schema: " + ConfigurationManager.getProperty("db.schema"));
System.out.println("\nTesting connection...");
try
{
Connection connection = DatabaseManager.getConnection();
connection.close();
}
catch (SQLException sqle)
{
System.err.println("\nError: ");
System.err.println(" - " + sqle);
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
System.exit(1);
}
System.out.println("Connected successfully!\n");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import org.apache.log4j.Logger;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Represents the results of a database query
*
* @author Peter Breton
* @version $Revision: 5915 $
*/
public class TableRowIterator
{
private static final Logger log = Logger.getLogger(TableRowIterator.class);
/**
* Results from a query
*/
private ResultSet results;
/**
* Statement used to submit the query
*/
private Statement statemt = null;
/**
* The name of the RDBMS table
*/
private String table;
/**
* True if there is a next row
*/
private boolean hasNext = true;
/**
* True if we have already advanced to the next row.
*/
private boolean hasAdvanced = false;
/**
* Column names for the results in this table
*/
List<String> columnNames = null;
/**
* Constructor
*
* @param results -
* A JDBC ResultSet
*/
TableRowIterator(ResultSet results)
{
this(results, null);
statemt = null;
}
/**
* Constructor
*
* @param results -
* A JDBC ResultSet
* @param table -
* The name of the table
*/
TableRowIterator(ResultSet results, String table)
{
this(results, table, null);
statemt = null;
}
TableRowIterator(ResultSet results, String table, List<String> columnNames)
{
this.results = results;
this.table = table;
if (columnNames == null)
{
try
{
this.columnNames = (table == null) ? DatabaseManager.getColumnNames(results.getMetaData()) : DatabaseManager.getColumnNames(table);
}
catch (SQLException e)
{
this.columnNames = null;
}
}
else
{
this.columnNames = Collections.unmodifiableList(columnNames);
}
statemt = null;
}
/**
* Finalize -- this method is called when this object is GC-ed.
*/
protected void finalize() throws Throwable
{
close();
super.finalize();
}
/**
* setStatement -- this method saves the statement used to do the query. We
* must keep this so that the statement can be closed when we are finished.
*
* @param st -
* The statement used to do the query that created this
* TableRowIterator
*/
public void setStatement(Statement st)
{
statemt = st;
}
/**
* Advance to the next row and return it. Returns null if there are no more
* rows.
*
* @return - The next row, or null if no more rows
* @exception SQLException -
* If a database error occurs while fetching values
*/
public TableRow next() throws SQLException
{
if (results == null)
{
return null;
}
if (!hasNext())
{
return null;
}
hasAdvanced = false;
return DatabaseManager.process(results, table, columnNames);
}
/**
* Return true if there are more rows, false otherwise
*
* @return - true if there are more rows, false otherwise
* @exception SQLException -
* If a database error occurs while fetching values
*/
public boolean hasNext() throws SQLException
{
if (results == null)
{
close();
return false;
}
if (hasAdvanced)
{
return hasNext;
}
hasAdvanced = true;
hasNext = results.next();
// No more results
if (!hasNext)
{
close();
}
return hasNext;
}
/**
* Saves all the values returned by iterator into a list.
*
* As a side effect the result set is closed and no more
* operations can be performed on this object.
*
* @return - A list of all the values returned by the iterator.
* @exception SQLException -
* If a database error occurs while fetching values
*/
public List<TableRow> toList() throws SQLException
{
List<TableRow> resultsList = new ArrayList<TableRow>();
try
{
while (hasNext())
{
resultsList.add(next());
}
}
finally
{
// Close the connection after converting it to a list.
this.close();
}
return resultsList;
}
/**
* Close the Iterator and release any associated resources
*/
public void close()
{
try
{
if (results != null)
{
results.close();
results = null;
}
}
catch (SQLException sqle)
{
}
// try to close the statement if we have one
try
{
if (statemt != null)
{
statemt.close();
statemt = null;
}
}
catch (SQLException sqle)
{
}
columnNames = null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
/**
* Represents a column in an RDBMS table.
*/
public class ColumnInfo
{
/** The name of the column */
private String name;
private String canonicalizedName;
/** The JDBC type of the column */
private int type;
/** True if this column is a primary key */
private boolean isPrimaryKey;
/**
* Constructor
*/
ColumnInfo()
{
}
/**
* Constructor
*/
ColumnInfo(String name, int type)
{
this.name = name;
this.type = type;
this.canonicalizedName = canonicalize(name);
}
/**
* Return the column name.
*
* @return - The column name
*/
public String getName()
{
return name;
}
/**
* Return the column name.
*
* @return - The column name
*/
public String getCanonicalizedName()
{
return canonicalizedName;
}
/**
* Set the column name
*
* @param v -
* The column name
*/
void setName(String v)
{
name = v;
canonicalizedName = canonicalize(name);
}
/**
* Return the JDBC type. This is one of the constants from java.sql.Types.
*
* @return - The JDBC type
* @see java.sql.Types
*/
public int getType()
{
return type;
}
/**
* Set the JDBC type. This should be one of the constants from
* java.sql.Types.
*
* @param v -
* The JDBC type
* @see java.sql.Types
*/
void setType(int v)
{
type = v;
}
/**
* Return true if this column is a primary key.
*
* @return True if this column is a primary key, false otherwise.
*/
public boolean isPrimaryKey()
{
return isPrimaryKey;
}
/**
* Set whether this column is a primary key.
*
* @param v
* True if this column is a primary key.
*/
void setIsPrimaryKey(boolean v)
{
this.isPrimaryKey = v;
}
/*
* Return true if this object is equal to other, false otherwise.
*
* @return True if this object is equal to other, false otherwise.
*/
public boolean equals(Object other)
{
if (!(other instanceof ColumnInfo))
{
return false;
}
ColumnInfo theOther = (ColumnInfo) other;
return ((name != null) ? name.equals(theOther.name)
: (theOther.name == null))
&& (type == theOther.type)
&& (isPrimaryKey == theOther.isPrimaryKey);
}
/*
* Return a hashCode for this object.
*
* @return A hashcode for this object.
*/
public int hashCode()
{
return new StringBuffer().append(name).append(type)
.append(isPrimaryKey).toString().hashCode();
}
static String canonicalize(String column)
{
return column.toLowerCase();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.util.Set;
/**
* TaskQueue objects manage access to named queues of task entries.
* Entries represent curation task requests that have been deferred.
* The queue supports concurrent non-blocking writers, but controls
* read access to a single reader possessing a ticket (first come,
* first serve). After the read, the queue remains locked until
* released by the reader, after which it is typically purged.
*
* @author richardrodgers
*/
public interface TaskQueue {
/**
* Returns list of queue names.
*
* @return queues
* the list of names of active queues
*/
String[] queueNames();
/**
* Queues a single entry to a named queue.
*
* @param queueName
* the name of the queue on which to write
* @param entry
* the task entry
* @throws IOException
*/
void enqueue(String queueName, TaskQueueEntry entry) throws IOException;
/**
* Queues a set of task entries to a named queue.
*
* @param queueName
* the name of the queue on which to write
* @param entrySet
* the set of task entries
* @throws IOException
*/
void enqueue(String queueName, Set<TaskQueueEntry> entrySet) throws IOException;
/**
* Returns the set of task entries from the named queue. The operation locks
* the queue from any further enqueue or dequeue operations until a
* <code>release</code> is called. The ticket may be any number, but a
* timestamp should guarantee sufficient uniqueness.
*
* @param queueName
* the name of the queue to read
* @param ticket
* a token which must be presented to release the queue
* @return set
* the current set of queued task entries
* @throws IOException
*/
Set<TaskQueueEntry> dequeue(String queueName, long ticket) throws IOException;
/**
* Releases the lock upon the named queue, deleting it if <code>removeEntries</code>
* is set to true.
*
* @param queueName
* the name of the queue to release
* @param ticket
* a token that was presented when queue was dequeued.
* @param removeEntries
* flag to indicate whether entries may be deleted
*/
void release(String queueName, long ticket, boolean removeEntries);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
/**
* RequiredMetadata task compares item metadata with fields
* marked as required in input-forms.xml. The task succeeds if all
* required fields are present in the item metadata, otherwise it fails.
* Primarily a curation task demonstrator.
*
* @author richardrodgers
*/
@Suspendable
public class RequiredMetadata extends AbstractCurationTask
{
// map of DCInputSets
private DCInputsReader reader = null;
// map of required fields
private Map<String, List<String>> reqMap = new HashMap<String, List<String>>();
@Override
public void init(Curator curator, String taskId) throws IOException
{
super.init(curator, taskId);
try
{
reader = new DCInputsReader();
}
catch (DCInputsReaderException dcrE)
{
throw new IOException(dcrE.getMessage(), dcrE);
}
}
/**
* Perform the curation task upon passed DSO
*
* @param dso the DSpace object
* @throws IOException
*/
@Override
public int perform(DSpaceObject dso) throws IOException
{
if (dso.getType() == Constants.ITEM)
{
Item item = (Item)dso;
int count = 0;
try
{
StringBuilder sb = new StringBuilder();
String handle = item.getHandle();
if (handle == null)
{
// we are still in workflow - no handle assigned
handle = "in workflow";
}
sb.append("Item: ").append(handle);
for (String req : getReqList(item.getOwningCollection().getHandle()))
{
DCValue[] vals = item.getMetadata(req);
if (vals.length == 0)
{
sb.append(" missing required field: ").append(req);
count++;
}
}
if (count == 0)
{
sb.append(" has all required fields");
}
report(sb.toString());
setResult(sb.toString());
}
catch (DCInputsReaderException dcrE)
{
throw new IOException(dcrE.getMessage(), dcrE);
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
return (count == 0) ? Curator.CURATE_SUCCESS : Curator.CURATE_FAIL;
}
else
{
setResult("Object skipped");
return Curator.CURATE_SKIP;
}
}
private List<String> getReqList(String handle) throws DCInputsReaderException
{
List<String> reqList = reqMap.get(handle);
if (reqList == null)
{
reqList = reqMap.get("default");
}
if (reqList == null)
{
reqList = new ArrayList<String>();
DCInputSet inputs = reader.getInputs(handle);
for (int i = 0; i < inputs.getNumberPages(); i++)
{
for (DCInput input : inputs.getPageRows(i, true, true))
{
if (input.isRequired())
{
StringBuilder sb = new StringBuilder();
sb.append(input.getSchema()).append(".");
sb.append(input.getElement()).append(".");
String qual = input.getQualifier();
if (qual == null)
{
qual = "";
}
sb.append(qual);
reqList.add(sb.toString());
}
}
}
reqMap.put(inputs.getFormName(), reqList);
}
return reqList;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
/**
* FileTaskQueue provides a TaskQueue implementation based on flat files
* for the queues and semaphores.
*
* @author richardrodgers
*/
public class FileTaskQueue implements TaskQueue
{
private static Logger log = Logger.getLogger(TaskQueue.class);
// base directory for curation task queues
private String tqDir = ConfigurationManager.getProperty("curate", "taskqueue.dir");
// ticket for queue readers
private long readTicket = -1L;
// list of queues owned by reader
private List<Integer> readList = new ArrayList<Integer>();
public FileTaskQueue()
{
}
@Override
public String[] queueNames()
{
return new File(tqDir).list();
}
@Override
public synchronized void enqueue(String queueName, TaskQueueEntry entry)
throws IOException
{
Set entrySet = new HashSet<TaskQueueEntry>();
entrySet.add(entry);
enqueue(queueName, entrySet);
}
@Override
public synchronized void enqueue(String queueName, Set<TaskQueueEntry> entrySet)
throws IOException
{
// don't block or fail - iterate until an unlocked queue found/created
int queueIdx = 0;
File qDir = ensureQueue(queueName);
while (true)
{
File lock = new File(qDir, "lock" + Integer.toString(queueIdx));
// Check for lock, and create one if it doesn't exist.
// If the lock file already exists, this will return false
if (lock.createNewFile())
{
// append set contents to queue
BufferedWriter writer = null;
try
{
File queue = new File(qDir, "queue" + Integer.toString(queueIdx));
writer = new BufferedWriter(new FileWriter(queue, true));
Iterator<TaskQueueEntry> iter = entrySet.iterator();
while (iter.hasNext())
{
writer.write(iter.next().toString());
writer.newLine();
}
}
finally
{
if (writer != null)
{
writer.close();
}
}
// remove lock
if (!lock.delete())
{
log.error("Unable to remove lock: " + lock.getName());
}
break;
}
queueIdx++;
}
}
@Override
public synchronized Set<TaskQueueEntry> dequeue(String queueName, long ticket)
throws IOException
{
Set<TaskQueueEntry> entrySet = new HashSet<TaskQueueEntry>();
if (readTicket == -1L)
{
// hold the ticket & copy all Ids available, locking queues
// stop when no more queues or one found locked
File qDir = ensureQueue(queueName);
readTicket = ticket;
int queueIdx = 0;
while (true)
{
File queue = new File(qDir, "queue" + Integer.toString(queueIdx));
File lock = new File(qDir, "lock" + Integer.toString(queueIdx));
// If the queue file exists, atomically check for a lock file and create one if it doesn't exist
// If the lock file exists already, then this simply returns false
if (queue.exists() && lock.createNewFile()) {
// read contents from file
BufferedReader reader = null;
try
{
reader = new BufferedReader(new FileReader(queue));
String entryStr = null;
while ((entryStr = reader.readLine()) != null)
{
entryStr = entryStr.trim();
if (entryStr.length() > 0)
{
entrySet.add(new TaskQueueEntry(entryStr));
}
}
}
finally
{
if (reader != null)
{
reader.close();
}
}
readList.add(queueIdx);
}
else
{
break;
}
queueIdx++;
}
}
return entrySet;
}
@Override
public synchronized void release(String queueName, long ticket, boolean remove)
{
if (ticket == readTicket)
{
readTicket = -1L;
File qDir = ensureQueue(queueName);
// remove locks & queues (if flag true)
for (Integer queueIdx : readList)
{
File lock = new File(qDir, "lock" + Integer.toString(queueIdx));
if (remove)
{
File queue = new File(qDir, "queue" + Integer.toString(queueIdx));
if (!queue.delete())
{
log.error("Unable to delete queue file: " + queue.getName());
}
}
if (!lock.delete())
{
log.error("Unable to delete lock file: " + lock.getName());
}
}
readList.clear();
}
}
private File ensureQueue(String queueName)
{
// create directory structures as needed
File baseDir = new File(tqDir, queueName);
if (!baseDir.exists() && !baseDir.mkdirs())
{
throw new IllegalStateException("Unable to create directories");
}
return baseDir;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation type for CurationTasks. A task is suspendable if it may
* be suspended (halted) when a condition detected by the curation framework
* occurs. The current implementation monitors and uses the status code
* returned from the task to determine suspension, together with the
* 'invocation mode' - optionally set by the caller on the curation object.
* Thus, it effectively means that if a task is iterating over a collection,
* the first error, or failure will halt the process.
* This insures that the status code and result of the failure are preserved.
*
* @author richardrodgers
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Suspendable
{
// by default, suspension occurs however task is invoked
Curator.Invoked invoked() default Curator.Invoked.ANY;
// by default, either ERROR or FAILURE status codes trigger suspension
int[] statusCodes() default {-1, 1};
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Utils contains a few commonly occurring methods.
*
* @author richardrodgers
*/
public class Utils
{
private static final int BUFF_SIZE = 4096;
// we can live with 4k preallocation
private static final byte[] buffer = new byte[BUFF_SIZE];
/**
* Calculates and returns a checksum for the passed file using the passed
* algorithm.
*
* @param file
* file on which to calculate checksum
* @param algorithm
* string for algorithm: 'MD5', 'SHA1', etc
* @return checksum
* string of the calculated checksum
*
* @throws IOException
*/
public static String checksum(File file, String algorithm) throws IOException
{
InputStream in = null;
String chkSum = null;
try
{
in = new FileInputStream(file);
chkSum = checksum(in, algorithm);
}
finally
{
if (in != null)
{
in.close();
}
}
return chkSum;
}
/**
* Calculates and returns a checksum for the passed IO stream using the passed
* algorithm.
*
* @param in
* input stream on which to calculate checksum
* @param algorithm
* string for algorithm: 'MD5', 'SHA1', etc
* @return checksum
* string of the calculated checksum
*
* @throws IOException
*/
public static String checksum(InputStream in, String algorithm) throws IOException
{
try
{
DigestInputStream din = new DigestInputStream(in,
MessageDigest.getInstance(algorithm));
while (true)
{
synchronized (buffer)
{
if (din.read(buffer) == -1)
{
break;
}
// otherwise, a no-op
}
}
return toHex(din.getMessageDigest().digest());
} catch (NoSuchAlgorithmException nsaE) {
throw new IOException(nsaE.getMessage(), nsaE);
}
}
/**
* Reasonably efficient Hex checksum converter
*
* @param data
* byte array
* @return hexString
* checksum
*/
static final char[] HEX_CHARS = "0123456789abcdef".toCharArray();
public static String toHex(byte[] data) {
if ((data == null) || (data.length == 0)) {
return null;
}
char[] chars = new char[2 * data.length];
for (int i = 0; i < data.length; ++i) {
chars[2 * i] = HEX_CHARS[(data[i] & 0xF0) >>> 4];
chars[2 * i + 1] = HEX_CHARS[data[i] & 0x0F];
}
return new String(chars);
}
/**
* Performs a buffered copy from one file into another.
*
* @param inFile
* @param outFile
* @throws IOException
*/
public static void copy(File inFile, File outFile) throws IOException
{
FileInputStream in = null;
FileOutputStream out = null;
try
{
in = new FileInputStream(inFile);
out = new FileOutputStream(outFile);
copy(in, out);
}
finally
{
if (in != null)
{
in.close();
}
if (out != null)
{
out.close();
}
}
}
/**
* Performs a buffered copy from one IO stream into another. Note that stream
* closure is responsibility of caller.
*
* @param in
* input stream
* @param out
* output stream
* @throws IOException
*/
public static void copy(InputStream in, OutputStream out) throws IOException
{
while (true)
{
synchronized (buffer)
{
int count = in.read(buffer);
if (-1 == count)
{
break;
}
// write out those same bytes
out.write(buffer, 0, count);
}
}
// needed to flush cache
out.flush();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.content.Community;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.eperson.EPerson;
/**
* CurationCli provides command-line access to Curation tools and processes.
*
* @author richardrodgers
*/
public class CurationCli
{
public static void main(String[] args) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("t", "task", true,
"curation task name");
options.addOption("T", "taskfile", true,
"file containing curation task names");
options.addOption("i", "id", true,
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("q", "queue", true,
"name of task queue to process");
options.addOption("e", "eperson", true,
"email address of curating eperson");
options.addOption("r", "reporter", true,
"reporter to manage results - use '-' to report to console. If absent, no reporting");
options.addOption("v", "verbose", false,
"report activity to stdout");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
String taskName = null;
String taskFileName = null;
String idName = null;
String taskQueueName = null;
String ePersonName = null;
String reporterName = null;
boolean verbose = false;
if (line.hasOption('h'))
{
HelpFormatter help = new HelpFormatter();
help.printHelp("CurationCli\n", options);
System.out
.println("\nwhole repo: CurationCli -t estimate -i all");
System.out
.println("single item: CurationCli -t generate -i itemId");
System.out
.println("task queue: CurationCli -q monthly");
System.exit(0);
}
if (line.hasOption('t'))
{ // task
taskName = line.getOptionValue('t');
}
if (line.hasOption('T'))
{ // task file
taskFileName = line.getOptionValue('T');
}
if (line.hasOption('i'))
{ // id
idName = line.getOptionValue('i');
}
if (line.hasOption('q'))
{ // task queue
taskQueueName = line.getOptionValue('q');
}
if (line.hasOption('e'))
{ // eperson
ePersonName = line.getOptionValue('e');
}
if (line.hasOption('r'))
{ // report file
reporterName = line.getOptionValue('r');
}
if (line.hasOption('v'))
{ // verbose
verbose = true;
}
// now validate the args
if (idName == null && taskQueueName == null)
{
System.out.println("Id must be specified: a handle, 'all', or a task queue (-h for help)");
System.exit(1);
}
if (taskName == null && taskFileName == null && taskQueueName == null)
{
System.out.println("A curation task or queue must be specified (-h for help)");
System.exit(1);
}
Context c = new Context();
if (ePersonName != null)
{
EPerson ePerson = EPerson.findByEmail(c, ePersonName);
if (ePerson == null)
{
System.out.println("EPerson not found: " + ePersonName);
System.exit(1);
}
c.setCurrentUser(ePerson);
}
else
{
c.setIgnoreAuthorization(true);
}
Curator curator = new Curator();
if (reporterName != null)
{
curator.setReporter(reporterName);
}
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
// load curation tasks
if (taskName != null)
{
if (verbose)
{
System.out.println("Adding task: " + taskName);
}
curator.addTask(taskName);
if (verbose && ! curator.hasTask(taskName))
{
System.out.println("Task: " + taskName + " not resolved");
}
}
else if (taskQueueName == null)
{
// load taskFile
BufferedReader reader = null;
try
{
reader = new BufferedReader(new FileReader(taskFileName));
while ((taskName = reader.readLine()) != null)
{
if (verbose)
{
System.out.println("Adding task: " + taskName);
}
curator.addTask(taskName);
}
}
finally
{
if (reader != null)
{
reader.close();
}
}
}
// run tasks against object
long start = System.currentTimeMillis();
if (verbose)
{
System.out.println("Starting curation");
}
if (idName != null)
{
if (verbose)
{
System.out.println("Curating id: " + idName);
}
if ("all".equals(idName))
{
// run on all top-level communities
for (Community comm : Community.findAllTop(c))
{
if (verbose)
{
System.out.println("Curating community: " + comm.getHandle());
}
curator.curate(comm);
}
}
else
{
curator.curate(c, idName);
}
}
else
{
// process the task queue
TaskQueue queue = (TaskQueue)PluginManager.getSinglePlugin("curate", TaskQueue.class);
if (queue == null)
{
System.out.println("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(taskQueueName, ticket).iterator();
while (entryIter.hasNext())
{
TaskQueueEntry entry = entryIter.next();
if (verbose)
{
System.out.println("Curating id: " + entry.getObjectId());
}
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf("/") > 0)
{
for (String task : entry.getTaskNames())
{
curator.addTask(task);
}
curator.curate(c, entry.getObjectId());
}
else
{
// make eperson who queued task the effective user
EPerson agent = EPerson.findByEmail(c, entry.getEpersonId());
if (agent != null)
{
c.setCurrentUser(agent);
}
WorkflowCurator.curate(curator, c, entry.getObjectId());
}
}
queue.release(taskQueueName, ticket, true);
}
c.complete();
if (verbose)
{
long elapsed = System.currentTimeMillis() - start;
System.out.println("Ending curation. Elapsed time: " + elapsed);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation type for CurationTasks. A task is mutative if it
* alters (transforms, mutates) it's target object.
*
* @author richardrodgers
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Mutative
{
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* CurationTask describes a rather generic ability to perform an operation
* upon a DSpace object.
*
* @author richardrodgers
*/
public interface CurationTask
{
/**
* Initialize task - parameters inform the task of it's invoking curator.
* Since the curator can provide services to the task, this represents
* curation DI.
*
* @param curator the Curator controlling this task
* @param taskId identifier task should use in invoking services
* @throws IOException
*/
void init(Curator curator, String taskId) throws IOException;
/**
* Perform the curation task upon passed DSO
*
* @param dso the DSpace object
* @return status code
* @throws IOException
*/
int perform(DSpaceObject dso) throws IOException;
/**
* Perform the curation task for passed id
*
* @param ctx DSpace context object
* @param id persistent ID for DSpace object
* @return status code
* @throws Exception
*/
int perform(Context ctx, String id) throws IOException;
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* ProfileFormats is a task that creates a distribution table of Bitstream
* formats for it's passed object. Primarily a curation task demonstrator.
*
* @author richardrodgers
*/
@Distributive
public class ProfileFormats extends AbstractCurationTask
{
// map of formats to occurrences
private Map<String, Integer> fmtTable = new HashMap<String, Integer>();
/**
* Perform the curation task upon passed DSO
*
* @param dso the DSpace object
* @throws IOException
*/
@Override
public int perform(DSpaceObject dso) throws IOException
{
fmtTable.clear();
distribute(dso);
formatResults();
return Curator.CURATE_SUCCESS;
}
@Override
protected void performItem(Item item) throws SQLException, IOException
{
for (Bundle bundle : item.getBundles())
{
for (Bitstream bs : bundle.getBitstreams())
{
String fmt = bs.getFormat().getShortDescription();
Integer count = fmtTable.get(fmt);
if (count == null)
{
count = 1;
}
else
{
count += 1;
}
fmtTable.put(fmt, count);
}
}
}
private void formatResults() throws IOException
{
try
{
Context c = new Context();
StringBuilder sb = new StringBuilder();
for (String fmt : fmtTable.keySet())
{
BitstreamFormat bsf = BitstreamFormat.findByShortDescription(c, fmt);
sb.append(String.format("%6d", fmtTable.get(fmt))).append(" (").
append(bsf.getSupportLevelText().charAt(0)).append(") ").
append(bsf.getDescription()).append("\n");
}
report(sb.toString());
setResult(sb.toString());
c.complete();
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
// above package assignment temporary pending better aysnch release process
// package org.dspace.ctask.integrity;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/** ClamScan.java
*
* A set of methods to scan using the
* clamav daemon.
*
* TODO: add a check for the inputstream size limit
*
* @author wbossons
*/
@Suspendable(invoked=Curator.Invoked.INTERACTIVE)
public class ClamScan extends AbstractCurationTask
{
private static final int DEFAULT_CHUNK_SIZE = 4096;//2048
private static final byte[] INSTREAM = "zINSTREAM\0".getBytes();
private static final byte[] PING = "zPING\0".getBytes();
private static final byte[] STATS = "nSTATS\n".getBytes();//prefix with z
private static final byte[] IDSESSION = "zIDSESSION\0".getBytes();
private static final byte[] END = "zEND\0".getBytes();
private static final String PLUGIN_PREFIX = "clamav";
private static final String INFECTED_MESSAGE = "had virus detected.";
private static final String CLEAN_MESSAGE = "had no viruses detected.";
private static final String CONNECT_FAIL_MESSAGE = "Unable to connect to virus service - check setup";
private static final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup";
private static final String NEW_ITEM_HANDLE = "in workflow";
private static Logger log = Logger.getLogger(ClamScan.class);
private static String host = null;
private static int port = 0;
private static int timeout = 0;
private static boolean failfast = true;
private int status = Curator.CURATE_UNSET;
private List<String> results = null;
private Socket socket = null;
private DataOutputStream dataOutputStream = null;
@Override
public void init(Curator curator, String taskId) throws IOException
{
super.init(curator, taskId);
host = ConfigurationManager.getProperty(PLUGIN_PREFIX, "service.host");
port = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "service.port");
timeout = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "socket.timeout");
failfast = ConfigurationManager.getBooleanProperty(PLUGIN_PREFIX, "scan.failfast");
}
@Override
public int perform(DSpaceObject dso) throws IOException
{
status = Curator.CURATE_SKIP;
logDebugMessage("The target dso is " + dso.getName());
if (dso instanceof Item)
{
status = Curator.CURATE_SUCCESS;
Item item = (Item)dso;
try
{
openSession();
}
catch (IOException ioE)
{
// no point going further - set result and error out
closeSession();
setResult(CONNECT_FAIL_MESSAGE);
return Curator.CURATE_ERROR;
}
try
{
Bundle bundle = item.getBundles("ORIGINAL")[0];
results = new ArrayList<String>();
for (Bitstream bitstream : bundle.getBitstreams())
{
InputStream inputstream = bitstream.retrieve();
logDebugMessage("Scanning " + bitstream.getName() + " . . . ");
int bstatus = scan(bitstream, inputstream, getItemHandle(item));
inputstream.close();
if (bstatus == Curator.CURATE_ERROR)
{
// no point going further - set result and error out
setResult(SCAN_FAIL_MESSAGE);
status = bstatus;
break;
}
if (failfast && bstatus == Curator.CURATE_FAIL)
{
status = bstatus;
break;
}
else if (bstatus == Curator.CURATE_FAIL &&
status == Curator.CURATE_SUCCESS)
{
status = bstatus;
}
}
}
catch (AuthorizeException authE)
{
throw new IOException(authE.getMessage(), authE);
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
finally
{
closeSession();
}
if (status != Curator.CURATE_ERROR)
{
formatResults(item);
}
}
return status;
}
/** openSession
*
* This method opens a session.
*/
private void openSession() throws IOException
{
socket = new Socket();
try
{
logDebugMessage("Connecting to " + host + ":" + port);
socket.connect(new InetSocketAddress(host, port));
}
catch (IOException e)
{
log.error("Failed to connect to clamd . . .", e);
throw (e);
}
try
{
socket.setSoTimeout(timeout);
}
catch (SocketException e)
{
log.error("Could not set socket timeout . . . " + timeout + "ms", e);
throw (new IOException(e));
}
try
{
dataOutputStream = new DataOutputStream(socket.getOutputStream());
}
catch (IOException e)
{
log.error("Failed to open OutputStream . . . ", e);
throw (e);
}
try
{
dataOutputStream.write(IDSESSION);
}
catch (IOException e)
{
log.error("Error initiating session with IDSESSION command . . . ", e);
throw (e);
}
}
/** closeSession
*
* Close the IDSESSION in CLAMD
*
*
*/
private void closeSession()
{
if (dataOutputStream != null)
{
try
{
dataOutputStream.write(END);
}
catch (IOException e)
{
log.error("Exception closing dataOutputStream", e);
}
}
try
{
logDebugMessage("Closing the socket for ClamAv daemon . . . ");
socket.close();
}
catch (IOException e)
{
log.error("Exception closing socket", e);
}
}
/** scan
*
* Issue the INSTREAM command and return the response to
* and from the clamav daemon
*
* @param the bitstream for reporting results
* @param the InputStream to read
* @param the item handle for reporting results
* @return a ScanResult representing the server response
* @throws IOException
*/
final static byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];;
private int scan(Bitstream bitstream, InputStream inputstream, String itemHandle)
{
try
{
dataOutputStream.write(INSTREAM);
}
catch (IOException e)
{
log.error("Error writing INSTREAM command . . .");
return Curator.CURATE_ERROR;
}
int read = DEFAULT_CHUNK_SIZE;
while (read == DEFAULT_CHUNK_SIZE)
{
try
{
read = inputstream.read(buffer);
}
catch (IOException e)
{
log.error("Failed attempting to read the InputStream . . . ");
return Curator.CURATE_ERROR;
}
if (read == -1)
{
break;
}
try
{
dataOutputStream.writeInt(read);
dataOutputStream.write(buffer, 0, read);
}
catch (IOException e)
{
log.error("Could not write to the socket . . . ");
return Curator.CURATE_ERROR;
}
}
try
{
dataOutputStream.writeInt(0);
dataOutputStream.flush();
}
catch (IOException e)
{
log.error("Error writing zero-length chunk to socket") ;
return Curator.CURATE_ERROR;
}
try
{
read = socket.getInputStream().read(buffer);
}
catch (IOException e)
{
log.error( "Error reading result from socket");
return Curator.CURATE_ERROR;
}
if (read > 0)
{
String response = new String(buffer, 0, read);
logDebugMessage("Response: " + response);
if (response.indexOf("FOUND") != -1)
{
String itemMsg = "item - " + itemHandle + ": ";
String bsMsg = "bitstream - " + bitstream.getName() +
": SequenceId - " + bitstream.getSequenceID() + ": infected";
report(itemMsg + bsMsg);
results.add(bsMsg);
return Curator.CURATE_FAIL;
}
else
{
return Curator.CURATE_SUCCESS;
}
}
return Curator.CURATE_ERROR;
}
private void formatResults(Item item) throws IOException
{
StringBuilder sb = new StringBuilder();
sb.append("Item: ").append(getItemHandle(item)).append(" ");
if (status == Curator.CURATE_FAIL)
{
sb.append(INFECTED_MESSAGE);
int count = 0;
for (String scanresult : results)
{
sb.append("\n").append(scanresult).append("\n");
count++;
}
sb.append(count).append(" virus(es) found. ")
.append(" failfast: ").append(failfast);
}
else
{
sb.append(CLEAN_MESSAGE);
}
setResult(sb.toString());
}
private static String getItemHandle(Item item)
{
String handle = item.getHandle();
return (handle != null) ? handle: NEW_ITEM_HANDLE;
}
private void logDebugMessage(String message)
{
if (log.isDebugEnabled())
{
log.debug(message);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.util.Arrays;
import java.util.List;
/**
* TaskQueueEntry defines the record or entry in the named task queues.
* Regular immutable value object class.
*
* @author richardrodgers
*/
public final class TaskQueueEntry
{
private final String epersonId;
private final String submitTime;
private final String tasks;
private final String objId;
/**
* TaskQueueEntry constructor with enumerated field values.
*
* @param epersonId
* @param submitTime
* @param taskNames
* @param objId
*/
public TaskQueueEntry(String epersonId, long submitTime,
List<String> taskNames, String objId)
{
this.epersonId = epersonId;
this.submitTime = Long.toString(submitTime);
StringBuilder sb = new StringBuilder();
for (String tName : taskNames)
{
sb.append(tName).append(",");
}
this.tasks = sb.substring(0, sb.length() - 1);
this.objId = objId;
}
/**
* Constructor with a pipe-separated list of field values.
*
* @param entry
* list of field values separated by '|'s
*/
public TaskQueueEntry(String entry)
{
String[] tokens = entry.split("\\|");
epersonId = tokens[0];
submitTime = tokens[1];
tasks = tokens[2];
objId = tokens[3];
}
/**
* Returns the epersonId (email) of the agent who enqueued this task entry.
*
* @return epersonId
* name of EPerson (email) or 'unknown' if none recorded.
*/
public String getEpersonId()
{
return epersonId;
}
/**
* Returns the timestamp of when this entry was enqueued.
*
* @return time
* Submission timestamp
*/
public long getSubmitTime()
{
return Long.valueOf(submitTime);
}
/**
* Return the list of tasks associated with this entry.
*
* @return tasks
* the list of task names (Plugin names)
*/
public List<String> getTaskNames()
{
return Arrays.asList(tasks.split(","));
}
/**
* Returns the object identifier.
* @return objId
* usually a handle or workflow id
*/
public String getObjectId()
{
return objId;
}
/**
* Returns a string representation of the entry
* @return string
* pipe-separated field values
*/
@Override
public String toString()
{
return epersonId + "|" + submitTime + "|" + tasks + "|" + objId;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.ItemIterator;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.handle.HandleManager;
/**
* Curator orchestrates and manages the application of a one or more curation
* tasks to a DSpace object. It provides common services and runtime
* environment to the tasks.
*
* @author richardrodgers
*/
public class Curator
{
// status code values
/** Curator unable to find requested task */
public static final int CURATE_NOTASK = -3;
/** no assigned status code - typically because task not yet performed */
public static final int CURATE_UNSET = -2;
/** task encountered a error in processing */
public static final int CURATE_ERROR = -1;
/** task completed successfully */
public static final int CURATE_SUCCESS = 0;
/** task failed */
public static final int CURATE_FAIL = 1;
/** task was not applicable to passed object */
public static final int CURATE_SKIP = 2;
// invocation modes - used by Suspendable tasks
public static enum Invoked { INTERACTIVE, BATCH, ANY };
private static Logger log = Logger.getLogger(Curator.class);
private Map<String, TaskRunner> trMap = new HashMap<String, TaskRunner>();
private List<String> perfList = new ArrayList<String>();
private TaskQueue taskQ = null;
private String reporter = null;
private Invoked iMode = null;
/**
* No-arg constructor
*/
public Curator()
{
}
/**
* Add a task to the set to be performed. Caller should make no assumptions
* on execution ordering.
*
* @param taskName - logical name of task
* @return this curator - to support concatenating invocation style
*/
public Curator addTask(String taskName)
{
CurationTask task = (CurationTask)PluginManager.getNamedPlugin("curate", CurationTask.class, taskName);
if (task != null)
{
try
{
task.init(this, taskName);
trMap.put(taskName, new TaskRunner(task, taskName));
// performance order currently FIFO - to be revisited
perfList.add(taskName);
}
catch (IOException ioE)
{
log.error("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
}
}
else
{
log.error("Task: '" + taskName + "' does not resolve");
}
return this;
}
/**
* Returns whether this curator has the specified task
*
* @param taskName - logical name of the task
* @return true if task has been configured, else false
*/
public boolean hasTask(String taskName)
{
return perfList.contains(taskName);
}
/**
* Removes a task from the set to be performed.
*
* @param taskName - logical name of the task
* @return this curator - to support concatenating invocation style
*/
public Curator removeTask(String taskName)
{
trMap.remove(taskName);
perfList.remove(taskName);
return this;
}
/**
* Assigns invocation mode.
*
* @param mode one of INTERACTIVE, BATCH, ANY
* @return
*/
public Curator setInvoked(Invoked mode)
{
iMode = mode;
return this;
}
/**
* Sets the reporting stream for this curator.
*
* @param reporter name of reporting stream. The name '-'
* causes reporting to standard out.
* @return the Curator instance
*/
public Curator setReporter(String reporter)
{
this.reporter = reporter;
return this;
}
/**
* Performs all configured tasks upon object identified by id. If
* the object can be resolved as a handle, the DSO will be the
* target object.
*
* @param c a Dpace context
* @param id an object identifier
* @throws IOException
*/
public void curate(Context c, String id) throws IOException
{
if (id == null)
{
log.error("curate - null id");
return;
}
try
{
DSpaceObject dso = HandleManager.resolveToObject(c, id);
if (dso != null)
{
curate(dso);
}
else
{
for (String taskName : perfList)
{
trMap.get(taskName).run(c, id);
}
}
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
}
/**
* Performs all configured tasks upon DSpace object.
* @param dso the DSpace object
* @throws IOException
*/
public void curate(DSpaceObject dso) throws IOException
{
if (dso == null)
{
log.error("curate - null dso");
return;
}
int type = dso.getType();
for (String taskName : perfList)
{
TaskRunner tr = trMap.get(taskName);
// do we need to iterate over the object ?
if (type == Constants.ITEM ||
tr.task.getClass().isAnnotationPresent(Distributive.class))
{
tr.run(dso);
}
else if (type == Constants.COLLECTION)
{
doCollection(tr, (Collection)dso);
}
else if (type == Constants.COMMUNITY)
{
doCommunity(tr, (Community)dso);
}
}
}
/**
* Places a curation request for the object identified by id on a
* managed queue named by the queueId.
*
* @param c A DSpace context
* @param id an object Id
* @param queueId name of a queue. If queue does not exist, it will
* be created automatically.
* @throws IOException
*/
public void queue(Context c, String id, String queueId) throws IOException
{
if (taskQ == null)
{
taskQ = (TaskQueue)PluginManager.getSinglePlugin("curate", TaskQueue.class);
}
if (taskQ != null)
{
taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(),
System.currentTimeMillis(), perfList, id));
}
else
{
log.error("curate - no TaskQueue implemented");
}
}
/**
* Removes all configured tasks from the Curator.
*/
public void clear()
{
trMap.clear();
perfList.clear();
}
/**
* Adds a message to the configured reporting stream.
*
* @param message the message to output to the reporting stream.
*/
public void report(String message)
{
// Stub for now
if ("-".equals(reporter))
{
System.out.println(message);
}
}
/**
* Returns the status code for the latest performance of the named task.
*
* @param taskName the task name
* @return the status code - one of CURATE_ values
*/
public int getStatus(String taskName)
{
TaskRunner tr = trMap.get(taskName);
return (tr != null) ? tr.statusCode : CURATE_NOTASK;
}
/**
* Returns the result string for the latest performance of the named task.
*
* @param taskName the task name
* @return the result string, or <code>null</code> if task has not set it.
*/
public String getResult(String taskName)
{
TaskRunner tr = trMap.get(taskName);
return (tr != null) ? tr.result : null;
}
/**
* Assigns a result to the performance of the named task.
*
* @param taskName the task name
* @param result a string indicating results of performing task.
*/
public void setResult(String taskName, String result)
{
TaskRunner tr = trMap.get(taskName);
if (tr != null)
{
tr.setResult(result);
}
}
/**
* Returns whether a given DSO is a 'container' - collection or community
* @param dso a DSpace object
* @return true if a container, false otherwise
*/
public static boolean isContainer(DSpaceObject dso)
{
return (dso.getType() == Constants.COMMUNITY ||
dso.getType() == Constants.COLLECTION);
}
private boolean doCommunity(TaskRunner tr, Community comm) throws IOException
{
try
{
if (! tr.run(comm))
{
return false;
}
for (Community subcomm : comm.getSubcommunities())
{
if (! doCommunity(tr, subcomm))
{
return false;
}
}
for (Collection coll : comm.getCollections())
{
if (! doCollection(tr, coll))
{
return false;
}
}
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
return true;
}
private boolean doCollection(TaskRunner tr, Collection coll) throws IOException
{
try
{
if (! tr.run(coll))
{
return false;
}
ItemIterator iter = coll.getItems();
while (iter.hasNext())
{
if (! tr.run(iter.next()))
{
return false;
}
}
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
return true;
}
private class TaskRunner
{
CurationTask task = null;
String taskName = null;
int statusCode = CURATE_UNSET;
String result = null;
Invoked mode = null;
int[] codes = null;
public TaskRunner(CurationTask task, String name)
{
this.task = task;
taskName = name;
parseAnnotations(task.getClass());
}
public boolean run(DSpaceObject dso) throws IOException
{
if (dso == null)
{
throw new IOException("DSpaceObject is null");
}
statusCode = task.perform(dso);
return ! suspend(statusCode);
}
public boolean run(Context c, String id) throws IOException
{
if (c == null || id == null)
{
throw new IOException("Context or identifier is null");
}
statusCode = task.perform(c, id);
return ! suspend(statusCode);
}
public void setResult(String result)
{
this.result = result;
}
private void parseAnnotations(Class tClass)
{
Suspendable suspendAnn = (Suspendable)tClass.getAnnotation(Suspendable.class);
if (suspendAnn != null)
{
mode = suspendAnn.invoked();
codes = suspendAnn.statusCodes();
}
}
private boolean suspend(int code)
{
if (mode != null && (mode.equals(Invoked.ANY) || mode.equals(iMode)))
{
for (int i : codes)
{
if (code == i)
{
return true;
}
}
}
return false;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import org.dspace.content.Item;
import java.util.Arrays;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager;
// Warning - static import ahead!
import static javax.xml.stream.XMLStreamConstants.*;
/**
* WorkflowCurator manages interactions between curation and workflow.
* Specifically, it is invoked in WorkflowManager to allow the
* performance of curation tasks during workflow.
*
* @author richardrodgers
*/
public class WorkflowCurator {
/** log4j logger */
private static Logger log = Logger.getLogger(WorkflowCurator.class);
private static File cfgFile = new File(ConfigurationManager.getProperty("dspace.dir") +
File.separator + "config" + File.separator +
"workflow-curation.xml");
private static Map<String, TaskSet> tsMap = new HashMap<String, TaskSet>();
private static final String[] flowSteps = { "step1", "step2", "step3", "archive" };
static {
try {
loadTaskConfig();
} catch (IOException e) {
// debug e.printStackTrace();
log.fatal("Unable to load config: " + cfgFile.getAbsolutePath());
}
}
public static boolean needsCuration(WorkflowItem wfi) {
return getFlowStep(wfi) != null;
}
/**
* Determines and executes curation on a Workflow item.
*
* @param c the context
* @param wfi the workflow item
* @return true if curation was completed or not required,
* false if tasks were queued for later completion,
* or item was rejected
* @throws AuthorizeException
* @throws IOException
* @throws SQLException
*/
public static boolean doCuration(Context c, WorkflowItem wfi)
throws AuthorizeException, IOException, SQLException {
FlowStep step = getFlowStep(wfi);
if (step != null) {
Curator curator = new Curator();
// are we going to perform, or just put on queue?
if (step.queue != null) {
for (Task task : step.tasks) {
curator.addTask(task.name);
}
curator.queue(c, String.valueOf(wfi.getID()), step.queue);
wfi.update();
return false;
} else {
return curate(curator, c, wfi);
}
}
return true;
}
/**
* Determines and executes curation of a Workflow item.
*
* @param c the user context
* @param wfId the workflow id
* @throws AuthorizeException
* @throws IOException
* @throws SQLException
*/
public static boolean curate(Curator curator, Context c, String wfId)
throws AuthorizeException, IOException, SQLException {
WorkflowItem wfi = WorkflowItem.find(c, Integer.parseInt(wfId));
if (wfi != null) {
if (curate(curator, c, wfi)) {
WorkflowManager.advance(c, wfi, c.getCurrentUser(), false, true);
return true;
}
} else {
log.warn(LogManager.getHeader(c, "No workflow item found for id: " + wfId, null));
}
return false;
}
public static boolean curate(Curator curator, Context c, WorkflowItem wfi)
throws AuthorizeException, IOException, SQLException {
FlowStep step = getFlowStep(wfi);
if (step != null) {
// assign collection to item in case task needs it
Item item = wfi.getItem();
item.setOwningCollection(wfi.getCollection());
for (Task task : step.tasks) {
curator.addTask(task.name);
curator.curate(item);
int status = curator.getStatus(task.name);
String result = curator.getResult(task.name);
String action = "none";
if (status == Curator.CURATE_FAIL) {
// task failed - notify any contacts the task has assigned
if (task.powers.contains("reject")) {
action = "reject";
}
notifyContacts(c, wfi, task, "fail", action, result);
// if task so empowered, reject submission and terminate
if ("reject".equals(action)) {
WorkflowManager.reject(c, wfi, c.getCurrentUser(),
task.name + ": " + result);
return false;
}
} else if (status == Curator.CURATE_SUCCESS) {
if (task.powers.contains("approve")) {
action = "approve";
}
notifyContacts(c, wfi, task, "success", action, result);
if ("approve".equals(action)) {
// cease further task processing and advance submission
return true;
}
} else if (status == Curator.CURATE_ERROR) {
notifyContacts(c, wfi, task, "error", action, result);
}
curator.clear();
}
}
return true;
}
private static void notifyContacts(Context c, WorkflowItem wfi, Task task,
String status, String action, String message)
throws AuthorizeException, IOException, SQLException {
EPerson[] epa = resolveContacts(c, task.getContacts(status), wfi);
if (epa.length > 0) {
WorkflowManager.notifyOfCuration(c, wfi, epa, task.name, action, message);
}
}
private static EPerson[] resolveContacts(Context c, List<String> contacts,
WorkflowItem wfi)
throws AuthorizeException, IOException, SQLException {
List<EPerson> epList = new ArrayList<EPerson>();
for (String contact : contacts) {
// decode contacts
if ("$flowgroup".equals(contact)) {
// special literal for current flowgoup
int step = state2step(wfi.getState());
// make sure this step exists
if (step < 4) {
Group wfGroup = wfi.getCollection().getWorkflowGroup(step);
if (wfGroup != null) {
epList.addAll(Arrays.asList(Group.allMembers(c, wfGroup)));
}
}
} else if ("$colladmin".equals(contact)) {
Group adGroup = wfi.getCollection().getAdministrators();
if (adGroup != null) {
epList.addAll(Arrays.asList(Group.allMembers(c, adGroup)));
}
} else if ("$siteadmin".equals(contact)) {
EPerson siteEp = EPerson.findByEmail(c,
ConfigurationManager.getProperty("mail.admin"));
if (siteEp != null) {
epList.add(siteEp);
}
} else if (contact.indexOf("@") > 0) {
// little shaky heuristic here - assume an eperson email name
EPerson ep = EPerson.findByEmail(c, contact);
if (ep != null) {
epList.add(ep);
}
} else {
// assume it is an arbitrary group name
Group group = Group.findByName(c, contact);
if (group != null) {
epList.addAll(Arrays.asList(Group.allMembers(c, group)));
}
}
}
return epList.toArray(new EPerson[epList.size()]);
}
private static FlowStep getFlowStep(WorkflowItem wfi) {
Collection coll = wfi.getCollection();
String key = tsMap.containsKey(coll.getHandle()) ? coll.getHandle() : "default";
TaskSet ts = tsMap.get(key);
if (ts != null) {
int myStep = state2step(wfi.getState());
for (FlowStep fstep : ts.steps) {
if (fstep.step == myStep) {
return fstep;
}
}
}
return null;
}
private static int state2step(int state) {
if (state <= WorkflowManager.WFSTATE_STEP1POOL)
{
return 1;
}
if (state <= WorkflowManager.WFSTATE_STEP2POOL)
{
return 2;
}
if (state <= WorkflowManager.WFSTATE_STEP3POOL)
{
return 3;
}
return 4;
}
private static int stepName2step(String name) {
for (int i = 0; i < flowSteps.length; i++) {
if (flowSteps[i].equals(name)) {
return i + 1;
}
}
// invalid stepName - log
log.warn("Invalid step: '" + name + "' provided");
return -1;
}
private static void loadTaskConfig() throws IOException {
Map<String, String> collMap = new HashMap<String, String>();
Map<String, TaskSet> setMap = new HashMap<String, TaskSet>();
TaskSet taskSet = null;
FlowStep flowStep = null;
Task task = null;
String type = null;
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLStreamReader reader = factory.createXMLStreamReader(
new FileInputStream(cfgFile), "UTF-8");
while (reader.hasNext()) {
int event = reader.next();
if (event == START_ELEMENT) {
String eName = reader.getLocalName();
if ("mapping".equals(eName)) {
collMap.put(reader.getAttributeValue(0),
reader.getAttributeValue(1));
} else if ("taskset".equals(eName)) {
taskSet = new TaskSet(reader.getAttributeValue(0));
} else if ("flowstep".equals(eName)) {
int count = reader.getAttributeCount();
String queue = (count == 2) ?
reader.getAttributeValue(1) : null;
flowStep = new FlowStep(reader.getAttributeValue(0), queue);
} else if ("task".equals(eName)) {
task = new Task(reader.getAttributeValue(0));
} else if ("workflow".equals(eName)) {
type = "power";
} else if ("notify".equals(eName)) {
type = reader.getAttributeValue(0);
}
} else if (event == CHARACTERS) {
if (task != null) {
if ("power".equals(type)) {
task.addPower(reader.getText());
} else {
task.addContact(type, reader.getText());
}
}
} else if (event == END_ELEMENT) {
String eName = reader.getLocalName();
if ("task".equals(eName)) {
flowStep.addTask(task);
task = null;
} else if ("flowstep".equals(eName)) {
taskSet.addStep(flowStep);
} else if ("taskset".equals(eName)) {
setMap.put(taskSet.setName, taskSet);
}
}
}
reader.close();
// stitch maps together
for (Map.Entry<String, String> collEntry : collMap.entrySet()) {
if (! "none".equals(collEntry.getValue()) && setMap.containsKey(collEntry.getValue())) {
tsMap.put(collEntry.getKey(), setMap.get(collEntry.getValue()));
}
}
} catch (XMLStreamException xsE) {
throw new IOException(xsE.getMessage(), xsE);
}
}
private static class TaskSet {
public String setName = null;
public List<FlowStep> steps = null;
public TaskSet(String setName) {
this.setName = setName;
steps = new ArrayList<FlowStep>();
}
public void addStep(FlowStep step) {
steps.add(step);
}
}
private static class FlowStep {
public int step = -1;
public String queue = null;
public List<Task> tasks = null;
public FlowStep(String stepStr, String queueStr) {
this.step = stepName2step(stepStr);
this.queue = queueStr;
tasks = new ArrayList<Task>();
}
public void addTask(Task task) {
tasks.add(task);
}
}
private static class Task {
public String name = null;
public List<String> powers = new ArrayList<String>();
public Map<String, List<String>> contacts = new HashMap<String, List<String>>();
public Task(String name) {
this.name = name;
}
public void addPower(String power) {
powers.add(power);
}
public void addContact(String status, String contact) {
List<String> sContacts = contacts.get(status);
if (sContacts == null) {
sContacts = new ArrayList<String>();
contacts.put(status, sContacts);
}
sContacts.add(contact);
}
public List<String> getContacts(String status) {
List<String> ret = contacts.get(status);
return (ret != null) ? ret : new ArrayList<String>();
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
/**
* AbstractCurationTask encapsulates a few common patterns of task use,
* resources, and convenience methods.
*
* @author richardrodgers
*/
public abstract class AbstractCurationTask implements CurationTask
{
// invoking curator
protected Curator curator = null;
// curator-assigned taskId
protected String taskId = null;
@Override
public void init(Curator curator, String taskId) throws IOException
{
this.curator = curator;
this.taskId = taskId;
}
@Override
public abstract int perform(DSpaceObject dso) throws IOException;
/**
* Distributes a task through a DSpace container - a convenience method
* for tasks declaring the <code>@Distributive</code> property. Users must
* override the 'performItem' invoked by this method.
*
* @param dso
* @throws IOException
*/
protected void distribute(DSpaceObject dso) throws IOException
{
try
{
int type = dso.getType();
if (Constants.ITEM == type)
{
performItem((Item)dso);
}
else if (Constants.COLLECTION == type)
{
ItemIterator iter = ((Collection)dso).getItems();
while (iter.hasNext())
{
performItem(iter.next());
}
}
else if (Constants.COMMUNITY == type)
{
Community comm = (Community)dso;
for (Community subcomm : comm.getSubcommunities())
{
distribute(subcomm);
}
for (Collection coll : comm.getCollections())
{
distribute(coll);
}
}
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
}
/**
* Performs task upon an Item. Must be overridden if <code>distribute</code>
* method is used.
*
* @param item
* @throws SQLException
* @throws IOException
*/
protected void performItem(Item item) throws SQLException, IOException
{
// no-op - override when using 'distribute' method
}
@Override
public int perform(Context ctx, String id) throws IOException
{
DSpaceObject dso = dereference(ctx, id);
return (dso != null) ? perform(dso) : Curator.CURATE_FAIL;
}
/**
* Returns a DSpaceObject for passed identifier, if it exists
*
* @param ctx
* DSpace context
* @param id
* canonical id of object
* @return dso
* DSpace object, or null if no object with id exists
* @throws IOException
*/
protected DSpaceObject dereference(Context ctx, String id) throws IOException
{
try
{
return HandleManager.resolveToObject(ctx, id);
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
}
/**
* Sends message to the reporting stream
*
* @param message
* the message to stream
*/
protected void report(String message)
{
curator.report(message);
}
/**
* Assigns the result of the task performance
*
* @param result
* the result string
*/
protected void setResult(String result)
{
curator.setResult(taskId, result);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation type for CurationTasks. A task is distributive if it
* distributes its performance to the component parts of it's target object.
* This usually implies container iteration.
*
* @author richardrodgers
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Distributive
{
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.dspace.text.filter.*;
import org.dspace.sort.AbstractTextFilterOFD;
/**
* MARC 21 title ordering delegate implementation
*
* @author Graham Triggs
*/
public class OrderFormatTitleMarc21 extends AbstractTextFilterOFD
{
{
filters = new TextFilter[] { new MARC21InitialArticleWord(),
new DecomposeDiactritics(),
new StripLeadingNonAlphaNum(),
new LowerCaseAndTrim() };
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
/**
* Standard date ordering delegate implementation. The only "special" need is
* for treat with date with only "small" year < 4 digit
*
* @author Andrea Bollini
*/
public class OrderFormatDate implements OrderFormatDelegate
{
public String makeSortString(String value, String language)
{
int padding = 0;
int endYearIdx = value.indexOf('-');
if (endYearIdx >= 0 && endYearIdx < 4)
{
padding = 4 - endYearIdx;
}
else if (value.length() < 4)
{
padding = 4 - value.length();
}
if (padding > 0)
{
// padding the value from left with 0 so that 87 -> 0087, 687-11-24
// -> 0687-11-24
return String.format("%1$0" + padding + "d", 0)
+ value;
}
else
{
return value;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
/**
* Class to mediate with the sort configuration
*
* @author Richard Jones
*
*/
public class SortOption
{
private static final Logger log = Logger.getLogger(SortOption.class);
public static final String ASCENDING = "ASC";
public static final String DESCENDING = "DESC";
/** the sort configuration number */
private int number;
/** the name of the sort */
private String name;
/** the metadata field to sort on */
private String metadata;
/** the type of data we are sorting by */
private String type;
/** the metadata broken down into bits for convenience */
private String[] mdBits;
/** should the sort option be visible for user selection */
private boolean visible;
/** the sort options available for this index */
private static Set<SortOption> sortOptionsSet = null;
static {
try
{
Set<SortOption> newSortOptionsSet = new HashSet<SortOption>();
int idx = 1;
String option;
while ( ((option = ConfigurationManager.getProperty("webui.itemlist.sort-option." + idx))) != null)
{
SortOption so = new SortOption(idx, option);
newSortOptionsSet.add(so);
idx++;
}
SortOption.sortOptionsSet = newSortOptionsSet;
}
catch (SortException se)
{
log.fatal("Unable to load SortOptions", se);
}
}
/**
* Construct a new SortOption object with the given parameters
*
* @param number
* @param name
* @param md
* @param type
* @throws SortException
*/
public SortOption(int number, String name, String md, String type)
throws SortException
{
this.name = name;
this.type = type;
this.metadata = md;
this.number = number;
this.visible = true;
generateMdBits();
}
/**
* Construct a new SortOption object using the definition from the configuration
*
* @param number
* @param definition
* @throws SortException
*/
public SortOption(int number, String definition)
throws SortException
{
this.number = number;
String rx = "(\\w+):([\\w\\.\\*]+):(\\w+):?(\\w*)";
Pattern pattern = Pattern.compile(rx);
Matcher matcher = pattern.matcher(definition);
if (!matcher.matches())
{
throw new SortException("Sort Order configuration is not valid: webui.itemlist.sort-option." +
number + " = " + definition);
}
name = matcher.group(1);
metadata = matcher.group(2);
type = matcher.group(3);
// If the option is configured to be hidden, then set the visible flag to false
// otherwise, flag it as visible (true)
if (matcher.groupCount() > 3 && "hide".equalsIgnoreCase(matcher.group(4)))
{
visible = false;
}
else
{
visible = true;
}
generateMdBits();
}
/**
* @return Returns the metadata.
*/
public String getMetadata()
{
return metadata;
}
/**
* @param metadata The metadata to set.
*/
public void setMetadata(String metadata)
{
this.metadata = metadata;
}
/**
* @return Returns the name.
*/
public String getName()
{
return name;
}
/**
* @param name The name to set.
*/
public void setName(String name)
{
this.name = name;
}
/**
* @return Returns the type.
*/
public String getType()
{
return type;
}
/**
* @param type The type to set.
*/
public void setType(String type)
{
this.type = type;
}
/**
* @return Returns the number.
*/
public int getNumber()
{
return number;
}
/**
* @param number The number to set.
*/
public void setNumber(int number)
{
this.number = number;
}
/**
* Should this sort option be made visible in the UI
* @return true if visible, false otherwise
*/
public boolean isVisible()
{
return visible;
}
/**
* @return a 3 element array of the metadata bits
*/
public String[] getMdBits()
{
return (String[]) ArrayUtils.clone(mdBits);
}
/**
* Tell the class to generate the metadata bits
*
* @throws SortException
*/
private void generateMdBits()
throws SortException
{
try
{
mdBits = interpretField(metadata, null);
}
catch(IOException e)
{
throw new SortException(e);
}
}
/**
* Take a string representation of a metadata field, and return it as an array.
* This is just a convenient utility method to basically break the metadata
* representation up by its delimiter (.), and stick it in an array, inserting
* the value of the init parameter when there is no metadata field part.
*
* @param mfield the string representation of the metadata
* @param init the default value of the array elements
* @return a three element array with schema, element and qualifier respectively
*/
public final String[] interpretField(String mfield, String init)
throws IOException
{
StringTokenizer sta = new StringTokenizer(mfield, ".");
String[] field = {init, init, init};
int i = 0;
while (sta.hasMoreTokens())
{
field[i++] = sta.nextToken();
}
// error checks to make sure we have at least a schema and qualifier for both
if (field[0] == null || field[1] == null)
{
throw new IOException("at least a schema and element be " +
"specified in configuration. You supplied: " + mfield);
}
return field;
}
/**
* Is this a date field
*
* @return
*/
public boolean isDate()
{
if ("date".equals(type))
{
return true;
}
return false;
}
/**
* Is the default sort option
*
* @return
*/
public boolean isDefault()
{
if (number == 0)
{
return true;
}
return false;
}
/**
* Return all the configured sort options
* @return
* @throws SortException
*/
public static Set<SortOption> getSortOptions() throws SortException
{
if (SortOption.sortOptionsSet == null)
{
throw new SortException("Sort options not loaded");
}
return SortOption.sortOptionsSet;
}
/**
* Get the defined sort option by number (.1, .2, etc)
* @param number
* @return
* @throws SortException
*/
public static SortOption getSortOption(int number) throws SortException
{
for (SortOption so : SortOption.getSortOptions())
{
if (so.getNumber() == number)
{
return so;
}
}
return null;
}
/**
* Get the default sort option - initially, just the first one defined
* @return
* @throws SortException
*/
public static SortOption getDefaultSortOption() throws SortException
{
for (SortOption so : getSortOptions())
{
return so;
}
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.apache.log4j.Logger;
import org.dspace.text.filter.TextFilter;
import org.dspace.sort.OrderFormatDelegate;
/**
* Helper class for creating order delegates.
*
* To configure the filters create a subclass and, in an object initializer,
* create an array of classes that implement TextFilter:
*
* class MyLocaleDelegate extends AbstractTextFilterOFD {
* {
* filters = new TextFilter[] { new LocaleOrderingFilter(); }
* }
* }
*
* The order they are in the array, is the order that they are executed.
* (this may be important for some filters - read their documentation!)
*
* Example configurations that could be used:
* { new DecomposeDiactritics(), new StripDiacritics(), new LowerCaseAndTrim() }
* - Decompose and then strip the diacritics, lowercase and trim the string.
*
* { new MARC21InitialArticleWord(), new DecomposeDiactritics(), new LowerCaseTrim() }
* - Parse the initial article words based on the Library of Congress list of
* definite/indefinite article words, decompose diacritics, and lowercase/trim.
*
* { new LowerCaseTrim(), new LocaleOrderingFilter() }
* - Lowercase the string, then make a locale dependent sort text
* (note that the sort text is not human readable)
*
* @author Graham Triggs
*/
public abstract class AbstractTextFilterOFD implements OrderFormatDelegate
{
private static final Logger log = Logger.getLogger(AbstractTextFilterOFD.class);
// Initialised in subclass in an object initializer
protected TextFilter[] filters;
/**
* Prepare the appropriate sort string for the given value in the
* given language. Language should be supplied with the ISO-6390-1
* or ISO-639-2 standards. For example "en" or "eng".
*
* @param value the string value
* @param language the language to interpret in
*/
public String makeSortString(String value, String language)
{
if (filters == null)
{
// Log an error if the class is not configured correctly
log.error("No filters defined for " + this.getClass().getName());
}
else
{
// Normalize language into a two or three character code
if (language != null)
{
if (language.length() > 2 && language.charAt(2) == '_')
{
language = language.substring(0, 2);
}
if (language.length() > 3)
{
language = language.substring(0, 3);
}
}
// Iterate through filters, applying each in turn
for (int idx = 0; idx < filters.length; idx++)
{
if (language != null)
{
value = filters[idx].filter(value, language);
}
else
{
value = filters[idx].filter(value);
}
}
}
return value;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.StandardInitialArticleWord;
import org.dspace.text.filter.TextFilter;
import org.dspace.sort.AbstractTextFilterOFD;
/**
* Standard title ordering delegate implementation
*
* @author Graham Triggs
*/
public class OrderFormatTitle extends AbstractTextFilterOFD
{
{
filters = new TextFilter[] { new StandardInitialArticleWord(),
new DecomposeDiactritics(),
new LowerCaseAndTrim() };
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
/**
* Just a quick SortException class to give us the relevant data type
*/
public class SortException extends Exception
{
public SortException()
{
super();
}
public SortException(String message)
{
super(message);
}
public SortException(String message, Throwable cause)
{
super(message, cause);
}
public SortException(Throwable cause)
{
super(cause);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.TextFilter;
import org.dspace.sort.AbstractTextFilterOFD;
/**
* Standard text ordering delegate implementation
*
* @author Graham Triggs
*/
public class OrderFormatText extends AbstractTextFilterOFD
{
{
filters = new TextFilter[] { new DecomposeDiactritics(),
new LowerCaseAndTrim() };
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
/**
* Interface for browse order delegates
*
* @author Graham Triggs
*/
public interface OrderFormatDelegate
{
/**
* Prepare the appropriate sort string for the given value in the
* given language. Language should be supplied with the ISO-6390-1
* or ISO-639-2 standards. For example "en" or "eng".
*
* @param value the string value
* @param language the language to interpret in
*/
public String makeSortString(String value, String language);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.dspace.core.PluginManager;
/**
* Class implementing static helpers for anywhere that interacts with the sort columns
* (ie. ItemsByAuthor.sort_author, ItemsByTitle.sort_title)
*
* This class maps index 'types' to delegates that implement the sort string creation
*
* Types can be defined or configured using the plugin manager:
*
* plugin.named.org.dspace.sort.OrderFormatDelegate=
* org.dspace.sort.OrderFormatTitleMarc21=title
* org.dspace.sort.OrderFormatAuthor=author
*
* The following standard types have been defined by default, but can be reconfigured
* via the plugin manager:
*
* author = org.dspace.sort.OrderFormatAuthor
* title = org.dspace.sort.OrderFormatTitle
* text = org.dspace.sort.OrderFormatText
*
* IMPORTANT - If you change any of the orderings, you need to rebuild the browse sort columns
* (ie. run 'index-all', or 'dsrun org.dspace.browse.InitializeBrowse')
*
* @author Graham Triggs
* @version $Revision: 5844 $
*/
public class OrderFormat
{
public static final String AUTHOR = "author";
public static final String TITLE = "title";
public static final String TEXT = "text";
public static final String DATE = "date";
public static final String AUTHORITY = "authority";
// Array of all available order delegates - avoids excessive calls to plugin manager
private static final String[] delegates = PluginManager.getAllPluginNames(OrderFormatDelegate.class);
private static final OrderFormatDelegate authorDelegate = new OrderFormatAuthor();
private static final OrderFormatDelegate titleDelegate = new OrderFormatTitle();
private static final OrderFormatDelegate textDelegate = new OrderFormatText();
private static final OrderFormatDelegate dateDelegate = new OrderFormatDate();
private static final OrderFormatDelegate authorityDelegate = new OrderFormatText();
/**
* Generate a sort string for the given DC metadata
*/
public static String makeSortString(String value, String language, String type)
{
OrderFormatDelegate delegate = null;
// If there is no value, return null
if (value == null)
{
return null;
}
// If a named index has been supplied
if (type != null && type.length() > 0)
{
// Use a delegate if one is configured
delegate = OrderFormat.getDelegate(type);
if (delegate != null)
{
return delegate.makeSortString(value, language);
}
// No delegates found, so apply defaults
if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null)
{
return authorDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null)
{
return titleDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null)
{
return textDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.DATE) && dateDelegate != null)
{
return dateDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.AUTHORITY) && authorityDelegate != null)
{
return authorityDelegate.makeSortString(value, language);
}
}
return value;
}
/**
* Retrieve the named delegate
*/
private static OrderFormatDelegate getDelegate(String name)
{
if (name != null && name.length() > 0)
{
// Check the cached array of names to see if the delegate has been configured
for (int idx = 0; idx < delegates.length; idx++)
{
if (delegates[idx].equals(name))
{
return (OrderFormatDelegate)PluginManager.getNamedPlugin(OrderFormatDelegate.class, name);
}
}
}
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.TextFilter;
import org.dspace.sort.AbstractTextFilterOFD;
/**
* Standard author ordering delegate implementation
*
* @author Graham Triggs
*/
public class OrderFormatAuthor extends AbstractTextFilterOFD
{
{
filters = new TextFilter[] { new DecomposeDiactritics(),
new LowerCaseAndTrim() };
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.SQLException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Abstract base class for DSpace objects
*/
public abstract class DSpaceObject
{
// accumulate information to add to "detail" element of content Event,
// e.g. to document metadata fields touched, etc.
private StringBuffer eventDetails = null;
/**
* Reset the cache of event details.
*/
protected void clearDetails()
{
eventDetails = null;
}
/**
* Add a string to the cache of event details. Automatically
* separates entries with a comma.
* Subclass can just start calling addDetails, since it creates
* the cache if it needs to.
* @param d detail string to add.
*/
protected void addDetails(String d)
{
if (eventDetails == null)
{
eventDetails = new StringBuffer(d);
}
else
{
eventDetails.append(", ").append(d);
}
}
/**
* @return summary of event details, or null if there are none.
*/
protected String getDetails()
{
return (eventDetails == null ? null : eventDetails.toString());
}
/**
* Get the type of this object, found in Constants
*
* @return type of the object
*/
public abstract int getType();
/**
* Get the internal ID (database primary key) of this object
*
* @return internal ID of object
*/
public abstract int getID();
/**
* Get the Handle of the object. This may return <code>null</code>
*
* @return Handle of the object, or <code>null</code> if it doesn't have
* one
*/
public abstract String getHandle();
/**
* Get a proper name for the object. This may return <code>null</code>.
* Name should be suitable for display in a user interface.
*
* @return Name for the object, or <code>null</code> if it doesn't have
* one
*/
public abstract String getName();
/**
* Generic find for when the precise type of a DSO is not known, just the
* a pair of type number and database ID.
*
* @param context - the context
* @param type - type number
* @param id - id within table of type'd objects
* @return the object found, or null if it does not exist.
* @throws SQLException only upon failure accessing the database.
*/
public static DSpaceObject find(Context context, int type, int id)
throws SQLException
{
switch (type)
{
case Constants.BITSTREAM : return Bitstream.find(context, id);
case Constants.BUNDLE : return Bundle.find(context, id);
case Constants.ITEM : return Item.find(context, id);
case Constants.COLLECTION: return Collection.find(context, id);
case Constants.COMMUNITY : return Community.find(context, id);
case Constants.GROUP : return Group.find(context, id);
case Constants.EPERSON : return EPerson.find(context, id);
case Constants.SITE : return Site.find(context, id);
}
return null;
}
/**
* Return the dspace object where an ADMIN action right is sufficient to
* grant the initial authorize check.
* <p>
* Default behaviour is ADMIN right on the object grant right on all other
* action on the object itself. Subclass should override this method as
* need.
*
* @param action
* ID of action being attempted, from
* <code>org.dspace.core.Constants</code>. The ADMIN action is
* not a valid parameter for this method, an
* IllegalArgumentException should be thrown
* @return the dspace object, if any, where an ADMIN action is sufficient to
* grant the original action
* @throws SQLException
* @throws IllegalArgumentException
* if the ADMIN action is supplied as parameter of the method
* call
*/
public DSpaceObject getAdminObject(int action) throws SQLException
{
if (action == Constants.ADMIN)
{
throw new IllegalArgumentException("Illegal call to the DSpaceObject.getAdminObject method");
}
return this;
}
/**
* Return the dspace object that "own" the current object in the hierarchy.
* Note that this method has a meaning slightly different from the
* getAdminObject because it is independent of the action but it is in a way
* related to it. It defines the "first" dspace object <b>OTHER</b> then the
* current one, where allowed ADMIN actions imply allowed ADMIN actions on
* the object self.
*
* @return the dspace object that "own" the current object in
* the hierarchy
* @throws SQLException
*/
public DSpaceObject getParentObject() throws SQLException
{
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.Serializable;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.dspace.sort.OrderFormat;
/**
* Compare two Items by their DCValues.
*
* The DCValues to be compared are specified by the element, qualifier and
* language parameters to the constructor. If the Item has more than one
* matching DCValue, then the max parameter to the constructor specifies whether
* the maximum or minimum lexicographic value will be used.
*
* @author Peter Breton
* @version $Revision: 5844 $
*/
public class ItemComparator implements Comparator, Serializable
{
/** Dublin Core element */
private String element;
/** Dublin Core qualifier */
private String qualifier;
/** Language */
private String language;
/** Whether maximum or minimum value will be used */
private boolean max;
/**
* Constructor.
*
* @param element
* The Dublin Core element
* @param qualifier
* The Dublin Core qualifier
* @param language
* The language for the DCValues
* @param max
* If true, and there is more than one DCValue for element,
* qualifier and language, then use the maximum value
* lexicographically; otherwise use the minimum value.
*/
public ItemComparator(String element, String qualifier, String language,
boolean max)
{
this.element = element;
this.qualifier = qualifier;
this.language = language;
this.max = max;
}
/**
* Compare two Items by checking their DCValues for element, qualifier, and
* language.
*
* <p>
* Return >= 1 if the first is lexicographically greater than the second; <=
* -1 if the second is lexicographically greater than the first, and 0
* otherwise.
* </p>
*
* @param first
* The first object to compare. Must be an object of type
* org.dspace.content.Item.
* @param second
* The second object to compare. Must be an object of type
* org.dspace.content.Item.
* @return >= 1 if the first is lexicographically greater than the second; <=
* -1 if the second is lexicographically greater than the first, and
* 0 otherwise.
*/
public int compare(Object first, Object second)
{
if ((!(first instanceof Item)) || (!(second instanceof Item)))
{
throw new IllegalArgumentException("Arguments must be Items");
}
// Retrieve a chosen value from the array for comparison
String firstValue = getValue((Item) first);
String secondValue = getValue((Item) second);
if (firstValue == null && secondValue == null)
{
return 0;
}
if (firstValue == null)
{
return -1;
}
if (secondValue == null)
{
return 1;
}
// See the javadoc for java.lang.String for an explanation
// of the return value.
return firstValue.compareTo(secondValue);
}
/**
* Return true if the object is equal to this one, false otherwise. Another
* object is equal to this one if it is also an ItemComparator, and has the
* same values for element, qualifier, language, and max.
*
* @param obj
* The object to compare to.
* @return True if the other object is equal to this one, false otherwise.
*/
public boolean equals(Object obj)
{
if (!(obj instanceof ItemComparator))
{
return false;
}
ItemComparator other = (ItemComparator) obj;
return equalsWithNull(element, other.element)
&& equalsWithNull(qualifier, other.qualifier)
&& equalsWithNull(language, other.language) && (max == other.max);
}
public int hashCode()
{
return new HashCodeBuilder().append(element).append(qualifier).append(language).append(max).toHashCode();
}
/**
* Return true if the first string is equal to the second. Either or both
* may be null.
*/
private boolean equalsWithNull(String first, String second)
{
if (first == null && second == null)
{
return true;
}
if (first == null || second == null)
{
return false;
}
return first.equals(second);
}
/**
* Choose the canonical value from an item for comparison. If there are no
* values, null is returned. If there is exactly one value, then it is
* returned. Otherwise, either the maximum or minimum lexicographical value
* is returned; the parameter to the constructor says which.
*
* @param item
* The item to check
* @return The chosen value, or null
*/
private String getValue(Item item)
{
// The overall array and each element are guaranteed non-null
DCValue[] dcvalues = item.getDC(element, qualifier, language);
if (dcvalues.length == 0)
{
return null;
}
if (dcvalues.length == 1)
{
return normalizeTitle(dcvalues[0]);
}
// We want to sort using Strings, but also keep track of
// which DCValue the value came from.
Map<String, Integer> values = new HashMap<String, Integer>();
for (int i = 0; i < dcvalues.length; i++)
{
String value = dcvalues[i].value;
if (value != null)
{
values.put(value, Integer.valueOf(i));
}
}
if (values.size() == 0)
{
return null;
}
Set<String> valueSet = values.keySet();
String chosen = max ? Collections.max(valueSet)
: Collections.min(valueSet);
int index = (values.get(chosen)).intValue();
return normalizeTitle(dcvalues[index]);
}
/**
* Normalize the title of a DCValue.
*/
private String normalizeTitle(DCValue value)
{
if (!"title".equals(element))
{
return value.value;
}
return OrderFormat.makeSortString(value.value, value.language, OrderFormat.TITLE);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.embargo.EmbargoManager;
import org.dspace.event.Event;
import org.dspace.handle.HandleManager;
/**
* Support to install an Item in the archive.
*
* @author dstuve
* @version $Revision: 5844 $
*/
public class InstallItem
{
/**
* Take an InProgressSubmission and turn it into a fully-archived Item,
* creating a new Handle.
*
* @param c
* DSpace Context
* @param is
* submission to install
*
* @return the fully archived Item
*/
public static Item installItem(Context c, InProgressSubmission is)
throws SQLException, IOException, AuthorizeException
{
return installItem(c, is, null);
}
/**
* Take an InProgressSubmission and turn it into a fully-archived Item.
*
* @param c current context
* @param is
* submission to install
* @param suppliedHandle
* the existing Handle to give to the installed item
*
* @return the fully archived Item
*/
public static Item installItem(Context c, InProgressSubmission is,
String suppliedHandle) throws SQLException,
IOException, AuthorizeException
{
Item item = is.getItem();
String handle;
// if no previous handle supplied, create one
if (suppliedHandle == null)
{
// create a new handle for this item
handle = HandleManager.createHandle(c, item);
}
else
{
// assign the supplied handle to this item
handle = HandleManager.createHandle(c, item, suppliedHandle);
}
populateHandleMetadata(item, handle);
// this is really just to flush out fatal embargo metadata
// problems before we set inArchive.
DCDate liftDate = EmbargoManager.getEmbargoDate(c, item);
populateMetadata(c, item, liftDate);
return finishItem(c, item, is, liftDate);
}
/**
* Turn an InProgressSubmission into a fully-archived Item, for
* a "restore" operation such as ingestion of an AIP to recreate an
* archive. This does NOT add any descriptive metadata (e.g. for
* provenance) to preserve the transparency of the ingest. The
* ingest mechanism is assumed to have set all relevant technical
* and administrative metadata fields.
*
* @param c current context
* @param is
* submission to install
* @param suppliedHandle
* the existing Handle to give the installed item, or null
* to create a new one.
*
* @return the fully archived Item
*/
public static Item restoreItem(Context c, InProgressSubmission is,
String suppliedHandle)
throws SQLException, IOException, AuthorizeException
{
Item item = is.getItem();
String handle;
// if no handle supplied
if (suppliedHandle == null)
{
// create a new handle for this item
handle = HandleManager.createHandle(c, item);
//only populate handle metadata for new handles
// (existing handles should already be in the metadata -- as it was restored by ingest process)
populateHandleMetadata(item, handle);
}
else
{
// assign the supplied handle to this item
handle = HandleManager.createHandle(c, item, suppliedHandle);
}
//NOTE: this method specifically skips over "populateMetadata()"
// As this is a "restore" all the metadata should have already been restored
//@TODO: Do we actually want a "Restored on ..." provenance message? Or perhaps kick off an event?
return finishItem(c, item, is, null);
}
private static void populateHandleMetadata(Item item, String handle)
throws SQLException, IOException, AuthorizeException
{
String handleref = HandleManager.getCanonicalForm(handle);
// Add handle as identifier.uri DC value.
// First check that identifier dosn't already exist.
boolean identifierExists = false;
DCValue[] identifiers = item.getDC("identifier", "uri", Item.ANY);
for (DCValue identifier : identifiers)
{
if (handleref.equals(identifier.value))
{
identifierExists = true;
}
}
if (!identifierExists)
{
item.addDC("identifier", "uri", null, handleref);
}
}
// fill in metadata needed by new Item.
private static void populateMetadata(Context c, Item item, DCDate embargoLiftDate)
throws SQLException, IOException, AuthorizeException
{
// create accession date
DCDate now = DCDate.getCurrent();
item.addDC("date", "accessioned", null, now.toString());
// add date available if not under embargo, otherwise it will
// be set when the embargo is lifted.
if (embargoLiftDate == null)
{
item.addDC("date", "available", null, now.toString());
}
// create issue date if not present
DCValue[] currentDateIssued = item.getDC("date", "issued", Item.ANY);
if (currentDateIssued.length == 0)
{
DCDate issued = new DCDate(now.getYear(),now.getMonth(),now.getDay(),-1,-1,-1);
item.addDC("date", "issued", null, issued.toString());
}
String provDescription = "Made available in DSpace on " + now
+ " (GMT). " + getBitstreamProvenanceMessage(item);
if (currentDateIssued.length != 0)
{
DCDate d = new DCDate(currentDateIssued[0].value);
provDescription = provDescription + " Previous issue date: "
+ d.toString();
}
// Add provenance description
item.addDC("description", "provenance", "en", provDescription);
}
// final housekeeping when adding new Item to archive
// common between installing and "restoring" items.
private static Item finishItem(Context c, Item item, InProgressSubmission is, DCDate embargoLiftDate)
throws SQLException, IOException, AuthorizeException
{
// create collection2item mapping
is.getCollection().addItem(item);
// set owning collection
item.setOwningCollection(is.getCollection());
// set in_archive=true
item.setArchived(true);
// save changes ;-)
item.update();
// Notify interested parties of newly archived Item
c.addEvent(new Event(Event.INSTALL, Constants.ITEM, item.getID(),
item.getHandle()));
// remove in-progress submission
is.deleteWrapper();
// remove the item's policies and replace them with
// the defaults from the collection
item.inheritCollectionDefaultPolicies(is.getCollection());
// set embargo lift date and take away read access if indicated.
if (embargoLiftDate != null)
{
EmbargoManager.setEmbargo(c, item, embargoLiftDate);
}
return item;
}
/**
* Generate provenance-worthy description of the bitstreams contained in an
* item.
*
* @param myitem the item generate description for
*
* @return provenance description
*/
public static String getBitstreamProvenanceMessage(Item myitem)
throws SQLException
{
// Get non-internal format bitstreams
Bitstream[] bitstreams = myitem.getNonInternalBitstreams();
// Create provenance description
StringBuilder myMessage = new StringBuilder();
myMessage.append("No. of bitstreams: ").append(bitstreams.length).append("\n");
// Add sizes and checksums of bitstreams
for (int j = 0; j < bitstreams.length; j++)
{
myMessage.append(bitstreams[j].getName()).append(": ")
.append(bitstreams[j].getSize()).append(" bytes, checksum: ")
.append(bitstreams[j].getChecksum()).append(" (")
.append(bitstreams[j].getChecksumAlgorithm()).append(")\n");
}
return myMessage.toString();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing a schema in DSpace.
* <p>
* The schema object exposes a name which can later be used to generate
* namespace prefixes in RDF or XML, e.g. the core DSpace Dublin Core schema
* would have a name of <code>'dc'</code>.
* </p>
*
* @author Martin Hald
* @version $Revision: 6027 $
* @see org.dspace.content.MetadataValue
* @see org.dspace.content.MetadataField
*/
public class MetadataSchema
{
/** log4j logger */
private static Logger log = Logger.getLogger(MetadataSchema.class);
/** Numeric Identifier of built-in Dublin Core schema. */
public static final int DC_SCHEMA_ID = 1;
/** Short Name of built-in Dublin Core schema. */
public static final String DC_SCHEMA = "dc";
/** The row in the table representing this type */
private TableRow row;
private int schemaID;
private String namespace;
private String name;
// cache of schema by ID (Integer)
private static Map<Integer, MetadataSchema> id2schema = null;
// cache of schema by short name
private static Map<String, MetadataSchema> name2schema = null;
/**
* Default constructor.
*/
public MetadataSchema()
{
}
/**
* Object constructor.
*
* @param schemaID database key ID number
* @param namespace XML namespace URI
* @param name short name of schema
*/
public MetadataSchema(int schemaID, String namespace, String name)
{
this.schemaID = schemaID;
this.namespace = namespace;
this.name = name;
}
/**
* Immutable object constructor for creating a new schema.
*
* @param namespace XML namespace URI
* @param name short name of schema
*/
public MetadataSchema(String namespace, String name)
{
this.namespace = namespace;
this.name = name;
}
/**
* Constructor for loading the metadata schema from the database.
*
* @param row table row object from which to populate this schema.
*/
public MetadataSchema(TableRow row)
{
if (row != null)
{
this.schemaID = row.getIntColumn("metadata_schema_id");
this.namespace = row.getStringColumn("namespace");
this.name = row.getStringColumn("short_id");
this.row = row;
}
}
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final MetadataSchema other = (MetadataSchema) obj;
if (this.schemaID != other.schemaID)
{
return false;
}
if ((this.namespace == null) ? (other.namespace != null) : !this.namespace.equals(other.namespace))
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int hash = 5;
hash = 67 * hash + this.schemaID;
hash = 67 * hash + (this.namespace != null ? this.namespace.hashCode() : 0);
return hash;
}
/**
* Get the schema namespace.
*
* @return namespace String
*/
public String getNamespace()
{
return namespace;
}
/**
* Set the schema namespace.
*
* @param namespace XML namespace URI
*/
public void setNamespace(String namespace)
{
this.namespace = namespace;
}
/**
* Get the schema name.
*
* @return name String
*/
public String getName()
{
return name;
}
/**
* Set the schema name.
*
* @param name short name of schema
*/
public void setName(String name)
{
this.name = name;
}
/**
* Get the schema record key number.
*
* @return schema record key
*/
public int getSchemaID()
{
return schemaID;
}
/**
* Creates a new metadata schema in the database, out of this object.
*
* @param context
* DSpace context object
* @throws SQLException
* @throws AuthorizeException
* @throws NonUniqueMetadataException
*/
public void create(Context context) throws SQLException,
AuthorizeException, NonUniqueMetadataException
{
// Check authorisation: Only admins may create metadata schemas
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modify the metadata registry");
}
// Ensure the schema name is unique
if (!uniqueShortName(context, name))
{
throw new NonUniqueMetadataException("Please make the name " + name
+ " unique");
}
// Ensure the schema namespace is unique
if (!uniqueNamespace(context, namespace))
{
throw new NonUniqueMetadataException("Please make the namespace " + namespace
+ " unique");
}
// Create a table row and update it with the values
row = DatabaseManager.row("MetadataSchemaRegistry");
row.setColumn("namespace", namespace);
row.setColumn("short_id", name);
DatabaseManager.insert(context, row);
// invalidate our fast-find cache.
decache();
// Remember the new row number
this.schemaID = row.getIntColumn("metadata_schema_id");
log
.info(LogManager.getHeader(context, "create_metadata_schema",
"metadata_schema_id="
+ row.getIntColumn("metadata_schema_id")));
}
/**
* Get the schema object corresponding to this namespace URI.
*
* @param context DSpace context
* @param namespace namespace URI to match
* @return metadata schema object or null if none found.
* @throws SQLException
*/
public static MetadataSchema findByNamespace(Context context,
String namespace) throws SQLException
{
// Grab rows from DB
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataSchemaRegistry",
"SELECT * FROM MetadataSchemaRegistry WHERE namespace= ? ",
namespace);
TableRow row = null;
try
{
if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
if (row == null)
{
return null;
}
else
{
return new MetadataSchema(row);
}
}
/**
* Update the metadata schema in the database.
*
* @param context DSpace context
* @throws SQLException
* @throws AuthorizeException
* @throws NonUniqueMetadataException
*/
public void update(Context context) throws SQLException,
AuthorizeException, NonUniqueMetadataException
{
// Check authorisation: Only admins may update the metadata registry
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modify the metadata registry");
}
// Ensure the schema name is unique
if (!uniqueShortName(context, name))
{
throw new NonUniqueMetadataException("Please make the name " + name
+ " unique");
}
// Ensure the schema namespace is unique
if (!uniqueNamespace(context, namespace))
{
throw new NonUniqueMetadataException("Please make the namespace " + namespace
+ " unique");
}
row.setColumn("namespace", getNamespace());
row.setColumn("short_id", getName());
DatabaseManager.update(context, row);
decache();
log.info(LogManager.getHeader(context, "update_metadata_schema",
"metadata_schema_id=" + getSchemaID() + "namespace="
+ getNamespace() + "name=" + getName()));
}
/**
* Delete the metadata schema.
*
* @param context DSpace context
* @throws SQLException
* @throws AuthorizeException
*/
public void delete(Context context) throws SQLException, AuthorizeException
{
// Check authorisation: Only admins may create DC types
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modify the metadata registry");
}
log.info(LogManager.getHeader(context, "delete_metadata_schema",
"metadata_schema_id=" + getSchemaID()));
DatabaseManager.delete(context, row);
decache();
}
/**
* Return all metadata schemas.
*
* @param context DSpace context
* @return array of metadata schemas
* @throws SQLException
*/
public static MetadataSchema[] findAll(Context context) throws SQLException
{
List<MetadataSchema> schemas = new ArrayList<MetadataSchema>();
// Get all the metadataschema rows
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataSchemaRegistry",
"SELECT * FROM MetadataSchemaRegistry ORDER BY metadata_schema_id");
try
{
// Make into DC Type objects
while (tri.hasNext())
{
schemas.add(new MetadataSchema(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Convert list into an array
MetadataSchema[] typeArray = new MetadataSchema[schemas.size()];
return (MetadataSchema[]) schemas.toArray(typeArray);
}
/**
* Return true if and only if the passed name appears within the allowed
* number of times in the current schema.
*
* @param context DSpace context
* @param namespace namespace URI to match
* @return true of false
* @throws SQLException
*/
private boolean uniqueNamespace(Context context, String namespace)
throws SQLException
{
int count = 0;
Connection con = context.getDBConnection();
PreparedStatement statement = null;
ResultSet rs = null;
try
{
TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND namespace= ? ";
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,namespace);
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
return (count == 0);
}
/**
* Return true if and only if the passed name is unique.
*
* @param context DSpace context
* @param name short name of schema
* @return true of false
* @throws SQLException
*/
private boolean uniqueShortName(Context context, String name)
throws SQLException
{
int count = 0;
Connection con = context.getDBConnection();
PreparedStatement statement = null;
ResultSet rs = null;
try
{
TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND short_id = ? ";
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,name);
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
return (count == 0);
}
/**
* Get the schema corresponding with this numeric ID.
* The ID is a database key internal to DSpace.
*
* @param context
* context, in case we need to read it in from DB
* @param id
* the schema ID
* @return the metadata schema object
* @throws SQLException
*/
public static MetadataSchema find(Context context, int id)
throws SQLException
{
if (!isCacheInitialized())
{
initCache(context);
}
Integer iid = Integer.valueOf(id);
// sanity check
if (!id2schema.containsKey(iid))
{
return null;
}
return id2schema.get(iid);
}
/**
* Get the schema corresponding with this short name.
*
* @param context
* context, in case we need to read it in from DB
* @param shortName
* the short name for the schema
* @return the metadata schema object
* @throws SQLException
*/
public static MetadataSchema find(Context context, String shortName)
throws SQLException
{
// If we are not passed a valid schema name then return
if (shortName == null)
{
return null;
}
if (!isCacheInitialized())
{
initCache(context);
}
if (!name2schema.containsKey(shortName))
{
return null;
}
return name2schema.get(shortName);
}
// invalidate the cache e.g. after something modifies DB state.
private static void decache()
{
id2schema = null;
name2schema = null;
}
private static boolean isCacheInitialized()
{
return (id2schema != null && name2schema != null);
}
// load caches if necessary
private static synchronized void initCache(Context context) throws SQLException
{
if (!isCacheInitialized())
{
log.info("Loading schema cache for fast finds");
Map<Integer, MetadataSchema> new_id2schema = new HashMap<Integer, MetadataSchema>();
Map<String, MetadataSchema> new_name2schema = new HashMap<String, MetadataSchema>();
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataSchemaRegistry",
"SELECT * from MetadataSchemaRegistry");
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
MetadataSchema s = new MetadataSchema(row);
new_id2schema.put(Integer.valueOf(s.schemaID), s);
new_name2schema.put(s.name, s);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
id2schema = new_id2schema;
name2schema = new_name2schema;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.service;
import org.dspace.content.dao.ItemDAO;
import org.dspace.content.dao.ItemDAOFactory;
import org.dspace.content.Bitstream;
import org.dspace.content.Thumbnail;
import org.dspace.core.Context;
import java.sql.SQLException;
public class ItemService
{
public static Thumbnail getThumbnail(Context context, int itemId, boolean requireOriginal) throws SQLException
{
ItemDAO dao = ItemDAOFactory.getInstance(context);
Bitstream thumbBitstream = null;
Bitstream primaryBitstream = dao.getPrimaryBitstream(itemId, "ORIGINAL");
if (primaryBitstream != null)
{
if (primaryBitstream.getFormat().getMIMEType().equals("text/html"))
{
return null;
}
thumbBitstream = dao.getNamedBitstream(itemId, "THUMBNAIL", primaryBitstream.getName() + ".jpg");
}
else
{
if (requireOriginal)
{
primaryBitstream = dao.getFirstBitstream(itemId, "ORIGINAL");
}
thumbBitstream = dao.getFirstBitstream(itemId, "THUMBNAIL");
}
if (thumbBitstream != null)
{
return new Thumbnail(thumbBitstream, primaryBitstream);
}
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* DSpace object that represents a metadata field, which is
* defined by a combination of schema, element, and qualifier. Every
* metadata element belongs in a field.
*
* @author Martin Hald
* @version $Revision: 5844 $
* @see org.dspace.content.MetadataValue
* @see org.dspace.content.MetadataSchema
*/
public class MetadataField
{
private int fieldID = 0;
private int schemaID = 0;
private String element;
private String qualifier;
private String scopeNote;
/** log4j logger */
private static Logger log = Logger.getLogger(MetadataField.class);
/** The row in the table representing this type */
private TableRow row;
// cache of field by ID (Integer)
private static Map<Integer, MetadataField> id2field = null;
/**
* Default constructor.
*/
public MetadataField()
{
}
/**
* Constructor creating a field within a schema.
*
* @param schema schema to which the field belongs
*/
public MetadataField(MetadataSchema schema)
{
this.schemaID = schema.getSchemaID();
}
/**
* Full constructor for new metadata field elements.
*
* @param schema schema to which the field belongs
* @param element element of the field
* @param qualifier qualifier of the field
* @param scopeNote scope note of the field
*/
public MetadataField(MetadataSchema schema, String element,
String qualifier, String scopeNote)
{
this.schemaID = schema.getSchemaID();
this.element = element;
this.qualifier = qualifier;
this.scopeNote = scopeNote;
}
/**
* Full constructor for existing metadata field elements.
*
* @param schemaID schema to which the field belongs
* @param fieldID database ID of field.
* @param element element of the field
* @param qualifier qualifier of the field
* @param scopeNote scope note of the field
*/
public MetadataField(int schemaID, int fieldID, String element,
String qualifier, String scopeNote)
{
this.schemaID = schemaID;
this.fieldID = fieldID;
this.element = element;
this.qualifier = qualifier;
this.scopeNote = scopeNote;
}
/**
* Constructor to load the object from the database.
*
* @param row database row from which to populate object.
*/
public MetadataField(TableRow row)
{
if (row != null)
{
this.fieldID = row.getIntColumn("metadata_field_id");
this.schemaID = row.getIntColumn("metadata_schema_id");
this.element = row.getStringColumn("element");
this.qualifier = row.getStringColumn("qualifier");
this.scopeNote = row.getStringColumn("scope_note");
this.row = row;
}
}
/**
* Get the element name.
*
* @return element name
*/
public String getElement()
{
return element;
}
/**
* Set the element name.
*
* @param element new value for element
*/
public void setElement(String element)
{
this.element = element;
}
/**
* Get the metadata field id.
*
* @return metadata field id
*/
public int getFieldID()
{
return fieldID;
}
/**
* Get the qualifier.
*
* @return qualifier
*/
public String getQualifier()
{
return qualifier;
}
/**
* Set the qualifier.
*
* @param qualifier new value for qualifier
*/
public void setQualifier(String qualifier)
{
this.qualifier = qualifier;
}
/**
* Get the schema record key.
*
* @return schema record key
*/
public int getSchemaID()
{
return schemaID;
}
/**
* Set the schema record key.
*
* @param schemaID new value for key
*/
public void setSchemaID(int schemaID)
{
this.schemaID = schemaID;
}
/**
* Get the scope note.
*
* @return scope note
*/
public String getScopeNote()
{
return scopeNote;
}
/**
* Set the scope note.
*
* @param scopeNote new value for scope note
*/
public void setScopeNote(String scopeNote)
{
this.scopeNote = scopeNote;
}
/**
* Creates a new metadata field.
*
* @param context
* DSpace context object
* @throws IOException
* @throws AuthorizeException
* @throws SQLException
* @throws NonUniqueMetadataException
*/
public void create(Context context) throws IOException, AuthorizeException,
SQLException, NonUniqueMetadataException
{
// Check authorisation: Only admins may create DC types
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modify the metadata registry");
}
// Ensure the element and qualifier are unique within a given schema.
if (!unique(context, schemaID, element, qualifier))
{
throw new NonUniqueMetadataException("Please make " + element + "."
+ qualifier + " unique within schema #" + schemaID);
}
// Create a table row and update it with the values
row = DatabaseManager.row("MetadataFieldRegistry");
row.setColumn("metadata_schema_id", schemaID);
row.setColumn("element", element);
row.setColumn("qualifier", qualifier);
row.setColumn("scope_note", scopeNote);
DatabaseManager.insert(context, row);
decache();
// Remember the new row number
this.fieldID = row.getIntColumn("metadata_field_id");
log.info(LogManager.getHeader(context, "create_metadata_field",
"metadata_field_id=" + row.getIntColumn("metadata_field_id")));
}
/**
* Retrieves the metadata field from the database.
*
* @param context dspace context
* @param schemaID schema by ID
* @param element element name
* @param qualifier qualifier (may be ANY or null)
* @return recalled metadata field
* @throws SQLException
* @throws AuthorizeException
*/
public static MetadataField findByElement(Context context, int schemaID,
String element, String qualifier) throws SQLException,
AuthorizeException
{
// Grab rows from DB
TableRowIterator tri;
if (qualifier == null)
{
tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * FROM MetadataFieldRegistry WHERE metadata_schema_id= ? " +
"AND element= ? AND qualifier is NULL ",
schemaID, element);
}
else
{
tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * FROM MetadataFieldRegistry WHERE metadata_schema_id= ? " +
"AND element= ? AND qualifier= ? ",
schemaID, element, qualifier);
}
TableRow row = null;
try
{
if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
if (row == null)
{
return null;
}
else
{
return new MetadataField(row);
}
}
/**
* Retrieve all Dublin Core types from the registry
*
* @param context dspace context
* @return an array of all the Dublin Core types
* @throws SQLException
*/
public static MetadataField[] findAll(Context context) throws SQLException
{
List<MetadataField> fields = new ArrayList<MetadataField>();
// Get all the metadatafieldregistry rows
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataFieldRegistry",
"SELECT mfr.* FROM MetadataFieldRegistry mfr, MetadataSchemaRegistry msr where mfr.metadata_schema_id= msr.metadata_schema_id ORDER BY msr.short_id, mfr.element, mfr.qualifier");
try
{
// Make into DC Type objects
while (tri.hasNext())
{
fields.add(new MetadataField(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Convert list into an array
MetadataField[] typeArray = new MetadataField[fields.size()];
return (MetadataField[]) fields.toArray(typeArray);
}
/**
* Return all metadata fields that are found in a given schema.
*
* @param context dspace context
* @param schemaID schema by db ID
* @return array of metadata fields
* @throws SQLException
*/
public static MetadataField[] findAllInSchema(Context context, int schemaID)
throws SQLException
{
List<MetadataField> fields = new ArrayList<MetadataField>();
// Get all the metadatafieldregistry rows
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * FROM MetadataFieldRegistry WHERE metadata_schema_id= ? " +
" ORDER BY element, qualifier", schemaID);
try
{
// Make into DC Type objects
while (tri.hasNext())
{
fields.add(new MetadataField(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Convert list into an array
MetadataField[] typeArray = new MetadataField[fields.size()];
return (MetadataField[]) fields.toArray(typeArray);
}
/**
* Update the metadata field in the database.
*
* @param context dspace context
* @throws SQLException
* @throws AuthorizeException
* @throws NonUniqueMetadataException
* @throws IOException
*/
public void update(Context context) throws SQLException,
AuthorizeException, NonUniqueMetadataException, IOException
{
// Check authorisation: Only admins may update the metadata registry
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modiffy the Dublin Core registry");
}
// Check to see if the schema ID was altered. If is was then we will
// query to ensure that there is not already a duplicate name field.
if (row.getIntColumn("metadata_schema_id") != schemaID)
{
if (MetadataField.hasElement(context, schemaID, element, qualifier))
{
throw new NonUniqueMetadataException(
"Duplcate field name found in target schema");
}
}
// Ensure the element and qualifier are unique within a given schema.
if (!unique(context, schemaID, element, qualifier))
{
throw new NonUniqueMetadataException("Please make " + element + "."
+ qualifier);
}
row.setColumn("metadata_schema_id", schemaID);
row.setColumn("element", element);
row.setColumn("qualifier", qualifier);
row.setColumn("scope_note", scopeNote);
DatabaseManager.update(context, row);
decache();
log.info(LogManager.getHeader(context, "update_metadatafieldregistry",
"metadata_field_id=" + getFieldID() + "element=" + getElement()
+ "qualifier=" + getQualifier()));
}
/**
* Return true if and only if the schema has a field with the given element
* and qualifier pair.
*
* @param context dspace context
* @param schemaID schema by ID
* @param element element name
* @param qualifier qualifier name
* @return true if the field exists
* @throws SQLException
* @throws AuthorizeException
*/
private static boolean hasElement(Context context, int schemaID,
String element, String qualifier) throws SQLException,
AuthorizeException
{
return MetadataField.findByElement(context, schemaID, element,
qualifier) != null;
}
/**
* Delete the metadata field.
*
* @param context dspace context
* @throws SQLException
* @throws AuthorizeException
*/
public void delete(Context context) throws SQLException, AuthorizeException
{
// Check authorisation: Only admins may create DC types
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators may modify the metadata registry");
}
log.info(LogManager.getHeader(context, "delete_metadata_field",
"metadata_field_id=" + getFieldID()));
DatabaseManager.delete(context, row);
decache();
}
/**
* A sanity check that ensures a given element and qualifier are unique
* within a given schema. The check happens in code as we cannot use a
* database constraint.
*
* @param context dspace context
* @param schemaID
* @param element
* @param qualifier
* @return true if unique
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*/
private boolean unique(Context context, int schemaID, String element,
String qualifier) throws IOException, SQLException,
AuthorizeException
{
int count = 0;
Connection con = null;
PreparedStatement statement = null;
ResultSet rs = null;
try
{
con = context.getDBConnection();
TableRow reg = DatabaseManager.row("MetadataFieldRegistry");
String qualifierClause = "";
if (qualifier == null)
{
qualifierClause = "and qualifier is null";
}
else
{
qualifierClause = "and qualifier = ?";
}
String query = "SELECT COUNT(*) FROM " + reg.getTable()
+ " WHERE metadata_schema_id= ? "
+ " and metadata_field_id != ? "
+ " and element= ? " + qualifierClause;
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setInt(2,fieldID);
statement.setString(3,element);
if (qualifier != null)
{
statement.setString(4,qualifier);
}
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
return (count == 0);
}
/**
* Return the HTML FORM key for the given field.
*
* @param schema
* @param element
* @param qualifier
* @return HTML FORM key
*/
public static String formKey(String schema, String element, String qualifier)
{
if (qualifier == null)
{
return schema + "_" + element;
}
else
{
return schema + "_" + element + "_" + qualifier;
}
}
/**
* Find the field corresponding to the given numeric ID. The ID is
* a database key internal to DSpace.
*
* @param context
* context, in case we need to read it in from DB
* @param id
* the metadata field ID
* @return the metadata field object
* @throws SQLException
*/
public static MetadataField find(Context context, int id)
throws SQLException
{
if (!isCacheInitialized())
{
initCache(context);
}
// 'sanity check' first.
Integer iid = Integer.valueOf(id);
if (!id2field.containsKey(iid))
{
return null;
}
return id2field.get(iid);
}
// invalidate the cache e.g. after something modifies DB state.
private static void decache()
{
id2field = null;
}
private static boolean isCacheInitialized()
{
return id2field != null;
}
// load caches if necessary
private static synchronized void initCache(Context context) throws SQLException
{
if (!isCacheInitialized())
{
Map<Integer, MetadataField> new_id2field = new HashMap<Integer, MetadataField>();
log.info("Loading MetadataField elements into cache.");
// Grab rows from DB
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * from MetadataFieldRegistry");
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
int fieldID = row.getIntColumn("metadata_field_id");
new_id2field.put(Integer.valueOf(fieldID), new MetadataField(row));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
id2field = new_id2field;
}
}
/**
* Return <code>true</code> if <code>other</code> is the same MetadataField
* as this object, <code>false</code> otherwise
*
* @param other
* object to compare to
*
* @return <code>true</code> if object passed in represents the same
* MetadataField as this object
*/
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final MetadataField other = (MetadataField) obj;
if (this.fieldID != other.fieldID)
{
return false;
}
if (this.schemaID != other.schemaID)
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 47 * hash + this.fieldID;
hash = 47 * hash + this.schemaID;
return hash;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.MissingResourceException;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.log4j.Logger;
import org.dspace.app.util.AuthorizeUtil;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.browse.ItemCounter;
import org.dspace.browse.ItemCountException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.eperson.Group;
import org.dspace.event.Event;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing a community
* <P>
* The community's metadata (name, introductory text etc.) is loaded into'
* memory. Changes to this metadata are only reflected in the database after
* <code>update</code> is called.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class Community extends DSpaceObject
{
/** log4j category */
private static Logger log = Logger.getLogger(Community.class);
/** Our context */
private Context ourContext;
/** The table row corresponding to this item */
private TableRow communityRow;
/** The logo bitstream */
private Bitstream logo;
/** Handle, if any */
private String handle;
/** Flag set when data is modified, for events */
private boolean modified;
/** Flag set when metadata is modified, for events */
private boolean modifiedMetadata;
/** The default group of administrators */
private Group admins;
/**
* Construct a community object from a database row.
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
*/
Community(Context context, TableRow row) throws SQLException
{
ourContext = context;
communityRow = row;
// Get the logo bitstream
if (communityRow.isColumnNull("logo_bitstream_id"))
{
logo = null;
}
else
{
logo = Bitstream.find(ourContext, communityRow
.getIntColumn("logo_bitstream_id"));
}
// Get our Handle if any
handle = HandleManager.findHandle(context, this);
// Cache ourselves
context.cache(this, row.getIntColumn("community_id"));
modified = false;
modifiedMetadata = false;
admins = groupFromColumn("admin");
clearDetails();
}
/**
* Get a community from the database. Loads in the metadata
*
* @param context
* DSpace context object
* @param id
* ID of the community
*
* @return the community, or null if the ID is invalid.
*/
public static Community find(Context context, int id) throws SQLException
{
// First check the cache
Community fromCache = (Community) context
.fromCache(Community.class, id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "community", id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_community",
"not_found,community_id=" + id));
}
return null;
}
else
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_community",
"community_id=" + id));
}
return new Community(context, row);
}
}
/**
* Create a new top-level community, with a new ID.
*
* @param context
* DSpace context object
*
* @return the newly created community
*/
public static Community create(Community parent, Context context)
throws SQLException, AuthorizeException
{
return create(parent, context, null);
}
/**
* Create a new top-level community, with a new ID.
*
* @param context
* DSpace context object
* @param handle the pre-determined Handle to assign to the new community
*
* @return the newly created community
*/
public static Community create(Community parent, Context context, String handle)
throws SQLException, AuthorizeException
{
if (!(AuthorizeManager.isAdmin(context) ||
(parent != null && AuthorizeManager.authorizeActionBoolean(context, parent, Constants.ADD))))
{
throw new AuthorizeException(
"Only administrators can create communities");
}
TableRow row = DatabaseManager.create(context, "community");
Community c = new Community(context, row);
try
{
c.handle = (handle == null) ?
HandleManager.createHandle(context, c) :
HandleManager.createHandle(context, c, handle);
}
catch(IllegalStateException ie)
{
//If an IllegalStateException is thrown, then an existing object is already using this handle
//Remove the community we just created -- as it is incomplete
try
{
if(c!=null)
{
c.delete();
}
} catch(Exception e) { }
//pass exception on up the chain
throw ie;
}
if(parent != null)
{
parent.addSubcommunity(c);
}
// create the default authorization policy for communities
// of 'anonymous' READ
Group anonymousGroup = Group.find(context, 0);
ResourcePolicy myPolicy = ResourcePolicy.create(context);
myPolicy.setResource(c);
myPolicy.setAction(Constants.READ);
myPolicy.setGroup(anonymousGroup);
myPolicy.update();
context.addEvent(new Event(Event.CREATE, Constants.COMMUNITY, c.getID(), c.handle));
// if creating a top-level Community, simulate an ADD event at the Site.
if (parent == null)
{
context.addEvent(new Event(Event.ADD, Constants.SITE, Site.SITE_ID, Constants.COMMUNITY, c.getID(), c.handle));
}
log.info(LogManager.getHeader(context, "create_community",
"community_id=" + row.getIntColumn("community_id"))
+ ",handle=" + c.handle);
return c;
}
/**
* Get a list of all communities in the system. These are alphabetically
* sorted by community name.
*
* @param context
* DSpace context object
*
* @return the communities in the system
*/
public static Community[] findAll(Context context) throws SQLException
{
TableRowIterator tri = DatabaseManager.queryTable(context, "community",
"SELECT * FROM community ORDER BY name");
List<Community> communities = new ArrayList<Community>();
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Community fromCache = (Community) context.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
communities.add(fromCache);
}
else
{
communities.add(new Community(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray);
return communityArray;
}
/**
* Get a list of all top-level communities in the system. These are
* alphabetically sorted by community name. A top-level community is one
* without a parent community.
*
* @param context
* DSpace context object
*
* @return the top-level communities in the system
*/
public static Community[] findAllTop(Context context) throws SQLException
{
// get all communities that are not children
TableRowIterator tri = DatabaseManager.queryTable(context, "community",
"SELECT * FROM community WHERE NOT community_id IN "
+ "(SELECT child_comm_id FROM community2community) "
+ "ORDER BY name");
List<Community> topCommunities = new ArrayList<Community>();
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Community fromCache = (Community) context.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
topCommunities.add(fromCache);
}
else
{
topCommunities.add(new Community(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Community[] communityArray = new Community[topCommunities.size()];
communityArray = (Community[]) topCommunities.toArray(communityArray);
return communityArray;
}
/**
* Get the internal ID of this collection
*
* @return the internal identifier
*/
public int getID()
{
return communityRow.getIntColumn("community_id");
}
/**
* @see org.dspace.content.DSpaceObject#getHandle()
*/
public String getHandle()
{
if(handle == null) {
try {
handle = HandleManager.findHandle(this.ourContext, this);
} catch (SQLException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
}
}
return handle;
}
/**
* Get the value of a metadata field
*
* @param field
* the name of the metadata field to get
*
* @return the value of the metadata field
*
* @exception IllegalArgumentException
* if the requested metadata field doesn't exist
*/
public String getMetadata(String field)
{
String metadata = communityRow.getStringColumn(field);
return (metadata == null) ? "" : metadata;
}
/**
* Set a metadata value
*
* @param field
* the name of the metadata field to get
* @param value
* value to set the field to
*
* @exception IllegalArgumentException
* if the requested metadata field doesn't exist
* @exception MissingResourceException
*/
public void setMetadata(String field, String value)throws MissingResourceException
{
if ((field.trim()).equals("name")
&& (value == null || value.trim().equals("")))
{
try
{
value = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled");
}
catch (MissingResourceException e)
{
value = "Untitled";
}
}
/*
* Set metadata field to null if null
* and trim strings to eliminate excess
* whitespace.
*/
if(value == null)
{
communityRow.setColumnNull(field);
}
else
{
communityRow.setColumn(field, value.trim());
}
modifiedMetadata = true;
addDetails(field);
}
public String getName()
{
return getMetadata("name");
}
/**
* Get the logo for the community. <code>null</code> is return if the
* community does not have a logo.
*
* @return the logo of the community, or <code>null</code>
*/
public Bitstream getLogo()
{
return logo;
}
/**
* Give the community a logo. Passing in <code>null</code> removes any
* existing logo. You will need to set the format of the new logo bitstream
* before it will work, for example to "JPEG". Note that
* <code>update(/code> will need to be called for the change to take
* effect. Setting a logo and not calling <code>update</code> later may
* result in a previous logo lying around as an "orphaned" bitstream.
*
* @param is the stream to use as the new logo
*
* @return the new logo bitstream, or <code>null</code> if there is no
* logo (<code>null</code> was passed in)
*/
public Bitstream setLogo(InputStream is) throws AuthorizeException,
IOException, SQLException
{
// Check authorisation
// authorized to remove the logo when DELETE rights
// authorized when canEdit
if (!((is == null) && AuthorizeManager.authorizeActionBoolean(
ourContext, this, Constants.DELETE)))
{
canEdit();
}
// First, delete any existing logo
if (logo != null)
{
log.info(LogManager.getHeader(ourContext, "remove_logo",
"community_id=" + getID()));
communityRow.setColumnNull("logo_bitstream_id");
logo.delete();
logo = null;
}
if (is != null)
{
Bitstream newLogo = Bitstream.create(ourContext, is);
communityRow.setColumn("logo_bitstream_id", newLogo.getID());
logo = newLogo;
// now create policy for logo bitstream
// to match our READ policy
List<ResourcePolicy> policies = AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.READ);
AuthorizeManager.addPolicies(ourContext, policies, newLogo);
log.info(LogManager.getHeader(ourContext, "set_logo",
"community_id=" + getID() + "logo_bitstream_id="
+ newLogo.getID()));
}
modified = true;
return logo;
}
/**
* Update the community metadata (including logo) to the database.
*/
public void update() throws SQLException, IOException, AuthorizeException
{
// Check authorisation
canEdit();
log.info(LogManager.getHeader(ourContext, "update_community",
"community_id=" + getID()));
DatabaseManager.update(ourContext, communityRow);
if (modified)
{
ourContext.addEvent(new Event(Event.MODIFY, Constants.COMMUNITY, getID(), null));
modified = false;
}
if (modifiedMetadata)
{
ourContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.COMMUNITY, getID(), getDetails()));
modifiedMetadata = false;
clearDetails();
}
}
/**
* Create a default administrators group if one does not already exist.
* Returns either the newly created group or the previously existing one.
* Note that other groups may also be administrators.
*
* @return the default group of editors associated with this community
* @throws SQLException
* @throws AuthorizeException
*/
public Group createAdministrators() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin to create more Admins
AuthorizeUtil.authorizeManageAdminGroup(ourContext, this);
if (admins == null)
{
//turn off authorization so that Community Admins can create Sub-Community Admins
ourContext.turnOffAuthorisationSystem();
admins = Group.create(ourContext);
ourContext.restoreAuthSystemState();
admins.setName("COMMUNITY_" + getID() + "_ADMIN");
admins.update();
}
AuthorizeManager.addPolicy(ourContext, this, Constants.ADMIN, admins);
// register this as the admin group
communityRow.setColumn("admin", admins.getID());
modified = true;
return admins;
}
/**
* Remove the administrators group, if no group has already been created
* then return without error. This will merely dereference the current
* administrators group from the community so that it may be deleted
* without violating database constraints.
*/
public void removeAdministrators() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin of the parent community (or system admin) to delete Admin group
AuthorizeUtil.authorizeRemoveAdminGroup(ourContext, this);
// just return if there is no administrative group.
if (admins == null)
{
return;
}
// Remove the link to the community table.
communityRow.setColumnNull("admin");
admins = null;
modified = true;
}
/**
* Get the default group of administrators, if there is one. Note that the
* authorization system may allow others to be administrators for the
* community.
* <P>
* The default group of administrators for community 100 is the one called
* <code>community_100_admin</code>.
*
* @return group of administrators, or <code>null</code> if there is no
* default group.
*/
public Group getAdministrators()
{
return admins;
}
/**
* Get the collections in this community. Throws an SQLException because
* creating a community object won't load in all collections.
*
* @return array of Collection objects
*/
public Collection[] getCollections() throws SQLException
{
List<Collection> collections = new ArrayList<Collection>();
// Get the table rows
TableRowIterator tri = DatabaseManager.queryTable(
ourContext,"collection",
"SELECT collection.* FROM collection, community2collection WHERE " +
"community2collection.collection_id=collection.collection_id " +
"AND community2collection.community_id= ? ORDER BY collection.name",
getID());
// Make Collection objects
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Collection fromCache = (Collection) ourContext.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{
collections.add(fromCache);
}
else
{
collections.add(new Collection(ourContext, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Put them in an array
Collection[] collectionArray = new Collection[collections.size()];
collectionArray = (Collection[]) collections.toArray(collectionArray);
return collectionArray;
}
/**
* Get the immediate sub-communities of this community. Throws an
* SQLException because creating a community object won't load in all
* collections.
*
* @return array of Community objects
*/
public Community[] getSubcommunities() throws SQLException
{
List<Community> subcommunities = new ArrayList<Community>();
// Get the table rows
TableRowIterator tri = DatabaseManager.queryTable(
ourContext,"community",
"SELECT community.* FROM community, community2community WHERE " +
"community2community.child_comm_id=community.community_id " +
"AND community2community.parent_comm_id= ? ORDER BY community.name",
getID());
// Make Community objects
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Community fromCache = (Community) ourContext.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
subcommunities.add(fromCache);
}
else
{
subcommunities.add(new Community(ourContext, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Put them in an array
Community[] communityArray = new Community[subcommunities.size()];
communityArray = (Community[]) subcommunities.toArray(communityArray);
return communityArray;
}
/**
* Return the parent community of this community, or null if the community
* is top-level
*
* @return the immediate parent community, or null if top-level
*/
public Community getParentCommunity() throws SQLException
{
Community parentCommunity = null;
// Get the table rows
TableRowIterator tri = DatabaseManager.queryTable(
ourContext,"community",
"SELECT community.* FROM community, community2community WHERE " +
"community2community.parent_comm_id=community.community_id " +
"AND community2community.child_comm_id= ? ",
getID());
// Make Community object
try
{
if (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Community fromCache = (Community) ourContext.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
parentCommunity = fromCache;
}
else
{
parentCommunity = new Community(ourContext, row);
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return parentCommunity;
}
/**
* Return an array of parent communities of this community, in ascending
* order. If community is top-level, return an empty array.
*
* @return an array of parent communities, empty if top-level
*/
public Community[] getAllParents() throws SQLException
{
List<Community> parentList = new ArrayList<Community>();
Community parent = getParentCommunity();
while (parent != null)
{
parentList.add(parent);
parent = parent.getParentCommunity();
}
// Put them in an array
Community[] communityArray = new Community[parentList.size()];
communityArray = (Community[]) parentList.toArray(communityArray);
return communityArray;
}
/**
* Create a new collection within this community. The collection is created
* without any workflow groups or default submitter group.
*
* @return the new collection
*/
public Collection createCollection() throws SQLException,
AuthorizeException
{
return createCollection(null);
}
/**
* Create a new collection within this community. The collection is created
* without any workflow groups or default submitter group.
*
* @param handle the pre-determined Handle to assign to the new community
* @return the new collection
*/
public Collection createCollection(String handle) throws SQLException,
AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
Collection c = Collection.create(ourContext, handle);
addCollection(c);
return c;
}
/**
* Add an exisiting collection to the community
*
* @param c
* collection to add
*/
public void addCollection(Collection c) throws SQLException,
AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
log.info(LogManager.getHeader(ourContext, "add_collection",
"community_id=" + getID() + ",collection_id=" + c.getID()));
// Find out if mapping exists
TableRowIterator tri = DatabaseManager.queryTable(ourContext,
"community2collection",
"SELECT * FROM community2collection WHERE " +
"community_id= ? AND collection_id= ? ",getID(),c.getID());
try
{
if (!tri.hasNext())
{
// No existing mapping, so add one
TableRow mappingRow = DatabaseManager.row("community2collection");
mappingRow.setColumn("community_id", getID());
mappingRow.setColumn("collection_id", c.getID());
ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COLLECTION, c.getID(), c.getHandle()));
DatabaseManager.insert(ourContext, mappingRow);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
}
/**
* Create a new sub-community within this community.
*
* @return the new community
*/
public Community createSubcommunity() throws SQLException,
AuthorizeException
{
return createSubcommunity(null);
}
/**
* Create a new sub-community within this community.
*
* @param handle the pre-determined Handle to assign to the new community
* @return the new community
*/
public Community createSubcommunity(String handle) throws SQLException,
AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
Community c = create(this, ourContext, handle);
addSubcommunity(c);
return c;
}
/**
* Add an exisiting community as a subcommunity to the community
*
* @param c
* subcommunity to add
*/
public void addSubcommunity(Community c) throws SQLException,
AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
log.info(LogManager.getHeader(ourContext, "add_subcommunity",
"parent_comm_id=" + getID() + ",child_comm_id=" + c.getID()));
// Find out if mapping exists
TableRowIterator tri = DatabaseManager.queryTable(ourContext,
"community2community",
"SELECT * FROM community2community WHERE parent_comm_id= ? "+
"AND child_comm_id= ? ",getID(), c.getID());
try
{
if (!tri.hasNext())
{
// No existing mapping, so add one
TableRow mappingRow = DatabaseManager.row("community2community");
mappingRow.setColumn("parent_comm_id", getID());
mappingRow.setColumn("child_comm_id", c.getID());
ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COMMUNITY, c.getID(), c.getHandle()));
DatabaseManager.insert(ourContext, mappingRow);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
}
/**
* Remove a collection. Any items then orphaned are deleted.
*
* @param c
* collection to remove
*/
public void removeCollection(Collection c) throws SQLException,
AuthorizeException, IOException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
// will be the collection an orphan?
TableRow trow = DatabaseManager.querySingle(ourContext,
"SELECT COUNT(DISTINCT community_id) AS num FROM community2collection WHERE collection_id= ? ",
c.getID());
DatabaseManager.setConstraintDeferred(ourContext, "comm2coll_collection_fk");
if (trow.getLongColumn("num") == 1)
{
// Orphan; delete it
c.delete();
}
log.info(LogManager.getHeader(ourContext, "remove_collection",
"community_id=" + getID() + ",collection_id=" + c.getID()));
// Remove any mappings
DatabaseManager.updateQuery(ourContext,
"DELETE FROM community2collection WHERE community_id= ? "+
"AND collection_id= ? ", getID(), c.getID());
DatabaseManager.setConstraintImmediate(ourContext, "comm2coll_collection_fk");
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COLLECTION, c.getID(), c.getHandle()));
}
/**
* Remove a subcommunity. Any substructure then orphaned is deleted.
*
* @param c
* subcommunity to remove
*/
public void removeSubcommunity(Community c) throws SQLException,
AuthorizeException, IOException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
// will be the subcommunity an orphan?
TableRow trow = DatabaseManager.querySingle(ourContext,
"SELECT COUNT(DISTINCT parent_comm_id) AS num FROM community2community WHERE child_comm_id= ? ",
c.getID());
DatabaseManager.setConstraintDeferred(ourContext, "com2com_child_fk");
if (trow.getLongColumn("num") == 1)
{
// Orphan; delete it
c.rawDelete();
}
log.info(LogManager.getHeader(ourContext, "remove_subcommunity",
"parent_comm_id=" + getID() + ",child_comm_id=" + c.getID()));
// Remove any mappings
DatabaseManager.updateQuery(ourContext,
"DELETE FROM community2community WHERE parent_comm_id= ? " +
" AND child_comm_id= ? ", getID(),c.getID());
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COMMUNITY, c.getID(), c.getHandle()));
DatabaseManager.setConstraintImmediate(ourContext, "com2com_child_fk");
}
/**
* Delete the community, including the metadata and logo. Collections and
* subcommunities that are then orphans are deleted.
*/
public void delete() throws SQLException, AuthorizeException, IOException
{
// Check authorisation
// FIXME: If this was a subcommunity, it is first removed from it's
// parent.
// This means the parentCommunity == null
// But since this is also the case for top-level communities, we would
// give everyone rights to remove the top-level communities.
// The same problem occurs in removing the logo
if (!AuthorizeManager.authorizeActionBoolean(ourContext,
getParentCommunity(), Constants.REMOVE))
{
AuthorizeManager
.authorizeAction(ourContext, this, Constants.DELETE);
}
// If not a top-level community, have parent remove me; this
// will call rawDelete() before removing the linkage
Community parent = getParentCommunity();
if (parent != null)
{
// remove the subcommunities first
Community[] subcommunities = getSubcommunities();
for (int i = 0; i < subcommunities.length; i++)
{
subcommunities[i].delete();
}
// now let the parent remove the community
parent.removeSubcommunity(this);
return;
}
rawDelete();
}
/**
* Internal method to remove the community and all its childs from the database without aware of eventually parent
*/
private void rawDelete() throws SQLException, AuthorizeException, IOException
{
log.info(LogManager.getHeader(ourContext, "delete_community",
"community_id=" + getID()));
ourContext.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, getID(), getHandle()));
// Remove from cache
ourContext.removeCached(this, getID());
// Remove collections
Collection[] cols = getCollections();
for (int i = 0; i < cols.length; i++)
{
removeCollection(cols[i]);
}
// delete subcommunities
Community[] comms = getSubcommunities();
for (int j = 0; j < comms.length; j++)
{
comms[j].delete();
}
// Remove the logo
setLogo(null);
// Remove all authorization policies
AuthorizeManager.removeAllPolicies(ourContext, this);
// get rid of the content count cache if it exists
try
{
ItemCounter ic = new ItemCounter(ourContext);
ic.remove(this);
}
catch (ItemCountException e)
{
// FIXME: upside down exception handling due to lack of good
// exception framework
throw new IllegalStateException(e.getMessage(),e);
}
// Remove any Handle
HandleManager.unbindHandle(ourContext, this);
// Delete community row
DatabaseManager.delete(ourContext, communityRow);
// Remove administrators group - must happen after deleting community
Group g = getAdministrators();
if (g != null)
{
g.delete();
}
}
/**
* Return <code>true</code> if <code>other</code> is the same Community
* as this object, <code>false</code> otherwise
*
* @param other
* object to compare to
*
* @return <code>true</code> if object passed in represents the same
* community as this object
*/
public boolean equals(Object other)
{
if (!(other instanceof Community))
{
return false;
}
return (getID() == ((Community) other).getID());
}
public int hashCode()
{
return new HashCodeBuilder().append(getID()).toHashCode();
}
/**
* Utility method for reading in a group from a group ID in a column. If the
* column is null, null is returned.
*
* @param col
* the column name to read
* @return the group referred to by that column, or null
* @throws SQLException
*/
private Group groupFromColumn(String col) throws SQLException
{
if (communityRow.isColumnNull(col))
{
return null;
}
return Group.find(ourContext, communityRow.getIntColumn(col));
}
/**
* return type found in Constants
*/
public int getType()
{
return Constants.COMMUNITY;
}
/**
* return TRUE if context's user can edit community, false otherwise
*
* @return boolean true = current user can edit community
*/
public boolean canEditBoolean() throws java.sql.SQLException
{
try
{
canEdit();
return true;
}
catch (AuthorizeException e)
{
return false;
}
}
public void canEdit() throws AuthorizeException, SQLException
{
Community[] parents = getAllParents();
for (int i = 0; i < parents.length; i++)
{
if (AuthorizeManager.authorizeActionBoolean(ourContext, parents[i],
Constants.WRITE))
{
return;
}
if (AuthorizeManager.authorizeActionBoolean(ourContext, parents[i],
Constants.ADD))
{
return;
}
}
AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
}
/**
* counts items in this community
*
* @return total items
*/
public int countItems() throws SQLException
{
int total = 0;
// add collection counts
Collection[] cols = getCollections();
for ( int i = 0; i < cols.length; i++)
{
total += cols[i].countItems();
}
// add sub-community counts
Community[] comms = getSubcommunities();
for ( int j = 0; j < comms.length; j++ )
{
total += comms[j].countItems();
}
return total;
}
public DSpaceObject getAdminObject(int action) throws SQLException
{
DSpaceObject adminObject = null;
switch (action)
{
case Constants.REMOVE:
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion())
{
adminObject = this;
}
break;
case Constants.DELETE:
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion())
{
adminObject = getParentCommunity();
}
break;
case Constants.ADD:
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementCreation())
{
adminObject = this;
}
break;
default:
adminObject = this;
break;
}
return adminObject;
}
public DSpaceObject getParentObject() throws SQLException
{
Community pCommunity = getParentCommunity();
if (pCommunity != null)
{
return pCommunity;
}
else
{
return null;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.event.Event;
import org.dspace.storage.bitstore.BitstreamStorageManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing bitstreams stored in the DSpace system.
* <P>
* When modifying the bitstream metadata, changes are not reflected in the
* database until <code>update</code> is called. Note that you cannot alter
* the contents of a bitstream; you need to create a new bitstream.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class Bitstream extends DSpaceObject
{
/** log4j logger */
private static Logger log = Logger.getLogger(Bitstream.class);
/** Our context */
private Context bContext;
/** The row in the table representing this bitstream */
private TableRow bRow;
/** The bitstream format corresponding to this bitstream */
private BitstreamFormat bitstreamFormat;
/** Flag set when data is modified, for events */
private boolean modified;
/** Flag set when metadata is modified, for events */
private boolean modifiedMetadata;
/**
* Private constructor for creating a Bitstream object based on the contents
* of a DB table row.
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
* @throws SQLException
*/
Bitstream(Context context, TableRow row) throws SQLException
{
bContext = context;
bRow = row;
// Get the bitstream format
bitstreamFormat = BitstreamFormat.find(context, row
.getIntColumn("bitstream_format_id"));
if (bitstreamFormat == null)
{
// No format: use "Unknown"
bitstreamFormat = BitstreamFormat.findUnknown(context);
// Panic if we can't find it
if (bitstreamFormat == null)
{
throw new IllegalStateException("No Unknown bitstream format");
}
}
// Cache ourselves
context.cache(this, row.getIntColumn("bitstream_id"));
modified = false;
modifiedMetadata = false;
clearDetails();
}
/**
* Get a bitstream from the database. The bitstream metadata is loaded into
* memory.
*
* @param context
* DSpace context object
* @param id
* ID of the bitstream
*
* @return the bitstream, or null if the ID is invalid.
* @throws SQLException
*/
public static Bitstream find(Context context, int id) throws SQLException
{
// First check the cache
Bitstream fromCache = (Bitstream) context
.fromCache(Bitstream.class, id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "bitstream", id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bitstream",
"not_found,bitstream_id=" + id));
}
return null;
}
// not null, return Bitstream
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bitstream",
"bitstream_id=" + id));
}
return new Bitstream(context, row);
}
public static Bitstream[] findAll(Context context) throws SQLException
{
TableRowIterator tri = DatabaseManager.queryTable(context, "bitstream",
"SELECT * FROM bitstream");
List<Bitstream> bitstreams = new ArrayList<Bitstream>();
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Bitstream fromCache = (Bitstream) context.fromCache(
Bitstream.class, row.getIntColumn("bitstream_id"));
if (fromCache != null)
{
bitstreams.add(fromCache);
}
else
{
bitstreams.add(new Bitstream(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Bitstream[] bitstreamArray = new Bitstream[bitstreams.size()];
bitstreamArray = bitstreams.toArray(bitstreamArray);
return bitstreamArray;
}
/**
* Create a new bitstream, with a new ID. The checksum and file size are
* calculated. This method is not public, and does not check authorisation;
* other methods such as Bundle.createBitstream() will check authorisation.
* The newly created bitstream has the "unknown" format.
*
* @param context
* DSpace context object
* @param is
* the bits to put in the bitstream
*
* @return the newly created bitstream
* @throws IOException
* @throws SQLException
*/
static Bitstream create(Context context, InputStream is)
throws IOException, SQLException
{
// Store the bits
int bitstreamID = BitstreamStorageManager.store(context, is);
log.info(LogManager.getHeader(context, "create_bitstream",
"bitstream_id=" + bitstreamID));
// Set the format to "unknown"
Bitstream bitstream = find(context, bitstreamID);
bitstream.setFormat(null);
context.addEvent(new Event(Event.CREATE, Constants.BITSTREAM, bitstreamID, null));
return bitstream;
}
/**
* Register a new bitstream, with a new ID. The checksum and file size
* are calculated. This method is not public, and does not check
* authorisation; other methods such as Bundle.createBitstream() will
* check authorisation. The newly created bitstream has the "unknown"
* format.
*
* @param context DSpace context object
* @param assetstore corresponds to an assetstore in dspace.cfg
* @param bitstreamPath the path and filename relative to the assetstore
* @return the newly registered bitstream
* @throws IOException
* @throws SQLException
*/
static Bitstream register(Context context,
int assetstore, String bitstreamPath)
throws IOException, SQLException
{
// Store the bits
int bitstreamID = BitstreamStorageManager.register(
context, assetstore, bitstreamPath);
log.info(LogManager.getHeader(context,
"create_bitstream",
"bitstream_id=" + bitstreamID));
// Set the format to "unknown"
Bitstream bitstream = find(context, bitstreamID);
bitstream.setFormat(null);
context.addEvent(new Event(Event.CREATE, Constants.BITSTREAM, bitstreamID, "REGISTER"));
return bitstream;
}
/**
* Get the internal identifier of this bitstream
*
* @return the internal identifier
*/
public int getID()
{
return bRow.getIntColumn("bitstream_id");
}
public String getHandle()
{
// No Handles for bitstreams
return null;
}
/**
* Get the sequence ID of this bitstream
*
* @return the sequence ID
*/
public int getSequenceID()
{
return bRow.getIntColumn("sequence_id");
}
/**
* Set the sequence ID of this bitstream
*
* @param sid
* the ID
*/
public void setSequenceID(int sid)
{
bRow.setColumn("sequence_id", sid);
modifiedMetadata = true;
addDetails("SequenceID");
}
/**
* Get the name of this bitstream - typically the filename, without any path
* information
*
* @return the name of the bitstream
*/
public String getName()
{
return bRow.getStringColumn("name");
}
/**
* Set the name of the bitstream
*
* @param n
* the new name of the bitstream
*/
public void setName(String n)
{
bRow.setColumn("name", n);
modifiedMetadata = true;
addDetails("Name");
}
/**
* Get the source of this bitstream - typically the filename with path
* information (if originally provided) or the name of the tool that
* generated this bitstream
*
* @return the source of the bitstream
*/
public String getSource()
{
return bRow.getStringColumn("source");
}
/**
* Set the source of the bitstream
*
* @param n
* the new source of the bitstream
*/
public void setSource(String n)
{
bRow.setColumn("source", n);
modifiedMetadata = true;
addDetails("Source");
}
/**
* Get the description of this bitstream - optional free text, typically
* provided by a user at submission time
*
* @return the description of the bitstream
*/
public String getDescription()
{
return bRow.getStringColumn("description");
}
/**
* Set the description of the bitstream
*
* @param n
* the new description of the bitstream
*/
public void setDescription(String n)
{
bRow.setColumn("description", n);
modifiedMetadata = true;
addDetails("Description");
}
/**
* Get the checksum of the content of the bitstream, for integrity checking
*
* @return the checksum
*/
public String getChecksum()
{
return bRow.getStringColumn("checksum");
}
/**
* Get the algorithm used to calculate the checksum
*
* @return the algorithm, e.g. "MD5"
*/
public String getChecksumAlgorithm()
{
return bRow.getStringColumn("checksum_algorithm");
}
/**
* Get the size of the bitstream
*
* @return the size in bytes
*/
public long getSize()
{
return bRow.getLongColumn("size_bytes");
}
/**
* Set the user's format description. This implies that the format of the
* bitstream is uncertain, and the format is set to "unknown."
*
* @param desc
* the user's description of the format
* @throws SQLException
*/
public void setUserFormatDescription(String desc) throws SQLException
{
// FIXME: Would be better if this didn't throw an SQLException,
// but we need to find the unknown format!
setFormat(null);
bRow.setColumn("user_format_description", desc);
modifiedMetadata = true;
addDetails("UserFormatDescription");
}
/**
* Get the user's format description. Returns null if the format is known by
* the system.
*
* @return the user's format description.
*/
public String getUserFormatDescription()
{
return bRow.getStringColumn("user_format_description");
}
/**
* Get the description of the format - either the user's or the description
* of the format defined by the system.
*
* @return a description of the format.
*/
public String getFormatDescription()
{
if (bitstreamFormat.getShortDescription().equals("Unknown"))
{
// Get user description if there is one
String desc = bRow.getStringColumn("user_format_description");
if (desc == null)
{
return "Unknown";
}
return desc;
}
// not null or Unknown
return bitstreamFormat.getShortDescription();
}
/**
* Get the format of the bitstream
*
* @return the format of this bitstream
*/
public BitstreamFormat getFormat()
{
return bitstreamFormat;
}
/**
* Set the format of the bitstream. If the user has supplied a type
* description, it is cleared. Passing in <code>null</code> sets the type
* of this bitstream to "unknown".
*
* @param f
* the format of this bitstream, or <code>null</code> for
* unknown
* @throws SQLException
*/
public void setFormat(BitstreamFormat f) throws SQLException
{
// FIXME: Would be better if this didn't throw an SQLException,
// but we need to find the unknown format!
if (f == null)
{
// Use "Unknown" format
bitstreamFormat = BitstreamFormat.findUnknown(bContext);
}
else
{
bitstreamFormat = f;
}
// Remove user type description
bRow.setColumnNull("user_format_description");
// Update the ID in the table row
bRow.setColumn("bitstream_format_id", bitstreamFormat.getID());
modified = true;
}
/**
* Update the bitstream metadata. Note that the content of the bitstream
* cannot be changed - for that you need to create a new bitstream.
*
* @throws SQLException
* @throws AuthorizeException
*/
public void update() throws SQLException, AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(bContext, this, Constants.WRITE);
log.info(LogManager.getHeader(bContext, "update_bitstream",
"bitstream_id=" + getID()));
if (modified)
{
bContext.addEvent(new Event(Event.MODIFY, Constants.BITSTREAM, getID(), null));
modified = false;
}
if (modifiedMetadata)
{
bContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.BITSTREAM, getID(), getDetails()));
modifiedMetadata = false;
clearDetails();
}
DatabaseManager.update(bContext, bRow);
}
/**
* Delete the bitstream, including any mappings to bundles
*
* @throws SQLException
*/
void delete() throws SQLException
{
boolean oracle = false;
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
oracle = true;
}
// changed to a check on remove
// Check authorisation
//AuthorizeManager.authorizeAction(bContext, this, Constants.DELETE);
log.info(LogManager.getHeader(bContext, "delete_bitstream",
"bitstream_id=" + getID()));
bContext.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, getID(), String.valueOf(getSequenceID())));
// Remove from cache
bContext.removeCached(this, getID());
// Remove policies
AuthorizeManager.removeAllPolicies(bContext, this);
// Remove references to primary bitstreams in bundle
String query = "update bundle set primary_bitstream_id = ";
query += (oracle ? "''" : "Null") + " where primary_bitstream_id = ? ";
DatabaseManager.updateQuery(bContext,
query, bRow.getIntColumn("bitstream_id"));
// Remove bitstream itself
BitstreamStorageManager.delete(bContext, bRow
.getIntColumn("bitstream_id"));
}
/**
* Bitstreams are only logically deleted (via a flag in the database).
* This method allows us to verify is the bitstream is still valid
*
* @return true if the bitstream has been deleted
*/
boolean isDeleted() throws SQLException
{
String query = "select count(*) as mycount from Bitstream where deleted = '1' and bitstream_id = ? ";
TableRowIterator tri = DatabaseManager.query(bContext, query, bRow.getIntColumn("bitstream_id"));
long count = 0;
try
{
TableRow r = tri.next();
count = r.getLongColumn("mycount");
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return count == 1;
}
/**
* Retrieve the contents of the bitstream
*
* @return a stream from which the bitstream can be read.
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
public InputStream retrieve() throws IOException, SQLException,
AuthorizeException
{
// Maybe should return AuthorizeException??
AuthorizeManager.authorizeAction(bContext, this, Constants.READ);
return BitstreamStorageManager.retrieve(bContext, bRow
.getIntColumn("bitstream_id"));
}
/**
* Get the bundles this bitstream appears in
*
* @return array of <code>Bundle</code> s this bitstream appears in
* @throws SQLException
*/
public Bundle[] getBundles() throws SQLException
{
// Get the bundle table rows
TableRowIterator tri = DatabaseManager.queryTable(bContext, "bundle",
"SELECT bundle.* FROM bundle, bundle2bitstream WHERE " +
"bundle.bundle_id=bundle2bitstream.bundle_id AND " +
"bundle2bitstream.bitstream_id= ? ",
bRow.getIntColumn("bitstream_id"));
// Build a list of Bundle objects
List<Bundle> bundles = new ArrayList<Bundle>();
try
{
while (tri.hasNext())
{
TableRow r = tri.next();
// First check the cache
Bundle fromCache = (Bundle) bContext.fromCache(Bundle.class, r
.getIntColumn("bundle_id"));
if (fromCache != null)
{
bundles.add(fromCache);
}
else
{
bundles.add(new Bundle(bContext, r));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Bundle[] bundleArray = new Bundle[bundles.size()];
bundleArray = (Bundle[]) bundles.toArray(bundleArray);
return bundleArray;
}
/**
* return type found in Constants
*
* @return int Constants.BITSTREAM
*/
public int getType()
{
return Constants.BITSTREAM;
}
/**
* Determine if this bitstream is registered
*
* @return true if the bitstream is registered, false otherwise
*/
public boolean isRegisteredBitstream() {
return BitstreamStorageManager
.isRegisteredBitstream(bRow.getStringColumn("internal_id"));
}
/**
* Get the asset store number where this bitstream is stored
*
* @return the asset store number of the bitstream
*/
public int getStoreNumber() {
return bRow.getIntColumn("store_number");
}
public DSpaceObject getParentObject() throws SQLException
{
Bundle[] bundles = getBundles();
if (bundles != null && (bundles.length > 0 && bundles[0] != null))
{
// the ADMIN action is not allowed on Bundle object so skip to the item
Item[] items = bundles[0].getItems();
if (items != null && items.length > 0)
{
return items[0];
}
else
{
return null;
}
}
else
{
// is the bitstream a logo for a community or a collection?
TableRow qResult = DatabaseManager.querySingle(bContext,
"SELECT collection_id FROM collection " +
"WHERE logo_bitstream_id = ?",getID());
if (qResult != null)
{
return Collection.find(bContext,qResult.getIntColumn("collection_id"));
}
else
{
// is the bitstream related to a community?
qResult = DatabaseManager.querySingle(bContext,
"SELECT community_id FROM community " +
"WHERE logo_bitstream_id = ?",getID());
if (qResult != null)
{
return Community.find(bContext,qResult.getIntColumn("community_id"));
}
else
{
return null;
}
}
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.core.PluginManager;
import org.dspace.app.mediafilter.MediaFilter;
import org.jdom.Element;
/**
* Packager plugin to ingest a
* METS (Metadata Encoding & Transmission Standard) package
* that conforms to the DSpace METS SIP (Submission Information Package) Profile.
* See <a href="http://www.loc.gov/standards/mets/">http://www.loc.gov/standards/mets/</a>
* for more information on METS, and
* <a href="http://www.dspace.org/standards/METS/SIP/profilev0p9p1/metssipv0p9p1.pdf">
* http://www.dspace.org/standards/METS/SIP/profilev0p9p1/metssipv0p9p1.pdf</a>
* (or a similar file in the /standards/METS/SIP resource hierarchy)
* for more information about the DSpace METS SIP profile.
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 5844 $
* @see org.dspace.content.packager.METSManifest
* @see AbstractMETSIngester
* @see AbstractPackageIngester
* @see PackageIngester
*/
public class DSpaceMETSIngester
extends AbstractMETSIngester
{
// first part of required mets@PROFILE value
private static final String PROFILE_START = "DSpace METS SIP Profile";
// just check the profile name.
@Override
void checkManifest(METSManifest manifest)
throws MetadataValidationException
{
String profile = manifest.getProfile();
if (profile == null)
{
throw new MetadataValidationException("Cannot accept METS with no PROFILE attribute!");
}
else if (!profile.startsWith(PROFILE_START))
{
throw new MetadataValidationException("METS has unacceptable PROFILE value, profile=" + profile);
}
}
/**
* Choose DMD section(s) to crosswalk.
* <p>
* The algorithm is:<br>
* 1. Use whatever the <code>dmd</code> parameter specifies as the primary DMD.<br>
* 2. If (1) is unspecified, find MODS (preferably) or DC as primary DMD.<br>
* 3. If (1) or (2) succeeds, crosswalk it and ignore all other DMDs with
* same GROUPID<br>
* 4. Crosswalk remaining DMDs not eliminated already.
*/
@Override
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException
{
int found = -1;
// Check to see what dmdSec the user specified in the 'dmd' parameter
String userDmd = null;
if (params != null)
{
userDmd = params.getProperty("dmd");
}
if (userDmd != null && userDmd.length() > 0)
{
for (int i = 0; i < dmds.length; ++i)
{
if (userDmd.equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// MODS is preferred, if nothing specified by user
if (found == -1)
{
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be MODS (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("MODS".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// DC acceptable if no MODS
if (found == -1)
{
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be DC (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("DC".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
String groupID = null;
if (found >= 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[found], callback);
groupID = dmds[found].getAttributeValue("GROUPID");
if (groupID != null)
{
for (int i = 0; i < dmds.length; ++i)
{
String g = dmds[i].getAttributeValue("GROUPID");
if (g != null && !g.equals(groupID))
{
manifest.crosswalkItemDmd(context, params, dso, dmds[i], callback);
}
}
}
}
else
{
// otherwise take the first. Don't xwalk more than one because
// each xwalk _adds_ metadata, and could add duplicate fields.
if (dmds.length > 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[0], callback);
}
}
}
/**
* Policy: For DSpace deposit license, take deposit license
* supplied by explicit argument first, else use collection's
* default deposit license.
* For Creative Commons, look for a rightsMd containing a CC license.
*/
@Override
public void addLicense(Context context, Item item, String license,
Collection collection, PackageParameters params)
throws PackageValidationException,
AuthorizeException, SQLException, IOException
{
if (PackageUtils.findDepositLicense(context, item) == null)
{
PackageUtils.addDepositLicense(context, license, item, collection);
}
}
@Override
public void finishObject(Context context, DSpaceObject dso,
PackageParameters params)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
// nothing to do.
}
@Override
public int getObjectType(METSManifest manifest)
throws PackageValidationException
{
return Constants.ITEM;
}
// return name of derived file as if MediaFilter created it, or null
// only needed when importing a SIP without canonical DSpace derived file naming.
private String makeDerivedFilename(String bundleName, String origName)
{
// get the MediaFilter that would create this bundle:
String mfNames[] = PluginManager.getAllPluginNames(MediaFilter.class);
for (int i = 0; i < mfNames.length; ++i)
{
MediaFilter mf = (MediaFilter)PluginManager.getNamedPlugin(MediaFilter.class, mfNames[i]);
if (bundleName.equals(mf.getBundleName()))
{
return mf.getFilteredName(origName);
}
}
return null;
}
/**
* Take a second pass over files to correct names of derived files
* (e.g. thumbnails, extracted text) to what DSpace expects:
*/
@Override
public void finishBitstream(Context context,
Bitstream bs,
Element mfile,
METSManifest manifest,
PackageParameters params)
throws MetadataValidationException, SQLException, AuthorizeException, IOException
{
String bundleName = METSManifest.getBundleName(mfile);
if (!bundleName.equals(Constants.CONTENT_BUNDLE_NAME))
{
String opath = manifest.getOriginalFilePath(mfile);
if (opath != null)
{
// String ofileId = origFile.getAttributeValue("ID");
// Bitstream obs = (Bitstream)fileIdToBitstream.get(ofileId);
String newName = makeDerivedFilename(bundleName, opath);
if (newName != null)
{
//String mfileId = mfile.getAttributeValue("ID");
//Bitstream bs = (Bitstream)fileIdToBitstream.get(mfileId);
bs.setName(newName);
bs.update();
}
}
}
}
@Override
public String getConfigurationName()
{
return "dspaceSIP";
}
public boolean probe(Context context, InputStream in, PackageParameters params)
{
throw new UnsupportedOperationException("PDF package ingester does not implement probe()");
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
String parentHelp = super.getParameterHelp();
//Return superclass help info, plus the extra parameter/option that this class supports
return parentHelp +
"\n\n" +
"* dmd=[dmdSecType] " +
"Type of the METS <dmdSec> which should be used for primary item metadata (defaults to MODS, then DC)";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.IOException;
import java.sql.SQLException;
import org.jdom.Element;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Context;
import org.dspace.core.Constants;
/**
* Subclass of the METS packager framework to ingest a DSpace
* Archival Information Package (AIP). The AIP is intended to be, foremost,
* a _complete_ and _accurate_ representation of one object in the DSpace
* object model. An AIP contains all of the information needed to restore
* the object precisely in another DSpace archive instance.
* <p>
* This ingester recognizes two distinct types of AIPs: "Manifest-Only" and "External".
* The Manifest-Only AIP, which is selected by specifying a PackageParameters
* key "manifestOnly" with the value "true", refers to all its contents by
* reference only. For Community or Collection AIPs this means all references to their
* child objects are just via Handles. For Item AIPs all Bitreams are just
* referenced by their asset store location instead of finding them in the "package".
* The Manifest-Only AIP package format is simply a METS XML document serialized into a file.
* <p>
* An "external" AIP (the default), is a conventional Zip-file based package
* that includes copies of all bitstreams referenced by the object as well
* as a serialized METS XML document in the path "mets.xml".
*
* Configuration keys:
*
* # instructs which xwalk plugin to use for a given type of metadata
* mets.dspaceAIP.ingest.crosswalk.{mdSecName} = {pluginName}
* mets.dspaceAIP.ingest.crosswalk.DC = QDC
* mets.dspaceAIP.ingest.crosswalk.DSpaceDepositLicense = NULLSTREAM
*
* # Option to save METS manifest in the item: (default is false)
* mets.default.ingest.preserveManifest = false
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 1.1 $
*
* @see AbstractMETSIngester
* @see AbstractPackageIngester
* @see PackageIngester
* @see org.dspace.content.packager.METSManifest
*/
public class DSpaceAIPIngester
extends AbstractMETSIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(DSpaceAIPIngester.class);
/**
* Ensure it's an AIP generated by the complementary AIP disseminator.
*/
@Override
void checkManifest(METSManifest manifest)
throws MetadataValidationException
{
String profile = manifest.getProfile();
if (profile == null)
{
throw new MetadataValidationException("Cannot accept METS with no PROFILE attribute!");
}
else if (!profile.equals(DSpaceAIPDisseminator.PROFILE_1_0))
{
throw new MetadataValidationException("METS has unacceptable PROFILE attribute, profile=" + profile);
}
}
/**
* Choose DMD section(s) to crosswalk.
* <p>
* The algorithm is:<br>
* 1. Use whatever the <code>dmd</code> parameter specifies as the primary DMD.<br>
* 2. If (1) is unspecified, find DIM (preferably) or MODS as primary DMD.<br>
* 3. If (1) or (2) succeeds, crosswalk it and ignore all other DMDs with
* same GROUPID<br>
* 4. Crosswalk remaining DMDs not eliminated already.
*/
@Override
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException
{
int found = -1;
// Check to see what dmdSec the user specified in the 'dmd' parameter
String userDmd = null;
if (params != null)
{
userDmd = params.getProperty("dmd");
}
if (userDmd != null && userDmd.length() > 0)
{
for (int i = 0; i < dmds.length; ++i)
{
if (userDmd.equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// DIM is preferred, if nothing specified by user
if (found == -1)
{
// DIM is preferred for AIP
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be DIM (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("DIM".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// MODS is acceptable otehrwise..
if (found == -1)
{
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be MODS (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("MODS".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
String groupID = null;
if (found >= 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[found], callback);
groupID = dmds[found].getAttributeValue("GROUPID");
if (groupID != null)
{
for (int i = 0; i < dmds.length; ++i)
{
String g = dmds[i].getAttributeValue("GROUPID");
if (g != null && !g.equals(groupID))
{
manifest.crosswalkItemDmd(context, params, dso, dmds[i], callback);
}
}
}
}
// otherwise take the first. Don't xwalk more than one because
// each xwalk _adds_ metadata, and could add duplicate fields.
else if (dmds.length > 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[0], callback);
}
// it's an error if there is nothing to crosswalk:
else
{
throw new MetadataValidationException("DSpaceAIPIngester: Could not find an acceptable object-wide DMD section in manifest.");
}
}
/**
* Ignore license when restoring an manifest-only AIP, since it should
* be a bitstream in the AIP already.
* Otherwise: Check item for license first; then, take deposit
* license supplied by explicit argument next, else use collection's
* default deposit license.
* Normally the rightsMD crosswalks should provide a license.
*/
@Override
public void addLicense(Context context, Item item, String license,
Collection collection, PackageParameters params)
throws PackageValidationException,
AuthorizeException, SQLException, IOException
{
boolean newLicense = false;
if(!params.restoreModeEnabled())
{
//AIP is not being restored/replaced, so treat it like a SIP -- every new SIP needs a new license
newLicense = true;
}
// Add deposit license if there isn't one in the object,
// and it's not a restoration of an "manifestOnly" AIP:
if (!params.getBooleanProperty("manifestOnly", false) &&
PackageUtils.findDepositLicense(context, item) == null)
{
newLicense = true;
}
if(newLicense)
{
PackageUtils.addDepositLicense(context, license, item, collection);
}
}
/**
* Last change to fix up a DSpace Object.
* <P>
* For AIPs, if the object is an Item, we may want to make sure all of its
* metadata fields already exist in the database (otherwise, the database
* will throw errors when we attempt to save/update the Item)
*
* @param context DSpace Context
* @param dso DSpace object
* @param params Packager Parameters
*/
@Override
public void finishObject(Context context, DSpaceObject dso, PackageParameters params)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
if(dso.getType()==Constants.ITEM)
{
// Check if 'createMetadataFields' option is enabled (default=true)
// This defaults to true as by default we should attempt to restore as much metadata as we can.
// When 'createMetadataFields' is set to false, an ingest will fail if it attempts to ingest content to a missing metadata field.
if (params.getBooleanProperty("createMetadataFields", true))
{
// We want to verify that all the Metadata Fields we've crosswalked
// actually *exist* in the DB. If not, we'll try to create them
createMissingMetadataFields(context, (Item) dso);
}
}
}
/**
* Nothing extra to do to bitstream after ingestion.
*/
@Override
public void finishBitstream(Context context,
Bitstream bs,
Element mfile,
METSManifest manifest,
PackageParameters params)
throws MetadataValidationException, SQLException, AuthorizeException, IOException
{
// nothing to do.
}
/**
* Return the type of DSpaceObject in this package; it is
* in the TYPE attribute of the mets:mets element.
*/
@Override
public int getObjectType(METSManifest manifest)
throws PackageValidationException
{
Element mets = manifest.getMets();
String typeStr = mets.getAttributeValue("TYPE");
if (typeStr == null || typeStr.length() == 0)
{
throw new PackageValidationException("Manifest is missing the required mets@TYPE attribute.");
}
if (typeStr.startsWith("DSpace "))
{
typeStr = typeStr.substring(7);
}
int type = Constants.getTypeID(typeStr);
if (type < 0)
{
throw new PackageValidationException("Manifest has unrecognized value in mets@TYPE attribute: " + typeStr);
}
return type;
}
/**
* Name used to distinguish DSpace Configuration entries for this subclass.
*/
@Override
public String getConfigurationName()
{
return "dspaceAIP";
}
/**
* Verifies that all the unsaved, crosswalked metadata fields that have
* been added to an Item actually exist in our Database. If they don't
* exist, they are created within the proper database tables.
* <P>
* This method must be called *before* item.update(), as the call to update()
* will throw a SQLException when attempting to save any fields which
* don't already exist in the database.
* <P>
* NOTE: This will NOT create a missing Metadata Schema (e.g. "dc" schema),
* as we do not have enough info to create schemas on the fly.
*
* @param context - DSpace Context
* @param item - Item whose unsaved metadata fields we are testing
* @throws AuthorizeException if a metadata field doesn't exist and current user is not authorized to create it (i.e. not an Admin)
* @throws PackageValidationException if a metadata schema doesn't exist, as we cannot autocreate a schema
*/
protected static void createMissingMetadataFields(Context context, Item item)
throws PackageValidationException, AuthorizeException, IOException, SQLException
{
// Get all metadata fields/values currently added to this Item
DCValue allMD[] = item.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
// For each field, we'll check if it exists. If not, we'll create it.
for(DCValue md : allMD)
{
MetadataSchema mdSchema = null;
MetadataField mdField = null;
try
{
//Try to access this Schema
mdSchema = MetadataSchema.find(context, md.schema);
//If Schema found, try to locate field from database
if(mdSchema!=null)
{
mdField = MetadataField.findByElement(context, mdSchema.getSchemaID(), md.element, md.qualifier);
}
}
catch(SQLException se)
{
//If a SQLException error is thrown, then this field does NOT exist in DB
//Set field to null, so we know we need to create it
mdField = null;
}
// If our Schema was not found, we have a problem
// We cannot easily create a Schema automatically -- as we don't know its Namespace
if(mdSchema==null)
{
throw new PackageValidationException("Unknown Metadata Schema encountered (" + md.schema + ") when attempting to ingest an Item. You will need to create this Metadata Schema in DSpace Schema Registry before the Item can be ingested.");
}
// If our Metadata Field is null, we will attempt to create it in the proper Schema
if(mdField==null)
{
try
{
//initialize field (but don't set a scope note) & create it
mdField = new MetadataField(mdSchema, md.element, md.qualifier, null);
// NOTE: Only Adminstrators can create Metadata Fields -- create() will throw an AuthorizationException for non-Admins
mdField.create(context);
//log that field was created
log.info("Located a missing metadata field (schema:'" + mdSchema.getName() +"', element:'"+ md.element +"', qualifier:'"+ md.qualifier +"') while ingesting Item. This missing field has been created in the DSpace Metadata Field Registry.");
}
catch(NonUniqueMetadataException ne)
{ // This exception should never happen, as we already checked to make sure the field doesn't exist.
// But, we'll catch it anyways so that the Java compiler doesn't get upset
throw new SQLException("Unable to create Metadata Field (element='" + md.element + "', qualifier='" + md.qualifier + "') in Schema "+ mdSchema.getName() +".", ne);
}
}
}
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
String parentHelp = super.getParameterHelp();
//Return superclass help info, plus the extra parameters/options that this class supports
return parentHelp +
"\n\n" +
"* createMetadataFields=[boolean] " +
"If true, ingest attempts to create any missing metadata fields." +
"If false, ingest will fail if a metadata field is encountered which doesn't already exist. (default = true)" +
"\n\n" +
"* dmd=[dmdSecType] " +
"Type of the METS <dmdSec> which should be used to restore item metadata (defaults to DIM, then MODS)";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.jdom.Namespace;
/**
* Plugin to export all Group and EPerson objects in XML, perhaps for reloading.
*
* @author Mark Wood
*/
public class RoleDisseminator implements PackageDisseminator
{
/** log4j category */
private static final Logger log = Logger.getLogger(RoleDisseminator.class);
/**
* DSpace Roles XML Namespace in JDOM form.
*/
public static final Namespace DSROLES_NS =
Namespace.getNamespace("dsroles", "http://www.dspace.org/xmlns/dspace/dspace-roles");
public static final String DSPACE_ROLES = "DSpaceRoles";
public static final String ID = "ID";
public static final String GROUPS = "Groups";
public static final String GROUP = "Group";
public static final String NAME = "Name";
public static final String TYPE = "Type";
public static final String MEMBERS = "Members";
public static final String MEMBER = "Member";
public static final String MEMBER_GROUPS = "MemberGroups";
public static final String MEMBER_GROUP = "MemberGroup";
public static final String EPERSONS = "People";
public static final String EPERSON = "Person";
public static final String EMAIL = "Email";
public static final String NETID = "Netid";
public static final String FIRST_NAME = "FirstName";
public static final String LAST_NAME = "LastName";
public static final String LANGUAGE = "Language";
public static final String PASSWORD_HASH = "PasswordHash";
public static final String CAN_LOGIN = "CanLogin";
public static final String REQUIRE_CERTIFICATE = "RequireCertificate";
public static final String SELF_REGISTERED = "SelfRegistered";
// Valid type values for Groups (only used when Group is associated with a Community or Collection)
public static final String GROUP_TYPE_ADMIN = "ADMIN";
public static final String GROUP_TYPE_SUBMIT = "SUBMIT";
public static final String GROUP_TYPE_WORKFLOW_STEP_1 = "WORKFLOW_STEP_1";
public static final String GROUP_TYPE_WORKFLOW_STEP_2 = "WORKFLOW_STEP_2";
public static final String GROUP_TYPE_WORKFLOW_STEP_3 = "WORKFLOW_STEP_3";
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageDisseminator#disseminate(org.dspace
* .core.Context, org.dspace.content.DSpaceObject,
* org.dspace.content.packager.PackageParameters, java.io.File)
*/
@Override
public void disseminate(Context context, DSpaceObject object,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
boolean emitPasswords = params.containsKey("passwords");
FileOutputStream fileOut = null;
try
{
//open file stream for writing
fileOut = new FileOutputStream(pkgFile);
writeToStream(context, object, fileOut, emitPasswords);
}
finally
{
//close file stream & save
if (fileOut != null)
{
fileOut.close();
}
}
}
/**
* Make serialized users and groups available on an InputStream, for code
* which wants to read one.
*
* @param emitPasswords true if password hashes should be included.
* @return the stream of XML representing users and groups.
* @throws IOException
* if a PipedOutputStream or PipedInputStream cannot be created.
*/
InputStream asStream(Context context, DSpaceObject object, boolean emitPasswords)
throws IOException
{
// Create a PipedOutputStream to which to write some XML
PipedOutputStream outStream = new PipedOutputStream();
PipedInputStream inStream = new PipedInputStream(outStream);
// Create a new Thread to push serialized objects into the pipe
Serializer serializer = new Serializer(context, object, outStream,
emitPasswords);
new Thread(serializer).start();
return inStream;
}
/**
* Embody a thread for serializing users and groups.
*
* @author mwood
*/
private class Serializer implements Runnable
{
private Context context;
private DSpaceObject object;
private OutputStream stream;
private boolean emitPasswords;
@SuppressWarnings("unused")
private Serializer() {}
/**
* @param context
* @param object the DSpaceObject
* @param stream receives serialized user and group objects. Will be
* closed when serialization is complete.
* @param emitPasswords true if password hashes should be included.
*/
Serializer(Context context, DSpaceObject object, OutputStream stream, boolean emitPasswords)
{
this.context = context;
this.object = object;
this.stream = stream;
this.emitPasswords = emitPasswords;
}
@Override
public void run()
{
try
{
writeToStream(context, object, stream, emitPasswords);
stream.close();
}
catch (IOException e)
{
log.error(e);
}
catch (PackageException e)
{
log.error(e);
}
}
}
/**
* Serialize users and groups to a stream.
*
* @param context
* @param stream receives the output. Is not closed by this method.
* @param emitPasswords true if password hashes should be included.
* @throws XMLStreamException
* @throws SQLException
*/
private void writeToStream(Context context, DSpaceObject object, OutputStream stream,
boolean emitPasswords)
throws PackageException
{
try
{
//First, find all Groups/People associated with our current Object
Group[] groups = findAssociatedGroups(context, object);
EPerson[] people = findAssociatedPeople(context, object);
//Only continue if we've found Groups or People which we need to disseminate
if((groups!=null && groups.length>0) ||
(people!=null && people.length>0))
{
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter writer;
writer = factory.createXMLStreamWriter(stream, "UTF-8");
writer.setDefaultNamespace(DSROLES_NS.getURI());
writer.writeStartDocument("UTF-8", "1.0");
writer.writeStartElement(DSPACE_ROLES);
//Only disseminate a <Groups> element if some groups exist
if(groups!=null)
{
writer.writeStartElement(GROUPS);
for (Group group : groups)
{
writeGroup(context, object, group, writer);
}
writer.writeEndElement(); // GROUPS
}
//Only disseminate an <People> element if some people exist
if(people!=null)
{
writer.writeStartElement(EPERSONS);
for (EPerson eperson : people)
{
writeEPerson(eperson, writer, emitPasswords);
}
writer.writeEndElement(); // EPERSONS
}
writer.writeEndElement(); // DSPACE_ROLES
writer.writeEndDocument();
writer.close();
}//end if Groups or People exist
}
catch (Exception e)
{
throw new PackageException(e);
}
}
/* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageDisseminator#disseminateAll(org.dspace
* .core.Context, org.dspace.content.DSpaceObject,
* org.dspace.content.packager.PackageParameters, java.io.File)
*/
@Override
public List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
throw new PackageException("disseminateAll() is not implemented, as disseminate() method already handles dissemination of all roles to an external file.");
}
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageDisseminator#getMIMEType(org.dspace
* .content.packager.PackageParameters)
*/
@Override
public String getMIMEType(PackageParameters params)
{
return "application/xml";
}
/**
* Emit XML describing a single Group.
*
* @param context
* the DSpace Context
* @parm relatedObject
* the DSpaceObject related to this group (if any)
* @param group
* the Group to describe
* @param write
* the description to this stream
*/
private void writeGroup(Context context, DSpaceObject relatedObject, Group group, XMLStreamWriter writer)
throws XMLStreamException, PackageException
{
//Translate the Group name for export. This ensures that groups with Internal IDs in their names
// (e.g. COLLECTION_1_ADMIN) are properly translated using the corresponding Handle or external identifier.
String exportGroupName = PackageUtils.translateGroupNameForExport(context, group.getName());
//If translated group name is returned as "null", this means the Group name
// had an Internal Collection/Community ID embedded, which could not be
// translated properly to a Handle. We will NOT export these groups,
// as they could cause conflicts or data integrity problems if they are
// imported into another DSpace system.
if(exportGroupName==null)
{
return;
}
writer.writeStartElement(GROUP);
writer.writeAttribute(ID, String.valueOf(group.getID()));
writer.writeAttribute(NAME, exportGroupName);
String groupType = getGroupType(relatedObject, group);
if(groupType!=null && !groupType.isEmpty())
{
writer.writeAttribute(TYPE, groupType);
}
//Add People to Group (if any belong to this group)
if(group.getMembers().length>0)
{
writer.writeStartElement(MEMBERS);
for (EPerson member : group.getMembers())
{
writer.writeEmptyElement(MEMBER);
writer.writeAttribute(ID, String.valueOf(member.getID()));
writer.writeAttribute(NAME, member.getName());
}
writer.writeEndElement();
}
//Add Groups as Member Groups (if any belong to this group)
if(group.getMemberGroups().length>0)
{
writer.writeStartElement(MEMBER_GROUPS);
for (Group member : group.getMemberGroups())
{
String exportMemberName = PackageUtils.translateGroupNameForExport(context, member.getName());
//Only export member group if its name can be properly translated for export. As noted above,
// we don't want groups that are *unable* to be accurately translated causing issues on import.
if(exportMemberName!=null)
{
writer.writeEmptyElement(MEMBER_GROUP);
writer.writeAttribute(ID, String.valueOf(member.getID()));
writer.writeAttribute(NAME, exportMemberName);
}
}
writer.writeEndElement();
}
writer.writeEndElement();
}
/**
* Return a Group Type string (see RoleDisseminator.GROUP_TYPE_* constants)
* which describes the type of group and its relation to the given object.
* <P>
* As a basic example, if the Group is a Collection Administration group,
* the Group Type string returned should be "ADMIN"
* <P>
* If type string cannot be determined, null is returned.
*
* @param dso
* the related DSpaceObject
* @param group
* the group
* @return a group type string or null
*/
private String getGroupType(DSpaceObject dso, Group group)
{
if (dso == null || group == null)
{
return null;
}
if( dso.getType()==Constants.COMMUNITY)
{
Community community = (Community) dso;
//Check if this is the ADMIN group for this community
if (group.equals(community.getAdministrators()))
{
return GROUP_TYPE_ADMIN;
}
}
else if(dso.getType() == Constants.COLLECTION)
{
Collection collection = (Collection) dso;
if (group.equals(collection.getAdministrators()))
{
//Check if this is the ADMIN group for this collection
return GROUP_TYPE_ADMIN;
}
else if (group.equals(collection.getSubmitters()))
{
//Check if Submitters group
return GROUP_TYPE_SUBMIT;
}
else if (group.equals(collection.getWorkflowGroup(1)))
{
//Check if workflow step 1 group
return GROUP_TYPE_WORKFLOW_STEP_1;
}
else if (group.equals(collection.getWorkflowGroup(2)))
{
//check if workflow step 2 group
return GROUP_TYPE_WORKFLOW_STEP_2;
}
else if (group.equals(collection.getWorkflowGroup(3)))
{
//check if workflow step 3 group
return GROUP_TYPE_WORKFLOW_STEP_3;
}
}
//by default, return null
return null;
}
/**
* Emit XML describing a single EPerson.
*
* @param eperson
* the EPerson to describe
* @param write
* the description to this stream
* @param emitPassword
* do not export the password hash unless true
*/
private void writeEPerson(EPerson eperson, XMLStreamWriter writer,
boolean emitPassword) throws XMLStreamException
{
writer.writeStartElement(EPERSON);
writer.writeAttribute(ID, String.valueOf(eperson.getID()));
writer.writeStartElement(EMAIL);
writer.writeCharacters(eperson.getEmail());
writer.writeEndElement();
if(eperson.getNetid()!=null)
{
writer.writeStartElement(NETID);
writer.writeCharacters(eperson.getNetid());
writer.writeEndElement();
}
if(eperson.getFirstName()!=null)
{
writer.writeStartElement(FIRST_NAME);
writer.writeCharacters(eperson.getFirstName());
writer.writeEndElement();
}
if(eperson.getLastName()!=null)
{
writer.writeStartElement(LAST_NAME);
writer.writeCharacters(eperson.getLastName());
writer.writeEndElement();
}
if(eperson.getLanguage()!=null)
{
writer.writeStartElement(LANGUAGE);
writer.writeCharacters(eperson.getLanguage());
writer.writeEndElement();
}
if (emitPassword)
{
writer.writeStartElement(PASSWORD_HASH);
writer.writeCharacters(eperson.getPasswordHash());
writer.writeEndElement();
}
if (eperson.canLogIn())
{
writer.writeEmptyElement(CAN_LOGIN);
}
if (eperson.getRequireCertificate())
{
writer.writeEmptyElement(REQUIRE_CERTIFICATE);
}
if (eperson.getSelfRegistered())
{
writer.writeEmptyElement(SELF_REGISTERED);
}
writer.writeEndElement();
}
/**
* Find all Groups associated with this DSpace Object.
* <P>
* If object is SITE, all groups are returned.
* <P>
* If object is COMMUNITY or COLLECTION, only groups associated with
* those objects are returned (if any).
* <P>
* For all other objects, null is returned.
*
* @param context The DSpace context
* @param object the DSpace object
* @return array of all associated groups
*/
private Group[] findAssociatedGroups(Context context, DSpaceObject object)
throws SQLException
{
if(object.getType()==Constants.SITE)
{
// @TODO FIXME -- if there was a way to ONLY export Groups which are NOT
// associated with a Community or Collection, we should be doing that instead!
return Group.findAll(context, Group.NAME);
}
else if(object.getType()==Constants.COMMUNITY)
{
Community community = (Community) object;
ArrayList<Group> list = new ArrayList<Group>();
//check for admin group
if(community.getAdministrators()!=null)
{
list.add(community.getAdministrators());
}
// FINAL CATCH-ALL -> Find any other groups where name begins with "COMMUNITY_<ID>_"
// (There should be none, but this code is here just in case)
Group[] matchingGroups = Group.search(context, "COMMUNITY\\_" + community.getID() + "\\_");
for(Group g : matchingGroups)
{
if(!list.contains(g))
{
list.add(g);
}
}
if(list.size()>0)
{
Group[] groupArray = new Group[list.size()];
groupArray = (Group[]) list.toArray(groupArray);
return groupArray;
}
}
else if(object.getType()==Constants.COLLECTION)
{
Collection collection = (Collection) object;
ArrayList<Group> list = new ArrayList<Group>();
//check for admin group
if(collection.getAdministrators()!=null)
{
list.add(collection.getAdministrators());
}
//check for submitters group
if(collection.getSubmitters()!=null)
{
list.add(collection.getSubmitters());
}
//check for workflow step 1 group
if(collection.getWorkflowGroup(1)!=null)
{
list.add(collection.getWorkflowGroup(1));
}
//check for workflow step 2 group
if(collection.getWorkflowGroup(2)!=null)
{
list.add(collection.getWorkflowGroup(2));
}
//check for workflow step 3 group
if(collection.getWorkflowGroup(3)!=null)
{
list.add(collection.getWorkflowGroup(3));
}
// FINAL CATCH-ALL -> Find any other groups where name begins with "COLLECTION_<ID>_"
// (Necessary cause XMLUI allows you to generate a 'COLLECTION_<ID>_DEFAULT_READ' group)
Group[] matchingGroups = Group.search(context, "COLLECTION\\_" + collection.getID() + "\\_");
for(Group g : matchingGroups)
{
if(!list.contains(g))
{
list.add(g);
}
}
if(list.size()>0)
{
Group[] groupArray = new Group[list.size()];
groupArray = (Group[]) list.toArray(groupArray);
return groupArray;
}
}
//by default, return nothing
return null;
}
/**
* Find all EPeople associated with this DSpace Object.
* <P>
* If object is SITE, all people are returned.
* <P>
* For all other objects, null is returned.
*
* @param context The DSpace context
* @param object the DSpace object
* @return array of all associated EPerson objects
*/
private EPerson[] findAssociatedPeople(Context context, DSpaceObject object)
throws SQLException
{
if(object.getType()==Constants.SITE)
{
return EPerson.findAll(context, EPerson.EMAIL);
}
//by default, return nothing
return null;
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
return "* passwords=[boolean] " +
"If true, user password hashes are also exported (so that they can be later restored). If false, user passwords are not exported. (Default is false)";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Context;
/**
* Plugin Interface to produce Dissemination Information Package (DIP)
* of a DSpace object.
* <p>
* An implementation translates DSpace objects to some external
* "package" format. A package is a single data stream (or file)
* containing enough information to reconstruct the object. It can be
* anything from an archive like a Zip file with a manifest and metadata,
* to a simple manifest containing external references to the content,
* to a self-contained file such as a PDF.
* <p>
* A DIP implementation has two methods: <code>disseminate</code>
* to produce the package itself, and <code>getMIMEType</code> to
* identify its Internet format type (needed when transmitting the package
* over HTTP).
* <p>
* Both of these methods are given an attribute-values
* list of "parameters", which may modify their actions. Since the
* format output by <code>disseminate</code> may be affected by
* parameters, it is given to the <code>getMIMEType</code> method as well.
* The parameters list is a generalized mechanism to pass parameters
* from the package requestor to the packager, since different packagers will
* understand different sets of parameters.
*
* @author Larry Stone
* @version $Revision: 5844 $
* @see PackageParameters
*/
public interface PackageDisseminator
{
/**
* Export the object (Item, Collection, or Community) as a
* "package" on the indicated OutputStream. Package is any serialized
* representation of the item, at the discretion of the implementing
* class. It does not have to include content bitstreams.
* <p>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <p>
* Throws an exception of the chosen object is not acceptable or there is
* a failure creating the package.
*
* @param context DSpace context.
* @param object DSpace object (item, collection, etc)
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where export package should be written
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
void disseminate(Context context, DSpaceObject object,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException;
/**
* Recursively export one or more DSpace Objects as a series of packages.
* This method will export the given DSpace Object as well as all referenced
* DSpaceObjects (e.g. child objects) into a series of packages. The
* initial object is exported to the location specified by the pkgFile.
* All other generated packages are recursively exported to the same directory.
* <p>
* Package is any serialized representation of the item, at the discretion
* of the implementing class. It does not have to include content bitstreams.
* <p>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <p>
* Throws an exception of the initial object is not acceptable or there is
* a failure creating the packages.
* <p>
* A packager <em>may</em> choose not to implement <code>disseminateAll</code>,
* or simply forward the call to <code>disseminate</code> if it is unable to
* support recursive dissemination.
*
* @param context DSpace context.
* @param dso initial DSpace object
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where initial package should be written. All other
* packages will be written to the same directory as this File.
* @return List of all package Files which were successfully disseminated
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException;
/**
* Identifies the MIME-type of this package, e.g. <code>"application/zip"</code>.
* Required when sending the package via HTTP, to
* provide the Content-Type header.
*
* @return the MIME type (content-type header) of the package to be returned
*/
String getMIMEType(PackageParameters params);
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
String getParameterHelp();
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.ArrayList;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.license.CreativeCommons;
import edu.harvard.hul.ois.mets.Agent;
import edu.harvard.hul.ois.mets.Mets;
import edu.harvard.hul.ois.mets.MetsHdr;
import edu.harvard.hul.ois.mets.Role;
import edu.harvard.hul.ois.mets.helper.MetsException;
import edu.harvard.hul.ois.mets.Type;
import edu.harvard.hul.ois.mets.Name;
import edu.harvard.hul.ois.mets.helper.PCData;
/**
* Packager plugin to produce a
* METS (Metadata Encoding & Transmission Standard) package
* that is accepted as a DSpace METS SIP (Submission Information Package).
* See <a href="http://www.loc.gov/standards/mets/">http://www.loc.gov/standards/mets/</a>
* for more information on METS.
* <p>
* This class does not produce a true DSpace DIP, because there was no
* DIP standard when it was implemented. It does contain some features
* beyond the requirements of a SIP (e.g. deposit licenses), anticipating
* the DIP specification.
* <p>
* DSpaceMETSDisseminator was intended to be an useful example of a packager
* plugin, and a way to create packages acceptable to the METS SIP importer.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class DSpaceMETSDisseminator
extends AbstractMETSDisseminator
{
/**
* Identifier for the package we produce, i.e. DSpace METS SIP
* Profile. Though not strictly true, there is no DIP standard yet
* so it's the most meaningful label we can apply.
*/
private static final String PROFILE_LABEL = "DSpace METS SIP Profile 1.0";
// MDTYPE value for deposit license -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String DSPACE_DEPOSIT_LICENSE_MDTYPE = "DSpaceDepositLicense:DSPACE_DEPLICENSE";
// MDTYPE value for CC license in RDF -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String CREATIVE_COMMONS_RDF_MDTYPE = "CreativeCommonsRDF:DSPACE_CCRDF";
// MDTYPE value for CC license in Text -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String CREATIVE_COMMONS_TEXT_MDTYPE = "CreativeCommonsText:DSPACE_CCTXT";
/**
* Return identifier string for the profile this produces.
*
* @return string name of profile.
*/
@Override
public String getProfile()
{
return PROFILE_LABEL;
}
/**
* Returns name of METS fileGrp corresponding to a DSpace bundle name.
* They are mostly the same except for bundle "ORIGINAL" maps to "CONTENT".
* Don't worry about the metadata bundles since they are not
* packaged as fileGrps, but in *mdSecs.
* @param bname name of DSpace bundle.
* @return string name of fileGrp
*/
@Override
public String bundleToFileGrp(String bname)
{
if (bname.equals("ORIGINAL"))
{
return "CONTENT";
}
else
{
return bname;
}
}
/**
* Create metsHdr element - separate so subclasses can override.
*/
@Override
public MetsHdr makeMetsHdr(Context context, DSpaceObject dso,
PackageParameters params)
{
MetsHdr metsHdr = new MetsHdr();
// FIXME: CREATEDATE is now: maybe should be item create?
metsHdr.setCREATEDATE(new Date());
// Agent
Agent agent = new Agent();
agent.setROLE(Role.CUSTODIAN);
agent.setTYPE(Type.ORGANIZATION);
Name name = new Name();
name.getContent()
.add(new PCData(ConfigurationManager
.getProperty("dspace.name")));
agent.getContent().add(name);
metsHdr.getContent().add(agent);
return metsHdr;
}
/**
* Get DMD choice for Item. It defaults to MODS, but is overridden
* by the package parameters if they contain any "dmd" keys. The
* params may contain one or more values for "dmd"; each of those is
* the name of a crosswalk plugin, optionally followed by colon and
* its METS MDTYPE name.
*/
@Override
public String [] getDmdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
// XXX FIXME maybe let dmd choices be configured in DSpace config?
String result[] = null;
if (params != null)
{
result = params.getProperties("dmd");
}
if (result == null || result.length == 0)
{
result = new String[1];
result[0] = "MODS";
}
return result;
}
/**
* Get name of technical metadata crosswalk for Bitstreams.
* Default is PREMIS. This is both the name of the crosswalk plugin
* and the METS MDTYPE.
*/
@Override
public String[] getTechMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
if (dso.getType() == Constants.BITSTREAM)
{
String result[] = new String[1];
result[0] = "PREMIS";
return result;
}
else
{
return new String[0];
}
}
@Override
public String[] getSourceMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
return new String[0];
}
@Override
public String[] getDigiprovMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
return new String[0];
}
@Override
public String makeBitstreamURL(Bitstream bitstream, PackageParameters params)
{
String base = "bitstream_"+String.valueOf(bitstream.getID());
String ext[] = bitstream.getFormat().getExtensions();
return (ext.length > 0) ? base+"."+ext[0] : base;
}
/**
* Add rights MD (licenses) for DSpace item. These
* may include a deposit license, and Creative Commons.
*/
@Override
public String[] getRightsMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
List<String> result = new ArrayList<String>();
if (dso.getType() == Constants.ITEM)
{
Item item = (Item)dso;
if (PackageUtils.findDepositLicense(context, item) != null)
{
result.add(DSPACE_DEPOSIT_LICENSE_MDTYPE);
}
if (CreativeCommons.getLicenseRdfBitstream(item) != null)
{
result.add(CREATIVE_COMMONS_RDF_MDTYPE);
}
else if (CreativeCommons.getLicenseTextBitstream(item) != null)
{
result.add(CREATIVE_COMMONS_TEXT_MDTYPE);
}
}
return result.toArray(new String[result.size()]);
}
// This is where we'd elaborate on the default structMap; nothing to add, yet.
@Override
public void addStructMap(Context context, DSpaceObject dso,
PackageParameters params, Mets mets)
throws SQLException, IOException, AuthorizeException, MetsException
{
}
// only exclude metadata bundles from package.
@Override
public boolean includeBundle(Bundle bundle)
{
return ! PackageUtils.isMetaInfoBundle(bundle);
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
String parentHelp = super.getParameterHelp();
//Return superclass help info, plus the extra parameter/option that this class supports
return parentHelp +
"\n\n" +
"* dmd=[dmdSecType] " +
"(Repeatable) Type(s) of the METS <dmdSec> which should be created in the dissemination package (defaults to MODS)";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.PrintWriter;
import java.io.StringWriter;
import org.apache.log4j.Logger;
/**
* This is a superclass for exceptions representing a failure when
* importing or exporting a package. E.g., unacceptable package format
* or contents. Implementations should throw one of the more specific
* exceptions. This class is intended for declarations and catch clauses.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class PackageException extends Exception
{
/**
* Create a new exception with no message.
*/
public PackageException()
{
super();
}
/**
* Create a new exception with the given message.
* @param message - message text.
*/
public PackageException(String message)
{
super(message);
}
/**
* Create a new exception wrapping the given underlying cause.
* @param cause - exception specifying the cause of this failure.
*/
public PackageException(Throwable cause)
{
super(cause);
}
/**
* Create a new exception wrapping it around another exception.
* @param message - message text.
* @param cause - exception specifying the cause of this failure.
*/
public PackageException(String message, Throwable cause)
{
super(message, cause);
}
/**
* Write details of this exception to the indicated logger.
* Dump a stack trace to the log to aid in debugging.
*/
public void log(Logger log)
{
log.error(toString());
Throwable cause = getCause();
if (cause != null)
{
if (cause.getCause() != null)
{
cause = cause.getCause();
}
StringWriter sw = new StringWriter();
cause.printStackTrace(new PrintWriter(sw));
log.error(sw.toString());
}
}
public String toString()
{
String base = getClass().getName() + ": " + getMessage();
return (getCause() == null) ? base :
base + ", Reason: "+getCause().toString();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
/**
* This represents a failure when importing or exporting a package
* caused by invalid unacceptable package format or contents; for
* example, missing files that were mentioned in the manifest, or
* extra files not in manifest, or lack of a manifest.
* <p>
* When throwing a PackageValidationException, be sure the message
* includes enough specific information to let the end user diagnose
* the problem, i.e. what files appear to be missing from the manifest
* or package, or the details of a checksum error on a file.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class PackageValidationException extends PackageException
{
/**
* Create a new exception with the given message.
* @param message - diagnostic message.
*/
public PackageValidationException(String message)
{
super(message);
}
/**
* Create a new exception wrapping it around another exception.
* @param exception - exception specifying the cause of this failure.
*/
public PackageValidationException(Exception exception)
{
super(exception);
}
public PackageValidationException(String message, Exception exception)
{
super(message, exception);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import edu.harvard.hul.ois.mets.AmdSec;
import edu.harvard.hul.ois.mets.BinData;
import edu.harvard.hul.ois.mets.Checksumtype;
import edu.harvard.hul.ois.mets.Div;
import edu.harvard.hul.ois.mets.DmdSec;
import edu.harvard.hul.ois.mets.MdRef;
import edu.harvard.hul.ois.mets.FLocat;
import edu.harvard.hul.ois.mets.FileGrp;
import edu.harvard.hul.ois.mets.FileSec;
import edu.harvard.hul.ois.mets.Fptr;
import edu.harvard.hul.ois.mets.Mptr;
import edu.harvard.hul.ois.mets.Loctype;
import edu.harvard.hul.ois.mets.MdWrap;
import edu.harvard.hul.ois.mets.Mdtype;
import edu.harvard.hul.ois.mets.Mets;
import edu.harvard.hul.ois.mets.MetsHdr;
import edu.harvard.hul.ois.mets.StructMap;
import edu.harvard.hul.ois.mets.TechMD;
import edu.harvard.hul.ois.mets.SourceMD;
import edu.harvard.hul.ois.mets.DigiprovMD;
import edu.harvard.hul.ois.mets.RightsMD;
import edu.harvard.hul.ois.mets.helper.MdSec;
import edu.harvard.hul.ois.mets.XmlData;
import edu.harvard.hul.ois.mets.helper.Base64;
import edu.harvard.hul.ois.mets.helper.MetsElement;
import edu.harvard.hul.ois.mets.helper.MetsException;
import edu.harvard.hul.ois.mets.helper.MetsValidator;
import edu.harvard.hul.ois.mets.helper.MetsWriter;
import edu.harvard.hul.ois.mets.helper.PreformedXML;
import java.io.File;
import java.io.FileOutputStream;
import org.apache.log4j.Logger;
import org.dspace.app.util.Util;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Community;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.crosswalk.AbstractPackagerWrappingCrosswalk;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.CrosswalkObjectNotSupported;
import org.dspace.content.crosswalk.DisseminationCrosswalk;
import org.dspace.content.crosswalk.StreamDisseminationCrosswalk;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.core.Utils;
import org.dspace.license.CreativeCommons;
import org.jdom.Element;
import org.jdom.Namespace;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
/**
* Base class for disseminator of
* METS (Metadata Encoding & Transmission Standard) Package.<br>
* See <a href="http://www.loc.gov/standards/mets/">http://www.loc.gov/standards/mets/</a>
* <p>
* This is a generic packager framework intended to be subclassed to create
* packagers for more specific METS "profiles". METS is an
* abstract and flexible framework that can encompass many
* different kinds of metadata and inner package structures.
* <p>
* <b>Package Parameters:</b><br>
* <ul>
* <li><code>manifestOnly</code> -- if true, generate a standalone XML
* document of the METS manifest instead of a complete package. Any
* other metadata (such as licenses) will be encoded inline.
* Default is <code>false</code>.</li>
*
* <li><code>unauthorized</code> -- this determines what is done when the
* packager encounters a Bundle or Bitstream it is not authorized to
* read. By default, it just quits with an AuthorizeException.
* If this option is present, it must be one of the following values:
* <ul>
* <li><code>skip</code> -- simply exclude unreadable content from package.</li>
* <li><code>zero</code> -- include unreadable bitstreams as 0-length files;
* unreadable Bundles will still cause authorize errors.</li></ul></li>
* </ul>
*
* @author Larry Stone
* @author Robert Tansley
* @author Tim Donohue
* @version $Revision: 5932 $
*/
public abstract class AbstractMETSDisseminator
extends AbstractPackageDisseminator
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractMETSDisseminator.class);
// JDOM xml output writer - indented format for readability.
private static XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
// for gensym()
private int idCounter = 1;
/**
* Default date/time (in milliseconds since epoch) to set for Zip Entries
* for DSpace Objects which don't have a Last Modified date. If we don't
* set our own date/time, then it will default to current system date/time.
* This is less than ideal, as it causes the md5 checksum of Zip file to
* change whenever Zip is regenerated (even if compressed files are unchanged)
* 1036368000 seconds * 1000 = Nov 4, 2002 GMT (the date DSpace 1.0 was released)
*/
private static final int DEFAULT_MODIFIED_DATE = 1036368000 * 1000;
/**
* Suffix for Template objects (e.g. Item Templates)
*/
protected static final String TEMPLATE_TYPE_SUFFIX = " Template";
/**
* Wrapper for a table of streams to add to the package, such as
* mdRef'd metadata. Key is relative pathname of file, value is
* <code>InputStream</code> with contents to put in it. Some
* superclasses will put streams in this table when adding an mdRef
* element to e.g. a rightsMD segment.
*/
protected static class MdStreamCache
{
private Map<MdRef,InputStream> extraFiles = new HashMap<MdRef,InputStream>();
public void addStream(MdRef key, InputStream md)
{
extraFiles.put(key, md);
}
public Map<MdRef,InputStream> getMap()
{
return extraFiles;
}
public void close()
throws IOException
{
for (InputStream is : extraFiles.values())
{
is.close();
}
}
}
/**
* Make a new unique ID symbol with specified prefix.
* @param prefix the prefix of the identifier, constrained to XML ID schema
* @return a new string identifier unique in this session (instance).
*/
protected synchronized String gensym(String prefix)
{
return prefix + "_" + String.valueOf(idCounter++);
}
@Override
public String getMIMEType(PackageParameters params)
{
return (params != null &&
(params.getBooleanProperty("manifestOnly", false))) ?
"text/xml" : "application/zip";
}
/**
* Export the object (Item, Collection, or Community) as a
* "package" on the indicated OutputStream. Package is any serialized
* representation of the item, at the discretion of the implementing
* class. It does not have to include content bitstreams.
* <p>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <p>
* Throws an exception of the chosen object is not acceptable or there is
* a failure creating the package.
*
* @param context DSpace context.
* @param object DSpace object (item, collection, etc)
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where export package should be written
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
@Override
public void disseminate(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageValidationException, CrosswalkException, AuthorizeException, SQLException, IOException
{
FileOutputStream outStream = null;
try
{
//Make sure our package file exists
if(!pkgFile.exists())
{
PackageUtils.createFile(pkgFile);
}
//Open up an output stream to write to package file
outStream = new FileOutputStream(pkgFile);
// Generate a true manifest-only "package", no external files/data & no need to zip up
if (params != null && params.getBooleanProperty("manifestOnly", false))
{
Mets manifest = makeManifest(context, dso, params, null);
//only validate METS if specified (default = true)
if(params.getBooleanProperty("validate", true))
{
manifest.validate(new MetsValidator());
}
manifest.write(new MetsWriter(outStream));
}
else
{
// make a Zip-based package
writeZipPackage(context, dso, params, outStream);
}//end if/else
}//end try
catch (MetsException e)
{
String errorMsg = "Error exporting METS for DSpace Object, type="
+ Constants.typeText[dso.getType()] + ", handle="
+ dso.getHandle() + ", dbID="
+ String.valueOf(dso.getID());
// We don't pass up a MetsException, so callers don't need to
// know the details of the METS toolkit
log.error(errorMsg,e);
throw new PackageValidationException(errorMsg, e);
}
finally
{
//Close stream / stop writing to file
if (outStream != null)
{
outStream.close();
}
}
}
/**
* Make a Zipped up METS package for the given DSpace Object
*
* @param context DSpace Context
* @param dso The DSpace Object
* @param params Parameters to the Packager script
* @param pkg Package output stream
* @throws PackageValidationException
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*/
protected void writeZipPackage(Context context, DSpaceObject dso,
PackageParameters params, OutputStream pkg)
throws PackageValidationException, CrosswalkException, MetsException,
AuthorizeException, SQLException, IOException
{
long lmTime = 0;
if (dso.getType() == Constants.ITEM)
{
lmTime = ((Item) dso).getLastModified().getTime();
}
// map of extra streams to put in Zip (these are located during makeManifest())
MdStreamCache extraStreams = new MdStreamCache();
ZipOutputStream zip = new ZipOutputStream(pkg);
zip.setComment("METS archive created by DSpace " + Util.getSourceVersion());
Mets manifest = makeManifest(context, dso, params, extraStreams);
// copy extra (metadata, license, etc) bitstreams into zip, update manifest
if (extraStreams != null)
{
for (Map.Entry<MdRef, InputStream> ment : extraStreams.getMap().entrySet())
{
MdRef ref = ment.getKey();
// Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be
// included in our Package (if their bundles are already included in the <filSec> section of manifest).
// So, do a special check to see if we need to link up extra License <mdRef> entries to the bitstream in the <fileSec>.
// (this ensures that we don't accidentally add the same License file to our package twice)
linkLicenseRefsToBitstreams(context, params, dso, ref);
//If this 'mdRef' is NOT already linked up to a file in the package,
// then its file must be missing. So, we are going to add a new
// file to the Zip package.
if(ref.getXlinkHref()==null || ref.getXlinkHref().isEmpty())
{
InputStream is = ment.getValue();
// create a hopefully unique filename within the Zip
String fname = gensym("metadata");
// link up this 'mdRef' to point to that file
ref.setXlinkHref(fname);
if (log.isDebugEnabled())
{
log.debug("Writing EXTRA stream to Zip: " + fname);
}
//actually add the file to the Zip package
ZipEntry ze = new ZipEntry(fname);
if (lmTime != 0)
{
ze.setTime(lmTime);
}
else //Set a default modified date so that checksum of Zip doesn't change if Zip contents are unchanged
{
ze.setTime(DEFAULT_MODIFIED_DATE);
}
zip.putNextEntry(ze);
Utils.copy(is, zip);
zip.closeEntry();
is.close();
}
}
}
// write manifest after metadata.
ZipEntry me = new ZipEntry(METSManifest.MANIFEST_FILE);
if (lmTime != 0)
{
me.setTime(lmTime);
}
else //Set a default modified date so that checksum of Zip doesn't change if Zip contents are unchanged
{
me.setTime(DEFAULT_MODIFIED_DATE);
}
zip.putNextEntry(me);
// can only validate now after fixing up extraStreams
// note: only validate METS if specified (default = true)
if(params.getBooleanProperty("validate", true))
{
manifest.validate(new MetsValidator());
}
manifest.write(new MetsWriter(zip));
zip.closeEntry();
//write any bitstreams associated with DSpace object to zip package
addBitstreamsToZip(context, dso, params, zip);
zip.close();
}
/**
* Add Bitstreams associated with a given DSpace Object into an
* existing ZipOutputStream
* @param context DSpace Context
* @param dso The DSpace Object
* @param params Parameters to the Packager script
* @param zip Zip output
*/
protected void addBitstreamsToZip(Context context, DSpaceObject dso,
PackageParameters params, ZipOutputStream zip)
throws PackageValidationException, AuthorizeException, SQLException,
IOException
{
// how to handle unauthorized bundle/bitstream:
String unauth = (params == null) ? null : params.getProperty("unauthorized");
// copy all non-meta bitstreams into zip
if (dso.getType() == Constants.ITEM)
{
Item item = (Item)dso;
//get last modified time
long lmTime = ((Item)dso).getLastModified().getTime();
Bundle bundles[] = item.getBundles();
for (int i = 0; i < bundles.length; i++)
{
if (includeBundle(bundles[i]))
{
// unauthorized bundle?
if (!AuthorizeManager.authorizeActionBoolean(context,
bundles[i], Constants.READ))
{
if (unauth != null &&
(unauth.equalsIgnoreCase("skip")))
{
log.warn("Skipping Bundle[\""+bundles[i].getName()+"\"] because you are not authorized to read it.");
continue;
}
else
{
throw new AuthorizeException("Not authorized to read Bundle named \"" + bundles[i].getName() + "\"");
}
}
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int k = 0; k < bitstreams.length; k++)
{
boolean auth = AuthorizeManager.authorizeActionBoolean(context,
bitstreams[k], Constants.READ);
if (auth ||
(unauth != null && unauth.equalsIgnoreCase("zero")))
{
String zname = makeBitstreamURL(bitstreams[k], params);
ZipEntry ze = new ZipEntry(zname);
if (log.isDebugEnabled())
{
log.debug(new StringBuilder().append("Writing CONTENT stream of bitstream(").append(bitstreams[k].getID()).append(") to Zip: ").append(zname).append(", size=").append(bitstreams[k].getSize()).toString());
}
if (lmTime != 0)
{
ze.setTime(lmTime);
}
else //Set a default modified date so that checksum of Zip doesn't change if Zip contents are unchanged
{
ze.setTime(DEFAULT_MODIFIED_DATE);
}
ze.setSize(auth ? bitstreams[k].getSize() : 0);
zip.putNextEntry(ze);
if (auth)
{
InputStream input = bitstreams[k].retrieve();
Utils.copy(input, zip);
input.close();
}
else
{
log.warn("Adding zero-length file for Bitstream, SID="
+ String.valueOf(bitstreams[k].getSequenceID())
+ ", not authorized for READ.");
}
zip.closeEntry();
}
else if (unauth != null &&
unauth.equalsIgnoreCase("skip"))
{
log.warn("Skipping Bitstream, SID="+String.valueOf(bitstreams[k].getSequenceID())+", not authorized for READ.");
}
else
{
throw new AuthorizeException("Not authorized to read Bitstream, SID="+String.valueOf(bitstreams[k].getSequenceID()));
}
}
}
}
}
// Coll, Comm just add logo bitstream to content if there is one
else if (dso.getType() == Constants.COLLECTION ||
dso.getType() == Constants.COMMUNITY)
{
Bitstream logoBs = dso.getType() == Constants.COLLECTION ?
((Collection)dso).getLogo() :
((Community)dso).getLogo();
if (logoBs != null)
{
String zname = makeBitstreamURL(logoBs, params);
ZipEntry ze = new ZipEntry(zname);
if (log.isDebugEnabled())
{
log.debug("Writing CONTENT stream of bitstream(" + String.valueOf(logoBs.getID()) + ") to Zip: " + zname + ", size=" + String.valueOf(logoBs.getSize()));
}
ze.setSize(logoBs.getSize());
//Set a default modified date so that checksum of Zip doesn't change if Zip contents are unchanged
ze.setTime(DEFAULT_MODIFIED_DATE);
zip.putNextEntry(ze);
Utils.copy(logoBs.retrieve(), zip);
zip.closeEntry();
}
}
}
// set metadata type - if Mdtype.parse() gets exception,
// that means it's not in the MDTYPE vocabulary, so use OTHER.
protected void setMdType(MdWrap mdWrap, String mdtype)
{
try
{
mdWrap.setMDTYPE(Mdtype.parse(mdtype));
}
catch (MetsException e)
{
mdWrap.setMDTYPE(Mdtype.OTHER);
mdWrap.setOTHERMDTYPE(mdtype);
}
}
// set metadata type - if Mdtype.parse() gets exception,
// that means it's not in the MDTYPE vocabulary, so use OTHER.
protected void setMdType(MdRef mdRef, String mdtype)
{
try
{
mdRef.setMDTYPE(Mdtype.parse(mdtype));
}
catch (MetsException e)
{
mdRef.setMDTYPE(Mdtype.OTHER);
mdRef.setOTHERMDTYPE(mdtype);
}
}
/**
* Create an element wrapped around a metadata reference (either mdWrap
* or mdRef); i.e. dmdSec, techMd, sourceMd, etc. Checks for
* XML-DOM oriented crosswalk first, then if not found looks for
* stream crosswalk of the same name.
*
* @param context DSpace Context
* @param dso DSpace Object we are generating METS manifest for
* @param mdSecClass class of mdSec (TechMD, RightsMD, DigiProvMD, etc)
* @param typeSpec Type of metadata going into this mdSec (e.g. MODS, DC, PREMIS, etc)
* @param params the PackageParameters
* @param extraStreams list of extra files which need to be added to final dissemination package
*
* @return mdSec element or null if xwalk returns empty results.
*
* @throws SQLException
* @throws PackageValidationException
* @throws CrosswalkException
* @throws IOException
* @throws AuthorizeException
*/
protected MdSec makeMdSec(Context context, DSpaceObject dso, Class mdSecClass,
String typeSpec, PackageParameters params,
MdStreamCache extraStreams)
throws SQLException, PackageValidationException, CrosswalkException,
IOException, AuthorizeException
{
try
{
//create our metadata element (dmdSec, techMd, sourceMd, rightsMD etc.)
MdSec mdSec = (MdSec) mdSecClass.newInstance();
mdSec.setID(gensym(mdSec.getLocalName()));
String parts[] = typeSpec.split(":", 2);
String xwalkName, metsName;
//determine the name of the crosswalk to use to generate metadata
// for dmdSecs this is the part *after* the colon in the 'type' (see getDmdTypes())
// for all other mdSecs this is usually just corresponds to type name.
if (parts.length > 1)
{
metsName = parts[0];
xwalkName = parts[1];
}
else
{
metsName = typeSpec;
xwalkName = typeSpec;
}
// First, check to see if the crosswalk we are using is a normal DisseminationCrosswalk
boolean xwalkFound = PluginManager.hasNamedPlugin(DisseminationCrosswalk.class, xwalkName);
if(xwalkFound)
{
// Find the crosswalk we will be using to generate the metadata for this mdSec
DisseminationCrosswalk xwalk = (DisseminationCrosswalk)
PluginManager.getNamedPlugin(DisseminationCrosswalk.class, xwalkName);
if (xwalk.canDisseminate(dso))
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(xwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the output of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) xwalk;
wrapper.setPackagingParameters(params);
}
//For a normal DisseminationCrosswalk, we will be expecting an XML (DOM) based result.
// So, we are going to wrap this XML result in an <mdWrap> element
MdWrap mdWrap = new MdWrap();
setMdType(mdWrap, metsName);
XmlData xmlData = new XmlData();
if (crosswalkToMetsElement(xwalk, dso, xmlData) != null)
{
mdWrap.getContent().add(xmlData);
mdSec.getContent().add(mdWrap);
return mdSec;
}
else
{
return null;
}
}
else
{
return null;
}
}
// If we didn't find the correct crosswalk, we will check to see if this is
// a StreamDisseminationCrosswalk -- a Stream crosswalk disseminates to an OutputStream
else
{
StreamDisseminationCrosswalk sxwalk = (StreamDisseminationCrosswalk)
PluginManager.getNamedPlugin(StreamDisseminationCrosswalk.class, xwalkName);
if (sxwalk != null)
{
if (sxwalk.canDisseminate(context, dso))
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(sxwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the output of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) sxwalk;
wrapper.setPackagingParameters(params);
}
// Disseminate crosswalk output to an outputstream
ByteArrayOutputStream disseminateOutput = new ByteArrayOutputStream();
sxwalk.disseminate(context, dso, disseminateOutput);
// Convert output to an inputstream, so we can write to manifest or Zip file
ByteArrayInputStream crosswalkedStream = new ByteArrayInputStream(disseminateOutput.toByteArray());
//If we are capturing extra files to put into a Zip package
if(extraStreams!=null)
{
//Create an <mdRef> -- we'll just reference the file by name in Zip package
MdRef mdRef = new MdRef();
//add the crosswalked Stream to list of files to add to Zip package later
extraStreams.addStream(mdRef, crosswalkedStream);
//set properties on <mdRef>
// Note, filename will get set on this <mdRef> later,
// when we process all the 'extraStreams'
mdRef.setMIMETYPE(sxwalk.getMIMEType());
setMdType(mdRef, metsName);
mdRef.setLOCTYPE(Loctype.URL);
mdSec.getContent().add(mdRef);
}
else
{
//If we are *not* capturing extra streams to add to Zip package later,
// that means we are likely only generating a METS manifest
// (i.e. manifestOnly = true)
// In this case, the best we can do is take the crosswalked
// Stream, base64 encode it, and add in an <mdWrap> field
// First, create our <mdWrap>
MdWrap mdWrap = new MdWrap();
mdWrap.setMIMETYPE(sxwalk.getMIMEType());
setMdType(mdWrap, metsName);
// Now, create our <binData> and add base64 encoded contents to it.
BinData binData = new BinData();
Base64 base64 = new Base64(crosswalkedStream);
binData.getContent().add(base64);
mdWrap.getContent().add(binData);
mdSec.getContent().add(mdWrap);
}
return mdSec;
}
else
{
return null;
}
}
else
{
throw new PackageValidationException("Cannot find " + xwalkName + " crosswalk plugin, either DisseminationCrosswalk or StreamDisseminationCrosswalk");
}
}
}
catch (InstantiationException e)
{
throw new PackageValidationException("Error instantiating Mdsec object: "+ e.toString(), e);
}
catch (IllegalAccessException e)
{
throw new PackageValidationException("Error instantiating Mdsec object: "+ e.toString(), e);
}
}
// add either a techMd or sourceMd element to amdSec.
// mdSecClass determines which type.
// mdTypes[] is array of "[metsName:]PluginName" strings, maybe empty.
protected void addToAmdSec(AmdSec fAmdSec, String mdTypes[], Class mdSecClass,
Context context, DSpaceObject dso,
PackageParameters params,
MdStreamCache extraStreams)
throws SQLException, PackageValidationException, CrosswalkException,
IOException, AuthorizeException
{
for (int i = 0; i < mdTypes.length; ++i)
{
MdSec md = makeMdSec(context, dso, mdSecClass, mdTypes[i], params, extraStreams);
if (md != null)
{
fAmdSec.getContent().add(md);
}
}
}
// Create amdSec for any tech md's, return its ID attribute.
protected String addAmdSec(Context context, DSpaceObject dso, PackageParameters params,
Mets mets, MdStreamCache extraStreams)
throws SQLException, PackageValidationException, CrosswalkException,
IOException, AuthorizeException
{
String techMdTypes[] = getTechMdTypes(context, dso, params);
String rightsMdTypes[] = getRightsMdTypes(context, dso, params);
String sourceMdTypes[] = getSourceMdTypes(context, dso, params);
String digiprovMdTypes[] = getDigiprovMdTypes(context, dso, params);
// only bother if there are any sections to add
if ((techMdTypes.length+sourceMdTypes.length+
digiprovMdTypes.length+rightsMdTypes.length) > 0)
{
String result = gensym("amd");
AmdSec fAmdSec = new AmdSec();
fAmdSec.setID(result);
addToAmdSec(fAmdSec, techMdTypes, TechMD.class, context, dso, params, extraStreams);
addToAmdSec(fAmdSec, rightsMdTypes, RightsMD.class, context, dso, params, extraStreams);
addToAmdSec(fAmdSec, sourceMdTypes, SourceMD.class, context, dso, params, extraStreams);
addToAmdSec(fAmdSec, digiprovMdTypes, DigiprovMD.class, context, dso, params, extraStreams);
mets.getContent().add(fAmdSec);
return result;
}
else
{
return null;
}
}
// make the most "persistent" identifier possible, preferably a URN
// based on the Handle.
protected String makePersistentID(DSpaceObject dso)
{
String handle = dso.getHandle();
// If no Handle, punt to much-less-satisfactory database ID and type..
if (handle == null)
{
return "DSpace_DB_" + Constants.typeText[dso.getType()] + "_" + String.valueOf(dso.getID());
}
else
{
return getHandleURN(handle);
}
}
/**
* Write out a METS manifest.
* Mostly lifted from Rob Tansley's METS exporter.
*/
protected Mets makeManifest(Context context, DSpaceObject dso,
PackageParameters params,
MdStreamCache extraStreams)
throws MetsException, PackageValidationException, CrosswalkException, AuthorizeException, SQLException, IOException
{
// Create the METS manifest in memory
Mets mets = new Mets();
String identifier = "DB-ID-" + dso.getID();
if(dso.getHandle()!=null)
{
identifier = dso.getHandle().replace('/', '-');
}
// this ID should be globally unique (format: DSpace_[objType]_[handle with slash replaced with a dash])
mets.setID("DSpace_" + Constants.typeText[dso.getType()] + "_" + identifier);
// identifies the object described by this document
mets.setOBJID(makePersistentID(dso));
mets.setTYPE(getObjectTypeString(dso));
// this is the signature by which the ingester will recognize
// a document it can expect to interpret.
mets.setPROFILE(getProfile());
MetsHdr metsHdr = makeMetsHdr(context, dso, params);
if (metsHdr != null)
{
mets.getContent().add(metsHdr);
}
// add DMD sections
// Each type element MAY be either just a MODS-and-crosswalk name, OR
// a combination "MODS-name:crosswalk-name" (e.g. "DC:qDC").
String dmdTypes[] = getDmdTypes(context, dso, params);
// record of ID of each dmdsec to make DMDID in structmap.
String dmdId[] = new String[dmdTypes.length];
for (int i = 0; i < dmdTypes.length; ++i)
{
MdSec dmdSec = makeMdSec(context, dso, DmdSec.class, dmdTypes[i], params, extraStreams);
if (dmdSec != null)
{
mets.getContent().add(dmdSec);
dmdId[i] = dmdSec.getID();
}
}
// add object-wide technical/source MD segments, get ID string:
// Put that ID in ADMID of first div in structmap.
String objectAMDID = addAmdSec(context, dso, params, mets, extraStreams);
// Create simple structMap: initial div represents the Object's
// contents, its children are e.g. Item bitstreams (content only),
// Collection's members, or Community's members.
StructMap structMap = new StructMap();
structMap.setID(gensym("struct"));
structMap.setTYPE("LOGICAL");
structMap.setLABEL("DSpace Object");
Div div0 = new Div();
div0.setID(gensym("div"));
div0.setTYPE("DSpace Object Contents");
structMap.getContent().add(div0);
// fileSec is optional, let object type create it if needed.
FileSec fileSec = null;
// Item-specific manifest - license, bitstreams as Files, etc.
if (dso.getType() == Constants.ITEM)
{
// this tags file ID and group identifiers for bitstreams.
String bitstreamIDstart = "bitstream_";
Item item = (Item)dso;
// how to handle unauthorized bundle/bitstream:
String unauth = (params == null) ? null : params.getProperty("unauthorized");
// fileSec - all non-metadata bundles go into fileGrp,
// and each bitstream therein into a file.
// Create the bitstream-level techMd and div's for structmap
// at the same time so we can connect the IDREFs to IDs.
fileSec = new FileSec();
Bundle[] bundles = item.getBundles();
for (int i = 0; i < bundles.length; i++)
{
if (!includeBundle(bundles[i]))
{
continue;
}
// unauthorized bundle?
// NOTE: This must match the logic in disseminate()
if (!AuthorizeManager.authorizeActionBoolean(context,
bundles[i], Constants.READ))
{
if (unauth != null &&
(unauth.equalsIgnoreCase("skip")))
{
continue;
}
else
{
throw new AuthorizeException("Not authorized to read Bundle named \"" + bundles[i].getName() + "\"");
}
}
Bitstream[] bitstreams = bundles[i].getBitstreams();
// Create a fileGrp, USE = permuted Bundle name
FileGrp fileGrp = new FileGrp();
String bName = bundles[i].getName();
if ((bName != null) && !bName.equals(""))
{
fileGrp.setUSE(bundleToFileGrp(bName));
}
// add technical metadata for a bundle
String techBundID = addAmdSec(context, bundles[i], params, mets, extraStreams);
if (techBundID != null)
{
fileGrp.setADMID(techBundID);
}
// watch for primary bitstream
int primaryBitstreamID = -1;
boolean isContentBundle = false;
if ((bName != null) && bName.equals("ORIGINAL"))
{
isContentBundle = true;
primaryBitstreamID = bundles[i].getPrimaryBitstreamID();
}
// For each bitstream, add to METS manifest
for (int bits = 0; bits < bitstreams.length; bits++)
{
// Check for authorization. Handle unauthorized
// bitstreams to match the logic in disseminate(),
// i.e. "unauth=zero" means include a 0-length bitstream,
// "unauth=skip" means to ignore it (and exclude from
// manifest).
boolean auth = AuthorizeManager.authorizeActionBoolean(context,
bitstreams[bits], Constants.READ);
if (!auth)
{
if (unauth != null && unauth.equalsIgnoreCase("skip"))
{
continue;
}
else if (!(unauth != null && unauth.equalsIgnoreCase("zero")))
{
throw new AuthorizeException("Not authorized to read Bitstream, SID=" + String.valueOf(bitstreams[bits].getSequenceID()));
}
}
String sid = String.valueOf(bitstreams[bits].getSequenceID());
String fileID = bitstreamIDstart + sid;
edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File();
file.setID(fileID);
file.setSEQ(bitstreams[bits].getSequenceID());
fileGrp.getContent().add(file);
// set primary bitstream in structMap
if (bitstreams[bits].getID() == primaryBitstreamID)
{
Fptr fptr = new Fptr();
fptr.setFILEID(fileID);
div0.getContent().add(0, fptr);
}
// if this is content, add to structmap too:
if (isContentBundle)
{
div0.getContent().add(makeFileDiv(fileID, getObjectTypeString(bitstreams[bits])));
}
/*
* If we're in THUMBNAIL or TEXT bundles, the bitstream is
* extracted text or a thumbnail, so we use the name to work
* out which bitstream to be in the same group as
*/
String groupID = "GROUP_" + bitstreamIDstart + sid;
if ((bundles[i].getName() != null)
&& (bundles[i].getName().equals("THUMBNAIL") ||
bundles[i].getName().startsWith("TEXT")))
{
// Try and find the original bitstream, and chuck the
// derived bitstream in the same group
Bitstream original = findOriginalBitstream(item,
bitstreams[bits]);
if (original != null)
{
groupID = "GROUP_" + bitstreamIDstart
+ original.getSequenceID();
}
}
file.setGROUPID(groupID);
file.setMIMETYPE(bitstreams[bits].getFormat().getMIMEType());
file.setSIZE(auth ? bitstreams[bits].getSize() : 0);
// Translate checksum and type to METS
String csType = bitstreams[bits].getChecksumAlgorithm();
String cs = bitstreams[bits].getChecksum();
if (auth && cs != null && csType != null)
{
try
{
file.setCHECKSUMTYPE(Checksumtype.parse(csType));
file.setCHECKSUM(cs);
}
catch (MetsException e)
{
log.warn("Cannot set bitstream checksum type="+csType+" in METS.");
}
}
// FLocat: point to location of bitstream contents.
FLocat flocat = new FLocat();
flocat.setLOCTYPE(Loctype.URL);
flocat.setXlinkHref(makeBitstreamURL(bitstreams[bits], params));
file.getContent().add(flocat);
// technical metadata for bitstream
String techID = addAmdSec(context, bitstreams[bits], params, mets, extraStreams);
if (techID != null)
{
file.setADMID(techID);
}
}
fileSec.getContent().add(fileGrp);
}
}
else if (dso.getType() == Constants.COLLECTION)
{
Collection collection = (Collection)dso;
ItemIterator ii = collection.getItems();
while (ii.hasNext())
{
//add a child <div> for each item in collection
Item item = ii.next();
Div childDiv = makeChildDiv(getObjectTypeString(item), item, params);
if(childDiv!=null)
{
div0.getContent().add(childDiv);
}
}
// add metadata & info for Template Item, if exists
Item templateItem = collection.getTemplateItem();
if(templateItem!=null)
{
String templateDmdId[] = new String[dmdTypes.length];
// index where we should add the first template item <dmdSec>.
// Index = number of <dmdSecs> already added + number of <metsHdr> = # of dmdSecs + 1
// (Note: in order to be a valid METS file, all dmdSecs must be before the 1st amdSec)
int dmdIndex = dmdTypes.length + 1;
//For each type of dmdSec specified,
// add a new dmdSec which contains the Template Item metadata
// (Note: Template Items are only metadata -- they have no content files)
for (int i = 0; i < dmdTypes.length; ++i)
{
MdSec templateDmdSec = makeMdSec(context, templateItem, DmdSec.class, dmdTypes[i], params, extraStreams);
if (templateDmdSec != null)
{
mets.getContent().add(dmdIndex, templateDmdSec);
dmdIndex++;
templateDmdId[i] = templateDmdSec.getID();
}
}
//Now add a child <div> in structMap to represent that Template Item
Div templateItemDiv = new Div();
templateItemDiv.setID(gensym("div"));
templateItemDiv.setTYPE(getObjectTypeString(templateItem) + TEMPLATE_TYPE_SUFFIX);
//Link up the dmdSec(s) for the Template Item to this <div>
StringBuilder templateDmdIds = new StringBuilder();
for (String currdmdId : templateDmdId)
{
templateDmdIds.append(" ").append(currdmdId);
}
templateItemDiv.setDMDID(templateDmdIds.substring(1));
//add this child <div> before the listing of normal Items
div0.getContent().add(0, templateItemDiv);
}
// add link to Collection Logo, if one exists
Bitstream logoBs = collection.getLogo();
if (logoBs != null)
{
fileSec = new FileSec();
addLogoBitstream(logoBs, fileSec, div0, params);
}
}
else if (dso.getType() == Constants.COMMUNITY)
{
// Subcommunities are directly under "DSpace Object Contents" <div>,
// but are labeled as Communities.
Community subcomms[] = ((Community)dso).getSubcommunities();
for (int i = 0; i < subcomms.length; ++i)
{
//add a child <div> for each subcommunity in this community
Div childDiv = makeChildDiv(getObjectTypeString(subcomms[i]), subcomms[i], params);
if(childDiv!=null)
{
div0.getContent().add(childDiv);
}
}
// Collections are also directly under "DSpace Object Contents" <div>,
// but are labeled as Collections.
Collection colls[] = ((Community)dso).getCollections();
for (int i = 0; i < colls.length; ++i)
{
//add a child <div> for each collection in this community
Div childDiv = makeChildDiv(getObjectTypeString(colls[i]), colls[i], params);
if(childDiv!=null)
{
div0.getContent().add(childDiv);
}
}
//add Community logo bitstream
Bitstream logoBs = ((Community)dso).getLogo();
if (logoBs != null)
{
fileSec = new FileSec();
addLogoBitstream(logoBs, fileSec, div0, params);
}
}
else if (dso.getType() == Constants.SITE)
{
// This is a site-wide <structMap>, which just lists the top-level
// communities. Each top level community is referenced by a div.
Community comms[] = Community.findAllTop(context);
for (int i = 0; i < comms.length; ++i)
{
//add a child <div> for each top level community in this site
Div childDiv = makeChildDiv(getObjectTypeString(comms[i]),
comms[i], params);
if(childDiv!=null)
{
div0.getContent().add(childDiv);
}
}
}
//Only add the <fileSec> to the METS file if it has content. A <fileSec> must have content.
if (fileSec != null && fileSec.getContent()!=null && !fileSec.getContent().isEmpty())
{
mets.getContent().add(fileSec);
}
mets.getContent().add(structMap);
// set links to metadata for object -- after type-specific
// code since that can add to the object metadata.
StringBuilder dmdIds = new StringBuilder();
for (String currdmdId : dmdId)
{
dmdIds.append(" ").append(currdmdId);
}
div0.setDMDID(dmdIds.substring(1));
if (objectAMDID != null)
{
div0.setADMID(objectAMDID);
}
// Does subclass have something to add to structMap?
addStructMap(context, dso, params, mets);
return mets;
}
// Install logo bitstream into METS for Community, Collection.
// Add a file element, and refer to it from an fptr in the first div
// of the main structMap.
protected void addLogoBitstream(Bitstream logoBs, FileSec fileSec, Div div0, PackageParameters params)
{
edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File();
String fileID = gensym("logo");
file.setID(fileID);
file.setMIMETYPE(logoBs.getFormat().getMIMEType());
file.setSIZE(logoBs.getSize());
// Translate checksum and type to METS
String csType = logoBs.getChecksumAlgorithm();
String cs = logoBs.getChecksum();
if (cs != null && csType != null)
{
try
{
file.setCHECKSUMTYPE(Checksumtype.parse(csType));
file.setCHECKSUM(cs);
}
catch (MetsException e)
{
log.warn("Cannot set bitstream checksum type="+csType+" in METS.");
}
}
//Create <fileGroup USE="LOGO"> with a <FLocat> pointing at bitstream
FLocat flocat = new FLocat();
flocat.setLOCTYPE(Loctype.URL);
flocat.setXlinkHref(makeBitstreamURL(logoBs, params));
file.getContent().add(flocat);
FileGrp fileGrp = new FileGrp();
fileGrp.setUSE("LOGO");
fileGrp.getContent().add(file);
fileSec.getContent().add(fileGrp);
// add fptr directly to div0 of structMap
Fptr fptr = new Fptr();
fptr.setFILEID(fileID);
div0.getContent().add(0, fptr);
}
// create <div> element pointing to a file
protected Div makeFileDiv(String fileID, String type)
{
Div div = new Div();
div.setID(gensym("div"));
div.setTYPE(type);
Fptr fptr = new Fptr();
fptr.setFILEID(fileID);
div.getContent().add(fptr);
return div;
}
/**
* Create a <div> element with <mptr> which references a child
* object via its handle (and via a local file name, when recursively disseminating
* all child objects).
* @param type - type attr value for the <div>
* @param dso - object for which to create the div
* @param params
* @return
*/
protected Div makeChildDiv(String type, DSpaceObject dso, PackageParameters params)
{
String handle = dso.getHandle();
//start <div>
Div div = new Div();
div.setID(gensym("div"));
div.setTYPE(type);
//make sure we have a handle
if (handle == null || handle.length()==0)
{
log.warn("METS Disseminator is skipping "+type+" without handle: " + dso.toString());
}
else
{
//create <mptr> with handle reference
Mptr mptr = new Mptr();
mptr.setID(gensym("mptr"));
mptr.setLOCTYPE(Loctype.HANDLE);
mptr.setXlinkHref(handle);
div.getContent().add(mptr);
}
//determine file extension of child references,
//based on whether we are exporting just a manifest or a full Zip pkg
String childFileExtension = (params.getBooleanProperty("manifestOnly", false)) ? "xml" : "zip";
// Always create <mptr> with file-name reference to child package
// This is what DSpace will expect the child package to be named during ingestion
// (NOTE: without this reference, DSpace will be unable to restore any child objects during ingestion)
Mptr mptr2 = new Mptr();
mptr2.setID(gensym("mptr"));
mptr2.setLOCTYPE(Loctype.URL);
//we get the name of the child package from the Packager -- as it is what will actually create this child pkg file
mptr2.setXlinkHref(PackageUtils.getPackageName(dso, childFileExtension));
div.getContent().add(mptr2);
return div;
}
// put handle in canonical URN format -- note that HandleManager's
// canonicalize currently returns HTTP URL format.
protected String getHandleURN(String handle)
{
if (handle.startsWith("hdl:"))
{
return handle;
}
return "hdl:"+handle;
}
/**
* For a bitstream that's a thumbnail or extracted text, find the
* corresponding bitstream it was derived from, in the ORIGINAL bundle.
*
* @param item
* the item we're dealing with
* @param derived
* the derived bitstream
*
* @return the corresponding original bitstream (or null)
*/
protected static Bitstream findOriginalBitstream(Item item, Bitstream derived)
throws SQLException
{
Bundle[] bundles = item.getBundles();
// Filename of original will be filename of the derived bitstream
// minus the extension (last 4 chars - .jpg or .txt)
String originalFilename = derived.getName().substring(0,
derived.getName().length() - 4);
// First find "original" bundle
for (int i = 0; i < bundles.length; i++)
{
if ((bundles[i].getName() != null)
&& bundles[i].getName().equals("ORIGINAL"))
{
// Now find the corresponding bitstream
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int bsnum = 0; bsnum < bitstreams.length; bsnum++)
{
if (bitstreams[bsnum].getName().equals(originalFilename))
{
return bitstreams[bsnum];
}
}
}
}
// Didn't find it
return null;
}
// Get result from crosswalk plugin and add it to the document,
// including namespaces and schema.
// returns the new/modified element upon success.
private MetsElement crosswalkToMetsElement(DisseminationCrosswalk xwalk,
DSpaceObject dso, MetsElement me)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
try
{
// add crosswalk's namespaces and schemaLocation to this element:
String raw = xwalk.getSchemaLocation();
String sloc[] = raw == null ? null : raw.split("\\s+");
Namespace ns[] = xwalk.getNamespaces();
for (int i = 0; i < ns.length; ++i)
{
String uri = ns[i].getURI();
if (sloc != null && sloc.length > 1 && uri.equals(sloc[0]))
{
me.setSchema(ns[i].getPrefix(), uri, sloc[1]);
}
else
{
me.setSchema(ns[i].getPrefix(), uri);
}
}
// add result of crosswalk
PreformedXML pXML = null;
if (xwalk.preferList())
{
List<Element> res = xwalk.disseminateList(dso);
if (!(res == null || res.isEmpty()))
{
pXML = new PreformedXML(outputter.outputString(res));
}
}
else
{
Element res = xwalk.disseminateElement(dso);
if (res != null)
{
pXML = new PreformedXML(outputter.outputString(res));
}
}
if (pXML != null)
{
me.getContent().add(pXML);
return me;
}
return null;
}
catch (CrosswalkObjectNotSupported e)
{
// ignore this xwalk if object is unsupported.
if (log.isDebugEnabled())
{
log.debug("Skipping MDsec because of CrosswalkObjectNotSupported: dso=" + dso.toString() + ", xwalk=" + xwalk.getClass().getName());
}
return null;
}
}
/**
* Cleanup our license file reference links, as Deposit Licenses & CC Licenses can be
* added two ways (and we only want to add them to zip package *once*):
* (1) Added as a normal Bitstream (assuming LICENSE and CC_LICENSE bundles will be included in pkg)
* (2) Added via a 'rightsMD' crosswalk (as they are rights information/metadata on an Item)
* <p>
* So, if they are being added by *both*, then we want to just link the rightsMD <mdRef> entry so
* that it points to the Bitstream location. This implementation is a bit 'hackish', but it's
* the best we can do, as the Harvard METS API doesn't allow us to go back and crawl an entire
* METS file to look for these inconsistencies/duplications.
*
* @param context current DSpace Context
* @param params current Packager Parameters
* @param dso current DSpace Object
* @param ref the rightsMD <mdRef> element
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
protected void linkLicenseRefsToBitstreams(Context context, PackageParameters params,
DSpaceObject dso, MdRef mdRef)
throws SQLException, IOException, AuthorizeException
{
//If this <mdRef> is a reference to a DSpace Deposit License
if(mdRef.getMDTYPE()!=null && mdRef.getMDTYPE()==Mdtype.OTHER &&
mdRef.getOTHERMDTYPE()!=null && mdRef.getOTHERMDTYPE().equals("DSpaceDepositLicense"))
{
//Locate the LICENSE bundle
Item i = (Item)dso;
Bundle license[] = i.getBundles(Constants.LICENSE_BUNDLE_NAME);
//Are we already including the LICENSE bundle's bitstreams in this package?
if(license!=null && license.length>0 && includeBundle(license[0]))
{
//Since we are including the LICENSE bitstreams, lets find our LICENSE bitstream path & link to it.
Bitstream licenseBs = PackageUtils.findDepositLicense(context, (Item)dso);
mdRef.setXlinkHref(makeBitstreamURL(licenseBs, params));
}
}
//If this <mdRef> is a reference to a Creative Commons Textual License
else if(mdRef.getMDTYPE() != null && mdRef.getMDTYPE() == Mdtype.OTHER &&
mdRef.getOTHERMDTYPE()!=null && mdRef.getOTHERMDTYPE().equals("CreativeCommonsText"))
{
//Locate the CC-LICENSE bundle
Item i = (Item)dso;
Bundle license[] = i.getBundles(CreativeCommons.CC_BUNDLE_NAME);
//Are we already including the CC-LICENSE bundle's bitstreams in this package?
if(license!=null && license.length>0 && includeBundle(license[0]))
{
//Since we are including the CC-LICENSE bitstreams, lets find our CC-LICENSE (textual) bitstream path & link to it.
Bitstream ccText = CreativeCommons.getLicenseTextBitstream(i);
mdRef.setXlinkHref(makeBitstreamURL(ccText, params));
}
}
//If this <mdRef> is a reference to a Creative Commons RDF License
else if(mdRef.getMDTYPE() != null && mdRef.getMDTYPE() == Mdtype.OTHER &&
mdRef.getOTHERMDTYPE()!=null && mdRef.getOTHERMDTYPE().equals("CreativeCommonsRDF"))
{
//Locate the CC-LICENSE bundle
Item i = (Item)dso;
Bundle license[] = i.getBundles(CreativeCommons.CC_BUNDLE_NAME);
//Are we already including the CC-LICENSE bundle's bitstreams in this package?
if(license!=null && license.length>0 && includeBundle(license[0]))
{
//Since we are including the CC-LICENSE bitstreams, lets find our CC-LICENSE (RDF) bitstream path & link to it.
Bitstream ccRdf = CreativeCommons.getLicenseRdfBitstream(i);
mdRef.setXlinkHref(makeBitstreamURL(ccRdf, params));
}
}
}
/**
* Build a string which will be used as the "Type" of this object in
* the METS manifest.
* <P>
* Default format is "DSpace [Type-as-string]".
*
* @param dso DSpaceObject to create type-string for
* @return a string which will represent this object Type in METS
* @see org.dspace.core.Constants
*/
public String getObjectTypeString(DSpaceObject dso)
{
//Format: "DSpace <Type-as-string>" (e.g. "DSpace ITEM", "DSpace COLLECTION", etc)
return "DSpace " + Constants.typeText[dso.getType()];
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
return "* manifestOnly=[boolean] " +
"If true, only export the METS manifest (mets.xml) and don't export content files (defaults to false)." +
"\n\n" +
"* unauthorized=[value] " +
"If 'skip', skip over any files which the user doesn't have authorization to read. " +
"If 'zero', create a zero-length file for any files the user doesn't have authorization to read. " +
"By default, an AuthorizationException will be thrown for any files the user cannot read.";
}
/**
* Return identifier for bitstream in an Item; when making a package,
* this is the archive member name (e.g. in Zip file). In a bare
* manifest, it might be an external URL. The name should be in URL
* format ("file:" may be elided for in-archive filenames). It should
* be deterministic, since this gets called twice for each bitstream
* when building archive.
*/
public abstract String makeBitstreamURL(Bitstream bitstream, PackageParameters params);
/**
* Create metsHdr element - separate so subclasses can override.
*/
public abstract MetsHdr makeMetsHdr(Context context, DSpaceObject dso,
PackageParameters params);
/**
* Returns name of METS profile to which this package conforms, e.g.
* "DSpace METS DIP Profile 1.0"
* @return string name of profile.
*/
public abstract String getProfile();
/**
* Returns fileGrp's USE attribute value corresponding to a DSpace bundle name.
*
* @param bname name of DSpace bundle.
* @return string name of fileGrp
*/
public abstract String bundleToFileGrp(String bname);
/**
* Get the types of Item-wide DMD to include in package.
* Each element of the returned array is a String, which
* MAY be just a simple name, naming both the Crosswalk Plugin and
* the METS "MDTYPE", <em>or</em> a colon-separated pair consisting of
* the METS name followed by a colon and the Crosswalk Plugin name.
* E.g. the type string <code>"DC:qualifiedDublinCore"</code> tells it to
* create a METS section with <code>MDTYPE="DC"</code> and use the plugin
* named "qualifiedDublinCore" to obtain the data.
* @param params the PackageParameters passed to the disseminator.
* @return array of metadata type strings, never null.
*/
public abstract String [] getDmdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException;
/**
* Get the type string of the technical metadata to create for each
* object and each Bitstream in an Item. The type string may be a
* simple name or colon-separated compound as specified for
* <code>getDmdTypes()</code> above.
* @param params the PackageParameters passed to the disseminator.
* @return array of metadata type strings, never null.
*/
public abstract String[] getTechMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException;
/**
* Get the type string of the source metadata to create for each
* object and each Bitstream in an Item. The type string may be a
* simple name or colon-separated compound as specified for
* <code>getDmdTypes()</code> above.
* @param params the PackageParameters passed to the disseminator.
* @return array of metadata type strings, never null.
*/
public abstract String[] getSourceMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException;
/**
* Get the type string of the "digiprov" (digital provenance)
* metadata to create for each object and each Bitstream in an Item.
* The type string may be a simple name or colon-separated compound
* as specified for <code>getDmdTypes()</code> above.
*
* @param params the PackageParameters passed to the disseminator.
* @return array of metadata type strings, never null.
*/
public abstract String[] getDigiprovMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException;
/**
* Get the type string of the "rights" (permission and/or license)
* metadata to create for each object and each Bitstream in an Item.
* The type string may be a simple name or colon-separated compound
* as specified for <code>getDmdTypes()</code> above.
*
* @param params the PackageParameters passed to the disseminator.
* @return array of metadata type strings, never null.
*/
public abstract String[] getRightsMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException;
/**
* Add any additional <code>structMap</code> elements to the
* METS document, as required by this subclass. A simple default
* structure map which fulfills the minimal DSpace METS DIP/SIP
* requirements is already present, so this does not need to do anything.
* @param mets the METS document to which to add structMaps
*/
public abstract void addStructMap(Context context, DSpaceObject dso,
PackageParameters params, Mets mets)
throws SQLException, IOException, AuthorizeException, MetsException;
/**
* @return true when this bundle should be included as "content"
* in the package.. e.g. DSpace SIP does not include metadata bundles.
*/
public abstract boolean includeBundle(Bundle bundle);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager;
import org.jdom.Element;
/**
* Base class for package ingester of METS (Metadata Encoding & Transmission
* Standard) Packages.<br>
* See <a href="http://www.loc.gov/standards/mets/">
* http://www.loc.gov/standards/mets/</a>.
* <p>
* This is a generic packager framework intended to be subclassed to create
* ingesters for more specific METS "profiles". METS is an abstract and flexible
* framework that can encompass many different kinds of metadata and inner
* package structures.
*
* <p>
* <b>Package Parameters:</b>
* <ul>
* <li><code>validate</code> -- true/false attempt to schema-validate the METS
* manifest.</li>
* <li><code>manifestOnly</code> -- package consists only of a manifest
* document.</li>
* <li><code>ignoreHandle</code> -- true/false, ignore AIP's idea of handle
* when ingesting.</li>
* <li><code>ignoreParent</code> -- true/false, ignore AIP's idea of parent
* when ingesting.</li>
* </ul>
* <p>
* <b>Configuration Properties:</b>
* <ul>
* <li><code>mets.CONFIGNAME.ingest.preserveManifest</code> - if <em>true</em>,
* the METS manifest itself is preserved in a bitstream named
* <code>mets.xml</code> in the <code>METADATA</code> bundle. If it is
* <em>false</em> (the default), the manifest is discarded after ingestion.</li>
*
* <li><code>mets.CONFIGNAME.ingest.manifestBitstreamFormat</code> - short name
* of the bitstream format to apply to the manifest; MUST be specified when
* preserveManifest is true.</li>
*
* <li><code>mets.default.ingest.crosswalk.MD_SEC_NAME</code> = PLUGIN_NAME
* Establishes a default crosswalk plugin for the given type of metadata in a
* METS mdSec (e.g. "DC", "MODS"). The plugin may be either a stream or
* XML-oriented ingestion crosswalk. Subclasses can override the default mapping
* with their own, substituting their configurationName for "default" in the
* configuration property key above.</li>
*
* <li><code>mets.CONFIGNAME.ingest.useCollectionTemplate</code> - if
* <em>true</em>, when an item is created, use the collection template. If it is
* <em>false</em> (the default), any existing collection template is ignored.</li>
* </ul>
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 6137 $
* @see org.dspace.content.packager.METSManifest
* @see AbstractPackageIngester
* @see PackageIngester
*/
public abstract class AbstractMETSIngester extends AbstractPackageIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractMETSIngester.class);
/**
* An instance of ZipMdrefManager holds the state needed to retrieve the
* contents of an external metadata stream referenced by an
* <code>mdRef</code> element in a Zipped up METS manifest.
* <p>
* Initialize it with the Content (ORIGINAL) Bundle containing all of the
* metadata bitstreams. Match an mdRef by finding the bitstream with the
* same name.
*/
protected static final class MdrefManager implements METSManifest.Mdref
{
private File packageFile = null;
private PackageParameters params;
// constructor initializes from package file
private MdrefManager(File packageFile, PackageParameters params)
{
super();
this.packageFile = packageFile;
this.params = params;
}
/**
* Make the contents of an external resource mentioned in an
* <code>mdRef</code> element available as an <code>InputStream</code>.
* See the <code>METSManifest.MdRef</code> interface for details.
*
* @param mdref
* the METS mdRef element to locate the input for.
* @return the input stream of its content.
* @see METSManifest
*/
@Override
public InputStream getInputStream(Element mdref)
throws MetadataValidationException, IOException
{
String path = METSManifest.getFileName(mdref);
if (packageFile == null)
{
throw new MetadataValidationException(
"Failed referencing mdRef element, because there is no package specified.");
}
// Use the 'getFileInputStream()' method from the
// AbstractMETSIngester to retrieve the inputstream for the
// referenced external metadata file.
return AbstractMETSIngester.getFileInputStream(packageFile, params,
path);
}
}// end MdrefManager class
/**
* Create a new DSpace object out of a METS content package. All contents
* are dictated by the METS manifest. Package is a ZIP archive (or
* optionally bare manifest XML document). In a Zip, all files relative to
* top level and the manifest (as per spec) in mets.xml.
*
* @param context
* DSpace context.
* @param parent
* parent under which to create new object (may be null -- in
* which case ingester must determine parent from package or
* throw an error).
* @param pkgFile
* The package file to ingest
* @param params
* Properties-style list of options (interpreted by each
* packager).
* @param license
* may be null, which takes default license.
* @return DSpaceObject created by ingest.
*
* @throws PackageValidationException
* if package is unacceptable or there is a fatal error turning
* it into a DSpaceObject.
* @throws CrosswalkException
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*/
@Override
public DSpaceObject ingest(Context context, DSpaceObject parent,
File pkgFile, PackageParameters params, String license)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
// parsed out METS Manifest from the file.
METSManifest manifest = null;
// new DSpace object created
DSpaceObject dso = null;
try
{
log.info(LogManager.getHeader(context, "package_parse",
"Parsing package for ingest, file=" + pkgFile.getName()));
// Parse our ingest package, extracting out the METS manifest in the
// package
manifest = parsePackage(context, pkgFile, params);
// must have a METS Manifest to ingest anything
if (manifest == null)
{
throw new PackageValidationException(
"No METS Manifest found (filename="
+ METSManifest.MANIFEST_FILE
+ "). Package is unacceptable!");
}
// validate our manifest
checkManifest(manifest);
// if we are not restoring an object (i.e. we are submitting a new
// object) then, default the 'ignoreHandle' option to true (as a new
// object should get a new handle by default)
if (!params.restoreModeEnabled()
&& !params.containsKey("ignoreHandle"))
{ // ignore the handle in the manifest, and instead create a new
// handle
params.addProperty("ignoreHandle", "true");
}
// if we have a Parent Object, default 'ignoreParent' option to True
// (this will ignore the Parent specified in manifest)
if (parent != null && !params.containsKey("ignoreParent"))
{ // ignore the parent in the manifest, and instead use the
// specified parent object
params.addProperty("ignoreParent", "true");
}
// Actually ingest the object described by the METS Manifest
dso = ingestObject(context, parent, manifest, pkgFile, params,
license);
//if ingestion was successful
if(dso!=null)
{
// Log whether we finished an ingest (create new obj) or a restore
// (restore previously existing obj)
String action = "package_ingest";
if (params.restoreModeEnabled())
{
action = "package_restore";
}
log.info(LogManager.getHeader(context, action,
"Created new Object, type="
+ Constants.typeText[dso.getType()] + ", handle="
+ dso.getHandle() + ", dbID="
+ String.valueOf(dso.getID())));
// Check if the Packager is currently running recursively.
// If so, this means the Packager will attempt to recursively
// ingest all referenced child packages.
if (params.recursiveModeEnabled())
{
// Retrieve list of all Child object METS file paths from the
// current METS manifest.
// This is our list of known child packages
String[] childFilePaths = manifest.getChildMetsFilePaths();
// Save this list to our AbstractPackageIngester (and note which
// DSpaceObject the pkgs relate to).
// NOTE: The AbstractPackageIngester itself will perform the
// recursive ingest call, based on these child pkg references
for (int i = 0; i < childFilePaths.length; i++)
{
addPackageReference(dso, childFilePaths[i]);
}
}
}//end if dso not null
return dso;
}
catch (SQLException se)
{
// no need to really clean anything up,
// transaction rollback will get rid of it anyway.
dso = null;
// Pass this exception on to the next handler.
throw se;
}
}
/**
* Parse a given input package, ultimately returning the METS manifest out
* of the package. METS manifest is assumed to be a file named 'mets.xml'
*
* @param context
* DSpace Context
* @param pkgFile
* package to parse
* @param params
* Ingestion parameters
* @return parsed out METSManifest
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @throws MetadataValidationException
*/
protected METSManifest parsePackage(Context context, File pkgFile,
PackageParameters params) throws IOException, SQLException,
AuthorizeException, MetadataValidationException
{
// whether or not to validate the METSManifest before processing
// (default=false)
// (Even though it's preferable to validate -- it's costly and takes a
// lot of time, unless you cache schemas locally)
boolean validate = params.getBooleanProperty("validate", false);
// parsed out METS Manifest from the file.
METSManifest manifest = null;
// try to locate the METS Manifest in package
// 1. read "package" stream: it will be either bare Manifest
// or Package contents into bitstreams, depending on params:
if (params.getBooleanProperty("manifestOnly", false))
{
// parse the bare METS manifest and sanity-check it.
manifest = METSManifest.create(new FileInputStream(pkgFile),
validate, getConfigurationName());
}
else
{
ZipFile zip = new ZipFile(pkgFile);
// Retrieve the manifest file entry (named mets.xml)
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
validate, getConfigurationName());
// close the Zip file for now
// (we'll extract the other files from zip when we need them)
zip.close();
}
// return our parsed out METS manifest
return manifest;
}
/**
* Ingest/import a single DSpace Object, based on the associated METS
* Manifest and the parameters passed to the METSIngester
*
* @param context
* DSpace Context
* @param parent
* Parent DSpace Object
* @param manifest
* the parsed METS Manifest
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Parameters passed to METSIngester
* @param license
* DSpace license agreement
* @return completed result as a DSpace object
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @throws CrosswalkException
* @throws MetadataValidationException
* @throws PackageValidationException
*/
protected DSpaceObject ingestObject(Context context, DSpaceObject parent,
METSManifest manifest, File pkgFile, PackageParameters params,
String license) throws IOException, SQLException,
AuthorizeException, CrosswalkException,
MetadataValidationException, PackageValidationException
{
// type of DSpace Object (one of the type constants)
int type;
// -- Step 1 --
// Extract basic information (type, parent, handle) about DSpace object
// represented by manifest
type = getObjectType(manifest);
// if no parent passed in (or ignoreParent is false),
// attempt to determine parent DSpace object from manifest
if (type != Constants.SITE
&& (parent == null || !params.getBooleanProperty(
"ignoreParent", false)))
{
try
{
// get parent object from manifest
parent = getParentObject(context, manifest);
}
catch(UnsupportedOperationException e)
{
//If user specified to skip item ingest if any "missing parent" error message occur
if(params.getBooleanProperty("skipIfParentMissing", false))
{
//log a warning instead of throwing an error
log.warn(LogManager.getHeader(context, "package_ingest",
"SKIPPING ingest of object '" + manifest.getObjID()
+ "' as parent DSpace Object could not be found. "
+ "If you are running a recursive ingest, it is likely this object will be created as soon as its parent is created."));
//return a null object (nothing ingested as parent was missing)
return null;
}
else //else, throw exception upward to display to user
throw e;
}
}
String handle = null;
// if we are *not* ignoring the handle in manifest (i.e. ignoreHandle is
// false)
if (!params.getBooleanProperty("ignoreHandle", false))
{
// get handle from manifest
handle = getObjectHandle(manifest);
}
// -- Step 2 --
// Create our DSpace Object based on info parsed from manifest, and
// packager params
DSpaceObject dso;
try
{
dso = PackageUtils.createDSpaceObject(context, parent,
type, handle, params);
}
catch (SQLException sqle)
{
throw new PackageValidationException("Exception while ingesting "
+ pkgFile.getPath(), sqle);
}
// if we are uninitialized, throw an error -- something's wrong!
if (dso == null)
{
throw new PackageValidationException(
"Unable to initialize object specified by package (type='"
+ type + "', handle='" + handle + "' and parent='"
+ parent.getHandle() + "').");
}
// -- Step 3 --
// Run our Administrative metadata crosswalks!
// initialize callback object which will retrieve external inputstreams
// for any <mdRef>'s found in METS
MdrefManager callback = new MdrefManager(pkgFile, params);
// Crosswalk the sourceMD first, so that we make sure to fill in
// submitter info (and any other initial applicable info)
manifest.crosswalkObjectSourceMD(context, params, dso, callback);
// Next, crosswalk techMD, digiprovMD, rightsMD
manifest.crosswalkObjectOtherAdminMD(context, params, dso, callback);
// -- Step 4 --
// Run our Descriptive metadata (dublin core, etc) crosswalks!
crosswalkObjectDmd(context, dso, manifest, callback, manifest
.getItemDmds(), params);
// For Items, also sanity-check the metadata for minimum requirements.
if (type == Constants.ITEM)
{
PackageUtils.checkItemMetadata((Item) dso);
}
// -- Step 5 --
// Add all content files as bitstreams on new DSpace Object
if (type == Constants.ITEM)
{
Item item = (Item) dso;
//Check if this item is still in a user's workspace.
//It should be, as we haven't completed its install yet.
WorkspaceItem wsi = WorkspaceItem.findByItem(context, item);
// Get collection this item is being submitted to
Collection collection = item.getOwningCollection();
if (collection == null)
{
// Get the collection this workspace item belongs to
if (wsi != null)
{
collection = wsi.getCollection();
}
}
// save manifest as a bitstream in Item if desired
if (preserveManifest())
{
addManifestBitstream(context, item, manifest);
}
// save all other bitstreams in Item
addBitstreams(context, item, manifest, pkgFile, params, callback);
// have subclass manage license since it may be extra package file.
addLicense(context, item, license, collection, params);
// Finally, if item is still in the workspace, then we actually need
// to install it into the archive & assign its handle.
if(wsi!=null)
{
// Finish creating the item. This actually assigns the handle,
// and will either install item immediately or start a workflow, based on params
PackageUtils.finishCreateItem(context, wsi, handle, params);
}
} // end if ITEM
else if (type == Constants.COLLECTION || type == Constants.COMMUNITY)
{
// Add logo if one is referenced from manifest
addContainerLogo(context, dso, manifest, pkgFile, params);
if(type==Constants.COLLECTION)
{
//Add template item if one is referenced from manifest (only for Collections)
addTemplateItem(context, dso, manifest, pkgFile, params, callback);
}
}// end if Community/Collection
else if (type == Constants.SITE)
{
// Do nothing -- Crosswalks will handle anything necessary to ingest at Site-level
}
else
{
throw new PackageValidationException(
"Unknown DSpace Object type in package, type="
+ String.valueOf(type));
}
// -- Step 6 --
// Finish things up!
// Subclass hook for final checks and rearrangements
// (this allows subclasses to do some final validation / changes as
// necessary)
finishObject(context, dso, params);
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(dso);
return dso;
}
/**
* Replace the contents of a single DSpace Object, based on the associated
* METS Manifest and the parameters passed to the METSIngester.
*
* @param context
* DSpace Context
* @param dso
* DSpace Object to replace
* @param manifest
* the parsed METS Manifest
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Parameters passed to METSIngester
* @param license
* DSpace license agreement
* @return completed result as a DSpace object
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @throws CrosswalkException
* @throws MetadataValidationException
* @throws PackageValidationException
*/
protected DSpaceObject replaceObject(Context context, DSpaceObject dso,
METSManifest manifest, File pkgFile, PackageParameters params,
String license) throws IOException, SQLException,
AuthorizeException, CrosswalkException,
MetadataValidationException, PackageValidationException
{
// -- Step 1 --
// Before going forward with the replace, let's verify these objects are
// of the same TYPE! (We don't want to go around trying to replace a
// COMMUNITY with an ITEM -- that's dangerous.)
int manifestType = getObjectType(manifest);
if (manifestType != dso.getType())
{
throw new PackageValidationException(
"The object type of the METS manifest ("
+ Constants.typeText[manifestType]
+ ") does not match up with the object type ("
+ Constants.typeText[dso.getType()]
+ ") of the DSpaceObject to be replaced!");
}
if (log.isDebugEnabled())
{
log.debug("Object to be replaced (handle=" + dso.getHandle()
+ ") is " + Constants.typeText[dso.getType()] + " id="
+ dso.getID());
}
// -- Step 2 --
// Clear out current object (as we are replacing all its contents &
// metadata)
// remove all files attached to this object
// (For communities/collections this just removes the logo bitstream)
PackageUtils.removeAllBitstreams(dso);
// clear out all metadata values associated with this object
PackageUtils.clearAllMetadata(dso);
// @TODO -- We are currently NOT clearing out the following during a
// replace. So, even after a replace, the following information may be
// retained in the system:
// o Rights/Permissions in system or on objects
// o Collection item templates or Content Source info (e.g. OAI
// Harvesting collections)
// o Item status (embargo, withdrawn) or mappings to other collections
// -- Step 3 --
// Run our Administrative metadata crosswalks!
// initialize callback object which will retrieve external inputstreams
// for any <mdRef>s found in METS
MdrefManager callback = new MdrefManager(pkgFile, params);
// Crosswalk the sourceMD first, so that we make sure to fill in
// submitter info (and any other initial applicable info)
manifest.crosswalkObjectSourceMD(context, params, dso, callback);
// Next, crosswalk techMD, digiprovMD, rightsMD
manifest.crosswalkObjectOtherAdminMD(context, params, dso, callback);
// -- Step 4 --
// Add all content files as bitstreams on new DSpace Object
if (dso.getType() == Constants.ITEM)
{
Item item = (Item) dso;
// save manifest as a bitstream in Item if desired
if (preserveManifest())
{
addManifestBitstream(context, item, manifest);
}
// save all other bitstreams in Item
addBitstreams(context, item, manifest, pkgFile, params, callback);
// have subclass manage license since it may be extra package file.
addLicense(context, item, license, (Collection) dso
.getParentObject(), params);
// FIXME ?
// should set lastModifiedTime e.g. when ingesting AIP.
// maybe only do it in the finishObject() callback for AIP.
} // end if ITEM
else if (dso.getType() == Constants.COLLECTION
|| dso.getType() == Constants.COMMUNITY)
{
// Add logo if one is referenced from manifest
addContainerLogo(context, dso, manifest, pkgFile, params);
} // end if Community/Collection
else if (dso.getType() == Constants.SITE)
{
// Do nothing -- Crosswalks will handle anything necessary to replace at Site-level
}
// -- Step 5 --
// Run our Descriptive metadata (dublin core, etc) crosswalks!
crosswalkObjectDmd(context, dso, manifest, callback, manifest
.getItemDmds(), params);
// For Items, also sanity-check the metadata for minimum requirements.
if (dso.getType() == Constants.ITEM)
{
PackageUtils.checkItemMetadata((Item) dso);
}
// -- Step 6 --
// Finish things up!
// Subclass hook for final checks and rearrangements
// (this allows subclasses to do some final validation / changes as
// necessary)
finishObject(context, dso, params);
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(dso);
return dso;
}
/**
* Add Bitstreams to an Item, based on the files listed in the METS Manifest
*
* @param context
* DSpace Context
* @param item
* DSpace Item
* @param manifest
* METS Manifest
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Ingestion Parameters
* @param mdRefCallback
* MdrefManager storing info about mdRefs in manifest
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
* @throws MetadataValidationException
* @throws CrosswalkException
* @throws PackageValidationException
*/
protected void addBitstreams(Context context, Item item,
METSManifest manifest, File pkgFile, PackageParameters params,
MdrefManager mdRefCallback) throws SQLException, IOException,
AuthorizeException, MetadataValidationException,
CrosswalkException, PackageValidationException
{
// Step 1 -- find the ID of the primary or Logo bitstream in manifest
String primaryID = null;
Element primaryFile = manifest.getPrimaryOrLogoBitstream();
if (primaryFile != null)
{
primaryID = primaryFile.getAttributeValue("ID");
if (log.isDebugEnabled())
{
log.debug("Got primary bitstream file ID=\"" + primaryID + "\"");
}
}
// Step 2 -- find list of all content files from manifest
// Loop through these files, and add them one by one to Item
List<Element> manifestContentFiles = manifest
.getContentFiles();
boolean setPrimaryBitstream = false;
BitstreamFormat unknownFormat = BitstreamFormat.findUnknown(context);
for (Iterator<Element> mi = manifestContentFiles.iterator(); mi
.hasNext();)
{
Element mfile = mi.next();
// basic validation -- check that it has an ID attribute
String mfileID = mfile.getAttributeValue("ID");
if (mfileID == null)
{
throw new PackageValidationException(
"Invalid METS Manifest: file element without ID attribute.");
}
// retrieve path/name of file in manifest
String path = METSManifest.getFileName(mfile);
// extract the file input stream from package (or retrieve
// externally, if it is an externally referenced file)
InputStream fileStream = getFileInputStream(pkgFile, params, path);
// retrieve bundle name from manifest
String bundleName = METSManifest.getBundleName(mfile);
// Find or create the bundle where bitstrem should be attached
Bundle bundle;
Bundle bns[] = item.getBundles(bundleName);
if (bns != null && bns.length > 0)
{
bundle = bns[0];
}
else
{
bundle = item.createBundle(bundleName);
}
// Create the bitstream in the bundle & initialize its name
Bitstream bitstream = bundle.createBitstream(fileStream);
bitstream.setName(path);
// crosswalk this bitstream's administrative metadata located in
// METS manifest (or referenced externally)
manifest.crosswalkBitstream(context, params, bitstream, mfileID,
mdRefCallback);
// is this the primary bitstream?
if (primaryID != null && mfileID.equals(primaryID))
{
bundle.setPrimaryBitstreamID(bitstream.getID());
bundle.update();
setPrimaryBitstream = true;
}
// Run any finishing activities -- this allows subclasses to
// change default bitstream information
finishBitstream(context, bitstream, mfile, manifest, params);
// Last-ditch attempt to divine the format, if crosswalk failed to
// set it:
// 1. attempt to guess from MIME type
// 2. if that fails, guess from "name" extension.
if (bitstream.getFormat().equals(unknownFormat))
{
if (log.isDebugEnabled())
{
log.debug("Guessing format of Bitstream left un-set: "
+ bitstream.toString());
}
String mimeType = mfile.getAttributeValue("MIMETYPE");
BitstreamFormat bf = (mimeType == null) ? null
: BitstreamFormat.findByMIMEType(context, mimeType);
if (bf == null)
{
bf = FormatIdentifier.guessFormat(context, bitstream);
}
bitstream.setFormat(bf);
}
bitstream.update();
}// end for each manifest file
// Step 3 -- Sanity checks
// sanity check for primary bitstream
if (primaryID != null && !setPrimaryBitstream)
{
log.warn("Could not find primary bitstream file ID=\"" + primaryID
+ "\" in manifest file \"" + pkgFile.getAbsolutePath()
+ "\"");
}
}
/**
* Save/Preserve the METS Manifest as a Bitstream attached to the given
* DSpace item.
*
* @param context
* DSpace Context
* @param item
* DSpace Item
* @param manifest
* The METS Manifest
* @throws SQLException
* @throws AuthorizeException
* @throws PackageValidationException
*/
protected void addManifestBitstream(Context context, Item item,
METSManifest manifest) throws IOException, SQLException,
AuthorizeException, PackageValidationException
{
// We'll save the METS Manifest as part of the METADATA bundle.
Bundle mdBundle = item.createBundle(Constants.METADATA_BUNDLE_NAME);
// Create a Bitstream from the METS Manifest's content
Bitstream manifestBitstream = mdBundle.createBitstream(manifest
.getMetsAsStream());
manifestBitstream.setName(METSManifest.MANIFEST_FILE);
manifestBitstream.setSource(METSManifest.MANIFEST_FILE);
manifestBitstream.update();
// Get magic bitstream format to identify manifest.
String fmtName = getManifestBitstreamFormat();
if (fmtName == null)
{
throw new PackageValidationException(
"Configuration Error: No Manifest BitstreamFormat configured for METS ingester type="
+ getConfigurationName());
}
BitstreamFormat manifestFormat = PackageUtils
.findOrCreateBitstreamFormat(context, fmtName,
"application/xml", fmtName + " package manifest");
manifestBitstream.setFormat(manifestFormat);
manifestBitstream.update();
}
/**
* Add a Logo to a Community or Collection container object based on a METS
* Manifest.
*
* @param context
* DSpace Context
* @param dso
* DSpace Container Object
* @param manifest
* METS Manifest
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Ingestion Parameters
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
* @throws MetadataValidationException
* @throws PackageValidationException
*/
protected void addContainerLogo(Context context, DSpaceObject dso,
METSManifest manifest, File pkgFile, PackageParameters params)
throws SQLException, IOException, AuthorizeException,
MetadataValidationException, PackageValidationException
{
Element logoRef = manifest.getPrimaryOrLogoBitstream();
// only continue if a logo specified in manifest
if (logoRef != null)
{
// Find ID of logo file
String logoID = logoRef.getAttributeValue("ID");
// Loop through manifest content files to find actual logo file
for (Iterator<Element> mi = manifest
.getContentFiles().iterator(); mi.hasNext();)
{
Element mfile = mi.next();
if (logoID.equals(mfile.getAttributeValue("ID")))
{
String path = METSManifest.getFileName(mfile);
// extract the file input stream from package (or retrieve
// externally, if it is an externally referenced file)
InputStream fileStream = getFileInputStream(pkgFile,
params, path);
// Add this logo to the Community/Collection
if (dso.getType() == Constants.COLLECTION)
{
((Collection) dso).setLogo(fileStream);
}
else
{
((Community) dso).setLogo(fileStream);
}
break;
}
}// end for each file in manifest
}// end if logo reference found
}
/**
* Add a Template Item to a Collection container object based on a METS
* Manifest.
*
* @param context
* DSpace Context
* @param dso
* DSpace Container Object
* @param manifest
* METS Manifest
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Ingestion Parameters
* @param callback
* the MdrefManager (manages all external metadata files
* referenced by METS <code>mdref</code> elements)
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
* @throws MetadataValidationException
* @throws PackageValidationException
*/
protected void addTemplateItem(Context context, DSpaceObject dso,
METSManifest manifest, File pkgFile, PackageParameters params,
MdrefManager callback)
throws SQLException, IOException, AuthorizeException,
CrosswalkException, PackageValidationException
{
//Template items only valid for collections
if(dso.getType()!=Constants.COLLECTION)
return;
Collection collection = (Collection) dso;
//retrieve list of all <div>s representing child objects from manifest
List childObjList = manifest.getChildObjDivs();
if(childObjList!=null && !childObjList.isEmpty())
{
Element templateItemDiv = null;
Iterator childIterator = childObjList.iterator();
//Search for the child with a type of "DSpace ITEM Template"
while(childIterator.hasNext())
{
Element childDiv = (Element) childIterator.next();
String childType = childDiv.getAttributeValue("TYPE");
//should be the only child of type "ITEM" with "Template" for a suffix
if(childType.contains(Constants.typeText[Constants.ITEM]) &&
childType.endsWith(AbstractMETSDisseminator.TEMPLATE_TYPE_SUFFIX))
{
templateItemDiv = childDiv;
break;
}
}
//If an Template Item was found, create it with the specified metadata
if(templateItemDiv!=null)
{
//make sure this templateItemDiv is associated with one or more dmdSecs
String templateDmdIds = templateItemDiv.getAttributeValue("DMDID");
if(templateDmdIds!=null)
{
//create our template item & get a reference to it
collection.createTemplateItem();
Item templateItem = collection.getTemplateItem();
//get a reference to the dmdSecs which describe the metadata for this template item
Element[] templateDmds = manifest.getDmdElements(templateDmdIds);
// Run our Descriptive metadata (dublin core, etc) crosswalks to add metadata to template item
crosswalkObjectDmd(context, templateItem, manifest, callback, templateDmds, params);
// update the template item to save metadata changes
PackageUtils.updateDSpaceObject(templateItem);
}
}
}
}
/**
* Replace an existing DSpace object with the contents of a METS-based
* package. All contents are dictated by the METS manifest. Package is a ZIP
* archive (or optionally bare manifest XML document). In a Zip, all files
* relative to top level and the manifest (as per spec) in mets.xml.
* <P>
* This method is similar to ingest(), except that if the object already
* exists in DSpace, it is emptied of files and metadata. The METS-based
* package is then used to ingest new values for these.
*
* @param context
* DSpace Context
* @param dsoToReplace
* DSpace Object to be replaced (may be null if it will be
* specified in the METS manifest itself)
* @param pkgFile
* The package file to ingest
* @param params
* Parameters passed from the packager script
* @return DSpaceObject created by ingest.
* @throws PackageValidationException
* if package is unacceptable or there is a fatal error turning
* it into a DSpace Object.
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @throws CrosswalkException
*/
@Override
public DSpaceObject replace(Context context, DSpaceObject dsoToReplace,
File pkgFile, PackageParameters params)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
// parsed out METS Manifest from the file.
METSManifest manifest = null;
// resulting DSpace Object
DSpaceObject dso = null;
try
{
log.info(LogManager.getHeader(context, "package_parse",
"Parsing package for replace, file=" + pkgFile.getName()));
// Parse our ingest package, extracting out the METS manifest in the
// package
manifest = parsePackage(context, pkgFile, params);
// must have a METS Manifest to replace anything
if (manifest == null)
{
throw new PackageValidationException(
"No METS Manifest found (filename="
+ METSManifest.MANIFEST_FILE
+ "). Package is unacceptable!");
}
// It's possible that the object to replace will be passed in as
// null. Let's determine the handle of the object to replace.
if (dsoToReplace == null)
{
// since we don't know what we are replacing, we'll have to
// try to determine it from the parsed manifest
// Handle of object described by METS should be in OBJID
String handleURI = manifest.getObjID();
String handle = decodeHandleURN(handleURI);
try
{
// Attempt to resolve this handle to an existing object
dsoToReplace = HandleManager.resolveToObject(context,
handle);
}
catch (IllegalStateException ie)
{
// We don't care if this errors out -- we can continue
// whether or not an object exists with this handle.
}
}
// NOTE: At this point, it's still possible we don't have an object
// to replace. This could happen when there is actually no existing
// object in DSpace using that handle. (In which case, we're
// actually just doing a "restore" -- so we aren't going to throw an
// error or complain.)
// If we were unable to find the object to replace, then assume we
// are restoring it
if (dsoToReplace == null)
{
// As this object doesn't already exist, we will perform an
// ingest of a new object in order to restore it
// NOTE: passing 'null' as parent object in order to force
// ingestObject() method to determine parent using manifest.
dso = ingestObject(context, null, manifest, pkgFile, params,
null);
//if ingestion was successful
if(dso!=null)
{
// Log that we created an object
log.info(LogManager.getHeader(context, "package_replace",
"Created new Object, type="
+ Constants.typeText[dso.getType()]
+ ", handle=" + dso.getHandle() + ", dbID="
+ String.valueOf(dso.getID())));
}
}
else
// otherwise, we found the DSpaceObject to replace -- so, replace
// it!
{
// Actually replace the object described by the METS Manifest.
// NOTE: This will perform an in-place replace of all metadata
// and files currently associated with the object.
dso = replaceObject(context, dsoToReplace, manifest, pkgFile,
params, null);
// Log that we replaced an object
log.info(LogManager.getHeader(context, "package_replace",
"Replaced Object, type="
+ Constants.typeText[dso.getType()]
+ ", handle=" + dso.getHandle() + ", dbID="
+ String.valueOf(dso.getID())));
}
//if ingest/restore/replace successful
if(dso!=null)
{
// Check if the Packager is currently running recursively.
// If so, this means the Packager will attempt to recursively
// replace all referenced child packages.
if (params.recursiveModeEnabled())
{
// Retrieve list of all Child object METS file paths from the
// current METS manifest.
// This is our list of known child packages.
String[] childFilePaths = manifest.getChildMetsFilePaths();
// Save this list to our AbstractPackageIngester (and note which
// DSpaceObject the pkgs relate to)
// NOTE: The AbstractPackageIngester itself will perform the
// recursive ingest call, based on these child pkg references.
for (int i = 0; i < childFilePaths.length; i++)
{
addPackageReference(dso, childFilePaths[i]);
}
}
}
return dso;
}
catch (SQLException se)
{
// no need to really clean anything up,
// transaction rollback will get rid of it anyway, and will also
// restore everything to previous state.
dso = null;
// Pass this exception on to the next handler.
throw se;
}
}
// whether or not to save manifest as a bitstream in METADATA bundle.
protected boolean preserveManifest()
{
return ConfigurationManager.getBooleanProperty("mets."
+ getConfigurationName() + ".ingest.preserveManifest", false);
}
// return short name of manifest bitstream format
protected String getManifestBitstreamFormat()
{
return ConfigurationManager.getProperty("mets."
+ getConfigurationName() + ".ingest.manifestBitstreamFormat");
}
// whether or not to use Collection Templates when creating a new item
protected boolean useCollectionTemplate()
{
return ConfigurationManager.getBooleanProperty("mets."
+ getConfigurationName() + ".ingest.useCollectionTemplate",
false);
}
/**
* Parse the hdl: URI/URN format into a raw Handle.
*
* @param value
* handle URI string
* @return raw handle (with 'hdl:' prefix removed)
*/
protected String decodeHandleURN(String value)
{
if (value != null && value.startsWith("hdl:"))
{
return value.substring(4);
}
else
{
return null;
}
}
/**
* Remove an existing DSpace Object (called during a replace)
*
* @param dso
* DSpace Object
*/
protected void removeObject(Context context, DSpaceObject dso)
throws AuthorizeException, SQLException, IOException
{
if (log.isDebugEnabled())
{
log.debug("Removing object " + Constants.typeText[dso.getType()]
+ " id=" + dso.getID());
}
switch (dso.getType())
{
case Constants.ITEM:
Item item = (Item) dso;
Collection[] collections = item.getCollections();
// Remove item from all the collections it is in
for (Collection collection : collections)
{
collection.removeItem(item);
}
// Note: when removing an item from the last collection it will
// be removed from the system. So there is no need to also call
// an item.delete() method.
// Remove item from cache immediately
context.removeCached(item, item.getID());
// clear object
item = null;
break;
case Constants.COLLECTION:
Collection collection = (Collection) dso;
Community[] communities = collection.getCommunities();
// Remove collection from all the communities it is in
for (Community community : communities)
{
community.removeCollection(collection);
}
// Note: when removing a collection from the last community it will
// be removed from the system. So there is no need to also call
// an collection.delete() method.
// Remove collection from cache immediately
context.removeCached(collection, collection.getID());
// clear object
collection = null;
break;
case Constants.COMMUNITY:
// Just remove the Community entirely
Community community = (Community) dso;
community.delete();
// Remove community from cache immediately
context.removeCached(community, community.getID());
// clear object
community = null;
break;
}
}
/**
* Determines what parent DSpace object is referenced in this METS doc.
* <p>
* This is a default implementation which assumes the parent will be
* specified in a <structMap LABEL="Parent">. You should override this
* method if your METS manifest specifies the parent object in another
* location.
*
* @param context
* DSpace Context
* @param manifest
* METS manifest
* @returns a DSpace Object which is the parent (or null, if not found)
* @throws PackageValidationException
* if parent reference cannot be found in manifest
* @throws MetadataValidationException
* @throws SQLException
*/
public DSpaceObject getParentObject(Context context, METSManifest manifest)
throws PackageValidationException, MetadataValidationException,
SQLException
{
DSpaceObject parent = null;
// look for a Parent Object link in manifest <structmap>
String parentLink = manifest.getParentOwnerLink();
// verify we have a valid Parent Object
if (parentLink != null && parentLink.length() > 0)
{
parent = HandleManager.resolveToObject(context, parentLink);
if (parent == null)
{
throw new UnsupportedOperationException(
"Could not find a parent DSpaceObject referenced as '"
+ parentLink
+ "' in the METS Manifest for object "
+ manifest.getObjID()
+ ". A parent DSpaceObject must be specified from either the 'packager' command or noted in the METS Manifest itself.");
}
}
else
{
throw new UnsupportedOperationException(
"Could not find a parent DSpaceObject where we can ingest the packaged object "
+ manifest.getObjID()
+ ". A parent DSpaceObject must be specified from either the 'packager' command or noted in the METS Manifest itself.");
}
return parent;
}
/**
* Determines the handle of the DSpace object represented in this METS doc.
* <p>
* This is a default implementation which assumes the handle of the DSpace
* Object can be found in the <mets> @OBJID attribute. You should
* override this method if your METS manifest specifies the handle in
* another location.
*
* @param manifest
* METS manifest
* @returns handle as a string (or null, if not found)
* @throws PackageValidationException
* if handle cannot be found in manifest
*/
public String getObjectHandle(METSManifest manifest)
throws PackageValidationException, MetadataValidationException,
SQLException
{
// retrieve handle URI from manifest
String handleURI = manifest.getObjID();
// decode this URI (by removing the 'hdl:' prefix)
String handle = decodeHandleURN(handleURI);
if (handle == null || handle.length() == 0)
{
throw new PackageValidationException(
"The DSpace Object handle required to ingest this package could not be resolved in manifest. The <mets OBJID='hdl:xxxx'> is missing.");
}
return handle;
}
/**
* Retrieve the inputStream for a File referenced from a specific path
* within a METS package.
* <p>
* If the packager is set to 'manifest-only' (i.e. pkgFile is just a
* manifest), we assume the file is available for download via a URL.
* <p>
* Otherwise, the pkgFile is a Zip, so the file should be retrieved from
* within that Zip package.
*
* @param pkgFile
* the full package file (which may include content files if a
* zip)
* @param params
* Parameters passed to METSIngester
* @param path
* the File path (either path in Zip package or a URL)
* @return the InputStream for the file
*/
protected static InputStream getFileInputStream(File pkgFile,
PackageParameters params, String path)
throws MetadataValidationException, IOException
{
// If this is a manifest only package (i.e. not a zip file)
if (params.getBooleanProperty("manifestOnly", false))
{
// NOTE: since we are only dealing with a METS manifest,
// we will assume all external files are available via URLs.
try
{
// attempt to open a connection to given URL
URL fileURL = new URL(path);
URLConnection connection = fileURL.openConnection();
// open stream to access file contents
return connection.getInputStream();
}
catch (IOException io)
{
log
.error("Unable to retrieve external file from URL '"
+ path
+ "' for manifest-only METS package. All externally referenced files must be retrievable via URLs.");
// pass exception upwards
throw io;
}
}
else
{
// open the Zip package
ZipFile zipPackage = new ZipFile(pkgFile);
// Retrieve the manifest file entry by name
ZipEntry manifestEntry = zipPackage.getEntry(path);
// Get inputStream associated with this file
return zipPackage.getInputStream(manifestEntry);
}
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
return "* ignoreHandle=[boolean] " +
"If true, the ingester will ignore any Handle specified in the METS manifest itself, and instead create a new Handle during the ingest process (this is the default when running in Submit mode, using the -s flag). " +
"If false, the ingester attempts to restore the Handles specified in the METS manifest (this is the default when running in Restore/replace mode, using the -r flag). " +
"\n\n" +
"* ignoreParent=[boolean] " +
"If true, the ingester will ignore any Parent object specified in the METS manifest itself, and instead ingest under a new Parent object (this is the default when running in Submit mode, using the -s flag). The new Parent object must be specified via the -p flag. " +
"If false, the ingester attempts to restore the object directly under its old Parent (this is the default when running in Restore/replace mode, using the -r flag). " +
"\n\n" +
"* manifestOnly=[boolean] " +
"Specify true if the ingest package consists of just a METS manifest (mets.xml), without any content files (defaults to false)." +
"\n\n" +
"* validate=[boolean] " +
"If true, enable XML validation of METS file using schemas in document (default is true).";
}
/**
* Profile-specific tests to validate manifest. The implementation can
* access the METS document through the <code>manifest</code> variable, an
* instance of <code>METSManifest</code>.
*
* @throws MetadataValidationException
* if there is a fatal problem with the METS document's
* conformance to the expected profile.
*/
abstract void checkManifest(METSManifest manifest)
throws MetadataValidationException;
/**
* Select the <code>dmdSec</code> element(s) to apply to the Item. The
* implementation is responsible for choosing which (if any) of the metadata
* sections to crosswalk to get the descriptive metadata for the item being
* ingested. It is responsible for calling the crosswalk, using the
* manifest's helper i.e.
* <code>manifest.crosswalkItemDmd(context,item,dmdElement,callback);</code>
* (The <code>callback</code> argument is a reference to itself since the
* class also implements the <code>METSManifest.MdRef</code> interface to
* fetch package files referenced by mdRef elements.)
* <p>
* Note that <code>item</code> and <code>manifest</code> are available as
* protected fields from the superclass.
*
* @param context
* the DSpace context
* @param manifest
* the METSManifest
* @param callback
* the MdrefManager (manages all external metadata files
* referenced by METS <code>mdref</code> elements)
* @param dmds
* array of Elements, each a METS <code>dmdSec</code> that
* applies to the Item as a whole.
* @param params
*/
public abstract void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest, MdrefManager callback, Element dmds[],
PackageParameters params) throws CrosswalkException,
PackageValidationException, AuthorizeException, SQLException,
IOException;
/**
* Add license(s) to Item based on contents of METS and other policies. The
* implementation of this method controls exactly what licenses are added to
* the new item, including the DSpace deposit license. It is given the
* collection (which is the source of a default deposit license), an
* optional user-supplied deposit license (in the form of a String), and the
* METS manifest. It should invoke <code>manifest.getItemRightsMD()</code>
* to get an array of <code>rightsMd</code> elements which might contain
* other license information of interest, e.g. a Creative Commons license.
* <p>
* This framework does not add any licenses by default.
* <p>
* Note that crosswalking rightsMD sections can also add a deposit or CC
* license to the object.
*
* @param context
* the DSpace context
* @param collection
* DSpace Collection to which the item is being submitted.
* @param license
* optional user-supplied Deposit License text (may be null)
*/
public abstract void addLicense(Context context, Item item, String license,
Collection collection, PackageParameters params)
throws PackageValidationException, AuthorizeException,
SQLException, IOException;
/**
* Hook for final "finishing" operations on the new Object. This method is
* called when the new Object is otherwise complete and ready to be
* returned. The implementation should use this opportunity to make whatever
* final checks and modifications are necessary.
*
* @param context
* the DSpace context
* @param dso
* the DSpace Object
* @param params
* the Packager Parameters
*/
public abstract void finishObject(Context context, DSpaceObject dso,
PackageParameters params) throws PackageValidationException,
CrosswalkException, AuthorizeException, SQLException, IOException;
/**
* Determines what type of DSpace object is represented in this METS doc.
*
* @returns one of the object types in Constants.
*/
public abstract int getObjectType(METSManifest manifest)
throws PackageValidationException;
/**
* Subclass-dependent final processing on a Bitstream; could include fixing
* up the name, bundle, other attributes.
*/
public abstract void finishBitstream(Context context, Bitstream bs,
Element mfile, METSManifest manifest, PackageParameters params)
throws MetadataValidationException, SQLException,
AuthorizeException, IOException;
/**
* Returns keyword that makes the configuration keys of this subclass
* unique, e.g. if it returns NAME, the key would be:
* "mets.NAME.ingest.preserveManifest = true"
*/
public abstract String getConfigurationName();
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.AbstractPackagerWrappingCrosswalk;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.CrosswalkObjectNotSupported;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.content.crosswalk.IngestionCrosswalk;
import org.dspace.content.crosswalk.StreamIngestionCrosswalk;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.jdom.Document;
import org.jdom.Content;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.jdom.xpath.XPath;
/**
* <P>
* Manage the METS manifest document for METS importer classes,
* such as the package importer <code>org.dspace.content.packager.MetsSubmission</code>
* and the federated importer <code>org.dspace.app.mets.FederatedMETSImport</code>
* </P>
* <P>
* It can parse the METS document, build an internal model, and give the importers
* access to that model. It also crosswalks
* all of the descriptive and administrative metadata in the METS
* manifest into the target DSpace Item, under control of the importer.
* </P>
*
* <P>
* It reads the following DSpace Configuration entries:
* </P>
* <UL>
* <LI>Local XML schema (XSD) declarations, in the general format:
* <br><code>mets.xsd.<em>identifier</em> = <em>namespace</em> <em>xsd-URL</em></code>
* <br> eg. <code>mets.xsd.dc = http://purl.org/dc/elements/1.1/ dc.xsd</code>
* <br>Add a separate config entry for each schema.
* </LI>
* <p><LI>Crosswalk plugin mappings:
* These tell it the name of the crosswalk plugin to invoke for metadata sections
* with a particular value of <code>MDTYPE</code> (or <code>OTHERMDTYPE</code>)
* By default, the crosswalk mechanism will look for a plugin with the
* same name as the metadata type (e.g. <code>"MODS"</code>,
* <code>"DC"</code>). This example line invokes the <code>QDC</code>
* plugin when <code>MDTYPE="DC"</code>
* <br><code>mets.submission.crosswalk.DC = QDC </code>
* <br> general format is:
* <br><code>mets.submission.crosswalk.<em>mdType</em> = <em>pluginName</em> </code>
* </LI>
* </UL>
*
*
* @author Robert Tansley
* @author WeiHua Huang
* @author Rita Lee
* @author Larry Stone
* @see org.dspace.content.packager.MetsSubmission
* @see org.dspace.app.mets.FederatedMETSImport
*/
public class METSManifest
{
/**
* Callback interface to retrieve data streams in mdRef elements.
* "Package" or file reader returns an input stream for the
* given relative path, e.g. to dereference <code>mdRef</code> elements.
*/
public interface Mdref
{
/**
* Make the contents of an external resource mentioned in
* an <code>mdRef</code> element available as an <code>InputStream</code>.
* The implementation must use the information in the
* <code>mdRef</code> element, and the state in the object that
* implements this interface, to find the actual metadata content.
* <p>
* For example, an implementation that ingests a directory of
* files on the local filesystem would get a relative pathname
* out of the <code>mdRef</code> and open that file.
*
* @param mdRef JDOM element of mdRef in the METS manifest.
* @return stream containing the metadata mentioned in mdRef.
* @throws MetadataValidationException if the mdRef is unacceptable or missing required information.
* @throws IOException if it is returned by services called by this method.
* @throws SQLException if it is returned by services called by this method.
* @throws AuthorizeException if it is returned by services called by this method.
*/
public InputStream getInputStream(Element mdRef)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException;
}
/** log4j category */
private static final Logger log = Logger.getLogger(METSManifest.class);
/** Canonical filename of METS manifest within a package or as a bitstream. */
public static final String MANIFEST_FILE = "mets.xml";
/** Prefix of DSpace configuration lines that map METS metadata type to
* crosswalk plugin names.
*/
public static final String CONFIG_METS_PREFIX = "mets.";
/** prefix of config lines identifying local XML Schema (XSD) files */
private static final String CONFIG_XSD_PREFIX = CONFIG_METS_PREFIX+"xsd.";
/** Dublin core element namespace */
private static final Namespace dcNS = Namespace
.getNamespace("http://purl.org/dc/elements/1.1/");
/** Dublin core term namespace (for qualified DC) */
private static final Namespace dcTermNS = Namespace
.getNamespace("http://purl.org/dc/terms/");
/** METS namespace -- includes "mets" prefix for use in XPaths */
public static final Namespace metsNS = Namespace
.getNamespace("mets", "http://www.loc.gov/METS/");
/** XLink namespace -- includes "xlink" prefix prefix for use in XPaths */
public static final Namespace xlinkNS = Namespace
.getNamespace("xlink", "http://www.w3.org/1999/xlink");
/** root element of the current METS manifest. */
private Element mets = null;
/** all mdRef elements in the manifest */
private List mdFiles = null;
/** <file> elements in "original" file group (bundle) */
private List<Element> contentFiles = null;
/** builder to use for mdRef streams, inherited from create() */
private SAXBuilder parser = null;
/** name of packager who created this manifest object, for looking up configuration entries. */
private String configName;
// Create list of local schemas at load time, since it depends only
// on the DSpace configuration.
private static String localSchemas;
static
{
String dspace_dir = ConfigurationManager.getProperty("dspace.dir");
File xsdPath1 = new File(dspace_dir+"/config/schemas/");
File xsdPath2 = new File(dspace_dir+"/config/");
Enumeration<String> pe = (Enumeration<String>)ConfigurationManager.propertyNames();
StringBuffer result = new StringBuffer();
while (pe.hasMoreElements())
{
// config lines have the format:
// mets.xsd.{identifier} = {namespace} {xsd-URL}
// e.g.
// mets.xsd.dc = http://purl.org/dc/elements/1.1/ dc.xsd
// (filename is relative to {dspace_dir}/config/schemas/)
String key = pe.nextElement();
if (key.startsWith(CONFIG_XSD_PREFIX))
{
String spec = ConfigurationManager.getProperty(key);
String val[] = spec.trim().split("\\s+");
if (val.length == 2)
{
File xsd = new File(xsdPath1, val[1]);
if (!xsd.exists())
{
xsd = new File(xsdPath2, val[1]);
}
if (!xsd.exists())
{
log.warn("Schema file not found for config entry=\"" + spec + "\"");
}
else
{
try
{
String u = xsd.toURL().toString();
if (result.length() > 0)
{
result.append(" ");
}
result.append(val[0]).append(" ").append(u);
}
catch (java.net.MalformedURLException e)
{
log.warn("Skipping badly formed XSD URL: "+e.toString());
}
}
}
else
{
log.warn("Schema config entry has wrong format, entry=\"" + spec + "\"");
}
}
}
localSchemas = result.toString();
if (log.isDebugEnabled())
{
log.debug("Got local schemas = \"" + localSchemas + "\"");
}
}
/**
* Default constructor, only called internally.
* @param builder XML parser (for parsing mdRef'd files and binData)
* @param mets parsed METS document
*/
private METSManifest(SAXBuilder builder, Element mets, String configName)
{
super();
this.mets = mets;
parser = builder;
this.configName = configName;
}
/**
* Create a new manifest object from a serialized METS XML document.
* Parse document read from the input stream, optionally validating.
* @param is input stream containing serialized XML
* @param validate if true, enable XML validation using schemas
* in document. Also validates any sub-documents.
* @throws MetadataValidationException if there is any error parsing
* or validating the METS.
* @return new METSManifest object.
*/
public static METSManifest create(InputStream is, boolean validate, String configName)
throws IOException,
MetadataValidationException
{
SAXBuilder builder = new SAXBuilder(validate);
builder.setIgnoringElementContentWhitespace(true);
// Set validation feature
if (validate)
{
builder.setFeature("http://apache.org/xml/features/validation/schema", true);
}
// Tell the parser where local copies of schemas are, to speed up
// validation. Local XSDs are identified in the configuration file.
if (localSchemas.length() > 0)
{
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
}
// Parse the METS file
Document metsDocument;
try
{
metsDocument = builder.build(is);
/*** XXX leave commented out except if needed for
*** viewing the METS document that actually gets read.
*
* XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
* log.debug("Got METS DOCUMENT:");
* log.debug(outputPretty.outputString(metsDocument));
****/
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error validating METS in "
+ is.toString(), je);
}
return new METSManifest(builder, metsDocument.getRootElement(), configName);
}
/**
* Gets name of the profile to which this METS document conforms.
* @return value the PROFILE attribute of mets element, or null if none.
*/
public String getProfile()
{
return mets.getAttributeValue("PROFILE");
}
/**
* Return the OBJID attribute of the METS manifest.
* This is where the Handle URI/URN of the object can be found.
*
* @return OBJID attribute of METS manifest
*/
public String getObjID()
{
return mets.getAttributeValue("OBJID");
}
/**
* Gets all <code>file</code> elements which make up
* the item's content.
* @return a List of <code>Element</code>s.
*/
public List<Element> getContentFiles()
throws MetadataValidationException
{
if (contentFiles != null)
{
return contentFiles;
}
contentFiles = new ArrayList<Element>();
Element fileSec = mets.getChild("fileSec", metsNS);
if (fileSec != null)
{
Iterator fgi = fileSec.getChildren("fileGrp", metsNS).iterator();
while (fgi.hasNext())
{
Element fg = (Element)fgi.next();
Iterator fi = fg.getChildren("file", metsNS).iterator();
while (fi.hasNext())
{
Element f = (Element)fi.next();
contentFiles.add(f);
}
}
}
return contentFiles;
}
/**
* Gets list of all <code>mdRef</code> elements in the METS
* document. Used by ingester to e.g. check that all
* required files are present.
* @return a List of <code>Element</code>s.
*/
public List getMdFiles()
throws MetadataValidationException
{
if (mdFiles == null)
{
try
{
// Use a special namespace with known prefix
// so we get the right prefix.
XPath xpath = XPath.newInstance("descendant::mets:mdRef");
xpath.addNamespace(metsNS);
mdFiles = xpath.selectNodes(mets);
}
catch (JDOMException je)
{
throw new MetadataValidationException("Failed while searching for mdRef elements in manifest: ", je);
}
}
return mdFiles;
}
/**
* Get the "original" file element for a derived file.
* Finds the original from which this was derived by matching the GROUPID
* attribute that binds it to its original. For instance, the file for
* a thumbnail image would have the same GROUPID as its full-size version.
* <p>
* NOTE: This pattern of relating derived files through the GROUPID
* attribute is peculiar to the DSpace METS SIP profile, and may not be
* generally useful with other sorts of METS documents.
* @param file METS file element of derived file
* @return file path of original or null if none found.
*/
public String getOriginalFilePath(Element file)
{
String groupID = file.getAttributeValue("GROUPID");
if (groupID == null || groupID.equals(""))
{
return null;
}
try
{
XPath xpath = XPath.newInstance(
"mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\""+groupID+"\"]");
xpath.addNamespace(metsNS);
List oFiles = xpath.selectNodes(mets);
if (oFiles.size() > 0)
{
if (log.isDebugEnabled())
{
log.debug("Got ORIGINAL file for derived=" + file.toString());
}
Element flocat = ((Element)oFiles.get(0)).getChild("FLocat", metsNS);
if (flocat != null)
{
return flocat.getAttributeValue("href", xlinkNS);
}
}
return null;
}
catch (JDOMException je)
{
log.warn("Got exception on XPATH looking for Original file, "+je.toString());
return null;
}
}
// translate bundle name from METS to DSpace; METS may be "CONTENT"
// or "ORIGINAL" for the DSPace "ORIGINAL", rest are left alone.
private static String normalizeBundleName(String in)
{
if (in.equals("CONTENT"))
{
return Constants.CONTENT_BUNDLE_NAME;
}
else if (in.equals("MANIFESTMD"))
{
return Constants.METADATA_BUNDLE_NAME;
}
return in;
}
/**
* Get the DSpace bundle name corresponding to the <code>USE</code>
* attribute of the file group enclosing this <code>file</code> element.
*
* @return DSpace bundle name
* @throws MetadataValidationException when there is no USE attribute on the enclosing fileGrp.
*/
public static String getBundleName(Element file)
throws MetadataValidationException
{
Element fg = file.getParentElement();
String fgUse = fg.getAttributeValue("USE");
if (fgUse == null)
{
throw new MetadataValidationException("Invalid METS Manifest: every fileGrp element must have a USE attribute.");
}
return normalizeBundleName(fgUse);
}
/**
* Get the "local" file name of this <code>file</code> or <code>mdRef</code> element.
* By "local" we mean the reference to the actual resource containing
* the data for this file, e.g. a relative path within a Zip or tar archive
* if the METS is serving as a manifest for that sort of package.
* @return "local" file name (i.e. relative to package or content
* directory) corresponding to this <code>file</code> or <code>mdRef</code> element.
* @throws MetadataValidationException when there is not enough information to find a resource identifier.
*/
public static String getFileName(Element file)
throws MetadataValidationException
{
Element ref;
if (file.getName().equals("file"))
{
ref = file.getChild("FLocat", metsNS);
if (ref == null)
{
// check for forbidden FContent child first:
if (file.getChild("FContent", metsNS) == null)
{
throw new MetadataValidationException("Invalid METS Manifest: Every file element must have FLocat child.");
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: file element has forbidden FContent child, only FLocat is allowed.");
}
}
}
else if (file.getName().equals("mdRef"))
{
ref = file;
}
else
{
throw new MetadataValidationException("getFileName() called with recognized element type: " + file.toString());
}
String loctype = ref.getAttributeValue("LOCTYPE");
if (loctype != null && loctype.equals("URL"))
{
String result = ref.getAttributeValue("href", xlinkNS);
if (result == null)
{
throw new MetadataValidationException("Invalid METS Manifest: FLocat/mdRef is missing the required xlink:href attribute.");
}
return result;
}
throw new MetadataValidationException("Invalid METS Manifest: FLocat/mdRef does not have LOCTYPE=\"URL\" attribute.");
}
/**
* Returns file element corresponding to primary bitstream.
* There is <i>ONLY</i> a primary bitstream if the first {@code div} under
* first {@code structMap} has an {@code fptr}.
*
* @return file element of Item's primary bitstream, or null if there is none.
*/
public Element getPrimaryOrLogoBitstream()
throws MetadataValidationException
{
Element objDiv = getObjStructDiv();
Element fptr = objDiv.getChild("fptr", metsNS);
if (fptr == null)
{
return null;
}
String id = fptr.getAttributeValue("FILEID");
if (id == null)
{
throw new MetadataValidationException("fptr for Primary Bitstream is missing the required FILEID attribute.");
}
Element result = getElementByXPath("descendant::mets:file[@ID=\""+id+"\"]", false);
if (result == null)
{
throw new MetadataValidationException("Cannot find file element for Primary Bitstream: looking for ID=" + id);
}
return result;
}
/**
* Get the metadata type from within a *mdSec element.
* @return metadata type name.
*/
public String getMdType(Element mdSec)
throws MetadataValidationException
{
Element md = mdSec.getChild("mdRef", metsNS);
if (md == null)
{
md = mdSec.getChild("mdWrap", metsNS);
}
if (md == null)
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element has neither mdRef nor mdWrap child.");
}
String result = md.getAttributeValue("MDTYPE");
if (result != null && result.equals("OTHER"))
{
result = md.getAttributeValue("OTHERMDTYPE");
}
if (result == null)
{
throw new MetadataValidationException("Invalid METS Manifest: " + md.getName() + " has no MDTYPE or OTHERMDTYPE attribute.");
}
return result;
}
/**
* Returns MIME type of metadata content, if available.
* @return MIMEtype word, or null if none is available.
*/
public String getMdContentMimeType(Element mdSec)
throws MetadataValidationException
{
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
String mimeType = mdWrap.getAttributeValue("MIMETYPE");
if (mimeType == null && mdWrap.getChild("xmlData", metsNS) != null)
{
mimeType = "text/xml";
}
return mimeType;
}
Element mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
return mdRef.getAttributeValue("MIMETYPE");
}
return null;
}
/**
* Return contents of *md element as List of XML Element objects.
* Gets content, dereferencing mdRef if necessary, or decoding and parsing
* a binData that contains XML.
* @return contents of metadata section, or empty list if no XML content is available.
* @throws MetadataValidationException if METS is invalid, or there is an error parsing the XML.
*/
public List<Element> getMdContentAsXml(Element mdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException
{
try
{
// XXX sanity check: if this has more than one child, consider it
// an error since we cannot deal with more than one mdRef|mdWrap
// child. This may be considered a bug and need to be fixed,
// so it's best to bring it to the attention of users.
List mdc = mdSec.getChildren();
if (mdc.size() > 1)
{
// XXX scaffolding for debugging diagnosis; at least one
// XML parser stupidly includes newlines in prettyprinting
// as text content objects..
String id = mdSec.getAttributeValue("ID");
StringBuffer sb = new StringBuffer();
for (Iterator mi = mdc.iterator(); mi.hasNext();)
{
sb.append(", ").append(((Content)mi.next()).toString());
}
throw new MetadataValidationException("Cannot parse METS with "+mdSec.getQualifiedName()+" element that contains more than one child, size="+String.valueOf(mdc.size())+", ID="+id+"Kids="+sb.toString());
}
Element mdRef = null;
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element xmlData = mdWrap.getChild("xmlData", metsNS);
if (xmlData == null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element with neither xmlData nor binData child.");
}
// if binData is actually XML, return it; otherwise ignore.
else
{
String mimeType = mdWrap.getAttributeValue("MIMETYPE");
if (mimeType != null && mimeType.equalsIgnoreCase("text/xml"))
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
Document mdd = parser.build(new ByteArrayInputStream(value));
List<Element> result = new ArrayList<Element>(1);
result.add(mdd.getRootElement());
return result;
}
else
{
log.warn("Ignoring binData section because MIMETYPE is not XML, but: "+mimeType);
return new ArrayList<Element>(0);
}
}
}
else
{
return xmlData.getChildren();
}
}
else
{
mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
String mimeType = mdRef.getAttributeValue("MIMETYPE");
if (mimeType != null && mimeType.equalsIgnoreCase("text/xml"))
{
Document mdd = parser.build(callback.getInputStream(mdRef));
List<Element> result = new ArrayList<Element>(1);
result.add(mdd.getRootElement());
return result;
}
else
{
log.warn("Ignoring mdRef section because MIMETYPE is not XML, but: "+mimeType);
return new ArrayList<Element>(0);
}
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element with neither mdRef nor mdWrap child.");
}
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error parsing or validating metadata section in mdRef or binData within "+mdSec.toString(), je);
}
}
/**
* Return contents of *md element as stream.
* Gets content, dereferencing mdRef if necessary, or decoding
* a binData element if necessary.
* @return Stream containing contents of metadata section. Never returns null.
* @throws MetadataValidationException if METS format does not contain any metadata.
*/
public InputStream getMdContentAsStream(Element mdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException
{
Element mdRef = null;
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element xmlData = mdWrap.getChild("xmlData", metsNS);
if (xmlData == null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element with neither xmlData nor binData child.");
}
else
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
return new ByteArrayInputStream(value);
}
}
else
{
XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
return new ByteArrayInputStream(
outputPretty.outputString(xmlData.getChildren()).getBytes());
}
}
else
{
mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
return callback.getInputStream(mdRef);
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element with neither mdRef nor mdWrap child.");
}
}
}
/**
* Return the {@code <div>} which describes this DSpace Object (and its contents)
* from the {@code <structMap>}. In all cases, this is the first {@code <div>}
* in the first {@code <structMap>}.
*
* @return Element which is the DSpace Object Contents {@code <div>}
* @throws MetadataValidationException
*/
public Element getObjStructDiv()
throws MetadataValidationException
{
//get first <structMap>
Element sm = mets.getChild("structMap", metsNS);
if (sm == null)
{
throw new MetadataValidationException("METS document is missing the required structMap element.");
}
//get first <div>
Element result = sm.getChild("div", metsNS);
if (result == null)
{
throw new MetadataValidationException("METS document is missing the required first div element in first structMap.");
}
if (log.isDebugEnabled())
{
log.debug("Got getObjStructDiv result=" + result.toString());
}
return (Element)result;
}
/**
* Get an array of child object {@code <div>}s from the METS Manifest {@code <structMap>}.
* These {@code <div>}s reference the location of any child objects METS manifests.
*
* @return a List of {@code Element}s, each a {@code <div>}. May be empty but NOT null.
* @throws MetadataValidationException
*/
public List getChildObjDivs()
throws MetadataValidationException
{
//get the <div> in <structMap> which describes the current object's contents
Element objDiv = getObjStructDiv();
//get the child <div>s -- these should reference the child METS manifest
return objDiv.getChildren("div", metsNS);
}
/**
* Retrieve the file paths for the children objects' METS Manifest files.
* These file paths are located in the {@code <mptr>} where @LOCTYPE=URL
*
* @return a list of Strings, corresponding to relative file paths of children METS manifests
* @throws MetadataValidationException
*/
public String[] getChildMetsFilePaths()
throws MetadataValidationException
{
//get our child object <div>s
List childObjDivs = getChildObjDivs();
List<String> childPathList = new ArrayList<String>();
if(childObjDivs != null && !childObjDivs.isEmpty())
{
Iterator childIterator = childObjDivs.iterator();
//For each Div, we want to find the underlying <mptr> with @LOCTYPE=URL
while(childIterator.hasNext())
{
Element childDiv = (Element) childIterator.next();
//get all child <mptr>'s
List childMptrs = childDiv.getChildren("mptr", metsNS);
if(childMptrs!=null && !childMptrs.isEmpty())
{
Iterator mptrIterator = childMptrs.iterator();
//For each mptr, we want to find the one with @LOCTYPE=URL
while(mptrIterator.hasNext())
{
Element mptr = (Element) mptrIterator.next();
String locType = mptr.getAttributeValue("LOCTYPE");
//if @LOCTYPE=URL, then capture @xlink:href as the METS Manifest file path
if (locType!=null && locType.equals("URL"))
{
String filePath = mptr.getAttributeValue("href", xlinkNS);
if(filePath!=null && filePath.length()>0)
{
childPathList.add(filePath);
}
}
}//end <mptr> loop
}//end if <mptr>'s exist
}//end child <div> loop
}//end if child <div>s exist
String[] childPaths = new String[childPathList.size()];
childPaths = (String[]) childPathList.toArray(childPaths);
return childPaths;
}
/**
* Return the reference to the Parent Object from the "Parent" {@code <structMap>}.
* This parent object is the owner of current object.
*
* @return Link to the Parent Object (this is the Handle of that Parent)
* @throws MetadataValidationException
*/
public String getParentOwnerLink()
throws MetadataValidationException
{
//get a list of our structMaps
List<Element> childStructMaps = mets.getChildren("structMap", metsNS);
Element parentStructMap = null;
// find the <structMap LABEL='Parent'>
if(!childStructMaps.isEmpty())
{
for (Element structMap : childStructMaps)
{
String label = structMap.getAttributeValue("LABEL");
if(label!=null && label.equalsIgnoreCase("Parent"))
{
parentStructMap = structMap;
break;
}
}
}
if (parentStructMap == null)
{
throw new MetadataValidationException("METS document is missing the required structMap[@LABEL='Parent'] element.");
}
//get first <div>
Element linkDiv = parentStructMap.getChild("div", metsNS);
if (linkDiv == null)
{
throw new MetadataValidationException("METS document is missing the required first div element in structMap[@LABEL='Parent'].");
}
//the link is in the <mptr> in the @xlink:href attribute
Element mptr = linkDiv.getChild("mptr", metsNS);
if (mptr != null)
{
return mptr.getAttributeValue("href", xlinkNS);
}
//return null if we couldn't find the link
return null;
}
// return a single Element node found by one-off path.
// use only when path varies each time you call it.
private Element getElementByXPath(String path, boolean nullOk)
throws MetadataValidationException
{
try
{
XPath xpath = XPath.newInstance(path);
xpath.addNamespace(metsNS);
xpath.addNamespace(xlinkNS);
Object result = xpath.selectSingleNode(mets);
if (result == null && nullOk)
{
return null;
}
else if (result instanceof Element)
{
return (Element) result;
}
else
{
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"");
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\""+path+"\"", je);
}
}
// Find crosswalk for the indicated metadata type (e.g. "DC", "MODS")
private Object getCrosswalk(String type, Class clazz)
{
/**
* Allow DSpace Config to map the metadata type to a
* different crosswalk name either per-packager or for METS
* in general. First, look for config key like:
* mets.<packagerName>.ingest.crosswalk.MDNAME = XWALKNAME
* then try
* mets.default.ingest.crosswalk.MDNAME = XWALKNAME
*/
String xwalkName = ConfigurationManager.getProperty(
CONFIG_METS_PREFIX+configName+".ingest.crosswalk."+type);
if (xwalkName == null)
{
xwalkName = ConfigurationManager.getProperty(
CONFIG_METS_PREFIX+"default.ingest.crosswalk."+type);
if (xwalkName == null)
{
xwalkName = type;
}
}
return PluginManager.getNamedPlugin(clazz, xwalkName);
}
/**
* Gets all dmdSec elements containing metadata for the DSpace Item.
*
* @return array of Elements, each a dmdSec. May be empty but NOT null.
* @throws MetadataValidationException if the METS is missing a reference to item-wide
* DMDs in the correct place.
*/
public Element[] getItemDmds()
throws MetadataValidationException
{
// div@DMDID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String dmds = objDiv.getAttributeValue("DMDID");
if (dmds == null)
{
throw new MetadataValidationException("Invalid METS: Missing reference to Item descriptive metadata, first div on first structmap must have a DMDID attribute.");
}
return getDmdElements(dmds);
}
/**
* Gets all dmdSec elements from a space separated list
*
* @param dmdList space-separated list of DMDIDs
* @return array of Elements, each a dmdSec. May be empty but NOT null.
* @throws MetadataValidationException if the METS is missing a reference to item-wide
* DMDs in the correct place.
*/
public Element[] getDmdElements(String dmdList)
throws MetadataValidationException
{
if(dmdList!=null && !dmdList.isEmpty())
{
String dmdID[] = dmdList.split("\\s+");
Element result[] = new Element[dmdID.length];
for (int i = 0; i < dmdID.length; ++i)
{
result[i] = getElementByXPath("mets:dmdSec[@ID=\""+dmdID[i]+"\"]", false);
}
return result;
}
else
{
return new Element[0];
}
}
/**
* Return rights metadata section(s) relevant to item as a whole.
* @return array of rightsMd elements, possibly empty but never null.
* @throws MetadataValidationException if METS is invalid, e.g. referenced amdSec is missing.
*/
public Element[] getItemRightsMD()
throws MetadataValidationException
{
// div@ADMID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String amds = objDiv.getAttributeValue("ADMID");
if (amds == null)
{
if (log.isDebugEnabled())
{
log.debug("getItemRightsMD: No ADMID references found.");
}
return new Element[0];
}
String amdID[] = amds.split("\\s+");
List<Element> resultList = new ArrayList<Element>();
for (int i = 0; i < amdID.length; ++i)
{
List rmds = getElementByXPath("mets:amdSec[@ID=\""+amdID[i]+"\"]", false).
getChildren("rightsMD", metsNS);
if (rmds.size() > 0)
{
resultList.addAll(rmds);
}
}
return resultList.toArray(new Element[resultList.size()]);
}
/**
* Invokes appropriate crosswalks on Item-wide descriptive metadata.
*/
public void crosswalkItemDmd(Context context, PackageParameters params,
DSpaceObject dso,
Element dmdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
crosswalkXmd(context, params, dso, dmdSec, callback);
}
/**
* Crosswalk all technical and source metadata sections that belong
* to the whole object.
* @throws MetadataValidationException if METS is invalid, e.g. referenced amdSec is missing.
*/
public void crosswalkObjectOtherAdminMD(Context context, PackageParameters params,
DSpaceObject dso, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
for (String amdID : getAmdIDs())
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID+"\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("digiprovMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("rightsMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
}
}
/**
* Just crosswalk the sourceMD sections; used to set the handle and parent of AIP.
* @return true if any metadata section was actually crosswalked, false otherwise
*/
public boolean crosswalkObjectSourceMD(Context context, PackageParameters params,
DSpaceObject dso, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
boolean result = false;
for (String amdID : getAmdIDs())
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID+"\"]", false);
for (Iterator ti = amdSec.getChildren("sourceMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
result = true;
}
}
return result;
}
/**
* Get an aray of all AMDID values for this object
*
* @return
* @throws MetadataValidationException
*/
private String[] getAmdIDs()
throws MetadataValidationException
{
// div@ADMID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String amds = objDiv.getAttributeValue("ADMID");
if (amds == null)
{
if (log.isDebugEnabled())
{
log.debug("crosswalkObjectTechMD: No ADMID references found.");
}
return new String[0];
}
return amds.split("\\s+");
}
// Crosswalk *any* kind of metadata section - techMD, rightsMD, etc.
private void crosswalkXmd(Context context, PackageParameters params,
DSpaceObject dso,
Element xmd, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
String type = getMdType(xmd);
//First, try to find the IngestionCrosswalk to use
IngestionCrosswalk xwalk = (IngestionCrosswalk)getCrosswalk(type, IngestionCrosswalk.class);
// If metadata is not simply applicable to object,
// let it go with a warning.
try
{
// If we found the IngestionCrosswalk, crosswalk our XML-based content
if (xwalk != null)
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(xwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the ingest process of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) xwalk;
wrapper.setPackagingParameters(params);
}
xwalk.ingest(context, dso, getMdContentAsXml(xmd,callback));
}
// Otherwise, try stream-based crosswalk
else
{
StreamIngestionCrosswalk sxwalk =
(StreamIngestionCrosswalk)getCrosswalk(type, StreamIngestionCrosswalk.class);
if (sxwalk != null)
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(sxwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the ingest process of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) sxwalk;
wrapper.setPackagingParameters(params);
}
// If we found a Stream-based crosswalk that matches, we now want to
// locate the stream we are crosswalking. This stream should be
// references in METS via an <mdRef> element
// (which is how METS references external files)
Element mdRef = xmd.getChild("mdRef", metsNS);
if (mdRef != null)
{
InputStream in = null;
try
{
in = callback.getInputStream(mdRef);
sxwalk.ingest(context, dso, in,
mdRef.getAttributeValue("MIMETYPE"));
}
finally
{
if (in != null)
{
in.close();
}
}
} // If we couldn't find an <mdRef>, then we'll try an <mdWrap>
// with a <binData> element instead.
// (this is how METS wraps embedded base64-encoded content streams)
else
{
Element mdWrap = xmd.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element for streaming crosswalk without binData child.");
}
else
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
sxwalk.ingest(context, dso,
new ByteArrayInputStream(value),
mdWrap.getAttributeValue("MIMETYPE"));
}
}
else
{
throw new MetadataValidationException("Cannot process METS Manifest: " +
"Metadata of type=" + type + " requires a reference to a stream (mdRef), which was not found in " + xmd.getName());
}
}
}
else
{
throw new MetadataValidationException("Cannot process METS Manifest: " +
"No crosswalk found for contents of " + xmd.getName() + " element, MDTYPE=" + type);
}
}
}
catch (CrosswalkObjectNotSupported e)
{
log.warn("Skipping metadata section "+xmd.getName()+", type="+type+" inappropriate for this type of object: Object="+dso.toString()+", error="+e.toString());
}
}
/**
* Crosswalk the metadata associated with a particular <code>file</code>
* element into the bitstream it corresponds to.
* @param context a dspace context.
* @param params any PackageParameters which may affect how bitstreams are crosswalked
* @param bitstream bitstream target of the crosswalk
* @param fileId value of ID attribute in the file element responsible
* for the contents of that bitstream.
* @param callback ???
*/
public void crosswalkBitstream(Context context, PackageParameters params,
Bitstream bitstream,
String fileId, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
Element file = getElementByXPath("descendant::mets:file[@ID=\""+fileId+"\"]", false);
if (file == null)
{
throw new MetadataValidationException("Failed in Bitstream crosswalk, Could not find file element with ID=" + fileId);
}
// In DSpace METS SIP spec, admin metadata is only "highly
// recommended", not "required", so it is OK if there is no ADMID.
String amds = file.getAttributeValue("ADMID");
if (amds == null)
{
log.warn("Got no bitstream ADMID, file@ID="+fileId);
return;
}
String amdID[] = amds.split("\\s+");
for (int i = 0; i < amdID.length; ++i)
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID[i]+"\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, bitstream, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("sourceMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, bitstream, (Element)ti.next(), callback);
}
}
}
/**
* @return root element of METS document.
*/
public Element getMets()
{
return mets;
}
/**
* Return entire METS document as an inputStream
*
* @return entire METS document as a stream
*/
public InputStream getMetsAsStream()
{
XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
return new ByteArrayInputStream(
outputPretty.outputString(mets).getBytes());
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.List;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.apache.pdfbox.cos.COSDocument;
import org.apache.pdfbox.pdfparser.PDFParser;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.DCDate;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.core.Utils;
/**
* Accept a PDF file by itself as a SIP.
* <p>
* This is mainly a proof-of-concept to demonstrate the flexibility
* of the packager and crosswalk plugins.
* <p>
* To import, open up the PDF and try to extract sufficient metadata
* from its InfoDict.
* <p>
* Export is a crude hack: if the item has a bitstream containing PDF,
* send that, otherwise it fails. Do not attempt to insert metadata.
*
* @author Larry Stone
* @version $Revision: 5844 $
* @see PackageIngester
* @see PackageDisseminator
*/
public class PDFPackager
extends SelfNamedPlugin
implements PackageIngester, PackageDisseminator
{
/** log4j category */
private static final Logger log = Logger.getLogger(PDFPackager.class);
private static final String BITSTREAM_FORMAT_NAME = "Adobe PDF";
private static String aliases[] = { "PDF", "Adobe PDF", "pdf", "application/pdf" };
public static String[] getPluginNames()
{
return (String[]) ArrayUtils.clone(aliases);
}
// utility to grovel bitstream formats..
private static void setFormatToMIMEType(Context context, Bitstream bs, String mimeType)
throws SQLException
{
BitstreamFormat bf[] = BitstreamFormat.findNonInternal(context);
for (int i = 0; i < bf.length; ++i)
{
if (bf[i].getMIMEType().equalsIgnoreCase(mimeType))
{
bs.setFormat(bf[i]);
break;
}
}
}
/**
* Create new Item out of the ingested package, in the indicated
* collection. It creates a workspace item, which the application
* can then install if it chooses to bypass Workflow.
* <p>
* This is a VERY crude import of a single Adobe PDF (Portable
* Document Format) file, using the document's embedded metadata
* for package metadata. If the PDF file hasn't got the minimal
* metadata available, it is rejected.
* <p>
* @param context DSpace context.
* @param parent collection under which to create new item.
* @param pkgFile The package file to ingest
* @param params package parameters (none recognized)
* @param license may be null, which takes default license.
* @return workspace item created by ingest.
* @throws PackageException if package is unacceptable or there is
* a fatal error turning it into an Item.
*/
public DSpaceObject ingest(Context context, DSpaceObject parent,
File pkgFile, PackageParameters params,
String license)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
boolean success = false;
Bundle original = null;
Bitstream bs = null;
WorkspaceItem wi = null;
try
{
// Save the PDF in a bitstream first, since the parser
// has to read it as well, and we cannot "rewind" it after that.
wi = WorkspaceItem.create(context, (Collection)parent, false);
Item myitem = wi.getItem();
original = myitem.createBundle("ORIGINAL");
InputStream fileStream = null;
try
{
fileStream = new FileInputStream(pkgFile);
bs = original.createBitstream(fileStream);
}
finally
{
if (fileStream != null)
{
fileStream.close();
}
}
bs.setName("package.pdf");
setFormatToMIMEType(context, bs, "application/pdf");
bs.update();
if (log.isDebugEnabled())
{
log.debug("Created bitstream ID=" + String.valueOf(bs.getID()) + ", parsing...");
}
crosswalkPDF(context, myitem, bs.retrieve());
wi.update();
context.commit();
success = true;
log.info(LogManager.getHeader(context, "ingest",
"Created new Item, db ID="+String.valueOf(myitem.getID())+
", WorkspaceItem ID="+String.valueOf(wi.getID())));
myitem = PackageUtils.finishCreateItem(context, wi, null, params);
return myitem;
}
finally
{
// get rid of bitstream and item if ingest fails
if (!success)
{
if (original != null && bs != null)
{
original.removeBitstream(bs);
}
if (wi != null)
{
wi.deleteAll();
}
}
context.commit();
}
}
/**
* IngestAll() cannot be implemented for a PDF ingester, because there's only one PDF to ingest
*/
public List<DSpaceObject> ingestAll(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
throw new UnsupportedOperationException("PDF packager does not support the ingestAll() operation at this time.");
}
/**
* Replace is not implemented.
*/
public DSpaceObject replace(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
throw new UnsupportedOperationException("PDF packager does not support the replace() operation at this time.");
}
/**
* ReplaceAll() cannot be implemented for a PDF ingester, because there's only one PDF to ingest
*/
public List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
throw new UnsupportedOperationException("PDF packager does not support the replaceAll() operation at this time.");
}
/**
* VERY crude dissemination: just look for the first
* bitstream with the PDF package type, and toss it out.
* Works on packages importer with this packager, and maybe some others.
*/
public void disseminate(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
if (dso.getType() != Constants.ITEM)
{
throw new PackageValidationException("This disseminator can only handle objects of type ITEM.");
}
Item item = (Item)dso;
BitstreamFormat pdff = BitstreamFormat.findByShortDescription(context,
BITSTREAM_FORMAT_NAME);
if (pdff == null)
{
throw new PackageValidationException("Cannot find BitstreamFormat \"" + BITSTREAM_FORMAT_NAME + "\"");
}
Bitstream pkgBs = PackageUtils.getBitstreamByFormat(item, pdff, Constants.DEFAULT_BUNDLE_NAME);
if (pkgBs == null)
{
throw new PackageValidationException("Cannot find Bitstream with format \"" + BITSTREAM_FORMAT_NAME + "\"");
}
//Make sure our package file exists
if(!pkgFile.exists())
{
PackageUtils.createFile(pkgFile);
}
//open up output stream to copy bistream to file
FileOutputStream out = null;
try
{
//open up output stream to copy bistream to file
out = new FileOutputStream(pkgFile);
Utils.copy(pkgBs.retrieve(), out);
}
finally
{
if (out != null)
{
out.close();
}
}
}
/**
* disseminateAll() cannot be implemented for a PDF disseminator, because there's only one PDF to disseminate
*/
public List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
throw new UnsupportedOperationException("PDF packager does not support the disseminateAll() operation at this time.");
}
/**
* Identifies the MIME-type of this package, i.e. "application/pdf".
*
* @return the MIME type (content-type header) of the package to be returned
*/
public String getMIMEType(PackageParameters params)
{
return "application/pdf";
}
private void crosswalkPDF(Context context, Item item, InputStream metadata)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
COSDocument cos = null;
try
{
PDFParser parser = new PDFParser(metadata);
parser.parse();
cos = parser.getDocument();
// sanity check: PDFBox breaks on encrypted documents, so give up.
if(cos.getEncryptionDictionary() != null)
{
throw new MetadataValidationException("This packager cannot accept an encrypted PDF document.");
}
/* PDF to DC "crosswalk":
*
* NOTE: This is not in a crosswalk plugin because (a) it isn't
* useful anywhere else, and more importantly, (b) the source
* data is not XML so it doesn't fit the plugin's interface.
*
* pattern of crosswalk -- PDF dict entries to DC:
* Title -> title.null
* Author -> contributor.author
* CreationDate -> date.created
* ModDate -> date.created
* Creator -> description.provenance (application that created orig)
* Producer -> description.provenance (convertor to pdf)
* Subject -> description.abstract
* Keywords -> subject.other
* date is java.util.Calendar
*/
PDDocument pd = new PDDocument(cos);
PDDocumentInformation docinfo = pd.getDocumentInformation();
String title = docinfo.getTitle();
// sanity check: item must have a title.
if (title == null)
{
throw new MetadataValidationException("This PDF file is unacceptable, it does not have a value for \"Title\" in its Info dictionary.");
}
if (log.isDebugEnabled())
{
log.debug("PDF Info dict title=\"" + title + "\"");
}
item.addDC("title", null, "en", title);
String value = docinfo.getAuthor();
if (value != null)
{
item.addDC("contributor", "author", null, value);
if (log.isDebugEnabled())
{
log.debug("PDF Info dict author=\"" + value + "\"");
}
}
value = docinfo.getCreator();
if (value != null)
{
item.addDC("description", "provenance", "en",
"Application that created the original document: " + value);
}
value = docinfo.getProducer();
if (value != null)
{
item.addDC("description", "provenance", "en",
"Original document converted to PDF by: " + value);
}
value = docinfo.getSubject();
if (value != null)
{
item.addDC("description", "abstract", null, value);
}
value = docinfo.getKeywords();
if (value != null)
{
item.addDC("subject", "other", null, value);
}
// Take either CreationDate or ModDate as "date.created",
// Too bad there's no place to put "last modified" in the DC.
Calendar calValue = docinfo.getCreationDate();
if (calValue == null)
{
calValue = docinfo.getModificationDate();
}
if (calValue != null)
{
item.addDC("date", "created", null,
(new DCDate(calValue.getTime())).toString());
}
item.update();
}
finally
{
if (cos != null)
{
cos.close();
}
}
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
return "No additional options available.";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
/**
* An abstract implementation of a DSpace Package Ingester, which
* implements a few helper/utility methods that most (all?) PackageIngesters
* may find useful.
* <P>
* First, implements recursive functionality in ingestAll() and replaceAll()
* methods of the PackageIngester interface. These methods are setup to
* recursively call ingest() and replace() respectively.
* <P>
* Finally, it also implements several utility methods (createDSpaceObject(),
* finishCreateItem(), updateDSpaceObject()) which subclasses may find useful.
* This classes will allow subclasses to easily create/update objects without
* having to worry too much about normal DSpace submission workflows (which is
* taken care of in these utility methods).
* <P>
* All Package ingesters should either extend this abstract class
* or implement <code>PackageIngester</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageIngester
*/
public abstract class AbstractPackageIngester
implements PackageIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractPackageIngester.class);
/**
* References to other packages -- these are the next packages to ingest recursively
* Key = DSpace Object just ingested, Value = List of all packages relating to a DSpaceObject
**/
private Map<DSpaceObject,List<String>> packageReferences = new HashMap<DSpaceObject,List<String>>();
/** List of all successfully ingested/replaced DSpace objects */
private List<DSpaceObject> dsoIngestedList = new ArrayList<DSpaceObject>();
/**
* Recursively create one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object is created under the indicated parent. All other
* objects are created based on their relationship to the initial object.
* <p>
* For example, a scenario may be to create a Collection based on a
* collection-level package, and also create an Item for every item-level
* package referenced by the collection-level package.
* <p>
* The output of this method is one or more newly created <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>ingestAll</code>,
* or simply forward the call to <code>ingest</code> if it is unable to support
* recursive ingestion.
* <p>
* The deposit license (Only significant for Item) is passed
* explicitly as a string since there is no place for it in many
* package formats. It is optional and may be given as
* <code>null</code>.
*
* @param context DSpace context.
* @param parent parent under which to create the initial object
* (may be null -- in which case ingester must determine parent from package
* or throw an error).
* @param pkgFile The initial package file to ingest
* @param params Properties-style list of options (interpreted by each packager).
* @param license may be null, which takes default license.
* @return List of DSpaceObjects created
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>ingestAll</code>
*/
@Override
public List<DSpaceObject> ingestAll(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive ingest
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//Initial DSpace Object to ingest
DSpaceObject dso = null;
//try to ingest the first package
try
{
//actually ingest pkg using provided PackageIngester
dso = ingest(context, parent, pkgFile, params, license);
}
catch(IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if(params.keepExistingModeEnabled())
{
log.warn(LogManager.getHeader(context, "skip_package_ingest", "Object already exists, package-skipped=" + pkgFile));
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
//as long as our first object was ingested successfully
if(dso!=null)
{
//add to list of successfully ingested objects
addToIngestedList(dso);
//We can only recursively ingest non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Check if we found child package references when ingesting this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(dso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively ingest each child package, using this current object as the parent DSpace Object
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- ingested referenced package as a child of current object
ingestAll(context, dso, childPkg, params, license);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == dso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'ingestAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)dso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects ingested
return getIngestedList();
}
/**
* Recursively replace one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object to replace is indicated by <code>dso</code>. All other
* objects are replaced based on information provided in the referenced packages.
* <p>
* For example, a scenario may be to replace a Collection based on a
* collection-level package, and also replace *every* Item in that collection
* based on the item-level packages referenced by the collection-level package.
* <p>
* Please note that since the <code>dso</code> input only specifies the
* initial object to replace, any additional objects to replace must be
* determined based on the referenced packages (or initial package itself).
* <p>
* The output of this method is one or more replaced <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>replaceAll</code>,
* since it somewhat contradicts the archival nature of DSpace. It also
* may choose to forward the call to <code>replace</code> if it is unable to
* support recursive replacement.
*
* @param context DSpace context.
* @param dso initial existing DSpace Object to be replaced, may be null
* if object to replace can be determined from package
* @param pkgFile The package file to ingest.
* @param params Properties-style list of options specific to this packager
* @return List of DSpaceObjects replaced
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>replaceAll</code>
*/
@Override
public List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive replace
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//actually ingest pkg using provided PackageIngester, and replace object
//NOTE: 'dso' may be null! If it is null, the PackageIngester must determine
// the object to be replaced from the package itself.
DSpaceObject replacedDso = replace(context, dso, pkgFile, params);
//as long as our object was replaced successfully
if(replacedDso!=null)
{
//add to list of successfully replaced objects
addToIngestedList(replacedDso);
//We can only recursively replace non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(replacedDso.getType()!=Constants.ITEM)
{
//Check if we found child package references when replacing this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(replacedDso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively replace each child package
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- replaced referenced package as a child of current object
// Pass object to replace as 'null', as we don't know which object to replace.
replaceAll(context, null, childPkg, params);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == replacedDso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'replaceAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)replacedDso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects replaced
return getIngestedList();
}
/**
* During ingestion process, some submission information packages (SIPs)
* may reference other packages to be ingested (recursively).
* <P>
* This method collects all references to other packages, so that we
* can choose to recursively ingest them, as necessary, alongside the
* DSpaceObject created from the original SIP.
* <P>
* References are collected based on the DSpaceObject created from the SIP
* (this way we keep the context of these references).
*
* @param dso DSpaceObject whose SIP referenced another package
* @param packageRef A reference to another package, which can be ingested after this one
*/
public void addPackageReference(DSpaceObject dso, String packageRef)
{
List<String> packageRefValues = null;
// Check if we already have an entry for packages reference by this object
if(packageReferences.containsKey(dso))
{
packageRefValues = packageReferences.get(dso);
}
else
{
//Create a new empty list of references
packageRefValues = new ArrayList<String>();
}
//add this package reference to existing list and save
packageRefValues.add(packageRef);
packageReferences.put(dso, packageRefValues);
}
/**
* Return a list of known SIP references from a newly created DSpaceObject.
* <P>
* These references should detail where another package exists which
* should be ingested alongside the current DSpaceObject.
* <P>
* The <code>AbstractPackageIngester</code> or an equivalent SIP handler is expected
* to understand how to deal with these package references.
*
* @param dso DSpaceObject whose SIP referenced other SIPs
* @return List of Strings which are the references to external submission ingestion packages
* (may be null if no SIPs were referenced)
*/
public List<String> getPackageReferences(DSpaceObject dso)
{
return packageReferences.get(dso);
}
/**
* Add DSpaceObject to list of successfully ingested/replaced objects
* @param dso DSpaceObject
*/
protected void addToIngestedList(DSpaceObject dso)
{
//add to list of successfully ingested objects
if(!dsoIngestedList.contains(dso))
{
dsoIngestedList.add(dso);
}
}
/**
* Return List of all DSpaceObjects which have been ingested/replaced by
* this instance of the Ingester.
* <P>
* This list can be useful in reporting back to the user what content has
* been added or replaced. It's used by ingestAll() and replaceAll() to
* return this list of everything that was ingested/replaced.
*
* @return List of DSpaceObjects which have been added/replaced
*/
protected List<DSpaceObject> getIngestedList()
{
return dsoIngestedList;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.List;
import java.util.ArrayList;
import org.apache.log4j.Logger;
import org.dspace.app.util.Util;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Site;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.license.CreativeCommons;
import edu.harvard.hul.ois.mets.Agent;
import edu.harvard.hul.ois.mets.Loctype;
import edu.harvard.hul.ois.mets.Mets;
import edu.harvard.hul.ois.mets.MetsHdr;
import edu.harvard.hul.ois.mets.Name;
import edu.harvard.hul.ois.mets.Role;
import edu.harvard.hul.ois.mets.Div;
import edu.harvard.hul.ois.mets.Mptr;
import edu.harvard.hul.ois.mets.StructMap;
import edu.harvard.hul.ois.mets.Type;
import edu.harvard.hul.ois.mets.helper.MetsException;
import edu.harvard.hul.ois.mets.helper.PCData;
import java.net.URLEncoder;
import java.util.Date;
import org.dspace.core.Utils;
/**
* Subclass of the METS packager framework to disseminate a DSpace
* Archival Information Package (AIP). The AIP is intended to be, foremost,
* a _complete_ and _accurate_ representation of one object in the DSpace
* object model. An AIP contains all of the information needed to restore
* the object precisely in another DSpace archive instance.
* <p>
* Configuration keys:
* <p>
* The following take as values a space-and-or-comma-separated list
* of plugin names that name *either* a DisseminationCrosswalk or
* StreamDisseminationCrosswalk plugin. Shown are the default values.
* The value may be a simple crosswalk name, or a METS MDsec-name followed by
* a colon and the crosswalk name e.g. "DSpaceDepositLicense:DSPACE_DEPLICENSE"
*
* # MD types to put in the sourceMD section of the object.
* aip.disseminate.sourceMD = AIP-TECHMD
*
* # MD types to put in the techMD section of the object (and member Bitstreams if an Item)
* aip.disseminate.techMD = PREMIS
*
* # MD types to put in digiprovMD section of the object.
* #aip.disseminate.digiprovMD =
*
* # MD types to put in the rightsMD section of the object.
* aip.disseminate.rightsMD = DSpaceDepositLicense:DSPACE_DEPLICENSE, \
* CreativeCommonsRDF:DSPACE_CCRDF, CreativeCommonsText:DSPACE_CCTXT, METSRights
*
* # MD types to put in dmdSec's corresponding the object.
* aip.disseminate.dmd = MODS, DIM
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 1.1 $
* @see AbstractMETSDisseminator
* @see AbstractPackageDisseminator
*/
public class DSpaceAIPDisseminator extends AbstractMETSDisseminator
{
private static final Logger log = Logger.getLogger(DSpaceAIPDisseminator.class);
/**
* Unique identifier for the profile of the METS document.
* To ensure uniqueness, it is the URL that the XML schema document would
* have _if_ there were to be one. There is no schema at this time.
*/
public static final String PROFILE_1_0 =
"http://www.dspace.org/schema/aip/mets_aip_1_0.xsd";
/** TYPE of the div containing AIP's parent handle in its mptr. */
public static final String PARENT_DIV_TYPE = "AIP Parent Link";
// Default MDTYPE value for deposit license -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String DSPACE_DEPOSIT_LICENSE_MDTYPE =
"DSpaceDepositLicense:DSPACE_DEPLICENSE";
// Default MDTYPE value for CC license in RDF -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String CREATIVE_COMMONS_RDF_MDTYPE =
"CreativeCommonsRDF:DSPACE_CCRDF";
// Default MDTYPE value for CC license in Text -- "magic string"
// NOTE: format is <label-for-METS>:<DSpace-crosswalk-name>
private static final String CREATIVE_COMMONS_TEXT_MDTYPE =
"CreativeCommonsText:DSPACE_CCTXT";
// dissemination parameters passed to the AIP Disseminator
private PackageParameters disseminateParams = null;
@Override
public void disseminate(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageValidationException, CrosswalkException, AuthorizeException, SQLException, IOException
{
//Before disseminating anything, save the passed in PackageParameters, so they can be used by all methods
disseminateParams = params;
boolean disseminate = true; //by default, always disseminate
//if user specified to only disseminate objects updated *after* a specific date
// (Note: this only works for Items right now, as DSpace doesn't store a
// last modified date for Collections or Communities)
if(disseminateParams.containsKey("updatedAfter") && dso.getType()==Constants.ITEM)
{
Date afterDate = Utils.parseISO8601Date(disseminateParams.getProperty("updatedAfter"));
//if null is returned, we couldn't parse the date!
if(afterDate==null)
{
throw new IOException("Invalid date passed in via 'updatedAfter' option. Date must be in ISO-8601 format, and include both a day and time (e.g. 2010-01-01T00:00:00).");
}
//check when this item was last modified.
Item i = (Item) dso;
if(i.getLastModified().after(afterDate))
{
disseminate = true;
}
else
{
disseminate = false;
}
}
if(disseminate)
{
//just do a normal dissemination as specified by AbstractMETSDisseminator
super.disseminate(context, dso, params, pkgFile);
}
}
/**
* Return identifier string for the METS profile this produces.
*
* @return string name of profile.
*/
@Override
public String getProfile()
{
return PROFILE_1_0;
}
/**
* Returns name of METS fileGrp corresponding to a DSpace bundle name.
* For AIP the mapping is direct.
*
* @param bname name of DSpace bundle.
* @return string name of fileGrp
*/
@Override
public String bundleToFileGrp(String bname)
{
return bname;
}
/**
* Create the metsHdr element for the AIP METS Manifest.
* <p>
* CREATEDATE is time at which the package (i.e. this manifest) was created.
* LASTMODDATE is last-modified time of the target object, if available.
* Agent describes the archive this belongs to.
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public MetsHdr makeMetsHdr(Context context, DSpaceObject dso,
PackageParameters params)
{
MetsHdr metsHdr = new MetsHdr();
// Note: we specifically do not add a CREATEDATE to <metsHdr>
// as for AIPs we want md5 checksums to be identical if no content
// has changed. Adding a CREATEDATE changes checksum each time.
// Add a LASTMODDATE for items
if (dso.getType() == Constants.ITEM)
{
metsHdr.setLASTMODDATE(((Item) dso).getLastModified());
}
// Agent Custodian - name custodian, the DSpace Archive, by handle.
Agent agent = new Agent();
agent.setROLE(Role.CUSTODIAN);
agent.setTYPE(Type.OTHER);
agent.setOTHERTYPE("DSpace Archive");
Name name = new Name();
name.getContent()
.add(new PCData(Site.getSiteHandle()));
agent.getContent().add(name);
metsHdr.getContent().add(agent);
// Agent Creator - name creator, which is a specific version of DSpace.
Agent agentCreator = new Agent();
agentCreator.setROLE(Role.CREATOR);
agentCreator.setTYPE(Type.OTHER);
agentCreator.setOTHERTYPE("DSpace Software");
Name creatorName = new Name();
creatorName.getContent()
.add(new PCData("DSpace " + Util.getSourceVersion()));
agentCreator.getContent().add(creatorName);
metsHdr.getContent().add(agentCreator);
return metsHdr;
}
/**
* Return the name of all crosswalks to run for the dmdSec section of
* the METS Manifest.
* <p>
* Default is DIM (DSpace Internal Metadata) and MODS.
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public String [] getDmdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
String dmdTypes = ConfigurationManager.getProperty("aip.disseminate.dmd");
if (dmdTypes == null)
{
String result[] = new String[2];
result[0] = "MODS";
result[1] = "DIM";
return result;
}
else
{
return dmdTypes.split("\\s*,\\s*");
}
}
/**
* Return the name of all crosswalks to run for the techMD section of
* the METS Manifest.
* <p>
* Default is PREMIS.
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public String[] getTechMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
String techTypes = ConfigurationManager.getProperty("aip.disseminate.techMD");
if (techTypes == null)
{
if (dso.getType() == Constants.BITSTREAM)
{
String result[] = new String[1];
result[0] = "PREMIS";
return result;
}
else
{
return new String[0];
}
}
else
{
return techTypes.split("\\s*,\\s*");
}
}
/**
* Return the name of all crosswalks to run for the sourceMD section of
* the METS Manifest.
* <p>
* Default is AIP-TECHMD.
* <p>
* In an AIP, the sourceMD element MUST include the original persistent
* identifier (Handle) of the object, and the original persistent ID
* (Handle) of its parent in the archive, so that it can be restored.
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public String[] getSourceMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
String sourceTypes = ConfigurationManager.getProperty("aip.disseminate.sourceMD");
if (sourceTypes == null)
{
String result[] = new String[1];
result[0] = "AIP-TECHMD";
return result;
}
else
{
return sourceTypes.split("\\s*,\\s*");
}
}
/**
* Return the name of all crosswalks to run for the digiprovMD section of
* the METS Manifest.
* <p>
* By default, none are returned
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public String[] getDigiprovMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
String dpTypes = ConfigurationManager.getProperty("aip.disseminate.digiprovMD");
if (dpTypes == null)
{
return new String[0];
}
else
{
return dpTypes.split("\\s*,\\s*");
}
}
/**
* Return the name of all crosswalks to run for the rightsMD section of
* the METS Manifest.
* <p>
* By default, Deposit Licenses and CC Licenses will be added for Items.
* Also, by default METSRights info will be added for all objects.
*
* @param context DSpace Context
* @param dso current DSpace Object
* @param params Packager Parameters
* @return List of crosswalk names to run
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
@Override
public String[] getRightsMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException
{
List<String> result = new ArrayList<String>();
String rTypes = ConfigurationManager.getProperty("aip.disseminate.rightsMD");
//If unspecified in configuration file, add default settings
if (rTypes == null)
{
// Licenses only apply to an Item
if (dso.getType() == Constants.ITEM)
{
//By default, disseminate Deposit License, and any CC Licenses
// to an item's rightsMD section
if (PackageUtils.findDepositLicense(context, (Item)dso) != null)
{
result.add(DSPACE_DEPOSIT_LICENSE_MDTYPE);
}
if (CreativeCommons.getLicenseRdfBitstream((Item)dso) != null)
{
result.add(CREATIVE_COMMONS_RDF_MDTYPE);
}
else if (CreativeCommons.getLicenseTextBitstream((Item)dso) != null)
{
result.add(CREATIVE_COMMONS_TEXT_MDTYPE);
}
}
//By default, also add METSRights info to the rightsMD
result.add("METSRights");
}
else
{
return rTypes.split("\\s*,\\s*");
}
return result.toArray(new String[result.size()]);
}
/**
* Get the URL by which the METS manifest refers to a Bitstream
* member within the same package. In other words, this is generally
* a relative path link to where the Bitstream file is within the Zipped
* up AIP.
* <p>
* For a manifest-only AIP, this is a reference to an HTTP URL where
* the bitstream should be able to be downloaded from.
* An external AIP names a file in the package
* with a relative URL, that is, relative pathname.
*
* @param bitstream the Bitstream
* @param params Packager Parameters
* @return String in URL format naming path to bitstream.
*/
@Override
public String makeBitstreamURL(Bitstream bitstream, PackageParameters params)
{
// if bare manifest, use external "persistent" URI for bitstreams
if (params != null && (params.getBooleanProperty("manifestOnly", false)))
{
// Try to build a persistent(-ish) URI for bitstream
// Format: {site-base-url}/bitstream/{item-handle}/{sequence-id}/{bitstream-name}
try
{
// get handle of parent Item of this bitstream, if there is one:
String handle = null;
Bundle[] bn = bitstream.getBundles();
if (bn.length > 0)
{
Item bi[] = bn[0].getItems();
if (bi.length > 0)
{
handle = bi[0].getHandle();
}
}
if (handle != null)
{
return ConfigurationManager
.getProperty("dspace.url")
+ "/bitstream/"
+ handle
+ "/"
+ String.valueOf(bitstream.getSequenceID())
+ "/"
+ URLEncoder.encode(bitstream.getName(), "UTF-8");
}
else
{
return ConfigurationManager
.getProperty("dspace.url")
+ "/retrieve/"
+ String.valueOf(bitstream.getID());
}
}
catch (SQLException e)
{
log.error("Database problem", e);
}
catch (UnsupportedEncodingException e)
{
log.error("Unknown character set", e);
}
// We should only get here if we failed to build a nice URL above
// so, by default, we're just going to return the bitstream name.
return bitstream.getName();
}
else
{
String base = "bitstream_"+String.valueOf(bitstream.getID());
String ext[] = bitstream.getFormat().getExtensions();
return (ext.length > 0) ? base+"."+ext[0] : base;
}
}
/**
* Adds another structMap element to contain the "parent link" that
* is an essential part of every AIP. This is a structmap of one
* div, which contains an mptr indicating the Handle of the parent
* of this object in the archive. The div has a unique TYPE attribute
* value, "AIP Parent Link", and the mptr has a LOCTYPE of "HANDLE"
* and an xlink:href containing the raw Handle value.
* <p>
* Note that the parent Handle has to be stored here because the
* parent is needed to create a DSpace Object when restoring the
* AIP; it cannot be determined later once the ingester parses it
* out of the metadata when the crosswalks are run. So, since the
* crosswalks require an object to operate on, and creating the
* object requires a parent, we cannot depend on metadata processed
* by crosswalks (e.g. AIP techMd) for the parent, it has to be at
* a higher level in the AIP manifest. The structMap is an obvious
* and standards-compliant location for it.
*
* @param context DSpace context
* @param dso Current DSpace object
* @param params Packager Parameters
* @param mets METS manifest
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
* @throws MetsException
*/
@Override
public void addStructMap(Context context, DSpaceObject dso,
PackageParameters params, Mets mets)
throws SQLException, IOException, AuthorizeException, MetsException
{
// find parent Handle
String parentHandle = null;
switch (dso.getType())
{
case Constants.ITEM:
parentHandle = ((Item)dso).getOwningCollection().getHandle();
break;
case Constants.COLLECTION:
parentHandle = (((Collection)dso).getCommunities())[0].getHandle();
break;
case Constants.COMMUNITY:
Community parent = ((Community)dso).getParentCommunity();
if (parent == null)
{
parentHandle = Site.getSiteHandle();
}
else
{
parentHandle = parent.getHandle();
}
case Constants.SITE:
break;
}
// Parent Handle should only be null if we are creating a site-wide AIP
if(parentHandle!=null)
{
// add a structMap to contain div pointing to parent:
StructMap structMap = new StructMap();
structMap.setID(gensym("struct"));
structMap.setTYPE("LOGICAL");
structMap.setLABEL("Parent");
Div div0 = new Div();
div0.setID(gensym("div"));
div0.setTYPE(PARENT_DIV_TYPE);
div0.setLABEL("Parent of this DSpace Object");
Mptr mptr = new Mptr();
mptr.setID(gensym("mptr"));
mptr.setLOCTYPE(Loctype.HANDLE);
mptr.setXlinkHref(parentHandle);
div0.getContent().add(mptr);
structMap.getContent().add(div0);
mets.getContent().add(structMap);
}
}
/**
* By default, include all bundles in AIP as content.
* <P>
* However, if the user specified a comma separated list of bundle names
* via the "includeBundles" option, then check if this bundle is in that
* list. If it is, return true. If it is not, return false.
*
* @param bundle Bundle to check for
* @return true if bundle should be disseminated when disseminating Item AIPs
*/
@Override
public boolean includeBundle(Bundle bundle)
{
//Check the 'includeBundles' option to see if a list of bundles was provided (default = "all")
String bundleList = this.disseminateParams.getProperty("includeBundles", "all");
if(bundleList.equalsIgnoreCase("all"))
{
return true; //all bundles should be disseminated
}
else
{
//Check if this bundle is in our list of bundles to include
String[] bundleNames = bundleList.split(",");
for(String bundleName : bundleNames)
{
if(bundle.getName().equals(bundleName))
{
return true;
}
}
//if not in the 'includeBundles' list, then return false
return false;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* Create EPersons and Groups from a file of external representations.
*
* @author mwood
*/
public class RoleIngester implements PackageIngester
{
private static final Logger log = LoggerFactory
.getLogger(RoleIngester.class);
/**
* Common code to ingest roles from a Document.
*
* @param context
* DSpace Context
* @param parent
* the Parent DSpaceObject
* @param document
* the XML Document
* @throws SQLException
* @throws AuthorizeException
* @throws PackageException
*/
static void ingestDocument(Context context, DSpaceObject parent,
PackageParameters params, Document document)
throws SQLException, AuthorizeException, PackageException
{
String myEmail = context.getCurrentUser().getEmail();
String myNetid = context.getCurrentUser().getNetid();
// Ingest users (EPersons) first so Groups can use them
NodeList users = document
.getElementsByTagName(RoleDisseminator.EPERSON);
for (int i = 0; i < users.getLength(); i++)
{
Element user = (Element) users.item(i);
// int userID = Integer.valueOf(user.getAttribute("ID")); // FIXME
// no way to set ID!
NodeList emails = user.getElementsByTagName(RoleDisseminator.EMAIL);
NodeList netids = user.getElementsByTagName(RoleDisseminator.NETID);
EPerson eperson;
EPerson collider;
String email = null;
String netid = null;
String identity;
if (emails.getLength() > 0)
{
email = emails.item(0).getTextContent();
if (email.equals(myEmail))
{
continue; // Cannot operate on my own EPerson!
}
identity = email;
collider = EPerson.findByEmail(context, identity);
// collider = EPerson.find(context, userID);
}
else if (netids.getLength() > 0)
{
netid = netids.item(0).getTextContent();
if (netid.equals(myNetid))
{
continue; // Cannot operate on my own EPerson!
}
identity = netid;
collider = EPerson.findByNetid(context, identity);
}
else
{
throw new PackageException("EPerson has neither email nor netid.");
}
if (null != collider)
if (params.replaceModeEnabled()) // -r -f
{
eperson = collider;
}
else if (params.keepExistingModeEnabled()) // -r -k
{
log.warn("Existing EPerson {} was not restored from the package.", identity);
continue;
}
else
{
throw new PackageException("EPerson " + identity + " already exists.");
}
else
{
eperson = EPerson.create(context);
log.info("Created EPerson {}.", identity);
}
eperson.setEmail(email);
eperson.setNetid(netid);
NodeList data;
data = user.getElementsByTagName(RoleDisseminator.FIRST_NAME);
if (data.getLength() > 0)
{
eperson.setFirstName(data.item(0).getTextContent());
}
else
{
eperson.setFirstName(null);
}
data = user.getElementsByTagName(RoleDisseminator.LAST_NAME);
if (data.getLength() > 0)
{
eperson.setLastName(data.item(0).getTextContent());
}
else
{
eperson.setLastName(null);
}
data = user.getElementsByTagName(RoleDisseminator.LANGUAGE);
if (data.getLength() > 0)
{
eperson.setLanguage(data.item(0).getTextContent());
}
else
{
eperson.setLanguage(null);
}
data = user.getElementsByTagName(RoleDisseminator.CAN_LOGIN);
eperson.setCanLogIn(data.getLength() > 0);
data = user.getElementsByTagName(RoleDisseminator.REQUIRE_CERTIFICATE);
eperson.setRequireCertificate(data.getLength() > 0);
data = user.getElementsByTagName(RoleDisseminator.SELF_REGISTERED);
eperson.setSelfRegistered(data.getLength() > 0);
data = user.getElementsByTagName(RoleDisseminator.PASSWORD_HASH);
if (data.getLength() > 0)
{
eperson.setPasswordHash(data.item(0).getTextContent());
}
else
{
eperson.setPasswordHash(null);
}
// Actually write Eperson info to DB
// NOTE: this update() doesn't call a commit(). So, Eperson info
// may still be rolled back if a subsequent error occurs
eperson.update();
}
// Now ingest the Groups
NodeList groups = document.getElementsByTagName(RoleDisseminator.GROUP);
// Create the groups and add their EPerson members
for (int groupx = 0; groupx < groups.getLength(); groupx++)
{
Element group = (Element) groups.item(groupx);
String name = group.getAttribute(RoleDisseminator.NAME);
try
{
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
// TODO: is this necessary? can we leave it in format with Handle in place of <ID>?
// For now, this is necessary, because we don't want to accidentally
// create a new group COLLECTION_hdl:123/34_ADMIN, which is equivalent
// to an existing COLLECTION_45_ADMIN group
name = PackageUtils.translateGroupNameForImport(context, name);
}
catch(PackageException pe)
{
// If an error is thrown, then this Group corresponds to a
// Community or Collection that doesn't currently exist in the
// system. So, log a warning & skip it for now.
log.warn("Skipping group named '" + name + "' as it seems to correspond to a Community or Collection that does not exist in the system. " +
"If you are performing an AIP restore, you can ignore this warning as the Community/Collection AIP will likely create this group once it is processed.");
continue;
}
Group groupObj = null; // The group to restore
Group collider = Group.findByName(context, name); // Existing group?
if (null != collider)
{ // Group already exists, so empty it
if (params.replaceModeEnabled()) // -r -f
{
for (Group member : collider.getMemberGroups())
{
collider.removeMember(member);
}
for (EPerson member : collider.getMembers())
{
// Remove all group members *EXCEPT* we don't ever want
// to remove the current user from the list of Administrators
// (otherwise remainder of ingest will fail)
if(!(collider.equals(Group.find(context, 1)) &&
member.equals(context.getCurrentUser())))
{
collider.removeMember(member);
}
}
log.info("Existing Group {} was cleared. Its members will be replaced.", name);
groupObj = collider;
}
else if (params.keepExistingModeEnabled()) // -r -k
{
log.warn("Existing Group {} was not replaced from the package.",
name);
continue;
}
else
{
throw new PackageException("Group " + name + " already exists");
}
}
else
{ // No such group exists -- so, we'll need to create it!
// First Check if this is a "typed" group (i.e. Community or Collection associated Group)
// If so, we'll create it via the Community or Collection
String type = group.getAttribute(RoleDisseminator.TYPE);
if(type!=null && !type.isEmpty() && parent!=null)
{
//What type of dspace object is this group associated with
if(parent.getType()==Constants.COLLECTION)
{
Collection collection = (Collection) parent;
// Create this Collection-associated group, based on its group type
if(type.equals(RoleDisseminator.GROUP_TYPE_ADMIN))
{
groupObj = collection.createAdministrators();
}
else if(type.equals(RoleDisseminator.GROUP_TYPE_SUBMIT))
{
groupObj = collection.createSubmitters();
}
else if(type.equals(RoleDisseminator.GROUP_TYPE_WORKFLOW_STEP_1))
{
groupObj = collection.createWorkflowGroup(1);
}
else if(type.equals(RoleDisseminator.GROUP_TYPE_WORKFLOW_STEP_2))
{
groupObj = collection.createWorkflowGroup(2);
}
else if(type.equals(RoleDisseminator.GROUP_TYPE_WORKFLOW_STEP_3))
{
groupObj = collection.createWorkflowGroup(3);
}
}
else if(parent.getType()==Constants.COMMUNITY)
{
Community community = (Community) parent;
// Create this Community-associated group, based on its group type
if(type.equals(RoleDisseminator.GROUP_TYPE_ADMIN))
{
groupObj = community.createAdministrators();
}
}
//Ignore all other dspace object types
}
//If group not yet created, create it with the given name
if(groupObj==null)
{
groupObj = Group.create(context);
}
// Always set the name: parent.createBlop() is guessing
groupObj.setName(name);
log.info("Created Group {}.", groupObj.getName());
}
// Add EPeople to newly created Group
NodeList members = group.getElementsByTagName(RoleDisseminator.MEMBER);
for (int memberx = 0; memberx < members.getLength(); memberx++)
{
Element member = (Element) members.item(memberx);
String memberName = member.getAttribute(RoleDisseminator.NAME);
EPerson memberEPerson = EPerson.findByEmail(context, memberName);
if (null != memberEPerson)
groupObj.addMember(memberEPerson);
else
throw new PackageValidationException("EPerson " + memberName
+ " not found, not added to " + name);
}
// Actually write Group info to DB
// NOTE: this update() doesn't call a commit(). So, Group info
// may still be rolled back if a subsequent error occurs
groupObj.update();
}
// Go back and add Group members, now that all groups exist
for (int groupx = 0; groupx < groups.getLength(); groupx++)
{
Element group = (Element) groups.item(groupx);
String name = group.getAttribute(RoleDisseminator.NAME);
try
{
// Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
name = PackageUtils.translateGroupNameForImport(context, name);
}
catch(PackageException pe)
{
// If an error is thrown, then this Group corresponds to a
// Community or Collection that doesn't currently exist in the
// system. So,skip it for now.
// (NOTE: We already logged a warning about this group earlier as
// this is the second time we are looping through all groups)
continue;
}
// Find previously created group
Group groupObj = Group.findByName(context, name);
NodeList members = group
.getElementsByTagName(RoleDisseminator.MEMBER_GROUP);
for (int memberx = 0; memberx < members.getLength(); memberx++)
{
Element member = (Element) members.item(memberx);
String memberName = member.getAttribute(RoleDisseminator.NAME);
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
memberName = PackageUtils.translateGroupNameForImport(context, memberName);
// Find previously created group
Group memberGroup = Group.findByName(context, memberName);
groupObj.addMember(memberGroup);
}
// Actually update Group info in DB
// NOTE: Group info may still be rolled back if a subsequent error occurs
groupObj.update();
}
}
/**
* Ingest roles from an InputStream.
*
* @param context
* DSpace Context
* @param parent
* the Parent DSpaceObject
* @param stream
* the XML Document InputStream
* @throws PackageException
* @throws SQLException
* @throws AuthorizeException
*/
public static void ingestStream(Context context, DSpaceObject parent,
PackageParameters params, InputStream stream)
throws PackageException, SQLException, AuthorizeException
{
Document document;
try
{
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setIgnoringComments(true);
dbf.setCoalescing(true);
DocumentBuilder db = dbf.newDocumentBuilder();
document = db.parse(stream);
}
catch (ParserConfigurationException e)
{
throw new PackageException(e);
}
catch (SAXException e)
{
throw new PackageException(e);
}
catch (IOException e)
{
throw new PackageException(e);
}
/*
* TODO ? finally { close(stream); }
*/
ingestDocument(context, parent, params, document);
}
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageIngester#ingest(org.dspace.core.Context
* , org.dspace.content.DSpaceObject, java.io.File,
* org.dspace.content.packager.PackageParameters, java.lang.String)
*/
@Override
public DSpaceObject ingest(Context context, DSpaceObject parent,
File pkgFile, PackageParameters params, String license)
throws PackageException, CrosswalkException, AuthorizeException,
SQLException, IOException
{
Document document;
try
{
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setIgnoringComments(true);
dbf.setCoalescing(true);
DocumentBuilder db = dbf.newDocumentBuilder();
document = db.parse(pkgFile);
}
catch (ParserConfigurationException e)
{
throw new PackageException(e);
}
catch (SAXException e)
{
throw new PackageException(e);
}
ingestDocument(context, parent, params, document);
/* Does not create a DSpaceObject */
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageIngester#ingestAll(org.dspace.core
* .Context, org.dspace.content.DSpaceObject, java.io.File,
* org.dspace.content.packager.PackageParameters, java.lang.String)
*/
@Override
public List<DSpaceObject> ingestAll(Context context, DSpaceObject parent,
File pkgFile, PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException, SQLException, IOException
{
throw new PackageException(
"ingestAll() is not implemented, as ingest() method already handles ingestion of all roles from an external file.");
}
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageIngester#replace(org.dspace.core.Context
* , org.dspace.content.DSpaceObject, java.io.File,
* org.dspace.content.packager.PackageParameters)
*/
@Override
public DSpaceObject replace(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params) throws PackageException,
UnsupportedOperationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
//Just call ingest() -- this will perform a replacement as necessary
return ingest(context, dso, pkgFile, params, null);
}
/*
* (non-Javadoc)
*
* @see
* org.dspace.content.packager.PackageIngester#replaceAll(org.dspace.core
* .Context, org.dspace.content.DSpaceObject, java.io.File,
* org.dspace.content.packager.PackageParameters)
*/
@Override
public List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params) throws PackageException,
UnsupportedOperationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
throw new PackageException(
"replaceAll() is not implemented, as replace() method already handles replacement of all roles from an external file.");
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
return "No additional options available.";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Context;
/**
* Plugin Interface to interpret a Submission Information Package (SIP)
* and create (or replace) a DSpace Object from its contents.
* <p>
* A package is a single data stream containing enough information to
* construct an Object (i.e. an Item, Collection, or Community). It
* can be anything from an archive like a Zip file with a manifest and
* metadata, to a simple manifest containing external references to the
* content, to a self-contained file such as a PDF. The interpretation
* of the package is entirely at the discretion of the implementing
* class.
* <p>
* The ingest methods are also given an attribute-value
* list of "parameters" which may modify their actions.
* The parameters list is a generalized mechanism to pass parameters
* from the requestor to the packager, since different packagers will
* understand different sets of parameters.
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 5844 $
* @see PackageParameters
* @see AbstractPackageIngester
*/
public interface PackageIngester
{
/**
* Create new DSpaceObject out of the ingested package. The object
* is created under the indicated parent. This creates a
* <code>DSpaceObject</code>. For Items, it is up to the caller to
* decide whether to install it or submit it to normal DSpace Workflow.
* <p>
* The deposit license (Only significant for Item) is passed
* explicitly as a string since there is no place for it in many
* package formats. It is optional and may be given as
* <code>null</code>.
* <p>
* Use <code>ingestAll</code> method to perform a recursive ingest of all
* packages which are referenced by an initial package.
*
* @param context DSpace context.
* @param parent parent under which to create new object
* (may be null -- in which case ingester must determine parent from package
* or throw an error).
* @param pkgFile The package file to ingest
* @param params Properties-style list of options (interpreted by each packager).
* @param license may be null, which takes default license.
* @return DSpaceObject created by ingest.
*
* @throws PackageValidationException if package is unacceptable or there is
* a fatal error turning it into a DSpaceObject.
*/
DSpaceObject ingest(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException;
/**
* Recursively create one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object is created under the indicated parent. All other
* objects are created based on their relationship to the initial object.
* <p>
* For example, a scenario may be to create a Collection based on a
* collection-level package, and also create an Item for every item-level
* package referenced by the collection-level package.
* <p>
* The output of this method is one or more newly created <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>ingestAll</code>,
* or simply forward the call to <code>ingest</code> if it is unable to support
* recursive ingestion.
* <p>
* The deposit license (Only significant for Item) is passed
* explicitly as a string since there is no place for it in many
* package formats. It is optional and may be given as
* <code>null</code>.
*
* @param context DSpace context.
* @param parent parent under which to create the initial object
* (may be null -- in which case ingester must determine parent from package
* or throw an error).
* @param pkgFile The initial package file to ingest
* @param params Properties-style list of options (interpreted by each packager).
* @param license may be null, which takes default license.
* @return List of DSpaceObjects created
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>ingestAll</code>
*/
List<DSpaceObject> ingestAll(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException;
/**
* Replace an existing DSpace Object with contents of the ingested package.
* The packager <em>may</em> choose not to implement <code>replace</code>,
* since it somewhat contradicts the archival nature of DSpace.
* The exact function of this method is highly implementation-dependent.
* <p>
* Use <code>replaceAll</code> method to perform a recursive replace of
* objects referenced by a set of packages.
*
* @param context DSpace context.
* @param dso existing DSpace Object to be replaced, may be null
* if object to replace can be determined from package
* @param pkgFile The package file to ingest.
* @param params Properties-style list of options specific to this packager
* @return DSpaceObject with contents replaced
*
* @throws PackageValidationException if package is unacceptable or there is
* a fatal error turning it into an Item.
* @throws UnsupportedOperationException if this packager does not
* implement <code>replace</code>.
*/
DSpaceObject replace(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException;
/**
* Recursively replace one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object to replace is indicated by <code>dso</code>. All other
* objects are replaced based on information provided in the referenced packages.
* <p>
* For example, a scenario may be to replace a Collection based on a
* collection-level package, and also replace *every* Item in that collection
* based on the item-level packages referenced by the collection-level package.
* <p>
* Please note that since the <code>dso</code> input only specifies the
* initial object to replace, any additional objects to replace must be
* determined based on the referenced packages (or initial package itself).
* <p>
* The output of this method is one or more replaced <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>replaceAll</code>,
* since it somewhat contradicts the archival nature of DSpace. It also
* may choose to forward the call to <code>replace</code> if it is unable to
* support recursive replacement.
*
* @param context DSpace context.
* @param dso initial existing DSpace Object to be replaced, may be null
* if object to replace can be determined from package
* @param pkgFile The package file to ingest.
* @param params Properties-style list of options specific to this packager
* @return List of DSpaceObjects replaced
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>replaceAll</code>
*/
List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException;
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
String getParameterHelp();
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* An abstract implementation of a DSpace Package Disseminator, which
* implements a few helper/utility methods that most (all?) PackageDisseminators
* may find useful.
* <P>
* First, implements recursive functionality in the disseminateAll()
* method of the PackageIngester interface. This method is setup to
* recursively call disseminate() method.
* <P>
* All Package disseminators should either extend this abstract class
* or implement <code>PackageDisseminator</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageDisseminator
*/
public abstract class AbstractPackageDisseminator
implements PackageDisseminator
{
/** List of all successfully disseminated package files */
private List<File> packageFileList = new ArrayList<File>();
/**
* Recursively export one or more DSpace Objects as a series of packages.
* This method will export the given DSpace Object as well as all referenced
* DSpaceObjects (e.g. child objects) into a series of packages. The
* initial object is exported to the location specified by the OutputStream.
* All other packages are exported to the same directory location.
* <p>
* Package is any serialized representation of the item, at the discretion
* of the implementing class. It does not have to include content bitstreams.
* <br>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <br>
* Throws an exception of the initial object is not acceptable or there is
* a failure creating the package.
*
* @param context DSpace context.
* @param dso initial DSpace object
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where initial package should be written. All other
* packages will be written to the same directory as this File.
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
@Override
public List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive dissemination
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//try to disseminate the first object using provided PackageDisseminator
disseminate(context, dso, params, pkgFile);
//check if package was disseminated
if(pkgFile.exists())
{
//add to list of successfully disseminated packages
addToPackageList(pkgFile);
//We can only recursively disseminate non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Determine where first file package was disseminated to, as all
//others will be written to same directory
String pkgDirectory = pkgFile.getCanonicalFile().getParent();
if(!pkgDirectory.endsWith(File.separator))
{
pkgDirectory += File.separator;
}
String fileExtension = PackageUtils.getFileExtension(pkgFile.getName());
//recursively disseminate content, based on object type
switch (dso.getType())
{
case Constants.COLLECTION :
//Also find all Items in this Collection and disseminate
Collection collection = (Collection) dso;
ItemIterator iterator = collection.getItems();
while(iterator.hasNext())
{
Item item = iterator.next();
//disseminate all items (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(item, fileExtension);
disseminateAll(context, item, params, new File(childFileName));
}
break;
case Constants.COMMUNITY :
//Also find all SubCommunities in this Community and disseminate
Community community = (Community) dso;
Community[] subcommunities = community.getSubcommunities();
for(int i=0; i<subcommunities.length; i++)
{
//disseminate all sub-communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(subcommunities[i], fileExtension);
disseminateAll(context, subcommunities[i], params, new File(childFileName));
}
//Also find all Collections in this Community and disseminate
Collection[] collections = community.getCollections();
for(int i=0; i<collections.length; i++)
{
//disseminate all collections (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(collections[i], fileExtension);
disseminateAll(context, collections[i], params, new File(childFileName));
}
break;
case Constants.SITE :
//Also find all top-level Communities and disseminate
Community[] topCommunities = Community.findAllTop(context);
for(int i=0; i<topCommunities.length; i++)
{
//disseminate all top-level communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(topCommunities[i], fileExtension);
disseminateAll(context, topCommunities[i], params, new File(childFileName));
}
break;
}//end switch
}//end if not an Item
}//end if pkgFile exists
//return list of all successfully disseminated packages
return getPackageList();
}
/**
* Add File to list of successfully disseminated package files
* @param file File
*/
protected void addToPackageList(File f)
{
//add to list of successfully disseminated packages
if(!packageFileList.contains(f))
{
packageFileList.add(f);
}
}
/**
* Return List of all package Files which have been disseminated
* this instance of the Disseminator.
* <P>
* This list can be useful in reporting back to the user what content has
* been disseminated as packages. It's used by disseminateAll() to report
* what packages were created.
*
* @return List of Files which correspond to the disseminated packages
*/
protected List<File> getPackageList()
{
return packageFileList;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.util.Enumeration;
import java.util.Properties;
import javax.servlet.ServletRequest;
/**
* Parameter list for SIP and DIP packagers. It's really just
* a Java Properties object extended so each parameter can have
* multiple values. This was necessary so it can represent Servlet
* parameters, which have multiple values. It is also helpful to
* indicate e.g. metadata choices for package formats like METS that
* allow many different metadata segments.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class PackageParameters extends Properties
{
// Use non-printing FS (file separator) as arg-sep token, like Perl $;
private static final String SEPARATOR = "\034";
// Regular expression to match the separator token:
private static final String SEPARATOR_REGEX = "\\034";
public PackageParameters()
{
super();
}
public PackageParameters(Properties defaults)
{
super(defaults);
}
/**
* Creates new parameters object with the parameter values from
* a servlet request object.
*
* @param request - the request from which to take the values
* @return new parameters object.
*/
public static PackageParameters create(ServletRequest request)
{
PackageParameters result = new PackageParameters();
Enumeration pe = request.getParameterNames();
while (pe.hasMoreElements())
{
String name = (String)pe.nextElement();
String v[] = request.getParameterValues(name);
if (v.length == 0)
{
result.setProperty(name, "");
}
else if (v.length == 1)
{
result.setProperty(name, v[0]);
}
else
{
StringBuffer sb = new StringBuffer();
for (int i = 0; i < v.length; ++i)
{
if (i > 0)
{
sb.append(SEPARATOR);
}
sb.append(v[i]);
}
result.setProperty(name, sb.toString());
}
}
return result;
}
/**
* Adds a value to a property; if property already has value(s),
* this is tacked onto the end, otherwise it acts like setProperty().
*
* @param key - the key to be placed into this property list.
* @param value - the new value to add, corresponding to this key.
* @return the previous value of the specified key in this property list, or
* null if it did not have one.
*/
public Object addProperty(String key, String value)
{
String oldVal = getProperty(key);
if (oldVal == null)
{
setProperty(key, value);
}
else
{
setProperty(key, oldVal + SEPARATOR + value);
}
return oldVal;
}
/**
* Returns multiple property values in an array.
*
* @param key - the key to look for in this property list.
* @return all values in an array, or null if this property is unset.
*/
public String[] getProperties(String key)
{
String val = getProperty(key);
if (val == null)
{
return null;
}
else
{
return val.split(SEPARATOR_REGEX);
}
}
/**
* Returns boolean form of property with selectable default
* @param key the key to look for in this property list.
* @param defaultAnswer default to return if there is no such property
* @return the boolean derived from the value of property, or default
* if it was not specified.
*/
public boolean getBooleanProperty(String key, boolean defaultAnswer)
{
String stringValue = getProperty(key);
if (stringValue == null)
{
return defaultAnswer;
}
else
{
return stringValue.equalsIgnoreCase("true") ||
stringValue.equalsIgnoreCase("on") ||
stringValue.equalsIgnoreCase("yes");
}
}
/**
* Utility method to tell if workflow is enabled for Item ingestion.
* Checks the Packager parameters.
* <p>
* Defaults to 'true' if previously unset, as by default all
* DSpace Workflows should be enabled.
*
* @return boolean result
*/
public boolean workflowEnabled()
{
return getBooleanProperty("useWorkflow", true);
}
/***
* Utility method to enable/disable workflow for Item ingestion.
*
* @param value boolean value (true = workflow enabled, false = workflow disabled)
* @return boolean result
*/
public void setWorkflowEnabled(boolean value)
{
addProperty("useWorkflow", String.valueOf(value));
}
/***
* Utility method to tell if restore mode is enabled.
* Checks the Packager parameters.
* <p>
* Restore mode attempts to restore an missing/deleted object completely
* (including handle), based on contents of a package.
* <p>
* NOTE: restore mode should throw an error if it attempts to restore an
* object which already exists. Use 'keep-existing' or 'replace' mode to
* either skip-over (keep) or replace existing objects.
* <p>
* Defaults to 'false' if previously unset. NOTE: 'replace' mode and
* 'keep-existing' mode are special types of "restores". So, when either
* replaceModeEnabled() or keepExistingModeEnabled() is true, this method
* should also return true.
*
* @return boolean result
*/
public boolean restoreModeEnabled()
{
return (getBooleanProperty("restoreMode", false) ||
replaceModeEnabled() ||
keepExistingModeEnabled());
}
/***
* Utility method to enable/disable restore mode.
* <p>
* Restore mode attempts to restore an missing/deleted object completely
* (including handle), based on a given package's contents.
* <p>
* NOTE: restore mode should throw an error if it attempts to restore an
* object which already exists. Use 'keep-existing' or 'replace' mode to
* either skip-over (keep) or replace existing objects.
*
* @param value boolean value (true = restore enabled, false = restore disabled)
* @return boolean result
*/
public void setRestoreModeEnabled(boolean value)
{
addProperty("restoreMode", String.valueOf(value));
}
/***
* Utility method to tell if replace mode is enabled.
* Checks the Packager parameters.
* <p>
* Replace mode attempts to overwrite an existing object and replace it
* with the contents of a package. Replace mode is considered a special type
* of "restore", where the current object is being restored to a previous state.
* <p>
* Defaults to 'false' if previously unset.
*
* @return boolean result
*/
public boolean replaceModeEnabled()
{
return getBooleanProperty("replaceMode", false);
}
/***
* Utility method to enable/disable replace mode.
* <p>
* Replace mode attempts to overwrite an existing object and replace it
* with the contents of a package. Replace mode is considered a special type
* of "restore", where the current object is being restored to a previous state.
*
* @param value boolean value (true = replace enabled, false = replace disabled)
* @return boolean result
*/
public void setReplaceModeEnabled(boolean value)
{
addProperty("replaceMode", String.valueOf(value));
}
/***
* Utility method to tell if 'keep-existing' mode is enabled.
* Checks the Packager parameters.
* <p>
* Keep-Existing mode is identical to 'restore' mode, except that it
* skips over any objects which are found to already be existing. It
* essentially restores all missing objects, but keeps existing ones intact.
* <p>
* Defaults to 'false' if previously unset.
*
* @return boolean result
*/
public boolean keepExistingModeEnabled()
{
return getBooleanProperty("keepExistingMode", false);
}
/***
* Utility method to enable/disable 'keep-existing' mode.
* <p>
* Keep-Existing mode is identical to 'restore' mode, except that it
* skips over any objects which are found to already be existing. It
* essentially restores all missing objects, but keeps existing ones intact.
*
* @param value boolean value (true = replace enabled, false = replace disabled)
* @return boolean result
*/
public void setKeepExistingModeEnabled(boolean value)
{
addProperty("keepExistingMode", String.valueOf(value));
}
/***
* Utility method to tell if Items should use a Collection's template
* when they are created.
* <p>
* Defaults to 'false' if previously unset.
*
* @return boolean result
*/
public boolean useCollectionTemplate()
{
return getBooleanProperty("useCollectionTemplate", false);
}
/***
* Utility method to enable/disable Collection Template for Item ingestion.
* <p>
* When enabled, the Item will be installed using the parent collection's
* Item Template
*
* @param value boolean value (true = template enabled, false = template disabled)
* @return boolean result
*/
public void setUseCollectionTemplate(boolean value)
{
addProperty("useCollectionTemplate", String.valueOf(value));
}
/***
* Utility method to tell if recursive mode is enabled.
* Checks the Packager parameters.
* <p>
* Recursive mode should be enabled anytime one of the *All() methods
* is called (e.g. ingestAll(), replaceAll() or disseminateAll()). It
* recursively performs the same action on all related objects.
* <p>
* Defaults to 'false' if previously unset.
*
* @return boolean result
*/
public boolean recursiveModeEnabled()
{
return getBooleanProperty("recursiveMode", false);
}
/***
* Utility method to enable/disable recursive mode.
* <p>
* Recursive mode should be enabled anytime one of the *All() methods
* is called (e.g. ingestAll(), replaceAll() or disseminateAll()). It
* recursively performs the same action on all related objects.
*
* @param value boolean value (true = recursion enabled, false = recursion disabled)
* @return boolean result
*/
public void setRecursiveModeEnabled(boolean value)
{
addProperty("recursiveMode", String.valueOf(value));
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.InstallItem;
import org.dspace.content.Item;
import org.dspace.content.Site;
import org.dspace.content.WorkspaceItem;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.handle.HandleManager;
import org.dspace.license.CreativeCommons;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager;
/**
* Container class for code that is useful to many packagers.
*
* @author Larry Stone
* @version $Revision: 6137 $
*/
public class PackageUtils
{
/** log4j category */
private static final Logger log = Logger.getLogger(PackageUtils.class);
// Map of metadata elements for Communities and Collections
// Format is alternating key/value in a straight array; use this
// to initialize hash tables that convert to and from.
private static final String ccMetadataMap[] =
{
// getMetadata() -> DC element.term
"name", "dc.title",
"introductory_text", "dc.description",
"short_description", "dc.description.abstract",
"side_bar_text", "dc.description.tableofcontents",
"copyright_text", "dc.rights",
"provenance_description", "dc.provenance",
"license", "dc.rights.license"
};
// HashMaps to convert Community/Collection metadata to/from Dublin Core
// (useful when crosswalking Communities/Collections)
private static final Map<String,String> ccMetadataToDC = new HashMap<String,String>();
private static final Map<String,String> ccDCToMetadata = new HashMap<String,String>();
static
{
for (int i = 0; i < ccMetadataMap.length; i += 2)
{
ccMetadataToDC.put(ccMetadataMap[i], ccMetadataMap[i+1]);
ccDCToMetadata.put(ccMetadataMap[i+1], ccMetadataMap[i]);
}
}
/**
* Translate a Dublin Core metadata field into a Container's (Community or Collection)
* database column for that metadata entry.
* <P>
* e.g. "dc.title" would translate to the "name" database column
* <P>
* This method is of use when crosswalking Community or Collection metadata for ingest,
* as most ingest Crosswalks tend to deal with translating to DC-based metadata.
*
* @param dcField The dublin core metadata field
* @return The Community or Collection DB column where this metadata info is stored.
*/
public static String dcToContainerMetadata(String dcField)
{
return ccDCToMetadata.get(dcField);
}
/**
* Translate a Container's (Community or Collection) database column into
* a valid Dublin Core metadata field. This is the opposite of 'dcToContainerMetadata()'.
* <P>
* e.g. the "name" database column would translate to "dc.title"
* <P>
* This method is of use when crosswalking Community or Collection metadata for dissemination,
* as most dissemination Crosswalks tend to deal with translating from DC-based metadata.
*
*
* @param databaseField The Community or Collection DB column
* @return The Dublin Core metadata field that this metadata translates to.
*/
public static String containerMetadataToDC(String databaseField)
{
return ccMetadataToDC.get(databaseField);
}
/**
* Test that item has adequate metadata.
* Check item for the minimal DC metadata required to ingest a
* new item, and throw a PackageValidationException if test fails.
* Used by all SIP processors as a common sanity test.
*
* @param item - item to test.
*/
public static void checkItemMetadata(Item item)
throws PackageValidationException
{
DCValue t[] = item.getDC( "title", null, Item.ANY);
if (t == null || t.length == 0)
{
throw new PackageValidationException("Item cannot be created without the required \"title\" DC metadata.");
}
}
/**
* Add DSpace Deposit License to an Item.
* Utility function to add the a user-supplied deposit license or
* a default one if none was given; creates new bitstream in the
* "LICENSE" bundle and gives it the special license bitstream format.
*
* @param context - dspace context
* @param license - license string to add, may be null to invoke default.
* @param item - the item.
* @param collection - get the default license from here.
*/
public static void addDepositLicense(Context context, String license,
Item item, Collection collection)
throws SQLException, IOException, AuthorizeException
{
if (license == null)
{
license = collection.getLicense();
}
InputStream lis = new ByteArrayInputStream(license.getBytes());
Bundle lb;
//If LICENSE bundle is missing, create it
Bundle[] bundles = item.getBundles(Constants.LICENSE_BUNDLE_NAME);
if(bundles==null || bundles.length==0)
{
lb = item.createBundle(Constants.LICENSE_BUNDLE_NAME);
}
else
{
lb = bundles[0];
}
//Create the License bitstream
Bitstream lbs = lb.createBitstream(lis);
lis.close();
BitstreamFormat bf = BitstreamFormat.findByShortDescription(context, "License");
if (bf == null)
{
bf = FormatIdentifier.guessFormat(context, lbs);
}
lbs.setFormat(bf);
lbs.setName(Constants.LICENSE_BITSTREAM_NAME);
lbs.setSource(Constants.LICENSE_BITSTREAM_NAME);
lbs.update();
}
/**
* Find bitstream by its Name, looking in all bundles.
*
* @param item Item whose bitstreams to search.
* @param name Bitstream's name to match.
* @return first bitstream found or null.
*/
public static Bitstream getBitstreamByName(Item item, String name)
throws SQLException
{
return getBitstreamByName(item, name, null);
}
/**
* Find bitstream by its Name, looking in specific named bundle.
*
* @param item - dspace item whose bundles to search.
* @param bsName - name of bitstream to match.
* @param bnName - bundle name to match, or null for all.
* @return the format found or null if none found.
*/
public static Bitstream getBitstreamByName(Item item, String bsName, String bnName)
throws SQLException
{
Bundle[] bundles;
if (bnName == null)
{
bundles = item.getBundles();
}
else
{
bundles = item.getBundles(bnName);
}
for (int i = 0; i < bundles.length; i++)
{
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int k = 0; k < bitstreams.length; k++)
{
if (bsName.equals(bitstreams[k].getName()))
{
return bitstreams[k];
}
}
}
return null;
}
/**
* Find bitstream by its format, looking in a specific bundle.
* Used to look for particularly-typed Package Manifest bitstreams.
*
* @param item - dspace item whose bundles to search.
* @param bsf - BitstreamFormat object to match.
* @param bnName - bundle name to match, or null for all.
* @return the format found or null if none found.
*/
public static Bitstream getBitstreamByFormat(Item item,
BitstreamFormat bsf, String bnName)
throws SQLException
{
int fid = bsf.getID();
Bundle[] bundles;
if (bnName == null)
{
bundles = item.getBundles();
}
else
{
bundles = item.getBundles(bnName);
}
for (int i = 0; i < bundles.length; i++)
{
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int k = 0; k < bitstreams.length; k++)
{
if (bitstreams[k].getFormat().getID() == fid)
{
return bitstreams[k];
}
}
}
return null;
}
/**
* Predicate, does this bundle container meta-information. I.e.
* does this bundle contain descriptive metadata or other metadata
* such as license bitstreams? If so we probably don't want to put
* it into the "content" section of a package; hence this predicate.
*
* @param bn -- the bundle
* @return true if this bundle name indicates it is a meta-info bundle.
*/
public static boolean isMetaInfoBundle(Bundle bn)
{
return (bn.getName().equals(Constants.LICENSE_BUNDLE_NAME) ||
bn.getName().equals(CreativeCommons.CC_BUNDLE_NAME) ||
bn.getName().equals(Constants.METADATA_BUNDLE_NAME));
}
/**
* Stream wrapper that does not allow its wrapped stream to be
* closed. This is needed to work around problem when loading
* bitstreams from ZipInputStream. The Bitstream constructor
* invokes close() on the input stream, which would prematurely end
* the ZipInputStream.
* Example:
* <pre>
* ZipEntry ze = zip.getNextEntry();
* Bitstream bs = bundle.createBitstream(new PackageUtils.UnclosableInputStream(zipInput));
* </pre>
*/
public static class UnclosableInputStream extends FilterInputStream
{
public UnclosableInputStream(InputStream in)
{
super(in);
}
/**
* Do nothing, to prevent wrapped stream from being closed prematurely.
*/
@Override
public void close()
{
}
}
/**
* Find or create a bitstream format to match the given short
* description.
* Used by packager ingesters to obtain a special bitstream
* format for the manifest (and/or metadata) file.
* <p>
* NOTE: When creating a new format, do NOT set any extensions, since
* we don't want any file with the same extension, which may be something
* generic like ".xml", to accidentally get set to this format.
* @param context - the context.
* @param shortDesc - short descriptive name, used to locate existing format.
* @param MIMEType - MIME content-type
* @param desc - long description
* @return BitstreamFormat object that was found or created. Never null.
*/
public static BitstreamFormat findOrCreateBitstreamFormat(Context context,
String shortDesc, String MIMEType, String desc)
throws SQLException, AuthorizeException
{
return findOrCreateBitstreamFormat(context, shortDesc, MIMEType, desc, BitstreamFormat.KNOWN, false);
}
/**
* Find or create a bitstream format to match the given short
* description.
* Used by packager ingesters to obtain a special bitstream
* format for the manifest (and/or metadata) file.
* <p>
* NOTE: When creating a new format, do NOT set any extensions, since
* we don't want any file with the same extension, which may be something
* generic like ".xml", to accidentally get set to this format.
* @param context - the context.
* @param shortDesc - short descriptive name, used to locate existing format.
* @param MIMEType - mime content-type
* @param desc - long description
* @param internal value for the 'internal' flag of a new format if created.
* @return BitstreamFormat object that was found or created. Never null.
*/
public static BitstreamFormat findOrCreateBitstreamFormat(Context context,
String shortDesc, String MIMEType, String desc, int supportLevel, boolean internal)
throws SQLException, AuthorizeException
{
BitstreamFormat bsf = BitstreamFormat.findByShortDescription(context,
shortDesc);
// not found, try to create one
if (bsf == null)
{
bsf = BitstreamFormat.create(context);
bsf.setShortDescription(shortDesc);
bsf.setMIMEType(MIMEType);
bsf.setDescription(desc);
bsf.setSupportLevel(supportLevel);
bsf.setInternal(internal);
bsf.update();
}
return bsf;
}
/**
* Utility to find the license bitstream from an item
*
* @param context
* DSpace context
* @param item
* the item
* @return the license bitstream or null
*
* @throws IOException
* if the license bitstream can't be read
*/
public static Bitstream findDepositLicense(Context context, Item item)
throws SQLException, IOException, AuthorizeException
{
// get license format ID
int licenseFormatId = -1;
BitstreamFormat bf = BitstreamFormat.findByShortDescription(context,
"License");
if (bf != null)
{
licenseFormatId = bf.getID();
}
Bundle[] bundles = item.getBundles(Constants.LICENSE_BUNDLE_NAME);
for (int i = 0; i < bundles.length; i++)
{
// Assume license will be in its own bundle
Bitstream[] bitstreams = bundles[i].getBitstreams();
for(int j=0; j < bitstreams.length; j++)
{
// The License should have a file format of "License"
if (bitstreams[j].getFormat().getID() == licenseFormatId)
{
//found a bitstream with format "License" -- return it
return bitstreams[j];
}
}
// If we couldn't find a bitstream with format = "License",
// we will just assume the first bitstream is the deposit license
// (usually a safe assumption as it is in the LICENSE bundle)
if(bitstreams.length>0)
{
return bitstreams[0];
}
}
// Oops! No license!
return null;
}
/*=====================================================
* Utility Methods -- may be useful for subclasses
*====================================================*/
/**
* Create the specified DSpace Object, based on the passed
* in Package Parameters (along with other basic info required
* to create the object)
*
* @param context DSpace Context
* @param parent Parent Object
* @param type Type of new Object
* @param handle Handle of new Object (may be null)
* @param params Properties-style list of options (interpreted by each packager).
* @return newly created DSpace Object (or null)
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*/
public static DSpaceObject createDSpaceObject(Context context, DSpaceObject parent, int type, String handle, PackageParameters params)
throws AuthorizeException, SQLException, IOException
{
DSpaceObject dso = null;
switch (type)
{
case Constants.COLLECTION:
dso = ((Community)parent).createCollection(handle);
return dso;
case Constants.COMMUNITY:
// top-level community?
if (parent == null || parent.getType() == Constants.SITE)
{
dso = Community.create(null, context, handle);
}
else
{
dso = ((Community) parent).createSubcommunity(handle);
}
return dso;
case Constants.ITEM:
//Initialize a WorkspaceItem
//(Note: Handle is not set until item is finished)
WorkspaceItem wsi = WorkspaceItem.create(context, (Collection)parent, params.useCollectionTemplate());
// Please note that we are returning an Item which is *NOT* yet in the Archive,
// and doesn't yet have a handle assigned.
// This Item will remain "incomplete" until 'PackageUtils.finishCreateItem()' is called
return wsi.getItem();
case Constants.SITE:
return Site.find(context, Site.SITE_ID);
}
return null;
}
/**
* Perform any final tasks on a newly created WorkspaceItem in order to finish
* ingestion of an Item.
* <p>
* This may include starting up a workflow for the new item, restoring it,
* or archiving it (based on params passed in)
*
* @param context DSpace Context
* @param wsi Workspace Item that requires finishing
* @param handle Handle to assign to item (may be null)
* @param params Properties-style list of options (interpreted by each packager).
* @return finished Item
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
public static Item finishCreateItem(Context context, WorkspaceItem wsi, String handle, PackageParameters params)
throws IOException, SQLException, AuthorizeException
{
// if we are restoring/replacing existing object using the package
if (params.restoreModeEnabled())
{
// Restore & install item immediately
//(i.e. skip over any Collection workflows, as we are essentially restoring item from backup)
InstallItem.restoreItem(context, wsi, handle);
//return newly restored item
return wsi.getItem();
}
// if we are treating package as a SIP, and we are told to respect workflows
else if (params.workflowEnabled())
{
// Start an item workflow
// (NOTICE: The specified handle is ignored, as Workflows *always* end in a new handle being assigned)
WorkflowItem wfi = WorkflowManager.startWithoutNotify(context, wsi);
// return item with workflow started
return wfi.getItem();
}
// default: skip workflow, but otherwise normal submission (i.e. package treated like a SIP)
else
{
// Install item immediately with the specified handle
InstallItem.installItem(context, wsi, handle);
// return newly installed item
return wsi.getItem();
}
}//end finishCreateItem
/**
* Commit all recent changes to DSpaceObject.
* <p>
* This method is necessary as there is no generic 'update()' on a DSpaceObject
*
* @param dso DSpaceObject to update
*/
public static void updateDSpaceObject(DSpaceObject dso)
throws AuthorizeException, SQLException, IOException
{
if (dso != null)
{
switch (dso.getType())
{
case Constants.BITSTREAM:
((Bitstream)dso).update();
break;
case Constants.ITEM:
((Item)dso).update();
break;
case Constants.COLLECTION:
((Collection)dso).update();
break;
case Constants.COMMUNITY:
((Community)dso).update();
break;
}
}
}
/**
* Utility method to retrieve the file extension off of a filename.
*
* @param filename Full filename
* @return file extension
*/
public static String getFileExtension(String filename)
{
// Extract the file extension off of a filename
String extension = filename;
int lastDot = filename.lastIndexOf('.');
if (lastDot != -1)
{
extension = filename.substring(lastDot + 1);
}
return extension;
}
/**
* Returns name of a dissemination information package (DIP), based on the
* DSpace object and a provided fileExtension
* <p>
* Format: [dspace-obj-type]@[handle-with-dashes].[fileExtension]
* OR [dspace-obj-type]@internal-id-[dspace-ID].[fileExtension]
*
* @param dso DSpace Object to create file name for
* @param fileExtension file Extension of output file.
* @return filename of a DIP representing the DSpace Object
*/
public static String getPackageName(DSpaceObject dso, String fileExtension)
{
String handle = dso.getHandle();
// if Handle is empty, use internal ID for name
if(handle==null || handle.isEmpty())
{
handle = "internal-id-" + dso.getID();
}
else // if Handle exists, replace '/' with '-' to meet normal file naming conventions
{
handle = handle.replace("/", "-");
}
//Get type name
int typeID = dso.getType();
String type = Constants.typeText[typeID];
//check if passed in file extension already starts with "."
if(!fileExtension.startsWith("."))
{
fileExtension = "." + fileExtension;
}
//Here we go, here's our magical file name!
//Format: typeName@handle.extension
return type + "@" + handle + fileExtension;
}
/**
* Creates the specified file (along with all parent directories) if it doesn't already
* exist. If the file already exists, nothing happens.
*
* @param file
* @return boolean true if succeeded, false otherwise
* @throws IOException
*/
public static boolean createFile(File file)
throws IOException
{
boolean success = false;
//Check if file exists
if(!file.exists())
{
//file doesn't exist yet, does its parent directory exist?
File parentFile = file.getCanonicalFile().getParentFile();
//create the parent directory structure
if ((null != parentFile) && !parentFile.exists() && !parentFile.mkdirs())
{
log.error("Unable to create parent directory");
}
//create actual file
success = file.createNewFile();
}
return success;
}
/**
* Remove all bitstreams (files) associated with a DSpace object.
* <P>
* If this object is an Item, it removes all bundles & bitstreams. If this
* object is a Community or Collection, it removes all logo bitstreams.
* <P>
* This method is useful for replace functionality.
*
* @param dso The object to remove all bitstreams from
*/
public static void removeAllBitstreams(DSpaceObject dso)
throws SQLException, IOException, AuthorizeException
{
//If we are dealing with an Item
if(dso.getType()==Constants.ITEM)
{
Item item = (Item) dso;
// Get a reference to all Bundles in Item (which contain the bitstreams)
Bundle[] bunds = item.getBundles();
// Remove each bundle -- this will in turn remove all bitstreams associated with this Item.
for (int i = 0; i < bunds.length; i++)
{
item.removeBundle(bunds[i]);
}
}
else if (dso.getType()==Constants.COLLECTION)
{
Collection collection = (Collection) dso;
//clear out the logo for this collection
collection.setLogo(null);
}
else if (dso.getType()==Constants.COMMUNITY)
{
Community community = (Community) dso;
//clear out the logo for this community
community.setLogo(null);
}
}
/**
* Removes all metadata associated with a DSpace object.
* <P>
* This method is useful for replace functionality.
*
* @param dso The object to remove all metadata from
*/
public static void clearAllMetadata(DSpaceObject dso)
throws SQLException, IOException, AuthorizeException
{
//If we are dealing with an Item
if(dso.getType()==Constants.ITEM)
{
Item item = (Item) dso;
//clear all metadata entries
item.clearMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
}
//Else if collection, clear its database table values
else if (dso.getType()==Constants.COLLECTION)
{
Collection collection = (Collection) dso;
// Use the MetadataToDC map (defined privately in this class)
// to clear out all the Collection database fields.
for(String dbField : ccMetadataToDC.keySet())
{
try
{
collection.setMetadata(dbField, null);
}
catch(IllegalArgumentException ie)
{
// ignore the error -- just means the field doesn't exist in DB
// Communities & Collections don't include the exact same metadata fields
}
}
}
//Else if community, clear its database table values
else if (dso.getType()==Constants.COMMUNITY)
{
Community community = (Community) dso;
// Use the MetadataToDC map (defined privately in this class)
// to clear out all the Community database fields.
for(String dbField : ccMetadataToDC.keySet())
{
try
{
community.setMetadata(dbField, null);
}
catch(IllegalArgumentException ie)
{
// ignore the error -- just means the field doesn't exist in DB
// Communities & Collections don't include the exact same metadata fields
}
}
}
}
/** Recognize and pick apart likely "magic" group names */
private static final Pattern groupAnalyzer
= Pattern.compile("^(COMMUNITY|COLLECTION)_([0-9]+)_(.+)");
/** Lookaside list for translations we've already done, so we don't generate
* multiple names for the same group
*/
private static final Map<String, String> orphanGroups = new HashMap<String, String>();
/**
* When DSpace creates Default Group Names they are of a very specific format,
* for example:
* <ul>
* <li> COMMUNITY_[ID]_ADMIN </li>
* <li> COLLECTION_[ID]_ADMIN </li>
* <li> COLLECTION_[ID]_SUBMIT </li>
* <li> COLLECTION_[ID]_WORKFLOW_STEP_# </li>
* </ul>
* <p>
* Although these names work fine within DSpace, the DSpace internal ID
* (represented by [ID] above) becomes meaningless when content is exported
* outside of DSpace. In order to make these Group names meaningful outside
* of DSpace, they must be translated into a different format:
* <li> COMMUNITY_[HANDLE]_ADMIN (e.g. COMMUNITY_hdl:123456789/10_ADMIN), etc.
* <p>
* This format replaces the internal ID with an external Handle identifier
* (which is expected to be more meaningful even when content is exported
* from DSpace).
* <p>
* This method prepares group names for export by replacing any found
* internal IDs with the appropriate external Handle identifier. If
* the group name doesn't have an embedded internal ID, it is returned
* as is. If the group name contains an embedded internal ID, but the
* corresponding Handle cannot be determined, then it will be translated to
* GROUP_[random]_[objectType]_[groupType] and <em>not</em> re-translated on
* import.
* <p>
* This method may be useful to any Crosswalks/Packagers which deal with
* import/export of DSpace Groups.
* <p>
* Also see the translateGroupNameForImport() method which does the opposite
* of this method.
*
* @param context current DSpace Context
* @param groupName Group's name
* @return the group name, with any internal IDs translated to Handles
*/
public static String translateGroupNameForExport(Context context, String groupName)
throws PackageException
{
// See if this resembles a default Group name
Matcher matched = groupAnalyzer.matcher(groupName);
if (!matched.matches())
return groupName;
// It does! Pick out the components
String objType = matched.group(1);
String objID = matched.group(2);
String groupType = matched.group(3);
try
{
//We'll translate this internal ID into a Handle
//First, get the object via the Internal ID
DSpaceObject dso = DSpaceObject.find(context, Constants
.getTypeID(objType), Integer.parseInt(objID));
if(dso==null)
{
// No such object. Change the name to something harmless.
String newName;
if (orphanGroups.containsKey(groupName))
newName = orphanGroups.get(groupName);
else
{
newName= "GROUP_" + Utils.generateHexKey() + "_"
+ objType + "_" + groupType;
orphanGroups.put(groupName, newName);
// A given group should only be translated once, since the
// new name contains unique random elements which would be
// different every time.
}
// Just log a warning -- it's possible this Group was not
// cleaned up when the associated DSpace Object was removed.
// So, we don't want to throw an error and stop all other processing.
log.warn("DSpace Object (ID='" + objID
+ "', type ='" + objType
+ "') no longer exists -- translating " + groupName
+ " to " + newName + ".");
return newName;
}
//Create an updated group name, using the Handle to replace the InternalID
// Format: <DSpace-Obj-Type>_hdl:<Handle>_<Group-Type>
return objType + "_" + "hdl:" + dso.getHandle() + "_" + groupType;
}
catch (SQLException sqle)
{
throw new PackageException("Database error while attempting to translate group name ('" + groupName + "') for export.", sqle);
}
}
/**
* This method does the exact opposite of the translateGroupNameForExport()
* method. It prepares group names for import by replacing any found
* external Handle identifiers with the appropriate DSpace Internal
* identifier. As a basic example, it would change a group named
* "COLLECTION_hdl:123456789/10_ADMIN" to a name similar to
* "COLLECTION_11_ADMIN (where '11' is the internal ID of that Collection).
* <P>
* If the group name either doesn't have an embedded handle, then it is
* returned as is. If it has an embedded handle, but the corresponding
* internal ID cannot be determined, then an error is thrown. It is up
* to the calling method whether that error should be displayed to the user
* or if the group should just be skipped (since its associated object
* doesn't currently exist).
* <p>
* This method may be useful to any Crosswalks/Packagers which deal with
* import/export of DSpace Groups.
* <p>
* Also see the translateGroupNameForExport() method which does the opposite
* of this method.
*
* @param context current DSpace Context
* @param groupName Group's name
* @return the group name, with any Handles translated to internal IDs
*/
public static String translateGroupNameForImport(Context context, String groupName)
throws PackageException
{
// Check if this looks like a default Group name -- must have at LEAST two underscores surrounded by other characters
if(!groupName.matches("^.+_.+_.+$"))
{
//if this is not a valid default group name, just return group name as-is (no crosswalking necessary)
return groupName;
}
//Pull apart default group name into its three main parts
// Format: <DSpace-Obj-Type>_<DSpace-Obj-ID>_<Group-Type>
// (e.g. COLLECTION_123_ADMIN)
String objType = groupName.substring(0, groupName.indexOf('_'));
String tmpEndString = groupName.substring(groupName.indexOf('_')+1);
String objID = tmpEndString.substring(0, tmpEndString.indexOf('_'));
String groupType = tmpEndString.substring(tmpEndString.indexOf('_')+1);
try
{
if(objID.startsWith("hdl:"))
{
//We'll translate this handle into an internal ID
//Format for Handle => "hdl:<handle-prefix>/<handle-suffix>"
// (e.g. "hdl:123456789/10")
//First, get the object via the Handle
DSpaceObject dso = HandleManager.resolveToObject(context, objID.substring(4));
if(dso==null)
{
//throw an error as we cannot accurately rename/recreate this Group without its related DSpace Object
throw new PackageException("Unable to translate Handle to Internal ID in group named '" + groupName + "' as DSpace Object (Handle='" + objID + "') does not exist.");
}
//verify our group specified object Type corresponds to this object's type
if(Constants.getTypeID(objType)!=dso.getType())
{
throw new PackageValidationException("DSpace Object referenced by handle '" + objID + "' does not correspond to the object type specified by Group named '" + groupName + "'. This Group doesn't seem to correspond to this DSpace Object!");
}
//Create an updated group name, using the Internal ID to replace the Handle
// Format: <DSpace-Obj-Type>_<DSpace-Obj-ID>_<Group-Type>
return objType + "_" + dso.getID() + "_" + groupType;
}
else // default -- return group name as is
{
return groupName;
}
}
catch (SQLException sqle)
{
throw new PackageException("Database error while attempting to translate group name ('" + groupName + "') for import.", sqle);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.util.Locale;
/**
* Utility class for dealing with languages
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class DCLanguage
{
/** The country code */
private String country;
/** The language code. Special values: "" and "other". */
private String language;
/**
* Construct a language object from a database entry
*
* @param l
* the language text from the database
*/
public DCLanguage(String l)
{
setLanguage(l);
}
/**
* Write the language out to the database
*
* @return the language in a form for writing to the DCValue table
*/
public String toString()
{
if (language.equals(""))
{
return "";
}
else if (country.equals(""))
{
return language;
}
else
{
return country + "_" + language;
}
}
/**
* Set the language and country
*
* @param l
* The language and country code, e.g. "en_US" or "fr"
*/
public final void setLanguage(String l)
{
if(l == null)
{
language = "";
country = "";
}
else if("other".equals(l))
{
language = "other";
country = "";
}
else if (l.length() == 2)
{
language = l;
country = "";
}
else if (l.length() == 5)
{
language = l.substring(0, 2);
country = l.substring(3);
}
else
{
language = "";
country = "";
}
}
/**
* Get the displayable name for this language
*
* @return the displayable name
*/
public String getDisplayName()
{
Locale locale;
if (language.equals("other"))
{
return "(Other)";
}
else if (language.equals(""))
{
return "N/A";
}
else
{
locale = new Locale(language, country);
return locale.getDisplayName();
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Database access class representing a Dublin Core metadata value.
* It represents a value of a given <code>MetadataField</code> on an Item.
* (The Item can have many values of the same field.) It contains element, qualifier, value and language.
* the field (which names the schema, element, and qualifier), language,
* and a value.
*
* @author Martin Hald
* @see org.dspace.content.MetadataSchema
* @see org.dspace.content.MetadataField
*/
public class MetadataValue
{
/** The reference to the metadata field */
private int fieldId = 0;
/** The primary key for the metadata value */
private int valueId = 0;
/** The reference to the DSpace item */
private int itemId;
/** The value of the field */
public String value;
/** The language of the field, may be <code>null</code> */
public String language;
/** The position of the record. */
public int place = 1;
/** Authority key, if any */
public String authority = null;
/** Authority confidence value -- see Choices class for values */
public int confidence = 0;
/** log4j logger */
private static Logger log = Logger.getLogger(MetadataValue.class);
/** The row in the table representing this type */
private TableRow row;
/**
* Construct the metadata object from the matching database row.
*
* @param row database row to use for contents
*/
public MetadataValue(TableRow row)
{
if (row != null)
{
fieldId = row.getIntColumn("metadata_field_id");
valueId = row.getIntColumn("metadata_value_id");
itemId = row.getIntColumn("item_id");
value = row.getStringColumn("text_value");
language = row.getStringColumn("text_lang");
place = row.getIntColumn("place");
authority = row.getStringColumn("authority");
confidence = row.getIntColumn("confidence");
this.row = row;
}
}
/**
* Default constructor.
*/
public MetadataValue()
{
}
/**
* Constructor to create a value for a given field.
*
* @param field initial value for field
*/
public MetadataValue(MetadataField field)
{
this.fieldId = field.getFieldID();
}
/**
* Get the field ID the metadata value represents.
*
* @return metadata field ID
*/
public int getFieldId()
{
return fieldId;
}
/**
* Set the field ID that the metadata value represents.
*
* @param fieldId new field ID
*/
public void setFieldId(int fieldId)
{
this.fieldId = fieldId;
}
/**
* Get the item ID.
*
* @return item ID
*/
public int getItemId()
{
return itemId;
}
/**
* Set the item ID.
*
* @param itemId new item ID
*/
public void setItemId(int itemId)
{
this.itemId = itemId;
}
/**
* Get the language (e.g. "en").
*
* @return language
*/
public String getLanguage()
{
return language;
}
/**
* Set the language (e.g. "en").
*
* @param language new language
*/
public void setLanguage(String language)
{
this.language = language;
}
/**
* Get the place ordering.
*
* @return place ordering
*/
public int getPlace()
{
return place;
}
/**
* Set the place ordering.
*
* @param place new place (relative order in series of values)
*/
public void setPlace(int place)
{
this.place = place;
}
/**
* Get the value ID.
*
* @return value ID
*/
public int getValueId()
{
return valueId;
}
/**
* Get the metadata value.
*
* @return metadata value
*/
public String getValue()
{
return value;
}
/**
* Set the metadata value
*
* @param value new metadata value
*/
public void setValue(String value)
{
this.value = value;
}
/**
* Get the metadata authority
*
* @return metadata authority
*/
public String getAuthority ()
{
return authority ;
}
/**
* Set the metadata authority
*
* @param value new metadata authority
*/
public void setAuthority (String value)
{
this.authority = value;
}
/**
* Get the metadata confidence
*
* @return metadata confidence
*/
public int getConfidence()
{
return confidence;
}
/**
* Set the metadata confidence
*
* @param value new metadata confidence
*/
public void setConfidence(int value)
{
this.confidence = value;
}
/**
* Creates a new metadata value.
*
* @param context
* DSpace context object
* @throws SQLException
* @throws AuthorizeException
*/
public void create(Context context) throws SQLException, AuthorizeException
{
// Create a table row and update it with the values
row = DatabaseManager.row("MetadataValue");
row.setColumn("item_id", itemId);
row.setColumn("metadata_field_id", fieldId);
row.setColumn("text_value", value);
row.setColumn("text_lang", language);
row.setColumn("place", place);
row.setColumn("authority", authority);
row.setColumn("confidence", confidence);
DatabaseManager.insert(context, row);
// Remember the new row number
this.valueId = row.getIntColumn("metadata_value_id");
// log.info(LogManager.getHeader(context, "create_metadata_value",
// "metadata_value_id=" + valueId));
}
/**
* Retrieves the metadata value from the database.
*
* @param context dspace context
* @param valueId database key id of value
* @return recalled metadata value
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
public static MetadataValue find(Context context, int valueId)
throws IOException, SQLException, AuthorizeException
{
// Grab rows from DB
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataValue",
"SELECT * FROM MetadataValue where metadata_value_id= ? ",
valueId);
TableRow row = null;
try
{
if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
if (row == null)
{
return null;
}
else
{
return new MetadataValue(row);
}
}
/**
* Retrieves the metadata values for a given field from the database.
*
* @param context dspace context
* @param fieldId field whose values to look for
* @return a collection of metadata values
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
public static List<MetadataValue> findByField(Context context, int fieldId)
throws IOException, SQLException, AuthorizeException
{
// Grab rows from DB
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataValue",
"SELECT * FROM MetadataValue WHERE metadata_field_id= ? ",
fieldId);
TableRow row = null;
List<MetadataValue> ret = new ArrayList<MetadataValue>();
try
{
while (tri.hasNext())
{
row = tri.next();
ret.add(new MetadataValue(row));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return ret;
}
/**
* Update the metadata value in the database.
*
* @param context dspace context
* @throws SQLException
* @throws AuthorizeException
*/
public void update(Context context) throws SQLException, AuthorizeException
{
row.setColumn("item_id", itemId);
row.setColumn("metadata_field_id", fieldId);
row.setColumn("text_value", value);
row.setColumn("text_lang", language);
row.setColumn("place", place);
row.setColumn("authority", authority);
row.setColumn("confidence", confidence);
DatabaseManager.update(context, row);
log.info(LogManager.getHeader(context, "update_metadatavalue",
"metadata_value_id=" + getValueId()));
}
/**
* Delete the metadata field.
*
* @param context dspace context
* @throws SQLException
* @throws AuthorizeException
*/
public void delete(Context context) throws SQLException, AuthorizeException
{
log.info(LogManager.getHeader(context, "delete_metadata_value",
" metadata_value_id=" + getValueId()));
DatabaseManager.delete(context, row);
}
/**
* Return <code>true</code> if <code>other</code> is the same MetadataValue
* as this object, <code>false</code> otherwise
*
* @param obj
* object to compare to
*
* @return <code>true</code> if object passed in represents the same
* MetadataValue as this object
*/
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final MetadataValue other = (MetadataValue) obj;
if (this.fieldId != other.fieldId)
{
return false;
}
if (this.valueId != other.valueId)
{
return false;
}
if (this.itemId != other.itemId)
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 47 * hash + this.fieldId;
hash = 47 * hash + this.valueId;
hash = 47 * hash + this.itemId;
return hash;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import org.dspace.content.Bitstream;
/**
* Wrapper class for bitstreams with Thumbnails associated with them for
* convenience in the browse system
*
* @author Richard Jones
*
*/
public class Thumbnail
{
/** the bitstream that is actually the thumbnail */
private Bitstream thumb;
/** the original bitstream for which this is the thumbnail */
private Bitstream original;
/**
* Construct a new thumbnail using the two bitstreams
*
* @param thumb the thumbnail bitstream
* @param original the original bitstream
*/
public Thumbnail(Bitstream thumb, Bitstream original)
{
this.thumb = thumb;
this.original = original;
}
/**
* @return Returns the original.
*/
public Bitstream getOriginal()
{
return original;
}
/**
* @param original The original to set.
*/
public void setOriginal(Bitstream original)
{
this.original = original;
}
/**
* @return Returns the thumb.
*/
public Bitstream getThumb()
{
return thumb;
}
/**
* @param thumb The thumb to set.
*/
public void setThumb(Bitstream thumb)
{
this.thumb = thumb;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
/**
* Series and report number, as stored in relation.ispartofseries
*
* @author Robert Tansley
* @version $Id: DCSeriesNumber.java 5844 2010-11-12 05:29:14Z mdiggory $
*/
public class DCSeriesNumber
{
/** Series */
private String series;
/** Number */
private String number;
/** Construct clean series number */
public DCSeriesNumber()
{
series = null;
number = null;
}
/**
* Construct from raw DC value
*
* @param value
* value from database
*/
public DCSeriesNumber(String value)
{
this();
int semicolon = -1;
if (value != null)
{
semicolon = value.indexOf(';');
}
if (semicolon >= 0)
{
series = value.substring(0, semicolon);
number = value.substring(semicolon + 1);
}
else
{
series = value;
}
}
/**
* Construct from given values
*
* @param s
* the series
* @param n
* the number
*/
public DCSeriesNumber(String s, String n)
{
series = s;
number = n;
}
/**
* Write as raw DC value
*
* @return the series and number as they should be stored in the DB
*/
public String toString()
{
if (series == null)
{
return (null);
}
else if (number == null)
{
return (series);
}
else
{
return (series + ";" + number);
}
}
/**
* Get the series name - guaranteed non-null
*/
public String getSeries()
{
return ((series == null) ? "" : series);
}
/**
* Get the number - guaranteed non-null
*/
public String getNumber()
{
return ((number == null) ? "" : number);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
import org.dspace.content.DSpaceObject;
/**
* This is a simple implementation of the LicenseArgumentFormatter for a
* DSpaceObject. The formatter options width/precision are not take in care.
*
* @author bollini
*
*/
public class SimpleDSpaceObjectLicenseFormatter implements
LicenseArgumentFormatter
{
public void formatTo(Formatter formatter, int flags, int width,
Object object, String type)
{
if (object == null)
{
formatter.format("sample "+type);
}
else
{
DSpaceObject dso = (DSpaceObject) object;
String name = dso.getName();
if (name != null)
{
formatter.format(name);
}
else
{
formatter.format("");
}
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formattable;
import java.util.Formatter;
import org.dspace.core.PluginManager;
/**
* Wrapper class to make formattable any argument used in the license template.
* The formatter behavior is delegated to a specific class on "type" basis
* using the PluginManager
*
* @see Formattable
* @see LicenseArgumentFormatter
* @author bollini
*
*/
public class FormattableArgument implements Formattable
{
private String type;
private Object object;
public FormattableArgument(String type, Object object)
{
this.type = type;
this.object = object;
}
public void formatTo(Formatter formatter, int flags, int width,
int precision)
{
LicenseArgumentFormatter laf = (LicenseArgumentFormatter) PluginManager
.getNamedPlugin(LicenseArgumentFormatter.class, type);
if (laf != null)
{
laf.formatTo(formatter, flags, width, object, type);
}
else
{
formatter.format(object.toString());
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
public interface LicenseArgumentFormatter
{
/**
* Format the object following the <code>java.util.Formatter</code> rules.
* The object type is expected to be know to the implementer can is free to
* assume safe to cast as appropriate. If a <code>null</code> object is
* supplied is expected that the implementer will work as if a "sample data"
* was requested.
*
* @see Formatter
* @param formatter
* the current formatter that need to process the object
* @param flags
* the flags option for the formatter
* @param width
* the width option for the formatter
* @param object
* the object to be formatted
* @param type
* the type of the object (this is an alias not the class name! -
* i.e. item, collection, eperson, etc.)
*/
void formatTo(Formatter formatter, int flags, int width, Object object,
String type);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing a particular bitstream format.
* <P>
* Changes to the bitstream format metadata are only written to the database
* when <code>update</code> is called.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class BitstreamFormat
{
/** log4j logger */
private static Logger log = Logger.getLogger(BitstreamFormat.class);
/**
* The "unknown" support level - for bitstream formats that are unknown to
* the system
*/
public static final int UNKNOWN = 0;
/**
* The "known" support level - for bitstream formats that are known to the
* system, but not fully supported
*/
public static final int KNOWN = 1;
/**
* The "supported" support level - for bitstream formats known to the system
* and fully supported.
*/
public static final int SUPPORTED = 2;
/** translate support-level ID to string. MUST keep this table in sync
* with support level definitions above.
*/
private static final String supportLevelText[] =
{ "UNKNOWN", "KNOWN", "SUPPORTED" };
/** Our context */
private Context bfContext;
/** The row in the table representing this format */
private TableRow bfRow;
/** File extensions for this format */
private List<String> extensions;
/**
* Class constructor for creating a BitstreamFormat object based on the
* contents of a DB table row.
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
* @throws SQLException
*/
BitstreamFormat(Context context, TableRow row) throws SQLException
{
bfContext = context;
bfRow = row;
extensions = new ArrayList<String>();
TableRowIterator tri = DatabaseManager.query(context,
"SELECT * FROM fileextension WHERE bitstream_format_id= ? ",
getID());
try
{
while (tri.hasNext())
{
extensions.add(tri.next().getStringColumn("extension"));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Cache ourselves
context.cache(this, row.getIntColumn("bitstream_format_id"));
}
/**
* Get a bitstream format from the database.
*
* @param context
* DSpace context object
* @param id
* ID of the bitstream format
*
* @return the bitstream format, or null if the ID is invalid.
* @throws SQLException
*/
public static BitstreamFormat find(Context context, int id)
throws SQLException
{
// First check the cache
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "bitstreamformatregistry",
id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context,
"find_bitstream_format",
"not_found,bitstream_format_id=" + id));
}
return null;
}
// not null, return format object
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bitstream_format",
"bitstream_format_id=" + id));
}
return new BitstreamFormat(context, row);
}
/**
* Find a bitstream format by its (unique) MIME type.
* If more than one bitstream format has the same MIME type, the
* one returned is unpredictable.
*
* @param context
* DSpace context object
* @param mimeType
* MIME type value
*
* @return the corresponding bitstream format, or <code>null</code> if
* there's no bitstream format with the given MIMEtype.
* @throws SQLException
*/
public static BitstreamFormat findByMIMEType(Context context,
String mimeType) throws SQLException
{
// NOTE: Avoid internal formats since e.g. "License" also has
// a MIMEtype of text/plain.
TableRow formatRow = DatabaseManager.querySingle(context,
"SELECT * FROM bitstreamformatregistry "+
"WHERE mimetype LIKE ? AND internal = '0' ",
mimeType);
if (formatRow == null)
{
return null;
}
return findByFinish(context, formatRow);
}
/**
* Find a bitstream format by its (unique) short description
*
* @param context
* DSpace context object
* @param desc
* the short description
*
* @return the corresponding bitstream format, or <code>null</code> if
* there's no bitstream format with the given short description
* @throws SQLException
*/
public static BitstreamFormat findByShortDescription(Context context,
String desc) throws SQLException
{
TableRow formatRow = DatabaseManager.findByUnique(context,
"bitstreamformatregistry", "short_description", desc);
if (formatRow == null)
{
return null;
}
return findByFinish(context, formatRow);
}
// shared final logic in findBy... methods;
// use context's cache for object mapped from table row.
private static BitstreamFormat findByFinish(Context context,
TableRow formatRow)
throws SQLException
{
// not null
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bitstream",
"bitstream_format_id="
+ formatRow.getIntColumn("bitstream_format_id")));
}
// From cache?
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, formatRow
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{
return fromCache;
}
return new BitstreamFormat(context, formatRow);
}
/**
* Get the generic "unknown" bitstream format.
*
* @param context
* DSpace context object
*
* @return the "unknown" bitstream format.
* @throws SQLException
*
* @throws IllegalStateException
* if the "unknown" bitstream format couldn't be found
*/
public static BitstreamFormat findUnknown(Context context)
throws SQLException
{
BitstreamFormat bf = findByShortDescription(context, "Unknown");
if (bf == null)
{
throw new IllegalStateException(
"No `Unknown' bitstream format in registry");
}
return bf;
}
/**
* Retrieve all bitstream formats from the registry, ordered by ID
*
* @param context
* DSpace context object
*
* @return the bitstream formats.
* @throws SQLException
*/
public static BitstreamFormat[] findAll(Context context)
throws SQLException
{
List<BitstreamFormat> formats = new ArrayList<BitstreamFormat>();
TableRowIterator tri = DatabaseManager.queryTable(context, "bitstreamformatregistry",
"SELECT * FROM bitstreamformatregistry ORDER BY bitstream_format_id");
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// From cache?
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{
formats.add(fromCache);
}
else
{
formats.add(new BitstreamFormat(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Return the formats as an array
BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()];
formatArray = (BitstreamFormat[]) formats.toArray(formatArray);
return formatArray;
}
/**
* Retrieve all non-internal bitstream formats from the registry. The
* "unknown" format is not included, and the formats are ordered by support
* level (highest first) first then short description.
*
* @param context
* DSpace context object
*
* @return the bitstream formats.
* @throws SQLException
*/
public static BitstreamFormat[] findNonInternal(Context context)
throws SQLException
{
List<BitstreamFormat> formats = new ArrayList<BitstreamFormat>();
String myQuery = "SELECT * FROM bitstreamformatregistry WHERE internal='0' "
+ "AND short_description NOT LIKE 'Unknown' "
+ "ORDER BY support_level DESC, short_description";
TableRowIterator tri = DatabaseManager.queryTable(context,
"bitstreamformatregistry", myQuery);
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// From cache?
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{
formats.add(fromCache);
}
else
{
formats.add(new BitstreamFormat(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Return the formats as an array
BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()];
formatArray = (BitstreamFormat[]) formats.toArray(formatArray);
return formatArray;
}
/**
* Create a new bitstream format
*
* @param context
* DSpace context object
* @return the newly created BitstreamFormat
* @throws SQLException
* @throws AuthorizeException
*/
public static BitstreamFormat create(Context context) throws SQLException,
AuthorizeException
{
// Check authorisation - only administrators can create new formats
if (!AuthorizeManager.isAdmin(context))
{
throw new AuthorizeException(
"Only administrators can create bitstream formats");
}
// Create a table row
TableRow row = DatabaseManager.create(context,
"bitstreamformatregistry");
log.info(LogManager.getHeader(context, "create_bitstream_format",
"bitstream_format_id="
+ row.getIntColumn("bitstream_format_id")));
return new BitstreamFormat(context, row);
}
/**
* Get the internal identifier of this bitstream format
*
* @return the internal identifier
*/
public final int getID()
{
return bfRow.getIntColumn("bitstream_format_id");
}
/**
* Get a short (one or two word) description of this bitstream format
*
* @return the short description
*/
public final String getShortDescription()
{
return bfRow.getStringColumn("short_description");
}
/**
* Set the short description of the bitstream format
*
* @param s
* the new short description
*/
public final void setShortDescription(String s)
throws SQLException
{
// You can not reset the unknown's registry's name
BitstreamFormat unknown = null;
try {
unknown = findUnknown(bfContext);
} catch (IllegalStateException e) {
// No short_description='Unknown' found in bitstreamformatregistry
// table. On first load of registries this is expected because it
// hasn't been inserted yet! So, catch but ignore this runtime
// exception thrown by method findUnknown.
}
// If the exception was thrown, unknown will == null so goahead and
// load s. If not, check that the unknown's registry's name is not
// being reset.
if (unknown == null || unknown.getID() != getID()) {
bfRow.setColumn("short_description", s);
}
}
/**
* Get a description of this bitstream format, including full application or
* format name
*
* @return the description
*/
public final String getDescription()
{
return bfRow.getStringColumn("description");
}
/**
* Set the description of the bitstream format
*
* @param s
* the new description
*/
public final void setDescription(String s)
{
bfRow.setColumn("description", s);
}
/**
* Get the MIME type of this bitstream format, for example
* <code>text/plain</code>
*
* @return the MIME type
*/
public final String getMIMEType()
{
return bfRow.getStringColumn("mimetype");
}
/**
* Set the MIME type of the bitstream format
*
* @param s
* the new MIME type
*/
public final void setMIMEType(String s)
{
bfRow.setColumn("mimetype", s);
}
/**
* Get the support level for this bitstream format - one of
* <code>UNKNOWN</code>,<code>KNOWN</code> or <code>SUPPORTED</code>.
*
* @return the support level
*/
public final int getSupportLevel()
{
return bfRow.getIntColumn("support_level");
}
/**
* Get the support level text for this bitstream format - one of
* <code>UNKNOWN</code>,<code>KNOWN</code> or <code>SUPPORTED</code>.
*
* @return the support level
*/
public String getSupportLevelText() {
return supportLevelText[getSupportLevel()];
}
/**
* Set the support level for this bitstream format - one of
* <code>UNKNOWN</code>,<code>KNOWN</code> or <code>SUPPORTED</code>.
*
* @param sl
* the new support level
*/
public final void setSupportLevel(int sl)
{
// Sanity check
if ((sl < 0) || (sl > 2))
{
throw new IllegalArgumentException("Invalid support level");
}
bfRow.setColumn("support_level", sl);
}
/**
* Find out if the bitstream format is an internal format - that is, one
* that is used to store system information, rather than the content of
* items in the system
*
* @return <code>true</code> if the bitstream format is an internal type
*/
public final boolean isInternal()
{
return bfRow.getBooleanColumn("internal");
}
/**
* Set whether the bitstream format is an internal format
*
* @param b
* pass in <code>true</code> if the bitstream format is an
* internal type
*/
public final void setInternal(boolean b)
{
bfRow.setColumn("internal", b);
}
/**
* Update the bitstream format metadata
*
* @throws SQLException
* @throws AuthorizeException
*/
public void update() throws SQLException, AuthorizeException
{
// Check authorisation - only administrators can change formats
if (!AuthorizeManager.isAdmin(bfContext))
{
throw new AuthorizeException(
"Only administrators can modify bitstream formats");
}
log.info(LogManager.getHeader(bfContext, "update_bitstream_format",
"bitstream_format_id=" + getID()));
// Delete extensions
DatabaseManager.updateQuery(bfContext,
"DELETE FROM fileextension WHERE bitstream_format_id= ? ",
getID());
// Rewrite extensions
for (int i = 0; i < extensions.size(); i++)
{
String s = extensions.get(i);
TableRow r = DatabaseManager.row("fileextension");
r.setColumn("bitstream_format_id", getID());
r.setColumn("extension", s);
DatabaseManager.insert(bfContext, r);
}
DatabaseManager.update(bfContext, bfRow);
}
/**
* Delete this bitstream format. This converts the types of any bitstreams
* that may have this type to "unknown". Use this with care!
*
* @throws SQLException
* @throws AuthorizeException
*/
public void delete() throws SQLException, AuthorizeException
{
// Check authorisation - only administrators can delete formats
if (!AuthorizeManager.isAdmin(bfContext))
{
throw new AuthorizeException(
"Only administrators can delete bitstream formats");
}
// Find "unknown" type
BitstreamFormat unknown = findUnknown(bfContext);
if (unknown.getID() == getID())
{
throw new IllegalArgumentException("The Unknown bitstream format may not be deleted.");
}
// Remove from cache
bfContext.removeCached(this, getID());
// Set bitstreams with this format to "unknown"
int numberChanged = DatabaseManager.updateQuery(bfContext,
"UPDATE bitstream SET bitstream_format_id= ? " +
" WHERE bitstream_format_id= ? ",
unknown.getID(),getID());
// Delete extensions
DatabaseManager.updateQuery(bfContext,
"DELETE FROM fileextension WHERE bitstream_format_id= ? ",
getID());
// Delete this format from database
DatabaseManager.delete(bfContext, bfRow);
log.info(LogManager.getHeader(bfContext, "delete_bitstream_format",
"bitstream_format_id=" + getID() + ",bitstreams_changed="
+ numberChanged));
}
/**
* Get the filename extensions associated with this format
*
* @return the extensions
*/
public String[] getExtensions()
{
String[] exts = new String[extensions.size()];
exts = (String[]) extensions.toArray(exts);
return exts;
}
/**
* Set the filename extensions associated with this format
*
* @param exts
* String [] array of extensions
*/
public void setExtensions(String[] exts)
{
extensions = new ArrayList<String>();
for (int i = 0; i < exts.length; i++)
{
extensions.add(exts[i]);
}
}
/**
* If you know the support level string, look up the corresponding type ID
* constant.
*
* @param slevel
* String with the name of the action (must be exact match)
*
* @return the corresponding action ID, or <code>-1</code> if the action
* string is unknown
*/
public static int getSupportLevelID(String slevel)
{
for (int i = 0; i < supportLevelText.length; i++)
{
if (supportLevelText[i].equals(slevel))
{
return i;
}
}
return -1;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Enumeration;
import org.apache.log4j.Logger;
import org.dspace.content.MetadataField;
import org.dspace.core.ConfigurationManager;
/**
* Broker for metadata authority settings configured for each metadata field.
*
* Configuration keys, per metadata field (e.g. "dc.contributer.author")
*
* # is field authority controlled (i.e. store authority, confidence values)?
* authority.controlled.<FIELD> = true
*
* # is field required to have an authority value, or may it be empty?
* # default is false.
* authority.required.<FIELD> = true | false
*
* # default value of minimum confidence level for ALL fields - must be
* # symbolic confidence level, see org.dspace.content.authority.Choices
* authority.minconfidence = uncertain
*
* # minimum confidence level for this field
* authority.minconfidence.SCHEMA.ELEMENT.QUALIFIER = SYMBOL
* e.g.
* authority.minconfidence.dc.contributor.author = accepted
*
* NOTE: There is *expected* to be a "choices" (see ChoiceAuthorityManager)
* configuration for each authority-controlled field.
*
* @see ChoiceAuthorityManager
* @see Choices
* @author Larry Stone
*/
public class MetadataAuthorityManager
{
private static Logger log = Logger.getLogger(MetadataAuthorityManager.class);
private static MetadataAuthorityManager cached = null;
// map of field key to authority plugin
private Map<String,Boolean> controlled = new HashMap<String,Boolean>();
// map of field key to answer of whether field is required to be controlled
private Map<String,Boolean> isAuthorityRequired = new HashMap<String,Boolean>();
/**
* map of field key to answer of which is the min acceptable confidence
* value for a field with authority
*/
private Map<String, Integer> minConfidence = new HashMap<String, Integer>();
/** fallback default value unless authority.minconfidence = X is configured. */
private int defaultMinConfidence = Choices.CF_ACCEPTED;
private MetadataAuthorityManager()
{
Enumeration pn = ConfigurationManager.propertyNames();
final String authPrefix = "authority.controlled.";
property:
while (pn.hasMoreElements())
{
String key = (String)pn.nextElement();
if (key.startsWith(authPrefix))
{
// field is expected to be "schema.element.qualifier"
String field = key.substring(authPrefix.length());
int dot = field.indexOf('.');
if (dot < 0)
{
log.warn("Skipping invalid MetadataAuthority configuration property: "+key+": does not have schema.element.qualifier");
continue property;
}
String schema = field.substring(0, dot);
String element = field.substring(dot+1);
String qualifier = null;
dot = element.indexOf('.');
if (dot >= 0)
{
qualifier = element.substring(dot+1);
element = element.substring(0, dot);
}
String fkey = makeFieldKey(schema, element, qualifier);
boolean ctl = ConfigurationManager.getBooleanProperty(key, true);
boolean req = ConfigurationManager.getBooleanProperty("authority.required."+field, false);
controlled.put(fkey, Boolean.valueOf(ctl));
isAuthorityRequired.put(fkey, Boolean.valueOf(req));
// get minConfidence level for this field if any
int mci = readConfidence("authority.minconfidence."+field);
if (mci >= Choices.CF_UNSET)
{
minConfidence.put(fkey, Integer.valueOf(mci));
}
log.debug("Authority Control: For schema="+schema+", elt="+element+", qual="+qualifier+", controlled="+ctl+", required="+req);
}
}
// get default min confidence if any:
int dmc = readConfidence("authority.minconfidence");
if (dmc >= Choices.CF_UNSET)
{
defaultMinConfidence = dmc;
}
}
private int readConfidence(String key)
{
String mc = ConfigurationManager.getProperty(key);
if (mc != null)
{
int mci = Choices.getConfidenceValue(mc.trim(), Choices.CF_UNSET-1);
if (mci == Choices.CF_UNSET-1)
{
log.warn("IGNORING bad value in DSpace Configuration, key="+key+", value="+mc+", must be a valid Authority Confidence keyword.");
}
else
{
return mci;
}
}
return Choices.CF_UNSET-1;
}
// factory method
public static MetadataAuthorityManager getManager()
{
if (cached == null)
{
cached = new MetadataAuthorityManager();
}
return cached;
}
/** Predicate - is field authority-controlled? */
public boolean isAuthorityControlled(String schema, String element, String qualifier)
{
return isAuthorityControlled(makeFieldKey(schema, element, qualifier));
}
/** Predicate - is field authority-controlled? */
public boolean isAuthorityControlled(String fieldKey)
{
return controlled.containsKey(fieldKey) && controlled.get(fieldKey).booleanValue();
}
/** Predicate - is authority value required for field? */
public boolean isAuthorityRequired(String schema, String element, String qualifier)
{
return isAuthorityRequired(makeFieldKey(schema, element, qualifier));
}
/** Predicate - is authority value required for field? */
public boolean isAuthorityRequired(String fieldKey)
{
Boolean result = isAuthorityRequired.get(fieldKey);
return (result == null) ? false : result.booleanValue();
}
/**
* Construct a single key from the tuple of schema/element/qualifier
* that describes a metadata field. Punt to the function we use for
* submission UI input forms, for now.
*/
public static String makeFieldKey(String schema, String element, String qualifier)
{
return MetadataField.formKey(schema, element, qualifier);
}
/**
* Give the minimal level of confidence required to consider valid an authority value
* for the given metadata.
* @return the minimal valid level of confidence for the given metadata
*/
public int getMinConfidence(String schema, String element, String qualifier)
{
Integer result = minConfidence.get(makeFieldKey(schema, element, qualifier));
return result == null ? defaultMinConfidence : result.intValue();
}
/**
* Return the list of metadata field with authority control. The strings
* are in the form <code>schema.element[.qualifier]</code>
*
* @return the list of metadata field with authority control
*/
public List<String> getAuthorityMetadata() {
List<String> copy = new ArrayList<String>();
for (String s : controlled.keySet())
{
copy.add(s.replaceAll("_","."));
}
return copy;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.io.File;
import org.apache.commons.lang.ArrayUtils;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.xml.sax.InputSource;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.SelfNamedPlugin;
/**
* ChoiceAuthority source that reads the JSPUI-style hierarchical vocabularies
* from ${dspace.dir}/config/controlled-vocabularies/*.xml and turns them into
* autocompleting authorities.
*
* Configuration:
* This MUST be configured as a self-named plugin, e.g.:
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
* org.dspace.content.authority.DSpaceControlledVocabulary
*
* It AUTOMATICALLY configures a plugin instance for each XML file in the
* controlled vocabularies directory. The name of the plugin is the basename
* of the file; e.g., "${dspace.dir}/config/controlled-vocabularies/nsi.xml"
* would generate a plugin called "nsi".
*
* Each configured plugin comes with three configuration options:
* vocabulary.plugin._plugin_.hierarchy.store = <true|false> # Store entire hierarchy along with selected value. Default: TRUE
* vocabulary.plugin._plugin_.hierarchy.suggest = <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE
* vocabulary.plugin._plugin_.delimiter = "<string>" # Delimiter to use when building hierarchy strings. Default: "::"
*
*
* @author Michael B. Klein
*
*/
public class DSpaceControlledVocabulary extends SelfNamedPlugin implements ChoiceAuthority
{
private static Logger log = Logger.getLogger(DSpaceControlledVocabulary.class);
private static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ','abcdefghijklmnopqrstuvwxyz'),'%s')]";
private static String idTemplate = "//node[@id = '%s']";
private static String pluginNames[] = null;
private String vocabularyName = null;
private InputSource vocabulary = null;
private Boolean suggestHierarchy = true;
private Boolean storeHierarchy = true;
private String hierarchyDelimiter = "::";
public DSpaceControlledVocabulary()
{
super();
}
public static String[] getPluginNames()
{
if (pluginNames == null)
{
initPluginNames();
}
return (String[]) ArrayUtils.clone(pluginNames);
}
private static synchronized void initPluginNames()
{
if (pluginNames == null)
{
class xmlFilter implements java.io.FilenameFilter
{
public boolean accept(File dir, String name)
{
return name.endsWith(".xml");
}
}
String vocabulariesPath = ConfigurationManager.getProperty("dspace.dir") + "/config/controlled-vocabularies/";
String[] xmlFiles = (new File(vocabulariesPath)).list(new xmlFilter());
List<String> names = new ArrayList<String>();
for (String filename : xmlFiles)
{
names.add((new File(filename)).getName().replace(".xml",""));
}
pluginNames = names.toArray(new String[names.size()]);
log.info("Got plugin names = "+Arrays.deepToString(pluginNames));
}
}
private void init()
{
if (vocabulary == null)
{
log.info("Initializing " + this.getClass().getName());
vocabularyName = this.getPluginInstanceName();
String vocabulariesPath = ConfigurationManager.getProperty("dspace.dir") + "/config/controlled-vocabularies/";
String configurationPrefix = "vocabulary.plugin." + vocabularyName;
storeHierarchy = ConfigurationManager.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy);
suggestHierarchy = ConfigurationManager.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy);
String configuredDelimiter = ConfigurationManager.getProperty(configurationPrefix + ".delimiter");
if (configuredDelimiter != null)
{
hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)","");
}
String filename = vocabulariesPath + vocabularyName + ".xml";
log.info("Loading " + filename);
vocabulary = new InputSource(filename);
}
}
private String buildString(Node node)
{
if (node.getNodeType() == Node.DOCUMENT_NODE)
{
return("");
}
else
{
String parentValue = buildString(node.getParentNode());
Node currentLabel = node.getAttributes().getNamedItem("label");
if (currentLabel != null)
{
String currentValue = currentLabel.getNodeValue();
if (parentValue.equals(""))
{
return currentValue;
}
else
{
return(parentValue + this.hierarchyDelimiter + currentValue);
}
}
else
{
return(parentValue);
}
}
}
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale)
{
init();
log.debug("Getting matches for '" + text + "'");
String xpathExpression = String.format(xpathTemplate, text.replaceAll("'", "'").toLowerCase());
XPath xpath = XPathFactory.newInstance().newXPath();
Choice[] choices;
try {
NodeList results = (NodeList)xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
String[] authorities = new String[results.getLength()];
String[] values = new String[results.getLength()];
String[] labels = new String[results.getLength()];
for (int i=0; i<results.getLength(); i++)
{
Node node = results.item(i);
String hierarchy = this.buildString(node);
if (this.suggestHierarchy)
{
labels[i] = hierarchy;
}
else
{
labels[i] = node.getAttributes().getNamedItem("label").getNodeValue();
}
if (this.storeHierarchy)
{
values[i] = hierarchy;
}
else
{
values[i] = node.getAttributes().getNamedItem("label").getNodeValue();
}
authorities[i] = node.getAttributes().getNamedItem("id").getNodeValue();
}
int resultCount = Math.min(labels.length-start, limit);
choices = new Choice[resultCount];
if (resultCount > 0)
{
for (int i=0; i<resultCount; i++)
{
choices[i] = new Choice(authorities[start+i],values[start+i],labels[start+i]);
}
}
} catch(XPathExpressionException e) {
choices = new Choice[0];
}
return new Choices(choices, 0, choices.length, Choices.CF_AMBIGUOUS, false);
}
public Choices getBestMatch(String field, String text, int collection, String locale)
{
init();
log.debug("Getting best match for '" + text + "'");
return getMatches(field, text, collection, 0, 2, locale);
}
public String getLabel(String field, String key, String locale)
{
init();
String xpathExpression = String.format(idTemplate, key);
XPath xpath = XPathFactory.newInstance().newXPath();
try {
Node node = (Node)xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
return node.getAttributes().getNamedItem("label").getNodeValue();
} catch(XPathExpressionException e) {
return("");
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Enumeration;
import org.apache.log4j.Logger;
import org.dspace.content.MetadataField;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.PluginManager;
/**
* Broker for ChoiceAuthority plugins, and for other information configured
* about the choice aspect of authority control for a metadata field.
*
* Configuration keys, per metadata field (e.g. "dc.contributer.author")
*
* # names the ChoiceAuthority plugin called for this field
* choices.plugin.<FIELD> = name-of-plugin
*
* # mode of UI presentation desired in submission UI:
* # "select" is dropdown menu, "lookup" is popup with selector, "suggest" is autocomplete/suggest
* choices.presentation.<FIELD> = "select" | "suggest"
*
* # is value "closed" to the set of these choices or are non-authority values permitted?
* choices.closed.<FIELD> = true | false
*
* @author Larry Stone
* @see ChoiceAuthority
*/
public final class ChoiceAuthorityManager
{
private static Logger log = Logger.getLogger(ChoiceAuthorityManager.class);
private static ChoiceAuthorityManager cached = null;
// map of field key to authority plugin
private Map<String,ChoiceAuthority> controller = new HashMap<String,ChoiceAuthority>();
// map of field key to presentation type
private Map<String,String> presentation = new HashMap<String,String>();
// map of field key to closed value
private Map<String,Boolean> closed = new HashMap<String,Boolean>();
private ChoiceAuthorityManager()
{
Enumeration pn = ConfigurationManager.propertyNames();
final String choicesPrefix = "choices.";
final String choicesPlugin = "choices.plugin.";
final String choicesPresentation = "choices.presentation.";
final String choicesClosed = "choices.closed.";
property:
while (pn.hasMoreElements())
{
String key = (String)pn.nextElement();
if (key.startsWith(choicesPrefix))
{
if (key.startsWith(choicesPlugin))
{
String fkey = config2fkey(key.substring(choicesPlugin.length()));
if (fkey == null)
{
log.warn("Skipping invalid ChoiceAuthority configuration property: "+key+": does not have schema.element.qualifier");
continue property;
}
// XXX FIXME maybe add sanity check, call
// MetadataField.findByElement to make sure it's a real field.
ChoiceAuthority ma = (ChoiceAuthority)
PluginManager.getNamedPlugin(ChoiceAuthority.class, ConfigurationManager.getProperty(key));
if (ma == null)
{
log.warn("Skipping invalid configuration for "+key+" because named plugin not found: "+ConfigurationManager.getProperty(key));
continue property;
}
controller.put(fkey, ma);
log.debug("Choice Control: For field="+fkey+", Plugin="+ma);
}
else if (key.startsWith(choicesPresentation))
{
String fkey = config2fkey(key.substring(choicesPresentation.length()));
if (fkey == null)
{
log.warn("Skipping invalid ChoiceAuthority configuration property: "+key+": does not have schema.element.qualifier");
continue property;
}
presentation.put(fkey, ConfigurationManager.getProperty(key));
}
else if (key.startsWith(choicesClosed))
{
String fkey = config2fkey(key.substring(choicesClosed.length()));
if (fkey == null)
{
log.warn("Skipping invalid ChoiceAuthority configuration property: "+key+": does not have schema.element.qualifier");
continue property;
}
closed.put(fkey, Boolean.valueOf(ConfigurationManager.getBooleanProperty(key)));
}
else
{
log.error("Illegal configuration property: " + key);
}
}
}
}
/** Factory method */
public static ChoiceAuthorityManager getManager()
{
if (cached == null)
{
cached = new ChoiceAuthorityManager();
}
return cached;
}
// translate tail of configuration key (supposed to be schema.element.qual)
// into field key
private String config2fkey(String field)
{
// field is expected to be "schema.element.qualifier"
int dot = field.indexOf('.');
if (dot < 0)
{
return null;
}
String schema = field.substring(0, dot);
String element = field.substring(dot+1);
String qualifier = null;
dot = element.indexOf('.');
if (dot >= 0)
{
qualifier = element.substring(dot+1);
element = element.substring(0, dot);
}
return makeFieldKey(schema, element, qualifier);
}
/**
* Wrapper that calls getMatches method of the plugin corresponding to
* the metadata field defined by schema,element,qualifier.
*
* @see ChoiceAuthority#getMatches(String, String, int, int, int, String)
* @param schema schema of metadata field
* @param element element of metadata field
* @param qualifier qualifier of metadata field
* @param query user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
*/
public Choices getMatches(String schema, String element, String qualifier,
String query, int collection, int start, int limit, String locale)
{
return getMatches(makeFieldKey(schema, element, qualifier), query,
collection, start, limit, locale);
}
/**
* Wrapper calls getMatches method of the plugin corresponding to
* the metadata field defined by single field key.
*
* @see ChoiceAuthority#getMatches(String, String, int, int, int, String)
* @param fieldKey single string identifying metadata field
* @param query user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
*/
public Choices getMatches(String fieldKey, String query, int collection,
int start, int limit, String locale)
{
ChoiceAuthority ma = controller.get(fieldKey);
if (ma == null)
{
throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey
+ "\".");
}
return ma.getMatches(fieldKey, query, collection, start, limit, locale);
}
/**
* Wrapper that calls getBestMatch method of the plugin corresponding to
* the metadata field defined by single field key.
*
* @see ChoiceAuthority#getBestMatch(String, String, int, String)
* @param fieldKey single string identifying metadata field
* @param query user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param locale explicit localization key if available, or null
* @return a Choices object (never null) with 1 or 0 values.
*/
public Choices getBestMatch(String fieldKey, String query, int collection,
String locale)
{
ChoiceAuthority ma = controller.get(fieldKey);
if (ma == null)
{
throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey
+ "\".");
}
return ma.getBestMatch(fieldKey, query, collection, locale);
}
/**
* Wrapper that calls getLabel method of the plugin corresponding to
* the metadata field defined by schema,element,qualifier.
*/
public String getLabel(String schema, String element, String qualifier,
String authKey, String locale)
{
return getLabel(makeFieldKey(schema, element, qualifier), authKey, locale);
}
/**
* Wrapper that calls getLabel method of the plugin corresponding to
* the metadata field defined by single field key.
*/
public String getLabel(String fieldKey, String authKey, String locale)
{
ChoiceAuthority ma = controller.get(fieldKey);
if (ma == null)
{
throw new IllegalArgumentException("No choices plugin was configured for field \"" + fieldKey + "\".");
}
return ma.getLabel(fieldKey, authKey, locale);
}
/**
* Predicate, is there a Choices configuration of any kind for the
* given metadata field?
* @return true if choices are configured for this field.
*/
public boolean isChoicesConfigured(String fieldKey)
{
return controller.containsKey(fieldKey);
}
/**
* Get the presentation keyword (should be "lookup", "select" or "suggest", but this
* is an informal convention so it can be easily extended) for this field.
*
* @return configured presentation type for this field, or null if none found
*/
public String getPresentation(String fieldKey)
{
return presentation.get(fieldKey);
}
/**
* Get the configured "closed" value for this field.
*
* @return true if choices are closed for this field.
*/
public boolean isClosed(String fieldKey)
{
return closed.containsKey(fieldKey) ? closed.get(fieldKey).booleanValue() : false;
}
/**
* Construct a single key from the tuple of schema/element/qualifier
* that describes a metadata field. Punt to the function we use for
* submission UI input forms, for now.
*/
public static String makeFieldKey(String schema, String element, String qualifier)
{
return MetadataField.formKey(schema, element, qualifier);
}
/**
* Construct a single key from the "dot" notation e.g. "dc.rights"
*/
public static String makeFieldKey(String dotty)
{
return dotty.replace(".", "_");
}
/**
* Wrapper to call plugin's getVariants().
*/
public List<String> getVariants(String schema, String element, String qualifier,
String authorityKey, String language)
{
ChoiceAuthority ma = controller.get(makeFieldKey(schema, element, qualifier));
if (ma instanceof AuthorityVariantsSupport)
{
AuthorityVariantsSupport avs = (AuthorityVariantsSupport) ma;
return avs.getVariants(authorityKey, language);
}
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
/**
* Record class to hold the data describing one option, or choice, for an
* authority-controlled metadata value.
*
* @author Larry Stone
* @see Choices
*/
public class Choice
{
/** Authority key for this value */
public String authority = null;
/** Label to display for this value (e.g. to present in UI menu) */
public String label = null;
/** The canonical text value to insert into MetadataValue's text field */
public String value = null;
public Choice()
{
}
public Choice(String authority, String value, String label)
{
this.authority = authority;
this.value = value;
this.label = label;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
/**
* Plugin interface that supplies an authority control mechanism for
* one metadata field.
*
* @author Larry Stone
* @see ChoiceAuthorityManager
* @see MetadataAuthorityManager
*/
public interface ChoiceAuthority
{
/**
* Get all values from the authority that match the profferred value.
* Note that the offering was entered by the user and may contain
* mixed/incorrect case, whitespace, etc so the plugin should be careful
* to clean up user data before making comparisons.
*
* Value of a "Name" field will be in canonical DSpace person name format,
* which is "Lastname, Firstname(s)", e.g. "Smith, John Q.".
*
* Some authorities with a small set of values may simply return the whole
* set for any sample value, although it's a good idea to set the
* defaultSelected index in the Choices instance to the choice, if any,
* that matches the value.
*
* @param field being matched for
* @param text user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
*/
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale);
/**
* Get the single "best" match (if any) of a value in the authority
* to the given user value. The "confidence" element of Choices is
* expected to be set to a meaningful value about the circumstances of
* this match.
*
* This call is typically used in non-interactive metadata ingest
* where there is no interactive agent to choose from among options.
*
* @param field being matched for
* @param text user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param locale explicit localization key if available, or null
* @return a Choices object (never null) with 1 or 0 values.
*/
public Choices getBestMatch(String field, String text, int collection, String locale);
/**
* Get the canonical user-visible "label" (i.e. short descriptive text)
* for a key in the authority. Can be localized given the implicit
* or explicit locale specification.
*
* This may get called many times while populating a Web page so it should
* be implemented as efficiently as possible.
*
* @param field being matched for
* @param key authority key known to this authority.
* @param locale explicit localization key if available, or null
* @return descriptive label - should always return something, never null.
*/
public String getLabel(String field, String key, String locale);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import org.apache.commons.httpclient.NameValuePair;
/**
* Sample Journal-name authority based on SHERPA/RoMEO
*
* WARNING: This is a very crude and incomplete implementation, done mainly
* as a proof-of-concept. Any site that actually wants to use it will
* probably have to refine it (and give patches back to dspace.org).
*
* @see SHERPARoMEOProtocol
* @author Larry Stone
* @version $Revision $
*/
public class SHERPARoMEOJournalTitle extends SHERPARoMEOProtocol
{
private static final String RESULT = "journal";
private static final String LABEL = "jtitle";
private static final String AUTHORITY = "issn";
public SHERPARoMEOJournalTitle()
{
super();
}
public Choices getMatches(String text, int collection, int start, int limit, String locale)
{
// punt if there is no query text
if (text == null || text.trim().length() == 0)
{
return new Choices(true);
}
// query args to add to SHERPA/RoMEO request URL
NameValuePair args[] = new NameValuePair[2];
args[0] = new NameValuePair("jtitle", text);
args[1] = new NameValuePair("qtype","contains"); // OR: starts, exact
Choices result = query(RESULT, LABEL, AUTHORITY, args, start, limit);
if (result == null)
{
result = new Choices(true);
}
return result;
}
@Override
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale) {
return getMatches(text, collection, start, limit, locale);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.io.IOException;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.XMLReader;
import org.xml.sax.InputSource;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.util.EncodingUtil;
import org.apache.commons.httpclient.NameValuePair;
import org.apache.commons.httpclient.HttpException;
/**
* Choice Authority based on SHERPA/RoMEO - for Publishers and Journals
* See the subclasses SHERPARoMEOPublisher and SHERPARoMEOJournalTitle
* for actual choice plugin implementations. This is a superclass
* containing all the common protocol logic.
*
* Reads these DSpace Config properties:
*
* # contact URL for server
* sherpa.romeo.url = http://www.sherpa.ac.uk/romeoapi11.php
*
* WARNING: This is a very crude and incomplete implementation, done mainly
* as a proof-of-concept. Any site that actually wants to use it will
* probably have to refine it (and give patches back to dspace.org).
*
* @see SHERPARoMEOPublisher
* @see SHERPARoMEOJournalTitle
* @author Larry Stone
* @version $Revision $
*/
public abstract class SHERPARoMEOProtocol implements ChoiceAuthority
{
private static Logger log = Logger.getLogger(SHERPARoMEOProtocol.class);
// contact URL from configuration
private static String url = null;
public SHERPARoMEOProtocol()
{
if (url == null)
{
url = ConfigurationManager.getProperty("sherpa.romeo.url");
// sanity check
if (url == null)
{
throw new IllegalStateException("Missing DSpace configuration keys for SHERPA/RoMEO Query");
}
}
}
// this implements the specific RoMEO API args and XML tag naming
public abstract Choices getMatches(String text, int collection, int start, int limit, String locale);
public Choices getBestMatch(String field, String text, int collection, String locale)
{
return getMatches(field, text, collection, 0, 2, locale);
}
// XXX FIXME just punt, returning value, never got around to
// implementing a reverse query.
public String getLabel(String field, String key, String locale)
{
return key;
}
// NOTE - ignore limit and start for now
protected Choices query(String result, String label, String authority,
NameValuePair[] args, int start, int limit)
{
HttpClient hc = new HttpClient();
String srUrl = url + "?" + EncodingUtil.formUrlEncode(args, "UTF8");
GetMethod get = new GetMethod(srUrl);
log.debug("Trying SHERPA/RoMEO Query, URL="+srUrl);
try
{
int status = hc.executeMethod(get);
if (status == 200)
{
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser sp = spf.newSAXParser();
XMLReader xr = sp.getXMLReader();
SRHandler handler = new SRHandler(result, label, authority);
// XXX FIXME: should turn off validation here explicitly, but
// it seems to be off by default.
xr.setFeature("http://xml.org/sax/features/namespaces", true);
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
xr.parse(new InputSource(get.getResponseBodyAsStream()));
int confidence;
if (handler.total == 0)
{
confidence = Choices.CF_NOTFOUND;
}
else if (handler.total == 1)
{
confidence = Choices.CF_UNCERTAIN;
}
else
{
confidence = Choices.CF_AMBIGUOUS;
}
return new Choices(handler.result, start, handler.total, confidence, false);
}
}
catch (HttpException e)
{
log.error("SHERPA/RoMEO query failed: ", e);
return null;
}
catch (IOException e)
{
log.error("SHERPA/RoMEO query failed: ", e);
return null;
}
catch (ParserConfigurationException e)
{
log.warn("Failed parsing SHERPA/RoMEO result: ", e);
return null;
}
catch (SAXException e)
{
log.warn("Failed parsing SHERPA/RoMEO result: ", e);
return null;
}
finally
{
get.releaseConnection();
}
return null;
}
// SAX handler to grab SHERPA/RoMEO (and eventually other details) from result
private static class SRHandler
extends DefaultHandler
{
private Choice result[] = null;
int rindex = 0; // result index
int total = 0;
// name of element containing a result, e.g. <journal>
private String resultElement = null;
// name of element containing the label e.g. <name>
private String labelElement = null;
// name of element containing the authority value e.g. <issn>
private String authorityElement = null;
protected String textValue = null;
public SRHandler(String result, String label, String authority)
{
super();
resultElement = result;
labelElement = label;
authorityElement = authority;
}
// NOTE: text value MAY be presented in multiple calls, even if
// it all one word, so be ready to splice it together.
// BEWARE: subclass's startElement method should call super()
// to null out 'value'. (Don't you miss the method combination
// options of a real object system like CLOS?)
public void characters(char[] ch, int start, int length)
throws SAXException
{
String newValue = new String(ch, start, length);
if (newValue.length() > 0)
{
if (textValue == null)
{
textValue = newValue;
}
else
{
textValue += newValue;
}
}
}
// if this was the FIRST "numhits" element, it's size of results:
public void endElement(String namespaceURI, String localName,
String qName)
throws SAXException
{
if (localName.equals("numhits"))
{
String stotal = textValue.trim();
if (stotal.length() > 0)
{
total = Integer.parseInt(stotal);
result = new Choice[total];
if (total > 0)
{
result[0] = new Choice();
log.debug("Got "+total+" records in results.");
}
}
}
else if (localName.equals(resultElement))
{
// after start of result element, get next hit ready
if (++rindex < result.length)
{
result[rindex] = new Choice();
}
}
else if (localName.equals(labelElement) && textValue != null)
{
// plug in label value
result[rindex].value = textValue.trim();
result[rindex].label = result[rindex].value;
}
else if (authorityElement != null && localName.equals(authorityElement) && textValue != null)
{
// plug in authority value
result[rindex].authority = textValue.trim();
}
else if (localName.equals("message") && textValue != null)
{
// error message
log.warn("SHERPA/RoMEO response error message: " + textValue.trim());
}
}
// subclass overriding this MUST call it with super()
public void startElement(String namespaceURI, String localName,
String qName, Attributes atts)
throws SAXException
{
textValue = null;
}
public void error(SAXParseException exception)
throws SAXException
{
throw new SAXException(exception);
}
public void fatalError(SAXParseException exception)
throws SAXException
{
throw new SAXException(exception);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.XMLReader;
import org.xml.sax.InputSource;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.content.DCPersonName;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.util.EncodingUtil;
import org.apache.commons.httpclient.NameValuePair;
import org.apache.commons.httpclient.HttpException;
/**
* Sample personal name authority based on Library of Congress Name Authority
* Also serves as an example of an SRU client as authority.
*
* This is tuned for the data in the LC Name Authority test instance, see
* http://alcme.oclc.org/srw/search/lcnaf
*
* WARNING: This is just a proof-of-concept implementation. It would need
* WARNING: lots of refinement to be used in production, because it is very
* WARNING: sloppy about digging through the MARC/XML results. No doubt
* WARNING: it is losing a lot of valid results and information.
* WARNING: Could also do a better job including more info (title, life dates
* WARNING: etc) in the label instead of just the name.
*
* Reads these DSpace Config properties:
*
* lcname.url = http://alcme.oclc.org/srw/search/lcnaf
*
* TODO: make # of results to ask for (and return) configurable.
*
* @author Larry Stone
* @version $Revision $
*/
public class LCNameAuthority implements ChoiceAuthority
{
private static Logger log = Logger.getLogger(LCNameAuthority.class);
// get these from configuration
private static String url = null;
// NS URI for SRU respones
private static final String NS_SRU = "http://www.loc.gov/zing/srw/";
// NS URI for MARC/XML
private static final String NS_MX = "http://www.loc.gov/MARC21/slim";
// constructor does static init too..
public LCNameAuthority()
{
if (url == null)
{
url = ConfigurationManager.getProperty("lcname.url");
// sanity check
if (url == null)
{
throw new IllegalStateException("Missing DSpace configuration keys for LCName Query");
}
}
}
// punt! this is a poor implementation..
public Choices getBestMatch(String field, String text, int collection, String locale)
{
return getMatches(field, text, collection, 0, 2, locale);
}
/**
* Match a proposed value against name authority records
* Value is assumed to be in "Lastname, Firstname" format.
*/
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale)
{
Choices result = queryPerson(text, start, limit);
if (result == null)
{
result = new Choices(true);
}
return result;
}
// punt; supposed to get the canonical display form of a metadata authority key
// XXX FIXME implement this with a query on the authority key, cache results
public String getLabel(String field, String key, String locale)
{
return key;
}
/**
* Guts of the implementation, returns a complete Choices result, or
* null for a failure.
*/
private Choices queryPerson(String text, int start, int limit)
{
// punt if there is no query text
if (text == null || text.trim().length() == 0)
{
return new Choices(true);
}
// 1. build CQL query
DCPersonName pn = new DCPersonName(text);
StringBuilder query = new StringBuilder();
query.append("local.FirstName = \"").append(pn.getFirstNames()).
append("\" and local.FamilyName = \"").append(pn.getLastName()).
append("\"");
// XXX arbitrary default limit - should be configurable?
if (limit == 0)
{
limit = 50;
}
NameValuePair args[] = new NameValuePair[6];
args[0] = new NameValuePair("operation", "searchRetrieve");
args[1] = new NameValuePair("version", "1.1");
args[2] = new NameValuePair("recordSchema", "info:srw/schema/1/marcxml-v1.1");
args[3] = new NameValuePair("query", query.toString());
args[4] = new NameValuePair("maximumRecords", String.valueOf(limit));
args[5] = new NameValuePair("startRecord", String.valueOf(start+1));
HttpClient hc = new HttpClient();
String srUrl = url + "?" + EncodingUtil.formUrlEncode(args, "UTF8");
GetMethod get = new GetMethod(srUrl);
log.debug("Trying SRU query, URL="+srUrl);
// 2. web request
try
{
int status = hc.executeMethod(get);
if (status == 200)
{
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser sp = spf.newSAXParser();
XMLReader xr = sp.getXMLReader();
SRUHandler handler = new SRUHandler();
// XXX FIXME: should turn off validation here explicitly, but
// it seems to be off by default.
xr.setFeature("http://xml.org/sax/features/namespaces", true);
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
xr.parse(new InputSource(get.getResponseBodyAsStream()));
// this probably just means more results available..
if (handler.hits != handler.result.size())
{
log.warn("Discrepency in results, result.length=" + handler.result.size() +
", yet expected results=" + handler.hits);
}
boolean more = handler.hits > (start + handler.result.size());
// XXX add non-auth option; perhaps the UI should do this?
// XXX it's really a policy matter if they allow unauth result.
// XXX good, stop it.
// handler.result.add(new Choice("", text, "Non-Authority: \""+text+"\""));
int confidence;
if (handler.hits == 0)
{
confidence = Choices.CF_NOTFOUND;
}
else if (handler.hits == 1)
{
confidence = Choices.CF_UNCERTAIN;
}
else
{
confidence = Choices.CF_AMBIGUOUS;
}
return new Choices(handler.result.toArray(new Choice[handler.result.size()]),
start, handler.hits, confidence, more);
}
}
catch (HttpException e)
{
log.error("SRU query failed: ", e);
return new Choices(true);
}
catch (IOException e)
{
log.error("SRU query failed: ", e);
return new Choices(true);
}
catch (ParserConfigurationException e)
{
log.warn("Failed parsing SRU result: ", e);
return new Choices(true);
}
catch (SAXException e)
{
log.warn("Failed parsing SRU result: ", e);
return new Choices(true);
}
finally
{
get.releaseConnection();
}
return new Choices(true);
}
/**
* XXX FIXME TODO: Very sloppy MARC/XML parser.
* This only reads subfields 010.a (for LCCN, to use as key)
* and 100.a (for "established personal name")
* Maybe look at Indicator on 100 too.
* Should probably read other 100 subfields to build a more detailed label.
*/
private static class SRUHandler
extends DefaultHandler
{
private List<Choice> result = new ArrayList<Choice>();
private int hits = -1;
private String textValue = null;
private String name = null;
private String lccn = null;
private String lastTag = null;
private String lastCode = null;
// NOTE: text value MAY be presented in multiple calls, even if
// it all one word, so be ready to splice it together.
// BEWARE: subclass's startElement method should call super()
// to null out 'value'. (Don't you miss the method combination
// options of a real object system like CLOS?)
public void characters(char[] ch, int start, int length)
throws SAXException
{
String newValue = new String(ch, start, length);
if (newValue.length() > 0)
{
if (textValue == null)
{
textValue = newValue;
}
else
{
textValue += newValue;
}
}
}
public void endElement(String namespaceURI, String localName,
String qName)
throws SAXException
{
if (localName.equals("numberOfRecords") &&
namespaceURI.equals(NS_SRU))
{
hits = Integer.parseInt(textValue.trim());
if (hits > 0)
{
name = null;
lccn = null;
log.debug("Expecting "+hits+" records in results.");
}
}
// after record get next hit ready
else if (localName.equals("record") &&
namespaceURI.equals(NS_SRU))
{
if (name != null && lccn != null)
{
// HACK: many LC name entries end with ',' ...trim it.
if (name.endsWith(","))
{
name = name.substring(0, name.length() - 1);
}
// XXX DEBUG
// log.debug("Got result, name="+name+", lccn="+lccn);
result.add(new Choice(lccn, name, name));
}
else
{
log.warn("Got anomalous result, at least one of these null: lccn=" + lccn + ", name=" + name);
}
name = null;
lccn = null;
}
else if (localName.equals("subfield") && namespaceURI.equals(NS_MX))
{
if (lastTag != null && lastCode != null)
{
if (lastTag.equals("010") && lastCode.equals("a"))
{
// 010.a is lccn, "authority code"
lccn = textValue;
}
else if (lastTag.equals("100") && lastCode.equals("a"))
{
// 100.a is the personal name
name = textValue;
}
if (lastTag.equals("100") && lastCode.equals("d") && (name != null))
{
name = name + " " + textValue;
}
}
}
}
// subclass overriding this MUST call it with super()
public void startElement(String namespaceURI, String localName,
String qName, Attributes atts)
throws SAXException
{
textValue = null;
if (localName.equals("datafield") &&
namespaceURI.equals(NS_MX))
{
lastTag = atts.getValue("tag");
if (lastTag == null)
{
log.warn("MARC datafield without tag attribute!");
}
}
else if (localName.equals("subfield") &&
namespaceURI.equals(NS_MX))
{
lastCode = atts.getValue("code");
if (lastCode == null)
{
log.warn("MARC subfield without code attribute!");
}
}
}
public void error(SAXParseException exception)
throws SAXException
{
throw new SAXException(exception);
}
public void fatalError(SAXParseException exception)
throws SAXException
{
throw new SAXException(exception);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import org.apache.commons.httpclient.NameValuePair;
/**
* Sample Publisher name authority based on SHERPA/RoMEO
*
*
* WARNING: This is a very crude and incomplete implementation, done mainly
* as a proof-of-concept. Any site that actually wants to use it will
* probably have to refine it (and give patches back to dspace.org).
*
* @see SHERPARoMEOProtocol
* @author Larry Stone
* @version $Revision $
*/
public class SHERPARoMEOPublisher extends SHERPARoMEOProtocol
{
private static final String RESULT = "publisher";
private static final String LABEL = "name";
// note: the publisher records have nothing we can use as authority code.
private static final String AUTHORITY = null;
public SHERPARoMEOPublisher()
{
super();
}
public Choices getMatches(String text, int collection, int start, int limit, String locale)
{
// punt if there is no query text
if (text == null || text.trim().length() == 0)
{
return new Choices(true);
}
// query args to add to SHERPA/RoMEO request URL
NameValuePair args[] = new NameValuePair[2];
args[0] = new NameValuePair("pub", text);
args[1] = new NameValuePair("qtype","all"); // OR: starts, exact
Choices result = query(RESULT, LABEL, AUTHORITY, args, start, limit);
if (result == null)
{
result = new Choices(true);
}
return result;
}
@Override
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale) {
return getMatches(text, collection, start, limit, locale);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.Iterator;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.core.SelfNamedPlugin;
/**
* ChoiceAuthority source that reads the same input-forms which drive
* configurable submission.
*
* Configuration:
* This MUST be configured aas a self-named plugin, e.g.:
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
* org.dspace.content.authority.DCInputAuthority
*
* It AUTOMATICALLY configures a plugin instance for each <value-pairs>
* element (within <form-value-pairs>) of the input-forms.xml. The name
* of the instance is the "value-pairs-name" attribute, e.g.
* the element: <value-pairs value-pairs-name="common_types" dc-term="type">
* defines a plugin instance "common_types".
*
* IMPORTANT NOTE: Since these value-pairs do NOT include authority keys,
* the choice lists derived from them do not include authority values.
* So you should not use them as the choice source for authority-controlled
* fields.
*/
public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
{
private static Logger log = Logger.getLogger(DCInputAuthority.class);
private String values[] = null;
private String labels[] = null;
private static DCInputsReader dci = null;
private static String pluginNames[] = null;
public DCInputAuthority()
{
super();
}
public static String[] getPluginNames()
{
if (pluginNames == null)
{
initPluginNames();
}
return (String[]) ArrayUtils.clone(pluginNames);
}
private static synchronized void initPluginNames()
{
if (pluginNames == null)
{
try
{
if (dci == null)
{
dci = new DCInputsReader();
}
}
catch (DCInputsReaderException e)
{
log.error("Failed reading DCInputs initialization: ",e);
}
List<String> names = new ArrayList<String>();
Iterator pi = dci.getPairsNameIterator();
while (pi.hasNext())
{
names.add((String)pi.next());
}
pluginNames = names.toArray(new String[names.size()]);
log.debug("Got plugin names = "+Arrays.deepToString(pluginNames));
}
}
// once-only load of values and labels
private void init()
{
if (values == null)
{
String pname = this.getPluginInstanceName();
List<String> pairs = dci.getPairs(pname);
if (pairs != null)
{
values = new String[pairs.size()/2];
labels = new String[pairs.size()/2];
for (int i = 0; i < pairs.size(); i += 2)
{
labels[i/2] = pairs.get(i);
values[i/2] = pairs.get(i+1);
}
log.debug("Found pairs for name="+pname);
}
else
{
log.error("Failed to find any pairs for name=" + pname, new IllegalStateException());
}
}
}
public Choices getMatches(String field, String query, int collection, int start, int limit, String locale)
{
init();
int dflt = -1;
Choice v[] = new Choice[values.length];
for (int i = 0; i < values.length; ++i)
{
v[i] = new Choice(values[i], values[i], labels[i]);
if (values[i].equalsIgnoreCase(query))
{
dflt = i;
}
}
return new Choices(v, 0, v.length, Choices.CF_AMBIGUOUS, false, dflt);
}
public Choices getBestMatch(String field, String text, int collection, String locale)
{
init();
for (int i = 0; i < values.length; ++i)
{
if (text.equalsIgnoreCase(values[i]))
{
Choice v[] = new Choice[1];
v[0] = new Choice(String.valueOf(i), values[i], labels[i]);
return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0);
}
}
return new Choices(Choices.CF_NOTFOUND);
}
public String getLabel(String field, String key, String locale)
{
init();
return labels[Integer.parseInt(key)];
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* Record class to hold a set of Choices returned by an authority in response
* to a search.
*
* @author Larry Stone
* @see Choice
*/
public class ChoicesXMLGenerator
{
// use the XHTML NS, even though this is a fragment.
private static final String NS_URI = "http://www.w3.org/1999/xhtml";
private static final String NS_NAME = "";
public static void generate(Choices result, String format, ContentHandler contentHandler)
throws SAXException
{
AttributesImpl resultAtts = new AttributesImpl();
if (result.more)
{
resultAtts.addAttribute("", "more", "more", "boolean", "true");
}
if (result.isError())
{
resultAtts.addAttribute("", "error", "error", "boolean", "true");
}
resultAtts.addAttribute("", "start", "start", "int", String.valueOf(result.start));
resultAtts.addAttribute("", "total", "total", "int", String.valueOf(result.total));
contentHandler.startDocument();
// "select" HTML format for DSpace popup
if (format != null && format.equalsIgnoreCase("select"))
{
contentHandler.startElement(NS_URI, NS_NAME, "select", resultAtts);
for (int i = 0; i < result.values.length; ++i)
{
Choice mdav = result.values[i];
AttributesImpl va = new AttributesImpl();
va.addAttribute("", "authority", "authority", "string", mdav.authority == null ? "":mdav.authority);
va.addAttribute("", "value", "value", "string", mdav.value);
if (result.defaultSelected == i)
{
va.addAttribute("", "selected", "selected", "boolean", "");
}
contentHandler.startElement(NS_URI, NS_NAME, "option", va);
contentHandler.characters(mdav.label.toCharArray(), 0, mdav.label.length());
contentHandler.endElement(NS_URI, NS_NAME, "option");
}
contentHandler.endElement(NS_URI, NS_NAME, "select");
}
// "ul" HTML format (required by Scriptactulous autocomplete)
else if (format != null && format.equalsIgnoreCase("ul"))
{
AttributesImpl classLabel = new AttributesImpl();
classLabel.addAttribute("", "class", "class", "string", "label");
AttributesImpl classValue = new AttributesImpl();
classValue.addAttribute("", "class", "class", "string", "value");
contentHandler.startElement(NS_URI, NS_NAME, "ul", resultAtts);
for (int i = 0; i < result.values.length; ++i)
{
Choice mdav = result.values[i];
AttributesImpl va = new AttributesImpl();
va.addAttribute("", "authority", "authority", "string", mdav.authority == null ? "":mdav.authority);
if (result.defaultSelected == i)
{
va.addAttribute("", "selected", "selected", "boolean", "");
}
contentHandler.startElement(NS_URI, NS_NAME, "li", va);
contentHandler.startElement(NS_URI, NS_NAME, "span", classLabel);
contentHandler.characters(mdav.label.toCharArray(), 0, mdav.label.length());
contentHandler.endElement(NS_URI, NS_NAME, "span");
contentHandler.startElement(NS_URI, NS_NAME, "span", classValue);
contentHandler.characters(mdav.value.toCharArray(), 0, mdav.value.length());
contentHandler.endElement(NS_URI, NS_NAME, "span");
contentHandler.endElement(NS_URI, NS_NAME, "li");
}
contentHandler.endElement(NS_URI, NS_NAME, "ul");
}
// default is XML format, Choices/Choice
else
{
contentHandler.startElement(NS_URI, NS_NAME, "Choices", resultAtts);
for (int i = 0; i < result.values.length; ++i)
{
Choice mdav = result.values[i];
AttributesImpl va = new AttributesImpl();
va.addAttribute("", "authority", "authority", "string", mdav.authority == null ? "":mdav.authority);
va.addAttribute("", "value", "value", "string", mdav.value);
if (result.defaultSelected == i)
{
va.addAttribute("", "selected", "selected", "boolean", "");
}
contentHandler.startElement(NS_URI, NS_NAME, "Choice", va);
contentHandler.characters(mdav.label.toCharArray(), 0, mdav.label.length());
contentHandler.endElement(NS_URI, NS_NAME, "Choice");
}
contentHandler.endElement(NS_URI, NS_NAME, "Choices");
}
contentHandler.endDocument();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import org.apache.commons.lang.ArrayUtils;
/**
* Record class to hold a set of Choices returned by an authority in response
* to a search.
*
* @author Larry Stone
* @see Choice
*/
public class Choices
{
/** -------------- Class fields ----------------- **/
/** Canonical values of the confidence metric. Higher is better. */
/** This authority value has been confirmed as accurate by an
interactive user or authoritative policy */
public static final int CF_ACCEPTED = 600;
/** Value is singular and valid but has not been seen and accepted
by a human, so its provenance is uncertain. */
public static final int CF_UNCERTAIN = 500;
/** There are multiple matching authority values of equal validity. */
public static final int CF_AMBIGUOUS = 400;
/** There are no matching answers from the authority. */
public static final int CF_NOTFOUND = 300;
/** The authority encountered an internal failure - this preserves a
record in the metadata of why there is no value. */
public static final int CF_FAILED = 200;
/** The authority recommends this submission be rejected. */
public static final int CF_REJECTED = 100;
/** No reasonable confidence value is available */
public static final int CF_NOVALUE = 0;
/** Value has not been set (DB default). */
public static final int CF_UNSET = -1;
/** descriptive labels for confidence values */
private static final int confidenceValue[] = {
CF_UNSET,
CF_NOVALUE,
CF_REJECTED,
CF_FAILED,
CF_NOTFOUND,
CF_AMBIGUOUS,
CF_UNCERTAIN,
CF_ACCEPTED,
};
private static final String confidenceText[] = {
"UNSET",
"NOVALUE",
"REJECTED",
"FAILED",
"NOTFOUND",
"AMBIGUOUS",
"UNCERTAIN",
"ACCEPTED"
};
/** -------------- Instance fields ----------------- **/
/** The set of values returned by the authority */
public Choice values[] = null;
/** The confidence level that applies to all values in this result set */
public int confidence = CF_NOVALUE;
/** Index of start of this result wrt. all results; 0 is start of
complete result. Note that length is implicit in values.length. */
public int start = 0;
/** Count of total results available */
public int total = 0;
/** Index of value to be selected by default, if any. -1 means none selected. */
public int defaultSelected = -1;
/** true when there are more values to be sent after this result. */
public boolean more = false;
/** -------------- Methods ----------------- **/
/**
* Constructor for general purpose
*/
public Choices(Choice values[], int start, int total, int confidence, boolean more)
{
super();
this.values = (Choice[]) ArrayUtils.clone(values);
this.start = start;
this.total = total;
this.confidence = confidence;
this.more = more;
}
/**
* Constructor for general purpose
*/
public Choices(Choice values[], int start, int total, int confidence, boolean more, int defaultSelected)
{
super();
this.values = (Choice[]) ArrayUtils.clone(values);
this.start = start;
this.total = total;
this.confidence = confidence;
this.more = more;
this.defaultSelected = defaultSelected;
}
/**
* Constructor for error results
*/
public Choices(int confidence)
{
this.values = new Choice[0];
this.confidence = confidence;
}
/**
* Constructor for simple empty or error results
*/
public Choices(boolean isError)
{
this.values = new Choice[0];
this.confidence = isError ? CF_FAILED : CF_NOVALUE;
}
/**
* Predicate, did this result encounter an error?
* @return true if this Choices result encountered an error
*/
public boolean isError()
{
return confidence == CF_FAILED || confidence == CF_REJECTED;
}
/**
* Get the symbolic name corresponding to a confidence value, or CF_NOVALUE's
* name if the value is unknown.
*
* @param cv confidence value
* @return String with symbolic name corresponding to value (never null)
*/
public static String getConfidenceText(int cv)
{
String novalue = null;
for (int i = 0; i < confidenceValue.length; ++i)
{
if (confidenceValue[i] == cv)
{
return confidenceText[i];
}
else if (confidenceValue[i] == CF_NOVALUE)
{
novalue = confidenceText[i];
}
}
return novalue;
}
/**
* Get the value corresponding to a symbolic name of a confidence
* value, or CF_NOVALUE if the symbol is unknown.
*
* @param ct symbolic name in String
* @return corresponding value or CF_NOVALUE if not found
*/
public static int getConfidenceValue(String ct)
{
return getConfidenceValue(ct, CF_NOVALUE);
}
/**
* Get the value corresponding to a symbolic name of a confidence
* value, or the given default if the symbol is unknown. This
* lets an application detect invalid data, e.g. in a configuration file.
*
* @param ct symbolic name in String
* @param dflt the default value to return
* @return corresponding value or CF_NOVALUE if not found
*/
public static int getConfidenceValue(String ct, int dflt)
{
for (int i = 0; i < confidenceText.length; ++i)
{
if (confidenceText[i].equalsIgnoreCase(ct))
{
return confidenceValue[i];
}
}
return dflt;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.dspace.content.authority;
import java.util.List;
/**
*
* @author bollini
*/
public interface AuthorityVariantsSupport {
public List<String> getVariants(String key, String locale);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
/**
* This is a *very* stupid test fixture for authority control, and also
* serves as a trivial example of an authority plugin implementation.
*/
public class SampleAuthority implements ChoiceAuthority
{
private static String values[] = {
"sun",
"mon",
"tue",
"wed",
"thu",
"fri",
"sat"
};
private static String labels[] = {
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
};
public Choices getMatches(String field, String query, int collection, int start, int limit, String locale)
{
int dflt = -1;
Choice v[] = new Choice[values.length];
for (int i = 0; i < values.length; ++i)
{
v[i] = new Choice(String.valueOf(i), values[i], labels[i]);
if (values[i].equalsIgnoreCase(query))
{
dflt = i;
}
}
return new Choices(v, 0, v.length, Choices.CF_AMBIGUOUS, false, dflt);
}
public Choices getBestMatch(String field, String text, int collection, String locale)
{
for (int i = 0; i < values.length; ++i)
{
if (text.equalsIgnoreCase(values[i]))
{
Choice v[] = new Choice[1];
v[0] = new Choice(String.valueOf(i), values[i], labels[i]);
return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0);
}
}
return new Choices(Choices.CF_NOTFOUND);
}
public String getLabel(String field, String key, String locale)
{
return labels[Integer.parseInt(key)];
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.text.DateFormatSymbols;
import java.text.SimpleDateFormat;
import java.text.ParseException;
import java.util.*;
import org.apache.log4j.Logger;
// FIXME: Not very robust - assumes dates will always be valid
/**
* Dublin Core date utility class
* <P>
* Dates in the DSpace database are held in the ISO 8601 format. They are always
* stored in UTC, converting to and from the current time zone. In practice only dates
* with a time component need to be converted.
* <P>
* <code>YYYY-MM-DDThh:mm:ss</code>
* <P>
* There are four levels of granularity, depending on how much date information
* is available: year, month, day, time.
* <P>
* Examples: <code>1994-05-03T15:30:24</code>,<code>1995-10-04</code>,
* <code>2001-10</code>,<code>1975</code>
*
* @author Robert Tansley
* @author Larry Stone
* @version $Revision: 5911 $
*/
public class DCDate
{
/** Logger */
private static Logger log = Logger.getLogger(DCDate.class);
// UTC timezone
private static final TimeZone utcZone = TimeZone.getTimeZone("UTC");
// components of time in UTC
private GregorianCalendar calendar = null;
// components of time in local zone
private GregorianCalendar localCalendar = null;
private enum DateGran { YEAR, MONTH, DAY, TIME }
DateGran granularity = null;
// Full ISO 8601 is e.g. "2009-07-16T13:59:21Z"
private final SimpleDateFormat fullIso = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
// without Z
private final SimpleDateFormat fullIso2 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
// without seconds
private final SimpleDateFormat fullIso3 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm");
// without minutes
private final SimpleDateFormat fullIso4 = new SimpleDateFormat("yyyy-MM-dd'T'HH");
// Date-only ISO 8601 is e.g. "2009-07-16"
private final SimpleDateFormat dateIso = new SimpleDateFormat("yyyy-MM-dd");
// Year-Month-only ISO 8601 is e.g. "2009-07"
private final SimpleDateFormat yearMonthIso = new SimpleDateFormat("yyyy-MM");
// just year, "2009"
private final SimpleDateFormat yearIso = new SimpleDateFormat("yyyy");
private static Map<Locale, DateFormatSymbols> dfsLocaleMap = new HashMap<Locale, DateFormatSymbols>();
/**
* Construct a date object from a Java <code>Date</code> object.
*
* @param date
* the Java <code>Date</code> object.
*/
public DCDate(Date date)
{
setUTCForFormatting();
if (date == null)
{
return;
}
// By definition a Date has a time component so always set the granularity to TIME.
granularity = DateGran.TIME;
// Set the local calendar.
localCalendar = new GregorianCalendar();
localCalendar.setTime(date);
// Now set the UTC equivalent.
calendar = new GregorianCalendar(utcZone);
calendar.setTime(date);
}
/**
* Construct a date object from a bunch of component parts. The date passed in is assumed to be in the current
* time zone. Unknown values should be given as -1.
*
* @param yyyy
* the year
* @param mm
* the month
* @param dd
* the day
* @param hh
* the hours
* @param mn
* the minutes
* @param ss
* the seconds
*/
public DCDate(int yyyy, int mm, int dd, int hh, int mn, int ss)
{
setUTCForFormatting();
// default values
int lyear = 0;
int lhours = 0;
int lminutes = 0;
int lseconds = 0;
int lmonth = 1;
int lday = 1;
if (yyyy > 0)
{
lyear = yyyy;
granularity = DateGran.YEAR;
}
if (mm > 0)
{
lmonth = mm;
granularity = DateGran.MONTH;
}
if (dd > 0)
{
lday = dd;
granularity = DateGran.DAY;
}
if (hh >= 0)
{
lhours = hh;
granularity = DateGran.TIME;
}
if (mn >= 0)
{
lminutes = mn;
granularity = DateGran.TIME;
}
if (ss >= 0)
{
lseconds = ss;
granularity = DateGran.TIME;
}
// Set the local calendar.
localCalendar = new GregorianCalendar(lyear, lmonth - 1, lday,
lhours, lminutes, lseconds);
if (granularity == DateGran.TIME)
{
// Now set the UTC equivalent.
calendar = new GregorianCalendar(utcZone);
calendar.setTime(localCalendar.getTime());
}
else
{
// No Time component so just set the UTC date to be the same as the local Year, Month, and Day.
calendar = new GregorianCalendar(localCalendar.get(Calendar.YEAR), localCalendar.get(Calendar.MONTH), localCalendar.get(Calendar.DAY_OF_MONTH));
}
}
/**
* Construct a date from a Dublin Core value
*
* @param fromDC
* the date string, in ISO 8601 (no timezone, always use UTC)
*/
public DCDate(String fromDC)
{
setUTCForFormatting();
// An empty date is OK
if ((fromDC == null) || fromDC.equals(""))
{
return;
}
// default granularity
granularity = DateGran.TIME;
Date date = tryParse(fullIso, fromDC);
if (date == null)
{
date = tryParse(fullIso2, fromDC);
}
if (date == null)
{
date = tryParse(fullIso3, fromDC);
}
if (date == null)
{
date = tryParse(fullIso4, fromDC);
}
if (date == null)
{
// Seems there is no time component to the date.
date = tryParse(dateIso, fromDC);
if (date != null)
{
granularity = DateGran.DAY;
}
}
if (date == null)
{
date = tryParse(yearMonthIso, fromDC);
if (date != null)
{
granularity = DateGran.MONTH;
}
}
if (date == null)
{
date = tryParse(yearIso, fromDC);
if (date != null)
{
granularity = DateGran.YEAR;
}
}
if (date == null)
{
log.warn("Mangled date: " + fromDC + " ..failed all attempts to parse as date.");
}
else
{
// Set the UTC time.
calendar = new GregorianCalendar(utcZone);
calendar.setTime(date);
// Now set the local equivalent.
if (granularity == DateGran.TIME)
{
localCalendar = new GregorianCalendar();
localCalendar.setTime(date);
}
else
{
// No Time component so just set the local date to be the same as the UTC Year, Month, and Day.
localCalendar = new GregorianCalendar(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH));
}
}
}
/**
* Set all the formatters to use UTC. SimpleDateFormat is not thread-safe which
* is why they are not static variables initialised once.
*/
private void setUTCForFormatting()
{
fullIso.setTimeZone(utcZone);
fullIso2.setTimeZone(utcZone);
fullIso3.setTimeZone(utcZone);
fullIso4.setTimeZone(utcZone);
dateIso.setTimeZone(utcZone);
yearMonthIso.setTimeZone(utcZone);
yearIso.setTimeZone(utcZone);
}
// Attempt to parse, swallowing errors; return null for failure.
private synchronized Date tryParse(SimpleDateFormat sdf, String source)
{
try
{
return sdf.parse(source);
}
catch (ParseException pe)
{
return null;
}
}
/**
* Get the year, adjusting for current time zone.
*
* @return the year
*/
public int getYear()
{
return (!withinGranularity(DateGran.YEAR)) ? -1 : localCalendar.get(Calendar.YEAR);
}
/**
* Get the month, adjusting for current time zone.
*
* @return the month
*/
public int getMonth()
{
return (!withinGranularity(DateGran.MONTH)) ? -1 : localCalendar.get(Calendar.MONTH) + 1;
}
/**
* Get the day, adjusting for current time zone.
*
* @return the day
*/
public int getDay()
{
return (!withinGranularity(DateGran.DAY)) ? -1 : localCalendar.get(Calendar.DAY_OF_MONTH);
}
/**
* Get the hour, adjusting for current time zone.
*
* @return the hour
*/
public int getHour()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.HOUR_OF_DAY);
}
/**
* Get the minute, adjusting for current time zone.
*
* @return the minute
*/
public int getMinute()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.MINUTE);
}
/**
* Get the second, adjusting for current time zone.
*
* @return the second
*/
public int getSecond()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.SECOND);
}
/**
* Get the year in UTC.
*
* @return the year
*/
public int getYearUTC()
{
return (!withinGranularity(DateGran.YEAR)) ? -1 : calendar.get(Calendar.YEAR);
}
/**
* Get the month in UTC.
*
* @return the month
*/
public int getMonthUTC()
{
return (!withinGranularity(DateGran.MONTH)) ? -1 : calendar.get(Calendar.MONTH) + 1;
}
/**
* Get the day in UTC.
*
* @return the day
*/
public int getDayUTC()
{
return (!withinGranularity(DateGran.DAY)) ? -1 : calendar.get(Calendar.DAY_OF_MONTH);
}
/**
* Get the hour in UTC.
*
* @return the hour
*/
public int getHourUTC()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.HOUR_OF_DAY);
}
/**
* Get the minute in UTC.
*
* @return the minute
*/
public int getMinuteUTC()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.MINUTE);
}
/**
* Get the second in UTC.
*
* @return the second
*/
public int getSecondUTC()
{
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.SECOND);
}
/**
* Get the date as a string to put back in the Dublin Core. Use the UTC/GMT calendar version.
*
* @return The date as a string.
*/
public String toString()
{
if (calendar == null)
{
return "null";
}
return toStringInternal();
}
private synchronized String toStringInternal()
{
if (granularity == DateGran.YEAR)
{
return String.format("%4d", getYearUTC());
}
else if (granularity == DateGran.MONTH)
{
return String.format("%4d-%02d", getYearUTC(), getMonthUTC());
}
else if (granularity == DateGran.DAY)
{
return String.format("%4d-%02d-%02d", getYearUTC(), getMonthUTC(), getDayUTC());
}
else
{
return fullIso.format(calendar.getTime());
}
}
/**
* Get the date as a Java Date object.
*
* @return a Date object
*/
public Date toDate()
{
if (calendar == null)
{
return null;
}
else
{
return calendar.getTime();
}
}
/**
* Format a human-readable version of the DCDate, with optional time.
* This needs to be in DCDate because it depends on the granularity of
* the original time.
*
* FIXME: This should probably be replaced with a localized DateFormat.
*
* @param showTime
* if true, display the time with the date
* @param isLocalTime
* if true, adjust for local time zone, otherwise UTC
* @param locale
* locale of the user
*
* @return String with the date in a human-readable form.
*/
public String displayDate(boolean showTime, boolean isLocalTime, Locale locale)
{
if (isLocalTime)
{
return displayLocalDate(showTime, locale);
}
else
{
return displayUTCDate(showTime, locale);
}
}
public String displayLocalDate(boolean showTime, Locale locale)
{
// forcibly truncate month name to 3 chars -- XXX FIXME?
String monthName = getMonthName(getMonth(), locale);
if (monthName.length() > 2)
monthName = monthName.substring(0, 3);
// display date and time
if (showTime && granularity == DateGran.TIME)
{
return String.format("%d-%s-%4d %02d:%02d:%02d", getDay(), monthName, getYear(), getHour(), getMinute(), getSecond());
}
else if (granularity == DateGran.YEAR)
{
return String.format("%4d", getYear());
}
else if (granularity == DateGran.MONTH)
{
return String.format("%s-%4d", monthName, getYear());
}
else
{
return String.format("%d-%s-%4d", getDay(), monthName, getYear());
}
}
public String displayUTCDate(boolean showTime, Locale locale)
{
// forcibly truncate month name to 3 chars -- XXX FIXME?
String monthName = getMonthName(getMonthUTC(), locale);
if (monthName.length() > 2)
monthName = monthName.substring(0, 3);
// display date and time
if (showTime && granularity == DateGran.TIME)
{
return String.format("%d-%s-%4d %02d:%02d:%02d", getDayUTC(), monthName, getYearUTC(), getHourUTC(), getMinuteUTC(), getSecondUTC());
}
else if (granularity == DateGran.YEAR)
{
return String.format("%4d", getYearUTC());
}
else if (granularity == DateGran.MONTH)
{
return String.format("%s-%4d", monthName, getYearUTC());
}
else
{
return String.format("%d-%s-%4d", getDayUTC(), monthName, getYearUTC());
}
}
/**
* Test if the requested level of granularity is within that of the date.
*
* @param dg
* The requested level of granularity.
* @return
* true or false.
*
*/
private boolean withinGranularity(DateGran dg)
{
if (granularity == DateGran.TIME)
{
if ((dg == DateGran.TIME) || (dg == DateGran.DAY) || (dg == DateGran.MONTH) || (dg == DateGran.YEAR))
{
return true;
}
}
if (granularity == DateGran.DAY)
{
if ((dg == DateGran.DAY) || (dg == DateGran.MONTH) || (dg == DateGran.YEAR))
{
return true;
}
}
if (granularity == DateGran.MONTH)
{
if ((dg == DateGran.MONTH) || (dg == DateGran.YEAR))
{
return true;
}
}
if (granularity == DateGran.YEAR)
{
if (dg == DateGran.YEAR)
{
return true;
}
}
return false;
}
/************** Some utility methods ******************/
/**
* Get a date representing the current instant in time.
*
* @return a DSpaceDate object representing the current instant.
*/
public static DCDate getCurrent()
{
return (new DCDate(new Date()));
}
/**
* Get a month's name for a month between 1 and 12. Any invalid month value
* (e.g. 0 or -1) will return a value of "Unspecified".
*
* @param m
* the month number
*
* @param locale
*
* @return the month name.
*/
public static String getMonthName(int m, Locale locale)
{
if ((m > 0) && (m < 13))
{
DateFormatSymbols dfs = dfsLocaleMap.get(locale);
if (dfs == null)
{
dfs = new DateFormatSymbols(locale);
dfsLocaleMap.put(locale, dfs);
}
return dfs.getMonths()[m-1];
}
else
{
return "Unspecified";
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Formatter;
import java.util.Locale;
import java.util.Map;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.license.FormattableArgument;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Utility class to manage generation and storing of the license text that the
* submitter has to grant/granted for archiving the item
*
* @author bollini
*
*/
public class LicenseUtils
{
/**
* Return the text of the license that the user has granted/must grant
* before for submit the item. The license text is build using the template
* defined for the collection if any or the wide site configuration. In the
* license text the following substitution can be used. {0} the eperson
* firstname<br>
* {1} the eperson lastname<br>
* {2} the eperson email<br>
* {3} the current date<br>
* {4} the collection object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {5} the item object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {6} the eperson object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {x} any addition argument supplied wrapped in the
* LicenseArgumentFormatter based on his type (map key)
*
* @see LicenseArgumentFormatter
* @param locale
* @param collection
* @param item
* @param eperson
* @param additionalInfo
* @return the license text obtained substituting the provided argument in
* the license template
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson, Map<String, Object> additionalInfo)
{
Formatter formatter = new Formatter(locale);
// EPerson firstname, lastname, email and the current date
// will be available as separate arguments to make more simple produce
// "tradition" text license
// collection, item and eperson object will be also available
int numArgs = 7 + (additionalInfo != null ? additionalInfo.size() : 0);
Object[] args = new Object[numArgs];
args[0] = eperson.getFirstName();
args[1] = eperson.getLastName();
args[2] = eperson.getEmail();
args[3] = new java.util.Date();
args[4] = new FormattableArgument("collection", collection);
args[5] = new FormattableArgument("item", item);
args[6] = new FormattableArgument("eperson", eperson);
if (additionalInfo != null)
{
int i = 7; // Start is next index after previous args
for (Map.Entry<String, Object> info : additionalInfo.entrySet())
{
args[i] = new FormattableArgument(info.getKey(), info.getValue());
i++;
}
}
String licenseTemplate = collection.getLicense();
return formatter.format(licenseTemplate, args).toString();
}
/**
* Utility method if no additional arguments has need to be supplied to the
* license template. (i.e. call the full getLicenseText supplying
* <code>null</code> for the additionalInfo argument)
*
* @param locale
* @param collection
* @param item
* @param eperson
* @return
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson)
{
return getLicenseText(locale, collection, item, eperson, null);
}
/**
* Store a copy of the license a user granted in the item.
*
* @param context
* the dspace context
* @param item
* the item object of the license
* @param licenseText
* the license the user granted
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
public static void grantLicense(Context context, Item item,
String licenseText) throws SQLException, IOException,
AuthorizeException
{
// Put together text to store
// String licenseText = "License granted by " + eperson.getFullName()
// + " (" + eperson.getEmail() + ") on "
// + DCDate.getCurrent().toString() + " (GMT):\n\n" + license;
// Store text as a bitstream
byte[] licenseBytes = licenseText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(licenseBytes);
Bitstream b = item.createSingleBitstream(bais, "LICENSE");
// Now set the format and name of the bitstream
b.setName("license.txt");
b.setSource("Written by org.dspace.content.LicenseUtils");
// Find the License format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(context,
"License");
b.setFormat(bf);
b.update();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao;
import org.dspace.core.Context;
import org.dspace.content.Bitstream;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.storage.rdbms.TableRow;
import java.sql.SQLException;
public class ItemDAOPostgres extends ItemDAO
{
private static final String selectPrimaryBitstreamID =
"SELECT bundle.primary_bitstream_id FROM item2bundle, bundle " +
"WHERE item2bundle.item_id=? AND item2bundle.bundle_id=bundle.bundle_id AND bundle.name=? LIMIT 1";
private static final String selectFirstBitstreamID =
"SELECT bundle2bitstream.bitstream_id FROM item2bundle, bundle, bundle2bitstream " +
"WHERE item2bundle.item_id=? AND item2bundle.bundle_id=bundle.bundle_id AND bundle.name=? " +
"AND bundle.bundle_id=bundle2bitstream.bundle_id LIMIT 1";
// private final String selectFirstBitstreamID =
// "SELECT bitstream.bitstream_id FROM item2bundle, bundle, bundle2bitstream, bitstream " +
// "WHERE item2bundle.item_id=? AND item2bundle.bundle_id=bundle.bundle_id AND bundle.name=? " +
// "AND bundle.bundle_id=bundle2bitstream.bundle_id AND bundle2bitstream.bitstream_id=bitstream.bitstream_id " +
// " LIMIT 1";
// private final String selectFirstBitstreamID =
// "SELECT bitstream_id FROM (" +
// "SELECT bitstream.bitstream_id FROM item2bundle, bundle, bundle2bitstream, bitstream " +
// "WHERE item2bundle.item_id=? AND item2bundle.bundle_id=bundle.bundle_id AND bundle.name=? " +
// "AND bundle.bundle_id=bundle2bitstream.bundle_id AND bundle2bitstream.bitstream_id=bitstream.bitstream_id " +
// "ORDER BY bitstream.sequence_id" +
// ") allstreams LIMIT 1";
private static final String selectNamedBitstreamID =
"SELECT bitstream.bitstream_id FROM item2bundle, bundle, bundle2bitstream, bitstream " +
"WHERE item2bundle.item_id=? AND item2bundle.bundle_id=bundle.bundle_id AND bundle.name=? " +
"AND bundle.bundle_id=bundle2bitstream.bundle_id AND bundle2bitstream.bitstream_id=bitstream.bitstream_id " +
"AND bitstream.name=?";
ItemDAOPostgres(Context ctx)
{
super(ctx);
}
public Bitstream getPrimaryBitstream(int itemId, String bundleName) throws SQLException
{
TableRowIterator tri = null;
try
{
tri = DatabaseManager.query(context, selectPrimaryBitstreamID, itemId, bundleName);
if (tri.hasNext())
{
TableRow row = tri.next();
int bid = row.getIntColumn("primary_bitstream_id");
return Bitstream.find(context, bid);
}
}
finally
{
if (tri != null)
{
tri.close();
}
}
return null;
}
public Bitstream getFirstBitstream(int itemId, String bundleName) throws SQLException
{
TableRowIterator tri = null;
try
{
tri = DatabaseManager.query(context, selectFirstBitstreamID, itemId, bundleName);
if (tri.hasNext())
{
TableRow row = tri.next();
int bid = row.getIntColumn("bitstream_id");
return Bitstream.find(context, bid);
}
}
finally
{
if (tri != null)
{
tri.close();
}
}
return null;
}
public Bitstream getNamedBitstream(int itemId, String bundleName, String fileName) throws SQLException
{
TableRowIterator tri = null;
try
{
tri = DatabaseManager.query(context, selectNamedBitstreamID, itemId, bundleName, fileName);
if (tri.hasNext())
{
TableRow row = tri.next();
int bid = row.getIntColumn("bitstream_id");
return Bitstream.find(context, bid);
}
}
finally
{
if (tri != null)
{
tri.close();
}
}
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao;
import org.dspace.core.Context;
import org.dspace.content.Bitstream;
import java.sql.SQLException;
public abstract class ItemDAO
{
protected Context context;
protected ItemDAO(Context ctx)
{
context = ctx;
}
public abstract Bitstream getPrimaryBitstream(int itemId, String bundleName) throws SQLException;
public abstract Bitstream getFirstBitstream(int itemId, String bundleName) throws SQLException;
public abstract Bitstream getNamedBitstream(int itemId, String bundleName, String fileName) throws SQLException;
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao;
import org.dspace.core.Context;
import org.dspace.content.Bitstream;
import java.sql.SQLException;
public class ItemDAOOracle extends ItemDAO
{
ItemDAOOracle(Context ctx)
{
super(ctx);
}
public Bitstream getPrimaryBitstream(int itemId, String bundleName) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Bitstream getFirstBitstream(int itemId, String bundleName) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Bitstream getNamedBitstream(int itemId, String bundleName, String fileName) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao;
import org.dspace.core.Context;
import org.dspace.core.ConfigurationManager;
/**
* Created by IntelliJ IDEA.
* User: Graham
* Date: 19-Dec-2007
* Time: 13:13:51
* To change this template use File | Settings | File Templates.
*/
public class ItemDAOFactory
{
public static ItemDAO getInstance(Context context)
{
if (ConfigurationManager.getProperty("db.name").equalsIgnoreCase("oracle"))
{
return new ItemDAOOracle(context);
}
return new ItemDAOPostgres(context);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
/**
* Represents the root of the DSpace Archive.
* By default, the handle suffix "0" represents the Site, e.g. "1721.1/0"
*/
public class Site extends DSpaceObject
{
/** "database" identifier of the site */
public static final int SITE_ID = 0;
// cache for Handle that is persistent ID for entire site.
private static String handle = null;
private static Site theSite = null;
/**
* Get the type of this object, found in Constants
*
* @return type of the object
*/
public int getType()
{
return Constants.SITE;
}
/**
* Get the internal ID (database primary key) of this object
*
* @return internal ID of object
*/
public int getID()
{
return SITE_ID;
}
/**
* Get the Handle of the object. This may return <code>null</code>
*
* @return Handle of the object, or <code>null</code> if it doesn't have
* one
*/
public String getHandle()
{
return getSiteHandle();
}
/**
* Static method to return site Handle without creating a Site.
* @return handle of the Site.
*/
public static String getSiteHandle()
{
if (handle == null)
{
handle = HandleManager.getPrefix() + "/" + String.valueOf(SITE_ID);
}
return handle;
}
/**
* Get Site object corresponding to db id (which is ignored).
* @param context the context.
* @param id integer database id, ignored.
* @return Site object.
*/
public static DSpaceObject find(Context context, int id)
throws SQLException
{
if (theSite == null)
{
theSite = new Site();
}
return theSite;
}
void delete()
throws SQLException, AuthorizeException, IOException
{
}
public void update()
throws SQLException, AuthorizeException, IOException
{
}
public String getName()
{
return ConfigurationManager.getProperty("dspace.name");
}
public String getURL()
{
return ConfigurationManager.getProperty("dspace.url");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.io.InputStream;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.util.AuthorizeUtil;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.browse.BrowseException;
import org.dspace.browse.IndexBrowse;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.content.authority.Choices;
import org.dspace.content.authority.ChoiceAuthorityManager;
import org.dspace.content.authority.MetadataAuthorityManager;
import org.dspace.core.ConfigurationManager;
import org.dspace.event.Event;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import proj.oceandocs.citation.CitationManager;
/**
* Class representing an item in DSpace.
* <P>
* This class holds in memory the item Dublin Core metadata, the bundles in the
* item, and the bitstreams in those bundles. When modifying the item, if you
* modify the Dublin Core or the "in archive" flag, you must call
* <code>update</code> for the changes to be written to the database.
* Creating, adding or removing bundles or bitstreams has immediate effect in
* the database.
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision: 6107 $
*/
public class Item extends DSpaceObject {
/**
* Wild card for Dublin Core metadata qualifiers/languages
*/
public static final String ANY = "*";
/** log4j category */
private static final Logger log = Logger.getLogger(Item.class);
/** Our context */
private Context ourContext;
/** The table row corresponding to this item */
private TableRow itemRow;
/** The e-person who submitted this item */
private EPerson submitter;
/** The bundles in this item - kept in sync with DB */
private List<Bundle> bundles;
/** The Dublin Core metadata - inner class for lazy loading */
MetadataCache dublinCore = new MetadataCache();
/** Handle, if any */
private String handle;
/**
* True if the Dublin Core has changed since reading from the DB or the last
* update()
*/
private boolean dublinCoreChanged;
/**
* True if anything else was changed since last update()
* (to drive event mechanism)
*/
private boolean modified;
/**
* Construct an item with the given table row
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
* @throws SQLException
*/
Item(Context context, TableRow row) throws SQLException {
ourContext = context;
itemRow = row;
dublinCoreChanged = false;
modified = false;
clearDetails();
// Get our Handle if any
handle = HandleManager.findHandle(context, this);
// Cache ourselves
context.cache(this, row.getIntColumn("item_id"));
}
private TableRowIterator retrieveMetadata() throws SQLException {
return DatabaseManager.queryTable(ourContext, "MetadataValue",
"SELECT * FROM MetadataValue WHERE item_id= ? ORDER BY metadata_field_id, place",
itemRow.getIntColumn("item_id"));
}
/**
* Get an item from the database. The item, its Dublin Core metadata, and
* the bundle and bitstream metadata are all loaded into memory.
*
* @param context
* DSpace context object
* @param id
* Internal ID of the item
* @return the item, or null if the internal ID is invalid.
* @throws SQLException
*/
public static Item find(Context context, int id) throws SQLException {
// First check the cache
Item fromCache = (Item) context.fromCache(Item.class, id);
if (fromCache != null) {
return fromCache;
}
TableRow row = DatabaseManager.find(context, "item", id);
if (row == null) {
if (log.isDebugEnabled()) {
log.debug(LogManager.getHeader(context, "find_item",
"not_found,item_id=" + id));
}
return null;
}
// not null, return item
if (log.isDebugEnabled()) {
log.debug(LogManager.getHeader(context, "find_item", "item_id="
+ id));
}
return new Item(context, row);
}
/**
* Create a new item, with a new internal ID. This method is not public,
* since items need to be created as workspace items. Authorisation is the
* responsibility of the caller.
*
* @param context
* DSpace context object
* @return the newly created item
* @throws SQLException
* @throws AuthorizeException
*/
static Item create(Context context) throws SQLException, AuthorizeException {
TableRow row = DatabaseManager.create(context, "item");
Item i = new Item(context, row);
// Call update to give the item a last modified date. OK this isn't
// amazingly efficient but creates don't happen that often.
context.turnOffAuthorisationSystem();
i.update();
context.restoreAuthSystemState();
context.addEvent(new Event(Event.CREATE, Constants.ITEM, i.getID(), null));
log.info(LogManager.getHeader(context, "create_item", "item_id="
+ row.getIntColumn("item_id")));
return i;
}
/**
* Get all the items in the archive. Only items with the "in archive" flag
* set are included. The order of the list is indeterminate.
*
* @param context
* DSpace context object
* @return an iterator over the items in the archive.
* @throws SQLException
*/
public static ItemIterator findAll(Context context) throws SQLException {
String myQuery = "SELECT * FROM item WHERE in_archive='1'";
TableRowIterator rows = DatabaseManager.queryTable(context, "item", myQuery);
return new ItemIterator(context, rows);
}
/**
* Find all the items in the archive by a given submitter. The order is
* indeterminate. Only items with the "in archive" flag set are included.
*
* @param context
* DSpace context object
* @param eperson
* the submitter
* @return an iterator over the items submitted by eperson
* @throws SQLException
*/
public static ItemIterator findBySubmitter(Context context, EPerson eperson)
throws SQLException {
String myQuery = "SELECT * FROM item WHERE in_archive='1' AND submitter_id="
+ eperson.getID();
TableRowIterator rows = DatabaseManager.queryTable(context, "item", myQuery);
return new ItemIterator(context, rows);
}
/**
* Get the internal ID of this item. In general, this shouldn't be exposed
* to users
*
* @return the internal identifier
*/
public int getID() {
return itemRow.getIntColumn("item_id");
}
/**
* @see org.dspace.content.DSpaceObject#getHandle()
*/
public String getHandle() {
if (handle == null) {
try {
handle = HandleManager.findHandle(this.ourContext, this);
} catch (SQLException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
}
}
return handle;
}
/**
* Find out if the item is part of the main archive
*
* @return true if the item is in the main archive
*/
public boolean isArchived() {
return itemRow.getBooleanColumn("in_archive");
}
/**
* Find out if the item has been withdrawn
*
* @return true if the item has been withdrawn
*/
public boolean isWithdrawn() {
return itemRow.getBooleanColumn("withdrawn");
}
/**
* Get the date the item was last modified, or the current date if
* last_modified is null
*
* @return the date the item was last modified, or the current date if the
* column is null.
*/
public Date getLastModified() {
Date myDate = itemRow.getDateColumn("last_modified");
if (myDate == null) {
myDate = new Date();
}
return myDate;
}
/**
* Set the "is_archived" flag. This is public and only
* <code>WorkflowItem.archive()</code> should set this.
*
* @param isArchived
* new value for the flag
*/
public void setArchived(boolean isArchived) {
itemRow.setColumn("in_archive", isArchived);
modified = true;
}
/**
* Set the owning Collection for the item
*
* @param c
* Collection
*/
public void setOwningCollection(Collection c) {
itemRow.setColumn("owning_collection", c.getID());
modified = true;
}
/**
* Get the owning Collection for the item
*
* @return Collection that is the owner of the item
* @throws SQLException
*/
public Collection getOwningCollection() throws java.sql.SQLException {
Collection myCollection = null;
// get the collection ID
int cid = itemRow.getIntColumn("owning_collection");
myCollection = Collection.find(ourContext, cid);
return myCollection;
}
// just get the collection ID for internal use
private int getOwningCollectionID() {
return itemRow.getIntColumn("owning_collection");
}
/**
* Get Dublin Core metadata for the item.
* Passing in a <code>null</code> value for <code>qualifier</code>
* or <code>lang</code> only matches Dublin Core fields where that
* qualifier or languages is actually <code>null</code>.
* Passing in <code>Item.ANY</code>
* retrieves all metadata fields with any value for the qualifier or
* language, including <code>null</code>
* <P>
* Examples:
* <P>
* Return values of the unqualified "title" field, in any language.
* Qualified title fields (e.g. "title.uniform") are NOT returned:
* <P>
* <code>item.getDC( "title", null, Item.ANY );</code>
* <P>
* Return all US English values of the "title" element, with any qualifier
* (including unqualified):
* <P>
* <code>item.getDC( "title", Item.ANY, "en_US" );</code>
* <P>
* The ordering of values of a particular element/qualifier/language
* combination is significant. When retrieving with wildcards, values of a
* particular element/qualifier/language combinations will be adjacent, but
* the overall ordering of the combinations is indeterminate.
*
* @param element
* the Dublin Core element. <code>Item.ANY</code> matches any
* element. <code>null</code> doesn't really make sense as all
* DC must have an element.
* @param qualifier
* the qualifier. <code>null</code> means unqualified, and
* <code>Item.ANY</code> means any qualifier (including
* unqualified.)
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means only
* values with no language are returned, and
* <code>Item.ANY</code> means values with any country code or
* no country code are returned.
* @return Dublin Core fields that match the parameters
*/
@Deprecated
public DCValue[] getDC(String element, String qualifier, String lang) {
return getMetadata(MetadataSchema.DC_SCHEMA, element, qualifier, lang);
}
/**
* Get metadata for the item in a chosen schema.
* See <code>MetadataSchema</code> for more information about schemas.
* Passing in a <code>null</code> value for <code>qualifier</code>
* or <code>lang</code> only matches metadata fields where that
* qualifier or languages is actually <code>null</code>.
* Passing in <code>Item.ANY</code>
* retrieves all metadata fields with any value for the qualifier or
* language, including <code>null</code>
* <P>
* Examples:
* <P>
* Return values of the unqualified "title" field, in any language.
* Qualified title fields (e.g. "title.uniform") are NOT returned:
* <P>
* <code>item.getMetadata("dc", "title", null, Item.ANY );</code>
* <P>
* Return all US English values of the "title" element, with any qualifier
* (including unqualified):
* <P>
* <code>item.getMetadata("dc, "title", Item.ANY, "en_US" );</code>
* <P>
* The ordering of values of a particular element/qualifier/language
* combination is significant. When retrieving with wildcards, values of a
* particular element/qualifier/language combinations will be adjacent, but
* the overall ordering of the combinations is indeterminate.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the element name. <code>Item.ANY</code> matches any
* element. <code>null</code> doesn't really make sense as all
* metadata must have an element.
* @param qualifier
* the qualifier. <code>null</code> means unqualified, and
* <code>Item.ANY</code> means any qualifier (including
* unqualified.)
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means only
* values with no language are returned, and
* <code>Item.ANY</code> means values with any country code or
* no country code are returned.
* @return metadata fields that match the parameters
*/
public DCValue[] getMetadata(String schema, String element, String qualifier,
String lang) {
// Build up list of matching values
List<DCValue> values = new ArrayList<DCValue>();
for (DCValue dcv : getMetadata()) {
if (match(schema, element, qualifier, lang, dcv)) {
// We will return a copy of the object in case it is altered
DCValue copy = new DCValue();
copy.element = dcv.element;
copy.qualifier = dcv.qualifier;
copy.value = dcv.value;
copy.language = dcv.language;
copy.schema = dcv.schema;
copy.authority = dcv.authority;
copy.confidence = dcv.confidence;
values.add(copy);
}
}
// Create an array of matching values
DCValue[] valueArray = new DCValue[values.size()];
valueArray = (DCValue[]) values.toArray(valueArray);
return valueArray;
}
/**
* Retrieve metadata field values from a given metadata string
* of the form <schema prefix>.<element>[.<qualifier>|.*]
*
* @param mdString
* The metadata string of the form
* <schema prefix>.<element>[.<qualifier>|.*]
*/
public DCValue[] getMetadata(String mdString) {
StringTokenizer dcf = new StringTokenizer(mdString, ".");
String[] tokens = {
"", "", ""
};
int i = 0;
while (dcf.hasMoreTokens()) {
tokens[i] = dcf.nextToken().trim();//.toLowerCase().trim();
i++;
}
String schema = tokens[0];
String element = tokens[1];
String qualifier = tokens[2];
DCValue[] values;
if ("*".equals(qualifier)) {
values = getMetadata(schema, element, Item.ANY, Item.ANY);
} else if ("".equals(qualifier)) {
values = getMetadata(schema, element, null, Item.ANY);
} else {
values = getMetadata(schema, element, qualifier, Item.ANY);
}
return values;
}
/**
* Add Dublin Core metadata fields. These are appended to existing values.
* Use <code>clearDC</code> to remove values. The ordering of values
* passed in is maintained.
*
* @param element
* the Dublin Core element
* @param qualifier
* the Dublin Core qualifier, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param values
* the values to add.
*/
@Deprecated
public void addDC(String element, String qualifier, String lang,
String[] values) {
addMetadata(MetadataSchema.DC_SCHEMA, element, qualifier, lang, values);
}
/**
* Add a single Dublin Core metadata field. This is appended to existing
* values. Use <code>clearDC</code> to remove values.
*
* @param element
* the Dublin Core element
* @param qualifier
* the Dublin Core qualifier, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param value
* the value to add.
*/
@Deprecated
public void addDC(String element, String qualifier, String lang,
String value) {
addMetadata(MetadataSchema.DC_SCHEMA, element, qualifier, lang, value);
}
/**
* Add metadata fields. These are appended to existing values.
* Use <code>clearDC</code> to remove values. The ordering of values
* passed in is maintained.
* <p>
* If metadata authority control is available, try to get authority
* values. The authority confidence depends on whether authority is
* <em>required</em> or not.
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the metadata element name
* @param qualifier
* the metadata qualifier name, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param values
* the values to add.
*/
public void addMetadata(String schema, String element, String qualifier, String lang,
String[] values) {
MetadataAuthorityManager mam = MetadataAuthorityManager.getManager();
String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier);
if (mam.isAuthorityControlled(fieldKey)) {
String authorities[] = new String[values.length];
int confidences[] = new int[values.length];
for (int i = 0; i < values.length; ++i) {
Choices c = ChoiceAuthorityManager.getManager().getBestMatch(fieldKey, values[i], getOwningCollectionID(), null);
authorities[i] = c.values.length > 0 ? c.values[0].authority : null;
confidences[i] = c.confidence;
}
addMetadata(schema, element, qualifier, lang, values, authorities, confidences);
} else {
addMetadata(schema, element, qualifier, lang, values, null, null);
}
}
public void addMetadata(String schema, String element, String qualifier, String lang,
String[] values, boolean authorityControlled) {
String fieldKey = MetadataField.formKey(schema, element, qualifier);
if (authorityControlled) {
String authorities[] = new String[values.length];
int confidences[] = new int[values.length];
for (int i = 0; i < values.length; ++i) {
Choices c = ChoiceAuthorityManager.getManager().getBestMatch(fieldKey, values[i], getOwningCollectionID(), null);
authorities[i] = c.values.length > 0 ? c.values[0].authority : null;
confidences[i] = c.confidence;
}
addMetadata(schema, element, qualifier, lang, values, authorities, confidences, authorityControlled);
} else {
addMetadata(schema, element, qualifier, lang, values, null, null, false);
}
}
/**
* Add metadata fields. These are appended to existing values.
* Use <code>clearDC</code> to remove values. The ordering of values
* passed in is maintained.
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the metadata element name
* @param qualifier
* the metadata qualifier name, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param values
* the values to add.
* @param authorities
* the external authority key for this value (or null)
* @param confidences
* the authority confidence (default 0)
*/
public void addMetadata(String schema, String element, String qualifier, String lang,
String[] values, String authorities[], int confidences[]) {
List<DCValue> dublinCore = getMetadata();
MetadataAuthorityManager mam = MetadataAuthorityManager.getManager();
boolean authorityControlled = mam.isAuthorityControlled(schema, element, qualifier);
boolean authorityRequired = mam.isAuthorityRequired(schema, element, qualifier);
String fieldName = schema + "." + element + ((qualifier == null) ? "" : "." + qualifier);
// We will not verify that they are valid entries in the registry
// until update() is called.
for (int i = 0; i < values.length; i++) {
DCValue dcv = new DCValue();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = (lang == null ? null : lang.trim());
// Logic to set Authority and Confidence:
// - normalize an empty string for authority to NULL.
// - if authority key is present, use given confidence or NOVALUE if not given
// - otherwise, preserve confidence if meaningful value was given since it may document a failed authority lookup
// - CF_UNSET signifies no authority nor meaningful confidence.
// - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key
if (authorityControlled) {
if (authorities != null && authorities[i] != null && authorities[i].length() > 0) {
dcv.authority = authorities[i];
dcv.confidence = confidences == null ? Choices.CF_NOVALUE : confidences[i];
} else {
dcv.authority = null;
dcv.confidence = confidences == null ? Choices.CF_UNSET : confidences[i];
}
// authority sanity check: if authority is required, was it supplied?
// XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so use a runtime exception
if (authorityRequired && (dcv.authority == null || dcv.authority.length() == 0)) {
throw new IllegalArgumentException("The metadata field \"" + fieldName + "\" requires an authority key but none was provided. Vaue=\"" + dcv.value + "\"");
}
}
if (values[i] != null) {
// remove control unicode char
String temp = values[i].trim();
char[] dcvalue = temp.toCharArray();
for (int charPos = 0; charPos < dcvalue.length; charPos++) {
if (Character.isISOControl(dcvalue[charPos])
&& !String.valueOf(dcvalue[charPos]).equals("\u0009")
&& !String.valueOf(dcvalue[charPos]).equals("\n")
&& !String.valueOf(dcvalue[charPos]).equals("\r")) {
dcvalue[charPos] = ' ';
}
}
dcv.value = String.valueOf(dcvalue);
} else {
dcv.value = null;
}
dublinCore.add(dcv);
addDetails(fieldName);
}
if (values.length > 0) {
dublinCoreChanged = true;
}
}
public void addMetadata(String schema, String element, String qualifier, String lang,
String[] values, String authorities[], int confidences[], boolean authorityControlled) {
List<DCValue> dublinCore = getMetadata();
String fieldName = schema + "." + element + ((qualifier == null) ? "" : "." + qualifier);
// We will not verify that they are valid entries in the registry
// until update() is called.
for (int i = 0; i < values.length; i++) {
DCValue dcv = new DCValue();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = (lang == null ? null : lang.trim());
// Logic to set Authority and Confidence:
// - normalize an empty string for authority to NULL.
// - if authority key is present, use given confidence or NOVALUE if not given
// - otherwise, preserve confidence if meaningful value was given since it may document a failed authority lookup
// - CF_UNSET signifies no authority nor meaningful confidence.
// - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key
if (authorityControlled) {
if (authorities != null && authorities[i] != null && authorities[i].length() > 0) {
dcv.authority = authorities[i];
dcv.confidence = confidences == null ? Choices.CF_NOVALUE : confidences[i];
} else {
dcv.authority = null;
dcv.confidence = confidences == null ? Choices.CF_UNSET : confidences[i];
}
// authority sanity check: if authority is required, was it supplied?
// XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so use a runtime exception
// if (authorityRequired && (dcv.authority == null || dcv.authority.length() == 0))
// {
// throw new IllegalArgumentException("The metadata field \"" + fieldName + "\" requires an authority key but none was provided. Vaue=\"" + dcv.value + "\"");
// }
}
if (values[i] != null) {
// remove control unicode char
String temp = values[i].trim();
char[] dcvalue = temp.toCharArray();
for (int charPos = 0; charPos < dcvalue.length; charPos++) {
if (Character.isISOControl(dcvalue[charPos])
&& !String.valueOf(dcvalue[charPos]).equals("\u0009")
&& !String.valueOf(dcvalue[charPos]).equals("\n")
&& !String.valueOf(dcvalue[charPos]).equals("\r")) {
dcvalue[charPos] = ' ';
}
}
dcv.value = String.valueOf(dcvalue);
} else {
dcv.value = null;
}
dublinCore.add(dcv);
addDetails(fieldName);
}
if (values.length > 0) {
dublinCoreChanged = true;
}
}
/**
* Add a single metadata field. This is appended to existing
* values. Use <code>clearDC</code> to remove values.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the metadata element name
* @param qualifier
* the metadata qualifier, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param value
* the value to add.
*/
public void addMetadata(String schema, String element, String qualifier,
String lang, String value) {
String[] valArray = new String[1];
valArray[0] = value;
addMetadata(schema, element, qualifier, lang, valArray);
}
/**
* Add a single metadata field. This is appended to existing
* values. Use <code>clearDC</code> to remove values.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the metadata element name
* @param qualifier
* the metadata qualifier, or <code>null</code> for
* unqualified
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param value
* the value to add.
* @param authority
* the external authority key for this value (or null)
* @param confidence
* the authority confidence (default 0)
*/
public void addMetadata(String schema, String element, String qualifier,
String lang, String value, String authority, int confidence) {
String[] valArray = new String[1];
String[] authArray = new String[1];
int[] confArray = new int[1];
valArray[0] = value;
authArray[0] = authority;
confArray[0] = confidence;
addMetadata(schema, element, qualifier, lang, valArray, authArray, confArray);
}
public void addMetadata(String schema, String element, String qualifier,
String lang, String value, String authority, int confidence, boolean authorityControlled) {
String[] valArray = new String[1];
String[] authArray = new String[1];
int[] confArray = new int[1];
valArray[0] = value;
authArray[0] = authority;
confArray[0] = confidence;
addMetadata(schema, element, qualifier, lang, valArray, authArray, confArray, authorityControlled);
}
/**
* Clear Dublin Core metadata values. As with <code>getDC</code> above,
* passing in <code>null</code> only matches fields where the qualifier or
* language is actually <code>null</code>.<code>Item.ANY</code> will
* match any element, qualifier or language, including <code>null</code>.
* Thus, <code>item.clearDC(Item.ANY, Item.ANY, Item.ANY)</code> will
* remove all Dublin Core metadata associated with an item.
*
* @param element
* the Dublin Core element to remove, or <code>Item.ANY</code>
* @param qualifier
* the qualifier. <code>null</code> means unqualified, and
* <code>Item.ANY</code> means any qualifier (including
* unqualified.)
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means only
* values with no language are removed, and <code>Item.ANY</code>
* means values with any country code or no country code are
* removed.
*/
@Deprecated
public void clearDC(String element, String qualifier, String lang) {
clearMetadata(MetadataSchema.DC_SCHEMA, element, qualifier, lang);
}
/**
* Clear metadata values. As with <code>getDC</code> above,
* passing in <code>null</code> only matches fields where the qualifier or
* language is actually <code>null</code>.<code>Item.ANY</code> will
* match any element, qualifier or language, including <code>null</code>.
* Thus, <code>item.clearDC(Item.ANY, Item.ANY, Item.ANY)</code> will
* remove all Dublin Core metadata associated with an item.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the Dublin Core element to remove, or <code>Item.ANY</code>
* @param qualifier
* the qualifier. <code>null</code> means unqualified, and
* <code>Item.ANY</code> means any qualifier (including
* unqualified.)
* @param lang
* the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means only
* values with no language are removed, and <code>Item.ANY</code>
* means values with any country code or no country code are
* removed.
*/
public void clearMetadata(String schema, String element, String qualifier,
String lang) {
// We will build a list of values NOT matching the values to clear
List<DCValue> values = new ArrayList<DCValue>();
for (DCValue dcv : getMetadata()) {
if (!match(schema, element, qualifier, lang, dcv)) {
values.add(dcv);
}
}
// Now swap the old list of values for the new, unremoved values
setMetadata(values);
dublinCoreChanged = true;
}
/**
* Utility method for pattern-matching metadata elements. This
* method will return <code>true</code> if the given schema,
* element, qualifier and language match the schema, element,
* qualifier and language of the <code>DCValue</code> object passed
* in. Any or all of the element, qualifier and language passed
* in can be the <code>Item.ANY</code> wildcard.
*
* @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the element to match, or <code>Item.ANY</code>
* @param qualifier
* the qualifier to match, or <code>Item.ANY</code>
* @param language
* the language to match, or <code>Item.ANY</code>
* @param dcv
* the Dublin Core value
* @return <code>true</code> if there is a match
*/
private boolean match(String schema, String element, String qualifier,
String language, DCValue dcv) {
// We will attempt to disprove a match - if we can't we have a match
if (!element.equals(Item.ANY) && !element.equalsIgnoreCase(dcv.element)) {
// Elements do not match, no wildcard
return false;
}
if (qualifier == null) {
// Value must be unqualified
if (dcv.qualifier != null) {
// Value is qualified, so no match
return false;
}
} else if (!qualifier.equals(Item.ANY)) {
// Not a wildcard, so qualifier must match exactly
if (!qualifier.equalsIgnoreCase(dcv.qualifier)) {
return false;
}
}
if (language == null || language.equals("")) {
// Value must be null language to match
if (dcv.language != null || dcv.language.equals("")) {
// Value is qualified, so no match
return false;
}
} else if (!language.equals(Item.ANY)) {
// Not a wildcard, so language must match exactly
if (!language.equalsIgnoreCase(dcv.language)) {
return false;
}
}
if (!schema.equals(Item.ANY)) {
if (dcv.schema != null && !dcv.schema.equalsIgnoreCase(schema)) {
// The namespace doesn't match
return false;
}
}
// If we get this far, we have a match
return true;
}
/**
* Get the e-person that originally submitted this item
*
* @return the submitter
*/
public EPerson getSubmitter() throws SQLException {
if (submitter == null && !itemRow.isColumnNull("submitter_id")) {
submitter = EPerson.find(ourContext, itemRow.getIntColumn("submitter_id"));
}
return submitter;
}
/**
* Set the e-person that originally submitted this item. This is a public
* method since it is handled by the WorkspaceItem class in the ingest
* package. <code>update</code> must be called to write the change to the
* database.
*
* @param sub
* the submitter
*/
public void setSubmitter(EPerson sub) {
submitter = sub;
if (submitter != null) {
itemRow.setColumn("submitter_id", submitter.getID());
} else {
itemRow.setColumnNull("submitter_id");
}
modified = true;
}
/**
* See whether this Item is contained by a given Collection.
* @param collection
* @return true if {@code collection} contains this Item.
* @throws SQLException
*/
public boolean isIn(Collection collection) throws SQLException {
TableRow tr = DatabaseManager.querySingle(ourContext,
"SELECT COUNT(*) AS count"
+ " FROM collection2item"
+ " WHERE collection_id = ? AND item_id = ?",
collection.getID(), itemRow.getIntColumn("item_id"));
return tr.getLongColumn("count") > 0;
}
/**
* Get the collections this item is in. The order is indeterminate.
*
* @return the collections this item is in, if any.
* @throws SQLException
*/
public Collection[] getCollections() throws SQLException {
List<Collection> collections = new ArrayList<Collection>();
// Get collection table rows
TableRowIterator tri = DatabaseManager.queryTable(ourContext, "collection",
"SELECT collection.* FROM collection, collection2item WHERE "
+ "collection2item.collection_id=collection.collection_id AND "
+ "collection2item.item_id= ? ",
itemRow.getIntColumn("item_id"));
try {
while (tri.hasNext()) {
TableRow row = tri.next();
// First check the cache
Collection fromCache = (Collection) ourContext.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null) {
collections.add(fromCache);
} else {
collections.add(new Collection(ourContext, row));
}
}
} finally {
// close the TableRowIterator to free up resources
if (tri != null) {
tri.close();
}
}
Collection[] collectionArray = new Collection[collections.size()];
collectionArray = (Collection[]) collections.toArray(collectionArray);
return collectionArray;
}
/**
* Get the communities this item is in. Returns an unordered array of the
* communities that house the collections this item is in, including parent
* communities of the owning collections.
*
* @return the communities this item is in.
* @throws SQLException
*/
public Community[] getCommunities() throws SQLException {
List<Community> communities = new ArrayList<Community>();
// Get community table rows
TableRowIterator tri = DatabaseManager.queryTable(ourContext, "community",
"SELECT community.* FROM community, community2item "
+ "WHERE community2item.community_id=community.community_id "
+ "AND community2item.item_id= ? ",
itemRow.getIntColumn("item_id"));
try {
while (tri.hasNext()) {
TableRow row = tri.next();
// First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
if (owner == null) {
owner = new Community(ourContext, row);
}
communities.add(owner);
// now add any parent communities
Community[] parents = owner.getAllParents();
communities.addAll(Arrays.asList(parents));
}
} finally {
// close the TableRowIterator to free up resources
if (tri != null) {
tri.close();
}
}
Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray);
return communityArray;
}
/**
* Get the bundles in this item.
*
* @return the bundles in an unordered array
*/
public Bundle[] getBundles() throws SQLException {
if (bundles == null) {
bundles = new ArrayList<Bundle>();
// Get bundles
TableRowIterator tri = DatabaseManager.queryTable(ourContext, "bundle",
"SELECT bundle.* FROM bundle, item2bundle WHERE "
+ "item2bundle.bundle_id=bundle.bundle_id AND "
+ "item2bundle.item_id= ? ",
itemRow.getIntColumn("item_id"));
try {
while (tri.hasNext()) {
TableRow r = tri.next();
// First check the cache
Bundle fromCache = (Bundle) ourContext.fromCache(Bundle.class,
r.getIntColumn("bundle_id"));
if (fromCache != null) {
bundles.add(fromCache);
} else {
bundles.add(new Bundle(ourContext, r));
}
}
} finally {
// close the TableRowIterator to free up resources
if (tri != null) {
tri.close();
}
}
}
Bundle[] bundleArray = new Bundle[bundles.size()];
bundleArray = (Bundle[]) bundles.toArray(bundleArray);
return bundleArray;
}
/**
* Get the bundles matching a bundle name (name corresponds roughly to type)
*
* @param name
* name of bundle (ORIGINAL/TEXT/THUMBNAIL)
*
* @return the bundles in an unordered array
*/
public Bundle[] getBundles(String name) throws SQLException {
List<Bundle> matchingBundles = new ArrayList<Bundle>();
// now only keep bundles with matching names
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
if (name.equals(bunds[i].getName())) {
matchingBundles.add(bunds[i]);
}
}
Bundle[] bundleArray = new Bundle[matchingBundles.size()];
bundleArray = (Bundle[]) matchingBundles.toArray(bundleArray);
return bundleArray;
}
/**
* Create a bundle in this item, with immediate effect
*
* @param name
* bundle name (ORIGINAL/TEXT/THUMBNAIL)
* @return the newly created bundle
* @throws SQLException
* @throws AuthorizeException
*/
public Bundle createBundle(String name) throws SQLException,
AuthorizeException {
if ((name == null) || "".equals(name)) {
throw new SQLException("Bundle must be created with non-null name");
}
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
Bundle b = Bundle.create(ourContext);
b.setName(name);
b.update();
addBundle(b);
return b;
}
/**
* Add an existing bundle to this item. This has immediate effect.
*
* @param b
* the bundle to add
* @throws SQLException
* @throws AuthorizeException
*/
public void addBundle(Bundle b) throws SQLException, AuthorizeException {
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
log.info(LogManager.getHeader(ourContext, "add_bundle", "item_id="
+ getID() + ",bundle_id=" + b.getID()));
// Check it's not already there
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
if (b.getID() == bunds[i].getID()) {
// Bundle is already there; no change
return;
}
}
// now add authorization policies from owning item
// hmm, not very "multiple-inclusion" friendly
AuthorizeManager.inheritPolicies(ourContext, this, b);
// Add the bundle to in-memory list
bundles.add(b);
// Insert the mapping
TableRow mappingRow = DatabaseManager.row("item2bundle");
mappingRow.setColumn("item_id", getID());
mappingRow.setColumn("bundle_id", b.getID());
DatabaseManager.insert(ourContext, mappingRow);
ourContext.addEvent(new Event(Event.ADD, Constants.ITEM, getID(), Constants.BUNDLE, b.getID(), b.getName()));
}
/**
* Remove a bundle. This may result in the bundle being deleted, if the
* bundle is orphaned.
*
* @param b
* the bundle to remove
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void removeBundle(Bundle b) throws SQLException, AuthorizeException,
IOException {
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
log.info(LogManager.getHeader(ourContext, "remove_bundle", "item_id="
+ getID() + ",bundle_id=" + b.getID()));
// Remove from internal list of bundles
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
if (b.getID() == bunds[i].getID()) {
// We've found the bundle to remove
bundles.remove(bunds[i]);
break;
}
}
// Remove mapping from DB
DatabaseManager.updateQuery(ourContext,
"DELETE FROM item2bundle WHERE item_id= ? "
+ "AND bundle_id= ? ",
getID(), b.getID());
ourContext.addEvent(new Event(Event.REMOVE, Constants.ITEM, getID(), Constants.BUNDLE, b.getID(), b.getName()));
// If the bundle is orphaned, it's removed
TableRowIterator tri = DatabaseManager.query(ourContext,
"SELECT * FROM item2bundle WHERE bundle_id= ? ",
b.getID());
try {
if (!tri.hasNext()) {
//make the right to remove the bundle explicit because the implicit
// relation
//has been removed. This only has to concern the currentUser
// because
//he started the removal process and he will end it too.
//also add right to remove from the bundle to remove it's
// bitstreams.
AuthorizeManager.addPolicy(ourContext, b, Constants.DELETE,
ourContext.getCurrentUser());
AuthorizeManager.addPolicy(ourContext, b, Constants.REMOVE,
ourContext.getCurrentUser());
// The bundle is an orphan, delete it
b.delete();
}
} finally {
// close the TableRowIterator to free up resources
if (tri != null) {
tri.close();
}
}
}
/**
* Create a single bitstream in a new bundle. Provided as a convenience
* method for the most common use.
*
* @param is
* the stream to create the new bitstream from
* @param name
* is the name of the bundle (ORIGINAL, TEXT, THUMBNAIL)
* @return Bitstream that is created
* @throws AuthorizeException
* @throws IOException
* @throws SQLException
*/
public Bitstream createSingleBitstream(InputStream is, String name)
throws AuthorizeException, IOException, SQLException {
// Authorisation is checked by methods below
// Create a bundle
Bundle bnd = createBundle(name);
Bitstream bitstream = bnd.createBitstream(is);
addBundle(bnd);
// FIXME: Create permissions for new bundle + bitstream
return bitstream;
}
/**
* Convenience method, calls createSingleBitstream() with name "ORIGINAL"
*
* @param is
* InputStream
* @return created bitstream
* @throws AuthorizeException
* @throws IOException
* @throws SQLException
*/
public Bitstream createSingleBitstream(InputStream is)
throws AuthorizeException, IOException, SQLException {
return createSingleBitstream(is, "ORIGINAL");
}
/**
* Get all non-internal bitstreams in the item. This is mainly used for
* auditing for provenance messages and adding format.* DC values. The order
* is indeterminate.
*
* @return non-internal bitstreams.
*/
public Bitstream[] getNonInternalBitstreams() throws SQLException {
List<Bitstream> bitstreamList = new ArrayList<Bitstream>();
// Go through the bundles and bitstreams picking out ones which aren't
// of internal formats
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
Bitstream[] bitstreams = bunds[i].getBitstreams();
for (int j = 0; j < bitstreams.length; j++) {
if (!bitstreams[j].getFormat().isInternal()) {
// Bitstream is not of an internal format
bitstreamList.add(bitstreams[j]);
}
}
}
return bitstreamList.toArray(new Bitstream[bitstreamList.size()]);
}
/**
* Remove just the DSpace license from an item This is useful to update the
* current DSpace license, in case the user must accept the DSpace license
* again (either the item was rejected, or resumed after saving)
* <p>
* This method is used by the org.dspace.submit.step.LicenseStep class
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void removeDSpaceLicense() throws SQLException, AuthorizeException,
IOException {
// get all bundles with name "LICENSE" (these are the DSpace license
// bundles)
Bundle[] bunds = getBundles("LICENSE");
for (int i = 0; i < bunds.length; i++) {
// FIXME: probably serious troubles with Authorizations
// fix by telling system not to check authorization?
removeBundle(bunds[i]);
}
}
/**
* Remove all licenses from an item - it was rejected
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void removeLicenses() throws SQLException, AuthorizeException,
IOException {
// Find the License format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(ourContext,
"License");
int licensetype = bf.getID();
// search through bundles, looking for bitstream type license
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
boolean removethisbundle = false;
Bitstream[] bits = bunds[i].getBitstreams();
for (int j = 0; j < bits.length; j++) {
BitstreamFormat bft = bits[j].getFormat();
if (bft.getID() == licensetype) {
removethisbundle = true;
}
}
// probably serious troubles with Authorizations
// fix by telling system not to check authorization?
if (removethisbundle) {
removeBundle(bunds[i]);
}
}
}
/**
* Update the item "in archive" flag and Dublin Core metadata in the
* database
*
* @throws SQLException
* @throws AuthorizeException
*/
public void update() throws SQLException, AuthorizeException {
// Check authorisation
// only do write authorization if user is not an editor
if (!canEdit()) {
AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
}
log.info(LogManager.getHeader(ourContext, "update_item", "item_id="
+ getID()));
// Set sequence IDs for bitstreams in item
int sequence = 0;
Bundle[] bunds = getBundles();
// find the highest current sequence number
for (int i = 0; i < bunds.length; i++) {
Bitstream[] streams = bunds[i].getBitstreams();
for (int k = 0; k < streams.length; k++) {
if (streams[k].getSequenceID() > sequence) {
sequence = streams[k].getSequenceID();
}
}
}
// start sequencing bitstreams without sequence IDs
sequence++;
for (int i = 0; i < bunds.length; i++) {
Bitstream[] streams = bunds[i].getBitstreams();
for (int k = 0; k < streams.length; k++) {
if (streams[k].getSequenceID() < 0) {
streams[k].setSequenceID(sequence);
sequence++;
streams[k].update();
modified = true;
}
}
}
// Map counting number of values for each element/qualifier.
// Keys are Strings: "element" or "element.qualifier"
// Values are Integers indicating number of values written for a
// element/qualifier
Map<String, Integer> elementCount = new HashMap<String, Integer>();
// Redo Dublin Core if it's changed
if (dublinCoreChanged) {
dublinCoreChanged = false;
// Arrays to store the working information required
int[] placeNum = new int[getMetadata().size()];
boolean[] storedDC = new boolean[getMetadata().size()];
MetadataField[] dcFields = new MetadataField[getMetadata().size()];
// Work out the place numbers for the in memory DC
for (int dcIdx = 0; dcIdx < getMetadata().size(); dcIdx++) {
DCValue dcv = getMetadata().get(dcIdx);
// Work out the place number for ordering
int current = 0;
// Key into map is "element" or "element.qualifier"
String key = dcv.element + ((dcv.qualifier == null) ? "" : ("." + dcv.qualifier));
Integer currentInteger = elementCount.get(key);
if (currentInteger != null) {
current = currentInteger.intValue();
}
current++;
elementCount.put(key, Integer.valueOf(current));
// Store the calculated place number, reset the stored flag, and cache the metadatafield
placeNum[dcIdx] = current;
storedDC[dcIdx] = false;
dcFields[dcIdx] = getMetadataField(dcv);
if (dcFields[dcIdx] == null) {
// Bad DC field, log and throw exception
log.warn(LogManager.getHeader(ourContext, "bad_dc",
"Bad DC field. schema=" + dcv.schema
+ ", element: \""
+ ((dcv.element == null) ? "null"
: dcv.element)
+ "\" qualifier: \""
+ ((dcv.qualifier == null) ? "null"
: dcv.qualifier)
+ "\" value: \""
+ ((dcv.value == null) ? "null"
: dcv.value) + "\""));
throw new SQLException("bad_dublin_core "
+ "schema=" + dcv.schema + ", "
+ dcv.element
+ " " + dcv.qualifier);
}
}
// Now the precalculations are done, iterate through the existing metadata
// looking for matches
TableRowIterator tri = retrieveMetadata();
if (tri != null) {
try {
while (tri.hasNext()) {
TableRow tr = tri.next();
// Assume that we will remove this row, unless we get a match
boolean removeRow = true;
// Go through the in-memory metadata, unless we've already decided to keep this row
for (int dcIdx = 0; dcIdx < getMetadata().size() && removeRow; dcIdx++) {
// Only process if this metadata has not already been matched to something in the DB
if (!storedDC[dcIdx]) {
boolean matched = true;
DCValue dcv = getMetadata().get(dcIdx);
// Check the metadata field is the same
if (matched && dcFields[dcIdx].getFieldID() != tr.getIntColumn("metadata_field_id")) {
matched = false;
}
// Check the place is the same
if (matched && placeNum[dcIdx] != tr.getIntColumn("place")) {
matched = false;
}
// Check the text is the same
if (matched) {
String text = tr.getStringColumn("text_value");
if (dcv.value == null && text == null) {
matched = true;
} else if (dcv.value != null && dcv.value.equals(text)) {
matched = true;
} else {
matched = false;
}
}
// Check the language is the same
if (matched) {
String lang = tr.getStringColumn("text_lang");
if (dcv.language == null && lang == null) {
matched = true;
} else if (dcv.language != null && dcv.language.equals(lang)) {
matched = true;
} else {
matched = false;
}
}
// check that authority and confidence match
if (matched) {
String auth = tr.getStringColumn("authority");
int conf = tr.getIntColumn("confidence");
if (!((dcv.authority == null && auth == null)
|| (dcv.authority != null && auth != null && dcv.authority.equals(auth))
&& dcv.confidence == conf)) {
matched = false;
}
}
// If the db record is identical to the in memory values
if (matched) {
// Flag that the metadata is already in the DB
storedDC[dcIdx] = true;
// Flag that we are not going to remove the row
removeRow = false;
}
}
}
// If after processing all the metadata values, we didn't find a match
// delete this row from the DB
if (removeRow) {
DatabaseManager.delete(ourContext, tr);
dublinCoreChanged = true;
modified = true;
}
}
} finally {
tri.close();
}
}
// Add missing in-memory DC
for (int dcIdx = 0; dcIdx < getMetadata().size(); dcIdx++) {
// Only write values that are not already in the db
if (!storedDC[dcIdx]) {
DCValue dcv = getMetadata().get(dcIdx);
// Write DCValue
MetadataValue metadata = new MetadataValue();
metadata.setItemId(getID());
metadata.setFieldId(dcFields[dcIdx].getFieldID());
metadata.setValue(dcv.value);
metadata.setLanguage(dcv.language);
metadata.setPlace(placeNum[dcIdx]);
metadata.setAuthority(dcv.authority);
metadata.setConfidence(dcv.confidence);
metadata.create(ourContext);
dublinCoreChanged = true;
modified = true;
}
}
}
if (dublinCoreChanged || modified) {
// Set the last modified date
itemRow.setColumn("last_modified", new Date());
// Make sure that withdrawn and in_archive are non-null
if (itemRow.isColumnNull("in_archive")) {
itemRow.setColumn("in_archive", false);
}
if (itemRow.isColumnNull("withdrawn")) {
itemRow.setColumn("withdrawn", false);
}
DatabaseManager.update(ourContext, itemRow);
if (dublinCoreChanged) {
ourContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.ITEM, getID(), getDetails()));
clearDetails();
dublinCoreChanged = false;
}
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(), null));
modified = false;
}
// this.updateCitationString();
// this.updateISSN();
// this.updateSubjectFields();
}
private transient MetadataField[] allMetadataFields = null;
private MetadataField getMetadataField(DCValue dcv) throws SQLException, AuthorizeException {
if (allMetadataFields == null) {
allMetadataFields = MetadataField.findAll(ourContext);
}
if (allMetadataFields != null) {
int schemaID = getMetadataSchemaID(dcv);
for (MetadataField field : allMetadataFields) {
if (field.getSchemaID() == schemaID
&& StringUtils.equals(field.getElement(), dcv.element)
&& StringUtils.equals(field.getQualifier(), dcv.qualifier)) {
return field;
}
}
}
return null;
}
private int getMetadataSchemaID(DCValue dcv) throws SQLException {
int schemaID;
MetadataSchema schema = MetadataSchema.find(ourContext, dcv.schema);
if (schema == null) {
schemaID = MetadataSchema.DC_SCHEMA_ID;
} else {
schemaID = schema.getSchemaID();
}
return schemaID;
}
/**
* Withdraw the item from the archive. It is kept in place, and the content
* and metadata are not deleted, but it is not publicly accessible.
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void withdraw() throws SQLException, AuthorizeException, IOException {
// Check permission. User either has to have REMOVE on owning collection
// or be COLLECTION_EDITOR of owning collection
AuthorizeUtil.authorizeWithdrawItem(ourContext, this);
String timestamp = DCDate.getCurrent().toString();
// Add suitable provenance - includes user, date, collections +
// bitstream checksums
EPerson e = ourContext.getCurrentUser();
// Build some provenance data while we're at it.
StringBuilder prov = new StringBuilder();
prov.append("Item withdrawn by ").append(e.getFullName()).append(" (").append(e.getEmail()).append(") on ").append(timestamp).append("\n").append("Item was in collections:\n");
Collection[] colls = getCollections();
for (int i = 0; i < colls.length; i++) {
prov.append(colls[i].getMetadata("name")).append(" (ID: ").append(colls[i].getID()).append(")\n");
}
// Set withdrawn flag. timestamp will be set; last_modified in update()
itemRow.setColumn("withdrawn", true);
// in_archive flag is now false
itemRow.setColumn("in_archive", false);
prov.append(InstallItem.getBitstreamProvenanceMessage(this));
addDC("description", "provenance", "en", prov.toString());
// Update item in DB
update();
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(), "WITHDRAW"));
// and all of our authorization policies
// FIXME: not very "multiple-inclusion" friendly
AuthorizeManager.removeAllPolicies(ourContext, this);
// Write log
log.info(LogManager.getHeader(ourContext, "withdraw_item", "user="
+ e.getEmail() + ",item_id=" + getID()));
}
/**
* Reinstate a withdrawn item
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void reinstate() throws SQLException, AuthorizeException,
IOException {
// check authorization
AuthorizeUtil.authorizeReinstateItem(ourContext, this);
String timestamp = DCDate.getCurrent().toString();
// Check permission. User must have ADD on all collections.
// Build some provenance data while we're at it.
Collection[] colls = getCollections();
// Add suitable provenance - includes user, date, collections +
// bitstream checksums
EPerson e = ourContext.getCurrentUser();
StringBuilder prov = new StringBuilder();
prov.append("Item reinstated by ").append(e.getFullName()).append(" (").append(e.getEmail()).append(") on ").append(timestamp).append("\n").append("Item was in collections:\n");
for (int i = 0; i < colls.length; i++) {
prov.append(colls[i].getMetadata("name")).append(" (ID: ").append(colls[i].getID()).append(")\n");
}
// Clear withdrawn flag
itemRow.setColumn("withdrawn", false);
// in_archive flag is now true
itemRow.setColumn("in_archive", true);
// Add suitable provenance - includes user, date, collections +
// bitstream checksums
prov.append(InstallItem.getBitstreamProvenanceMessage(this));
addDC("description", "provenance", "en", prov.toString());
// Update item in DB
update();
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(), "REINSTATE"));
// authorization policies
if (colls.length > 0) {
// FIXME: not multiple inclusion friendly - just apply access
// policies from first collection
// remove the item's policies and replace them with
// the defaults from the collection
inheritCollectionDefaultPolicies(colls[0]);
}
// Write log
log.info(LogManager.getHeader(ourContext, "reinstate_item", "user="
+ e.getEmail() + ",item_id=" + getID()));
}
/**
* Delete (expunge) the item. Bundles and bitstreams are also deleted if
* they are not also included in another item. The Dublin Core metadata is
* deleted.
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
void delete() throws SQLException, AuthorizeException, IOException {
// Check authorisation here. If we don't, it may happen that we remove the
// metadata but when getting to the point of removing the bundles we get an exception
// leaving the database in an inconsistent state
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
ourContext.addEvent(new Event(Event.DELETE, Constants.ITEM, getID(), getHandle()));
log.info(LogManager.getHeader(ourContext, "delete_item", "item_id="
+ getID()));
// Remove from cache
ourContext.removeCached(this, getID());
// Remove from browse indices, if appropriate
/** XXX FIXME
** Although all other Browse index updates are managed through
** Event consumers, removing an Item *must* be done *here* (inline)
** because otherwise, tables are left in an inconsistent state
** and the DB transaction will fail.
** Any fix would involve too much work on Browse code that
** is likely to be replaced soon anyway. --lcs, Aug 2006
**
** NB Do not check to see if the item is archived - withdrawn /
** non-archived items may still be tracked in some browse tables
** for administrative purposes, and these need to be removed.
**/
// FIXME: there is an exception handling problem here
try {
// Remove from indices
IndexBrowse ib = new IndexBrowse(ourContext);
ib.itemRemoved(this);
} catch (BrowseException e) {
log.error("caught exception: ", e);
throw new SQLException(e.getMessage(), e);
}
// Delete the Dublin Core
removeMetadataFromDatabase();
// Remove bundles
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
removeBundle(bunds[i]);
}
// remove all of our authorization policies
AuthorizeManager.removeAllPolicies(ourContext, this);
// Remove any Handle
HandleManager.unbindHandle(ourContext, this);
// Finally remove item row
DatabaseManager.delete(ourContext, itemRow);
}
/**
* Remove item and all its sub-structure from the context cache.
* Useful in batch processes where a single context has a long,
* multi-item lifespan
*/
public void decache() throws SQLException {
// Remove item and it's submitter from cache
ourContext.removeCached(this, getID());
if (submitter != null) {
ourContext.removeCached(submitter, submitter.getID());
}
// Remove bundles & bitstreams from cache if they have been loaded
if (bundles != null) {
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
ourContext.removeCached(bunds[i], bunds[i].getID());
Bitstream[] bitstreams = bunds[i].getBitstreams();
for (int j = 0; j < bitstreams.length; j++) {
ourContext.removeCached(bitstreams[j], bitstreams[j].getID());
}
}
}
}
/**
* Return <code>true</code> if <code>other</code> is the same Item as
* this object, <code>false</code> otherwise
*
* @param obj
* object to compare to
* @return <code>true</code> if object passed in represents the same item
* as this object
*/
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Item other = (Item) obj;
if (this.getType() != other.getType()) {
return false;
}
if (this.getID() != other.getID()) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 5;
hash = 71 * hash + (this.itemRow != null ? this.itemRow.hashCode() : 0);
return hash;
}
/**
* Return true if this Collection 'owns' this item
*
* @param c
* Collection
* @return true if this Collection owns this item
*/
public boolean isOwningCollection(Collection c) {
int owner_id = itemRow.getIntColumn("owning_collection");
if (c.getID() == owner_id) {
return true;
}
// not the owner
return false;
}
/**
* Utility method to remove all descriptive metadata associated with the item from
* the database (regardless of in-memory version)
*
* @throws SQLException
*/
private void removeMetadataFromDatabase() throws SQLException {
DatabaseManager.updateQuery(ourContext,
"DELETE FROM MetadataValue WHERE item_id= ? ",
getID());
}
/**
* return type found in Constants
*
* @return int Constants.ITEM
*/
public int getType() {
return Constants.ITEM;
}
/**
* remove all of the policies for item and replace them with a new list of
* policies
*
* @param newpolicies -
* this will be all of the new policies for the item and its
* contents
* @throws SQLException
* @throws AuthorizeException
*/
public void replaceAllItemPolicies(List<ResourcePolicy> newpolicies) throws SQLException,
AuthorizeException {
// remove all our policies, add new ones
AuthorizeManager.removeAllPolicies(ourContext, this);
AuthorizeManager.addPolicies(ourContext, newpolicies, this);
}
/**
* remove all of the policies for item's bitstreams and bundles and replace
* them with a new list of policies
*
* @param newpolicies -
* this will be all of the new policies for the bundle and
* bitstream contents
* @throws SQLException
* @throws AuthorizeException
*/
public void replaceAllBitstreamPolicies(List<ResourcePolicy> newpolicies)
throws SQLException, AuthorizeException {
// remove all policies from bundles, add new ones
// Remove bundles
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
Bundle mybundle = bunds[i];
mybundle.replaceAllBitstreamPolicies(newpolicies);
}
}
/**
* remove all of the policies for item's bitstreams and bundles that belong
* to a given Group
*
* @param g
* Group referenced by policies that needs to be removed
* @throws SQLException
*/
public void removeGroupPolicies(Group g) throws SQLException {
// remove Group's policies from Item
AuthorizeManager.removeGroupPolicies(ourContext, this, g);
// remove all policies from bundles
Bundle[] bunds = getBundles();
for (int i = 0; i < bunds.length; i++) {
Bundle mybundle = bunds[i];
Bitstream[] bs = mybundle.getBitstreams();
for (int j = 0; j < bs.length; j++) {
// remove bitstream policies
AuthorizeManager.removeGroupPolicies(ourContext, bs[j], g);
}
// change bundle policies
AuthorizeManager.removeGroupPolicies(ourContext, mybundle, g);
}
}
/**
* remove all policies on an item and its contents, and replace them with
* the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to
* the collection.
*
* @param c
* Collection
* @throws java.sql.SQLException
* if an SQL error or if no default policies found. It's a bit
* draconian, but default policies must be enforced.
* @throws AuthorizeException
*/
public void inheritCollectionDefaultPolicies(Collection c)
throws java.sql.SQLException, AuthorizeException {
// remove the submit authorization policies
// and replace them with the collection's default READ policies
List<ResourcePolicy> policies = AuthorizeManager.getPoliciesActionFilter(ourContext, c, Constants.DEFAULT_ITEM_READ);
// MUST have default policies
if (policies.size() < 1) {
throw new java.sql.SQLException("Collection " + c.getID()
+ " has no default item READ policies");
}
// change the action to just READ
// just don't call update on the resourcepolicies!!!
for (ResourcePolicy rp : policies) {
rp.setAction(Constants.READ);
}
replaceAllItemPolicies(policies);
policies = AuthorizeManager.getPoliciesActionFilter(ourContext, c, Constants.DEFAULT_BITSTREAM_READ);
if (policies.size() < 1) {
throw new java.sql.SQLException("Collection " + c.getID()
+ " has no default bitstream READ policies");
}
// change the action to just READ
// just don't call update on the resourcepolicies!!!
for (ResourcePolicy rp : policies) {
rp.setAction(Constants.READ);
}
replaceAllBitstreamPolicies(policies);
log.debug(LogManager.getHeader(ourContext, "item_inheritCollectionDefaultPolicies",
"item_id=" + getID()));
}
/**
* Moves the item from one collection to another one
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void move(Collection from, Collection to) throws SQLException, AuthorizeException, IOException {
// Use the normal move method, and default to not inherit permissions
this.move(from, to, false);
}
/**
* Moves the item from one collection to another one
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void move(Collection from, Collection to, boolean inheritDefaultPolicies) throws SQLException, AuthorizeException, IOException {
// Check authorisation on the item before that the move occur
// otherwise we will need edit permission on the "target collection" to archive our goal
// only do write authorization if user is not an editor
if (!canEdit()) {
AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
}
// Move the Item from one Collection to the other
to.addItem(this);
from.removeItem(this);
// If we are moving from the owning collection, update that too
if (isOwningCollection(from)) {
// Update the owning collection
log.info(LogManager.getHeader(ourContext, "move_item",
"item_id=" + getID() + ", from "
+ "collection_id=" + from.getID() + " to "
+ "collection_id=" + to.getID()));
setOwningCollection(to);
// If applicable, update the item policies
if (inheritDefaultPolicies) {
log.info(LogManager.getHeader(ourContext, "move_item",
"Updating item with inherited policies"));
inheritCollectionDefaultPolicies(to);
}
// Update the item
ourContext.turnOffAuthorisationSystem();
update();
ourContext.restoreAuthSystemState();
} else {
// Although we haven't actually updated anything within the item
// we'll tell the event system that it has, so that any consumers that
// care about the structure of the repository can take account of the move
// Note that updating the owning collection above will have the same effect,
// so we only do this here if the owning collection hasn't changed.
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(), null));
}
}
/**
* Check the bundle ORIGINAL to see if there are any uploaded files
*
* @return true if there is a bundle named ORIGINAL with one or more
* bitstreams inside
* @throws SQLException
*/
public boolean hasUploadedFiles() throws SQLException {
Bundle[] bundles = getBundles("ORIGINAL");
if (bundles.length == 0) {
// if no ORIGINAL bundle,
// return false that there is no file!
return false;
} else {
Bitstream[] bitstreams = bundles[0].getBitstreams();
if (bitstreams.length == 0) {
// no files in ORIGINAL bundle!
return false;
}
}
return true;
}
/**
* Get the collections this item is not in.
*
* @return the collections this item is not in, if any.
* @throws SQLException
*/
public Collection[] getCollectionsNotLinked() throws SQLException {
Collection[] allCollections = Collection.findAll(ourContext);
Collection[] linkedCollections = getCollections();
Collection[] notLinkedCollections = new Collection[allCollections.length - linkedCollections.length];
if ((allCollections.length - linkedCollections.length) == 0) {
return notLinkedCollections;
}
int i = 0;
for (Collection collection : allCollections) {
boolean alreadyLinked = false;
for (Collection linkedCommunity : linkedCollections) {
if (collection.getID() == linkedCommunity.getID()) {
alreadyLinked = true;
break;
}
}
if (!alreadyLinked) {
notLinkedCollections[i++] = collection;
}
}
return notLinkedCollections;
}
/**
* return TRUE if context's user can edit item, false otherwise
*
* @return boolean true = current user can edit item
* @throws SQLException
*/
public boolean canEdit() throws java.sql.SQLException {
// can this person write to the item?
if (AuthorizeManager.authorizeActionBoolean(ourContext, this,
Constants.WRITE)) {
return true;
}
// is this collection not yet created, and an item template is created
if (getOwningCollection() == null) {
return true;
}
// is this person an COLLECTION_EDITOR for the owning collection?
if (getOwningCollection().canEditBoolean(false)) {
return true;
}
return false;
}
public String getName() {
DCValue t[] = getMetadata("dc", "title", null, Item.ANY);
return (t.length >= 1) ? t[0].value : null;
}
/**
* Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException, AuthorizeException, IOException
*
*/
public static ItemIterator findByMetadataField(Context context,
String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException, IOException {
MetadataSchema mds = MetadataSchema.find(context, schema);
if (mds == null) {
throw new IllegalArgumentException("No such metadata schema: " + schema);
}
MetadataField mdf = MetadataField.findByElement(context, mds.getSchemaID(), element, qualifier);
if (mdf == null) {
throw new IllegalArgumentException(
"No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier);
}
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' "
+ "AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
TableRowIterator rows = null;
if (Item.ANY.equals(value)) {
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
} else {
query += " AND metadatavalue.text_value = ?";
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID(), value);
}
return new ItemIterator(context, rows);
}
public DSpaceObject getAdminObject(int action) throws SQLException {
DSpaceObject adminObject = null;
Collection collection = getOwningCollection();
Community community = null;
if (collection != null) {
Community[] communities = collection.getCommunities();
if (communities != null && communities.length > 0) {
community = communities[0];
}
} else {
// is a template item?
TableRow qResult = DatabaseManager.querySingle(ourContext,
"SELECT collection_id FROM collection "
+ "WHERE template_item_id = ?", getID());
if (qResult != null) {
collection = Collection.find(ourContext, qResult.getIntColumn("collection_id"));
Community[] communities = collection.getCommunities();
if (communities != null && communities.length > 0) {
community = communities[0];
}
}
}
switch (action) {
case Constants.ADD:
// ADD a cc license is less general than add a bitstream but we can't/won't
// add complex logic here to know if the ADD action on the item is required by a cc or
// a generic bitstream so simply we ignore it.. UI need to enforce the requirements.
if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) {
adminObject = this;
} else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamCreation()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminPerformBitstreamCreation()) {
adminObject = community;
}
break;
case Constants.REMOVE:
// see comments on ADD action, same things...
if (AuthorizeConfiguration.canItemAdminPerformBitstreamDeletion()) {
adminObject = this;
} else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminPerformBitstreamDeletion()) {
adminObject = community;
}
break;
case Constants.DELETE:
if (getOwningCollection() != null) {
if (AuthorizeConfiguration.canCollectionAdminPerformItemDeletion()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminPerformItemDeletion()) {
adminObject = community;
}
} else {
if (AuthorizeConfiguration.canCollectionAdminManageTemplateItem()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminManageCollectionTemplateItem()) {
adminObject = community;
}
}
break;
case Constants.WRITE:
// if it is a template item we need to check the
// collection/community admin configuration
if (getOwningCollection() == null) {
if (AuthorizeConfiguration.canCollectionAdminManageTemplateItem()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminManageCollectionTemplateItem()) {
adminObject = community;
}
} else {
adminObject = this;
}
break;
default:
adminObject = this;
break;
}
return adminObject;
}
public DSpaceObject getParentObject() throws SQLException {
Collection ownCollection = getOwningCollection();
if (ownCollection != null) {
return ownCollection;
} else {
// is a template item?
TableRow qResult = DatabaseManager.querySingle(ourContext,
"SELECT collection_id FROM collection "
+ "WHERE template_item_id = ?", getID());
if (qResult != null) {
return Collection.find(ourContext, qResult.getIntColumn("collection_id"));
}
return null;
}
}
/**
* Find all the items in the archive with a given authority key value
* in the indicated metadata field.
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value the value of authority key to look for
* @return an iterator over the items matching that authority value
* @throws SQLException, AuthorizeException, IOException
*/
public static ItemIterator findByAuthorityValue(Context context,
String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException, IOException {
MetadataSchema mds = MetadataSchema.find(context, schema);
if (mds == null) {
throw new IllegalArgumentException("No such metadata schema: " + schema);
}
MetadataField mdf = MetadataField.findByElement(context, mds.getSchemaID(), element, qualifier);
if (mdf == null) {
throw new IllegalArgumentException("No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier);
}
TableRowIterator rows = DatabaseManager.queryTable(context, "item",
"SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' "
+ "AND item.item_id = metadatavalue.item_id AND metadata_field_id = ? AND authority = ?",
mdf.getFieldID(), value);
return new ItemIterator(context, rows);
}
private List<DCValue> getMetadata() {
try {
return dublinCore.get(ourContext, getID(), log);
} catch (SQLException e) {
log.error("Loading item - cannot load metadata");
}
return new ArrayList<DCValue>();
}
private void setMetadata(List<DCValue> metadata) {
dublinCore.set(metadata);
dublinCoreChanged = true;
}
class MetadataCache {
List<DCValue> metadata = null;
List<DCValue> get(Context c, int itemId, Logger log) throws SQLException {
if (metadata == null) {
metadata = new ArrayList<DCValue>();
// Get Dublin Core metadata
TableRowIterator tri = retrieveMetadata(itemId);
if (tri != null) {
try {
while (tri.hasNext()) {
TableRow resultRow = tri.next();
// Get the associated metadata field and schema information
int fieldID = resultRow.getIntColumn("metadata_field_id");
MetadataField field = MetadataField.find(c, fieldID);
if (field == null) {
log.error("Loading item - cannot find metadata field " + fieldID);
} else {
MetadataSchema schema = MetadataSchema.find(c, field.getSchemaID());
if (schema == null) {
log.error("Loading item - cannot find metadata schema " + field.getSchemaID() + ", field " + fieldID);
} else {
// Make a DCValue object
DCValue dcv = new DCValue();
dcv.element = field.getElement();
dcv.qualifier = field.getQualifier();
dcv.value = resultRow.getStringColumn("text_value");
dcv.language = resultRow.getStringColumn("text_lang");
//dcv.namespace = schema.getNamespace();
dcv.schema = schema.getName();
dcv.authority = resultRow.getStringColumn("authority");
dcv.confidence = resultRow.getIntColumn("confidence");
// Add it to the list
metadata.add(dcv);
}
}
}
} finally {
// close the TableRowIterator to free up resources
if (tri != null) {
tri.close();
}
}
}
}
return metadata;
}
void set(List<DCValue> m) {
metadata = m;
}
TableRowIterator retrieveMetadata(int itemId) throws SQLException {
if (itemId > 0) {
return DatabaseManager.queryTable(ourContext, "MetadataValue",
"SELECT * FROM MetadataValue WHERE item_id= ? ORDER BY metadata_field_id, place",
itemId);
}
return null;
}
}
// Edited by Dimitri Surinx START
public static int[] latestAdditionsId(Context context) throws SQLException {
String query = "SELECT item.item_id FROM item,handle WHERE item.item_id = handle.resource_id AND handle.resource_type_id = 2 AND in_archive AND NOT withdrawn ORDER BY handle.handle_id DESC ";
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
int[] ids = new int[5];
ResultSet rs = statement.executeQuery();
for (int i = 0; i < 5 && rs.next(); i++) {
ids[i] = rs.getInt("item_id");
}
return ids;
}
public static int returnId(Context context, String element, String qualifier) throws SQLException {
String query = null;
PreparedStatement statement = null;
if (qualifier.equals("")) {
query = "SELECT metadata_field_id FROM metadatafieldregistry where metadatafieldregistry.element = ?";
statement = context.getDBConnection().prepareStatement(query);
statement.setString(1, element);
} else {
query = "SELECT metadata_field_id FROM metadatafieldregistry where metadatafieldregistry.element = ? AND metadatafieldregistry.qualifier = ?";
statement = context.getDBConnection().prepareStatement(query);
statement.setString(1, element);
statement.setString(2, qualifier);
}
ResultSet rs = statement.executeQuery();
if (rs.next()) {
return rs.getInt("metadata_field_id");
} else {
return 0;
}
}
public static List<String> latestAdditionsText(Context context, int id, String element, String qualifier) throws SQLException {
int regId = returnId(context, element, qualifier);
return latestAdditionsText(context, id, regId);
}
public static List<String> latestAdditionsText(Context context, int id, int regId) throws SQLException {
return latestAdditionsText(context, id, regId, 3);
}
public static List<String> latestAdditionsText(Context context, int id, int regId, int amount) throws SQLException {
String query = "SELECT text_value FROM item item,metadatavalue where item.item_id = metadatavalue.item_id AND item.item_id = ? AND metadata_field_id = ? ORDER BY last_modified DESC";
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
List<String> titles = new ArrayList<String>();
String citation = "";
statement.setInt(1, id);
statement.setInt(2, regId);
int i = 1;
ResultSet rs = statement.executeQuery();
for (i = 0; (i < amount || (amount == 0)) && rs.next(); i++) {
if (i == 0) {
titles.clear();
}
titles.add(rs.getString("text_value"));
}
return titles;
}
public static String getHandleMod(Context context, int regId) throws SQLException {
String query = "SELECT handle FROM handle where resource_type_id = 2 AND resource_id = ? ";
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
String title = "";
statement.setInt(1, regId);
ResultSet rs = statement.executeQuery();
while (rs.next()) {
title = rs.getString("handle");
}
return title;
}
// Edited by Dimitri Surinx STOP
public boolean updateCitationString() {
try {
CitationManager cm = new CitationManager();
String cit = cm.updateCitationString(this);
this.update();
return true;
} catch (Exception e) {
log.error("Caught exception in submission step: ", e);
return false;
}
}
public boolean updateISSN() {
try {
DCValue[] titles = this.getMetadata(MetadataSchema.DC_SCHEMA, "bibliographicCitation", "title", Item.ANY);
if (titles.length > 0) {
String issn = titles[0].authority;
this.clearMetadata(MetadataSchema.DC_SCHEMA, "identifier", "issn", Item.ANY);
if (issn != null && !"".equals(issn)) {
this.addMetadata(MetadataSchema.DC_SCHEMA, "identifier", "issn", Item.ANY, issn);
}
this.update();
}
return true;
} catch (Exception e) {
log.error(Item.class.getName() + ": ", e);
return false;
}
}
public void updateSubjectFields() {
try {
String[] dcToBeMapped = ConfigurationManager.getProperty("dc.needURI").split(",");
String[] dcMapTo = ConfigurationManager.getProperty("dc.mapURI").split(",");
String[] dcURIs = ConfigurationManager.getProperty("dc.URIs").split(",");
int len = dcToBeMapped.length;
if (dcMapTo.length < len) {
len = dcMapTo.length;
}
if (dcURIs.length < len) {
len = dcURIs.length;
}
ArrayList<String> authorityDone = new ArrayList<String>();
DCValue[] dcv;
String[] qual;
for (int i = 0; i < len; i++) {
authorityDone.clear();
dcv = getMetadata(dcToBeMapped[i]);
if (dcv != null) {
for (int j = 0; j < dcv.length; j++) {
if ((dcv[j].authority != null) && (!"".equals(dcv[j].authority)) && !authorityDone.contains(dcv[j].authority)) {
authorityDone.add(dcv[j].authority);
}
}
qual = splitFieldName(dcMapTo[i]);
if (qual.length >= 2) {
//clear old uri first
this.clearMetadata(qual[0], qual[1], qual.length == 3 ? qual[2] : null, ANY);
this.update();
for (String authority : authorityDone) {
this.addMetadata(qual[0], qual[1], qual.length == 3 ? qual[2] : null, ANY, dcURIs[i]+authority);
}
this.update();
}
}
}
} catch (Exception e) {
log.error(Item.class.getName() + ": ", e);
}
}
private String[] splitFieldName(String fieldname) {
String[] parts;
parts = fieldname.split("\\.");
if (parts.length == 2) {
String[] result = new String[2];
result[0] = parts[0];
result[1] = parts[1];
return result;
} else if (parts.length == 3) {
String[] result = new String[3];
result[0] = parts[0];
result[1] = parts[1];
result[2] = parts[2];
return result;
} else {
return new String[0];
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import org.dspace.content.authority.Choices;
/**
* Simple data structure-like class representing a Dublin Core value. It has an
* element, qualifier, value and language.
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision: 5844 $
*/
@Deprecated
public class DCValue {
/** The DC element */
public String element;
/** The DC qualifier, or <code>null</code> if unqualified */
public String qualifier;
/** The value of the field */
public String value;
/** The language of the field, may be <code>null</code> */
public String language;
/** The schema name of the metadata element */
public String schema;
/** Authority control key */
public String authority = null;
/** Authority control confidence */
public int confidence = Choices.CF_UNSET;
public void setQuals(String name, String delimiter) {
if ((name != null) && (!"".equals(name)) && (delimiter != null) && (!"".equals(delimiter))) {
String parts[];
if (delimiter.equals(".")) {
delimiter = "\\.";
}
parts = name.split(delimiter);
if (parts.length >= 2) {
this.schema = parts[0];
this.element = parts[1];
}
if (parts.length == 3) {
this.qualifier = parts[2];
}
}
}
public String getFullName(String delimiter) {
return this.schema + delimiter + this.element + ((this.qualifier == null || "".equals(this.qualifier)) ? "" : (delimiter + this.qualifier));
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
/**
* An exception that gets thrown when a metadata field cannot be created or
* saved due to an existing field with an identical element and qualifier.
*
* @author Martin Hald
*/
public class NonUniqueMetadataException extends Exception
{
/**
* Create an empty authorize exception
*/
public NonUniqueMetadataException()
{
super();
}
/**
* Create an exception with only a message
*
* @param message
*/
public NonUniqueMetadataException(String message)
{
super(message);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing an item in the process of being submitted by a user
*
* @author Robert Tansley
* @version $Revision: 6137 $
*/
public class WorkspaceItem implements InProgressSubmission
{
/** log4j logger */
private static Logger log = Logger.getLogger(WorkspaceItem.class);
/** The item this workspace object pertains to */
private Item item;
/** Our context */
private Context ourContext;
/** The table row corresponding to this workspace item */
private TableRow wiRow;
/** The collection the item is being submitted to */
private Collection collection;
/**
* Construct a workspace item corresponding to the given database row
*
* @param context
* the context this object exists in
* @param row
* the database row
*/
WorkspaceItem(Context context, TableRow row) throws SQLException
{
ourContext = context;
wiRow = row;
item = Item.find(context, wiRow.getIntColumn("item_id"));
collection = Collection.find(context, wiRow
.getIntColumn("collection_id"));
// Cache ourselves
context.cache(this, row.getIntColumn("workspace_item_id"));
}
/**
* Get a workspace item from the database. The item, collection and
* submitter are loaded into memory.
*
* @param context
* DSpace context object
* @param id
* ID of the workspace item
*
* @return the workspace item, or null if the ID is invalid.
*/
public static WorkspaceItem find(Context context, int id)
throws SQLException
{
// First check the cache
WorkspaceItem fromCache = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "workspaceitem", id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_workspace_item",
"not_found,workspace_item_id=" + id));
}
return null;
}
else
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_workspace_item",
"workspace_item_id=" + id));
}
return new WorkspaceItem(context, row);
}
}
/**
* Create a new workspace item, with a new ID. An Item is also created. The
* submitter is the current user in the context.
*
* @param c
* DSpace context object
* @param coll
* Collection being submitted to
* @param template
* if <code>true</code>, the workspace item starts as a copy
* of the collection's template item
*
* @return the newly created workspace item
*/
public static WorkspaceItem create(Context c, Collection coll,
boolean template) throws AuthorizeException, SQLException,
IOException
{
// Check the user has permission to ADD to the collection
AuthorizeManager.authorizeAction(c, coll, Constants.ADD);
// Create an item
Item i = Item.create(c);
i.setSubmitter(c.getCurrentUser());
// Now create the policies for the submitter and workflow
// users to modify item and contents
// contents = bitstreams, bundles
// FIXME: icky hardcoded workflow steps
Group step1group = coll.getWorkflowGroup(1);
Group step2group = coll.getWorkflowGroup(2);
Group step3group = coll.getWorkflowGroup(3);
EPerson e = c.getCurrentUser();
// read permission
AuthorizeManager.addPolicy(c, i, Constants.READ, e);
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step1group);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step2group);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step3group);
}
// write permission
AuthorizeManager.addPolicy(c, i, Constants.WRITE, e);
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step1group);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step2group);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step3group);
}
// add permission
AuthorizeManager.addPolicy(c, i, Constants.ADD, e);
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step1group);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step2group);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step3group);
}
// remove contents permission
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e);
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step1group);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step2group);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step3group);
}
// Copy template if appropriate
Item templateItem = coll.getTemplateItem();
if (template && (templateItem != null))
{
DCValue[] md = templateItem.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (int n = 0; n < md.length; n++)
{
i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
md[n].value);
}
}
i.update();
// Create the workspace item row
TableRow row = DatabaseManager.row("workspaceitem");
row.setColumn("item_id", i.getID());
row.setColumn("collection_id", coll.getID());
log.info(LogManager.getHeader(c, "create_workspace_item",
"workspace_item_id=" + row.getIntColumn("workspace_item_id")
+ "item_id=" + i.getID() + "collection_id="
+ coll.getID()));
DatabaseManager.insert(c, row);
WorkspaceItem wi = new WorkspaceItem(c, row);
return wi;
}
/**
* Get all workspace items for a particular e-person. These are ordered by
* workspace item ID, since this should likely keep them in the order in
* which they were created.
*
* @param context
* the context object
* @param ep
* the eperson
*
* @return the corresponding workspace items
*/
public static WorkspaceItem[] findByEPerson(Context context, EPerson ep)
throws SQLException
{
List<WorkspaceItem> wsItems = new ArrayList<WorkspaceItem>();
TableRowIterator tri = DatabaseManager.queryTable(context, "workspaceitem",
"SELECT workspaceitem.* FROM workspaceitem, item WHERE " +
"workspaceitem.item_id=item.item_id AND " +
"item.submitter_id= ? " +
"ORDER BY workspaceitem.workspace_item_id",
ep.getID());
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return wsItems.toArray(new WorkspaceItem[wsItems.size()]);
}
/**
* Get all workspace items for a particular collection.
*
* @param context
* the context object
* @param c
* the collection
*
* @return the corresponding workspace items
*/
public static WorkspaceItem[] findByCollection(Context context, Collection c)
throws SQLException
{
List<WorkspaceItem> wsItems = new ArrayList<WorkspaceItem>();
TableRowIterator tri = DatabaseManager.queryTable(context, "workspaceitem",
"SELECT workspaceitem.* FROM workspaceitem WHERE " +
"workspaceitem.collection_id= ? ",
c.getID());
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return wsItems.toArray(new WorkspaceItem[wsItems.size()]);
}
/**
* Check to see if a particular item is currently still in a user's Workspace.
* If so, its WorkspaceItem is returned. If not, null is returned
*
* @param context
* the context object
* @param i
* the item
*
* @return workflow item corresponding to the item, or null
*/
public static WorkspaceItem findByItem(Context context, Item i)
throws SQLException
{
// Look for the unique workspaceitem entry where 'item_id' references this item
TableRow row = DatabaseManager.findByUnique(context, "workspaceitem", "item_id", i.getID());
if (row == null)
{
return null;
}
else
{
return new WorkspaceItem(context, row);
}
}
/**
* Get all workspace items in the whole system
*
* @param context the context object
*
* @return all workspace items
*/
public static WorkspaceItem[] findAll(Context context)
throws SQLException
{
List<WorkspaceItem> wsItems = new ArrayList<WorkspaceItem>();
String query = "SELECT * FROM workspaceitem ORDER BY item_id";
TableRowIterator tri = DatabaseManager.queryTable(context,
"workspaceitem",
query);
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return wsItems.toArray(new WorkspaceItem[wsItems.size()]);
}
/**
* Get the internal ID of this workspace item
*
* @return the internal identifier
*/
public int getID()
{
return wiRow.getIntColumn("workspace_item_id");
}
/**
* Get the value of the stage reached column
*
* @return the value of the stage reached column
*/
public int getStageReached()
{
return wiRow.getIntColumn("stage_reached");
}
/**
* Set the value of the stage reached column
*
* @param v
* the value of the stage reached column
*/
public void setStageReached(int v)
{
wiRow.setColumn("stage_reached", v);
}
/**
* Get the value of the page reached column (which represents the page
* reached within a stage/step)
*
* @return the value of the page reached column
*/
public int getPageReached()
{
return wiRow.getIntColumn("page_reached");
}
/**
* Set the value of the page reached column (which represents the page
* reached within a stage/step)
*
* @param v
* the value of the page reached column
*/
public void setPageReached(int v)
{
wiRow.setColumn("page_reached", v);
}
/**
* Update the workspace item, including the unarchived item.
*/
public void update() throws SQLException, AuthorizeException, IOException
{
// Authorisation is checked by the item.update() method below
log.info(LogManager.getHeader(ourContext, "update_workspace_item",
"workspace_item_id=" + getID()));
// Update the item
item.update();
// Update ourselves
DatabaseManager.update(ourContext, wiRow);
}
/**
* Decide if this WorkspaceItem is equal to another
*
* @param o The other workspace item to compare to
* @return If they are equal or not
*/
public boolean equals(Object o) {
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
final WorkspaceItem that = (WorkspaceItem)o;
if (this.getID() != that.getID())
{
return false;
}
return true;
}
public int hashCode()
{
return new HashCodeBuilder().append(getID()).toHashCode();
}
/**
* Delete the workspace item. The entry in workspaceitem, the unarchived
* item and its contents are all removed (multiple inclusion
* notwithstanding.)
*/
public void deleteAll() throws SQLException, AuthorizeException,
IOException
{
/*
* Authorisation is a special case. The submitter won't have REMOVE
* permission on the collection, so our policy is this: Only the
* original submitter or an administrator can delete a workspace item.
*/
if (!AuthorizeManager.isAdmin(ourContext)
&& ((ourContext.getCurrentUser() == null) || (ourContext
.getCurrentUser().getID() != item.getSubmitter()
.getID())))
{
// Not an admit, not the submitter
throw new AuthorizeException("Must be an administrator or the "
+ "original submitter to delete a workspace item");
}
log.info(LogManager.getHeader(ourContext, "delete_workspace_item",
"workspace_item_id=" + getID() + "item_id=" + item.getID()
+ "collection_id=" + collection.getID()));
//deleteSubmitPermissions();
// Remove from cache
ourContext.removeCached(this, getID());
// Need to delete the epersongroup2workspaceitem row first since it refers
// to workspaceitem ID
deleteEpersonGroup2WorkspaceItem();
// Need to delete the workspaceitem row first since it refers
// to item ID
DatabaseManager.delete(ourContext, wiRow);
// Delete item
item.delete();
}
private void deleteEpersonGroup2WorkspaceItem() throws SQLException
{
String removeSQL="DELETE FROM epersongroup2workspaceitem WHERE workspace_item_id = ?";
DatabaseManager.updateQuery(ourContext, removeSQL,getID());
}
public void deleteWrapper() throws SQLException, AuthorizeException,
IOException
{
// Check authorisation. We check permissions on the enclosed item.
AuthorizeManager.authorizeAction(ourContext, item, Constants.WRITE);
log.info(LogManager.getHeader(ourContext, "delete_workspace_item",
"workspace_item_id=" + getID() + "item_id=" + item.getID()
+ "collection_id=" + collection.getID()));
// deleteSubmitPermissions();
// Remove from cache
ourContext.removeCached(this, getID());
// Need to delete the workspaceitem row first since it refers
// to item ID
DatabaseManager.delete(ourContext, wiRow);
}
// InProgressSubmission methods
public Item getItem()
{
return item;
}
public Collection getCollection()
{
return collection;
}
public EPerson getSubmitter() throws SQLException
{
return item.getSubmitter();
}
public boolean hasMultipleFiles()
{
return wiRow.getBooleanColumn("multiple_files");
}
public void setMultipleFiles(boolean b)
{
wiRow.setColumn("multiple_files", b);
}
public boolean hasMultipleTitles()
{
return wiRow.getBooleanColumn("multiple_titles");
}
public void setMultipleTitles(boolean b)
{
wiRow.setColumn("multiple_titles", b);
}
public boolean isPublishedBefore()
{
return wiRow.getBooleanColumn("published_before");
}
public void setPublishedBefore(boolean b)
{
wiRow.setColumn("published_before", b);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.SQLException;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* This class handles the recognition of bitstream formats, using the format
* registry in the database. For the moment, the format identifier simply uses
* file extensions stored in the "BitstreamFormatIdentifier" table. This
* probably isn't a particularly satisfactory long-term solution.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class FormatIdentifier
{
/**
* Attempt to identify the format of a particular bitstream. If the format
* is unknown, null is returned.
*
* @param bitstream
* the bitstream to identify the format of
*
* @return a format from the bitstream format registry, or null
*/
public static BitstreamFormat guessFormat(Context context,
Bitstream bitstream) throws SQLException
{
String filename = bitstream.getName();
// FIXME: Just setting format to first guess
// For now just get the file name
// Gracefully handle the null case
if (filename == null)
{
return null;
}
filename = filename.toLowerCase();
// This isn't rocket science. We just get the name of the
// bitstream, get the extension, and see if we know the type.
String extension = filename;
int lastDot = filename.lastIndexOf('.');
if (lastDot != -1)
{
extension = filename.substring(lastDot + 1);
}
// If the last character was a dot, then extension will now be
// an empty string. If this is the case, we don't know what
// file type it is.
if (extension.equals(""))
{
return null;
}
// See if the extension is in the fileextension table
TableRowIterator tri = DatabaseManager.query(context,
"SELECT bitstreamformatregistry.* FROM bitstreamformatregistry, " +
"fileextension WHERE fileextension.extension LIKE ? " +
"AND bitstreamformatregistry.bitstream_format_id=" +
"fileextension.bitstream_format_id",
extension);
BitstreamFormat retFormat = null;
try
{
if (tri.hasNext())
{
// Return first match
retFormat = new BitstreamFormat(context, tri.next());
}
else
{
retFormat = null;
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return retFormat;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.io.InputStream;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.MissingResourceException;
import org.apache.log4j.Logger;
import org.dspace.app.util.AuthorizeUtil;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.browse.BrowseException;
import org.dspace.browse.IndexBrowse;
import org.dspace.browse.ItemCounter;
import org.dspace.browse.ItemCountException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.eperson.Group;
import org.dspace.event.Event;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.workflow.WorkflowItem;
/**
* Class representing a collection.
* <P>
* The collection's metadata (name, introductory text etc), workflow groups, and
* default group of submitters are loaded into memory. Changes to metadata are
* not written to the database until <code>update</code> is called. If you
* create or remove a workflow group, the change is only reflected in the
* database after calling <code>update</code>. The default group of
* submitters is slightly different - creating or removing this has instant
* effect.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class Collection extends DSpaceObject
{
/** log4j category */
private static Logger log = Logger.getLogger(Collection.class);
/** Our context */
private Context ourContext;
/** The table row corresponding to this item */
private TableRow collectionRow;
/** The logo bitstream */
private Bitstream logo;
/** The item template */
private Item template;
/** Our Handle */
private String handle;
/** Flag set when data is modified, for events */
private boolean modified;
/** Flag set when metadata is modified, for events */
private boolean modifiedMetadata;
/**
* Groups corresponding to workflow steps - NOTE these start from one, so
* workflowGroups[0] corresponds to workflow_step_1.
*/
private Group[] workflowGroup;
/** The default group of submitters */
private Group submitters;
/** The default group of administrators */
private Group admins;
/**
* Construct a collection with the given table row
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
* @throws SQLException
*/
Collection(Context context, TableRow row) throws SQLException
{
ourContext = context;
collectionRow = row;
// Get the logo bitstream
if (collectionRow.isColumnNull("logo_bitstream_id"))
{
logo = null;
}
else
{
logo = Bitstream.find(ourContext, collectionRow
.getIntColumn("logo_bitstream_id"));
}
// Get the template item
if (collectionRow.isColumnNull("template_item_id"))
{
template = null;
}
else
{
template = Item.find(ourContext, collectionRow
.getIntColumn("template_item_id"));
}
// Get the relevant groups
workflowGroup = new Group[3];
workflowGroup[0] = groupFromColumn("workflow_step_1");
workflowGroup[1] = groupFromColumn("workflow_step_2");
workflowGroup[2] = groupFromColumn("workflow_step_3");
submitters = groupFromColumn("submitter");
admins = groupFromColumn("admin");
// Get our Handle if any
handle = HandleManager.findHandle(context, this);
// Cache ourselves
context.cache(this, row.getIntColumn("collection_id"));
modified = false;
modifiedMetadata = false;
clearDetails();
}
/**
* Get a collection from the database. Loads in the metadata
*
* @param context
* DSpace context object
* @param id
* ID of the collection
*
* @return the collection, or null if the ID is invalid.
* @throws SQLException
*/
public static Collection find(Context context, int id) throws SQLException
{
// First check the cache
Collection fromCache = (Collection) context.fromCache(Collection.class,
id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "collection", id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_collection",
"not_found,collection_id=" + id));
}
return null;
}
// not null, return Collection
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_collection",
"collection_id=" + id));
}
return new Collection(context, row);
}
/**
* Create a new collection, with a new ID. This method is not public, and
* does not check authorisation.
*
* @param context
* DSpace context object
*
* @return the newly created collection
* @throws SQLException
* @throws AuthorizeException
*/
static Collection create(Context context) throws SQLException,
AuthorizeException
{
return create(context, null);
}
/**
* Create a new collection, with a new ID. This method is not public, and
* does not check authorisation.
*
* @param context
* DSpace context object
*
* @param handle the pre-determined Handle to assign to the new community
* @return the newly created collection
* @throws SQLException
* @throws AuthorizeException
*/
static Collection create(Context context, String handle) throws SQLException,
AuthorizeException
{
TableRow row = DatabaseManager.create(context, "collection");
Collection c = new Collection(context, row);
try
{
c.handle = (handle == null) ?
HandleManager.createHandle(context, c) :
HandleManager.createHandle(context, c, handle);
}
catch(IllegalStateException ie)
{
//If an IllegalStateException is thrown, then an existing object is already using this handle
//Remove the collection we just created -- as it is incomplete
try
{
if(c!=null)
{
c.delete();
}
} catch(Exception e) { }
//pass exception on up the chain
throw ie;
}
// create the default authorization policy for collections
// of 'anonymous' READ
Group anonymousGroup = Group.find(context, 0);
ResourcePolicy myPolicy = ResourcePolicy.create(context);
myPolicy.setResource(c);
myPolicy.setAction(Constants.READ);
myPolicy.setGroup(anonymousGroup);
myPolicy.update();
// now create the default policies for submitted items
myPolicy = ResourcePolicy.create(context);
myPolicy.setResource(c);
myPolicy.setAction(Constants.DEFAULT_ITEM_READ);
myPolicy.setGroup(anonymousGroup);
myPolicy.update();
myPolicy = ResourcePolicy.create(context);
myPolicy.setResource(c);
myPolicy.setAction(Constants.DEFAULT_BITSTREAM_READ);
myPolicy.setGroup(anonymousGroup);
myPolicy.update();
context.addEvent(new Event(Event.CREATE, Constants.COLLECTION, c.getID(), c.handle));
log.info(LogManager.getHeader(context, "create_collection",
"collection_id=" + row.getIntColumn("collection_id"))
+ ",handle=" + c.handle);
return c;
}
/**
* Get all collections in the system. These are alphabetically sorted by
* collection name.
*
* @param context
* DSpace context object
*
* @return the collections in the system
* @throws SQLException
*/
public static Collection[] findAll(Context context) throws SQLException
{
TableRowIterator tri = DatabaseManager.queryTable(context, "collection",
"SELECT * FROM collection ORDER BY name");
List<Collection> collections = new ArrayList<Collection>();
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Collection fromCache = (Collection) context.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{
collections.add(fromCache);
}
else
{
collections.add(new Collection(context, row));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Collection[] collectionArray = new Collection[collections.size()];
collectionArray = (Collection[]) collections.toArray(collectionArray);
return collectionArray;
}
/**
* Get the in_archive items in this collection. The order is indeterminate.
*
* @return an iterator over the items in the collection.
* @throws SQLException
*/
public ItemIterator getItems() throws SQLException
{
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
+ "item.item_id=collection2item.item_id AND "
+ "collection2item.collection_id= ? "
+ "AND item.in_archive='1'";
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
myQuery,getID());
return new ItemIterator(ourContext, rows);
}
/**
* Get all the items in this collection. The order is indeterminate.
*
* @return an iterator over the items in the collection.
* @throws SQLException
*/
public ItemIterator getAllItems() throws SQLException
{
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
+ "item.item_id=collection2item.item_id AND "
+ "collection2item.collection_id= ? ";
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
myQuery,getID());
return new ItemIterator(ourContext, rows);
}
/**
* Get the internal ID of this collection
*
* @return the internal identifier
*/
public int getID()
{
return collectionRow.getIntColumn("collection_id");
}
/**
* @see org.dspace.content.DSpaceObject#getHandle()
*/
public String getHandle()
{
if(handle == null) {
try {
handle = HandleManager.findHandle(this.ourContext, this);
} catch (SQLException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
}
}
return handle;
}
/**
* Get the value of a metadata field
*
* @param field
* the name of the metadata field to get
*
* @return the value of the metadata field
*
* @exception IllegalArgumentException
* if the requested metadata field doesn't exist
*/
public String getMetadata(String field)
{
String metadata = collectionRow.getStringColumn(field);
return (metadata == null) ? "" : metadata;
}
/**
* Set a metadata value
*
* @param field
* the name of the metadata field to get
* @param value
* value to set the field to
*
* @exception IllegalArgumentException
* if the requested metadata field doesn't exist
* @exception MissingResourceException
*/
public void setMetadata(String field, String value) throws MissingResourceException
{
if ((field.trim()).equals("name")
&& (value == null || value.trim().equals("")))
{
try
{
value = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled");
}
catch (MissingResourceException e)
{
value = "Untitled";
}
}
/*
* Set metadata field to null if null
* and trim strings to eliminate excess
* whitespace.
*/
if(value == null)
{
collectionRow.setColumnNull(field);
}
else
{
collectionRow.setColumn(field, value.trim());
}
modifiedMetadata = true;
addDetails(field);
}
public String getName()
{
return getMetadata("name");
}
/**
* Get the logo for the collection. <code>null</code> is return if the
* collection does not have a logo.
*
* @return the logo of the collection, or <code>null</code>
*/
public Bitstream getLogo()
{
return logo;
}
/**
* Give the collection a logo. Passing in <code>null</code> removes any
* existing logo. You will need to set the format of the new logo bitstream
* before it will work, for example to "JPEG". Note that
* <code>update(/code> will need to be called for the change to take
* effect. Setting a logo and not calling <code>update</code> later may
* result in a previous logo lying around as an "orphaned" bitstream.
*
* @param is the stream to use as the new logo
*
* @return the new logo bitstream, or <code>null</code> if there is no
* logo (<code>null</code> was passed in)
* @throws AuthorizeException
* @throws IOException
* @throws SQLException
*/
public Bitstream setLogo(InputStream is) throws AuthorizeException,
IOException, SQLException
{
// Check authorisation
// authorized to remove the logo when DELETE rights
// authorized when canEdit
if (!((is == null) && AuthorizeManager.authorizeActionBoolean(
ourContext, this, Constants.DELETE)))
{
canEdit(true);
}
// First, delete any existing logo
if (!collectionRow.isColumnNull("logo_bitstream_id"))
{
logo.delete();
}
if (is == null)
{
collectionRow.setColumnNull("logo_bitstream_id");
logo = null;
log.info(LogManager.getHeader(ourContext, "remove_logo",
"collection_id=" + getID()));
}
else
{
Bitstream newLogo = Bitstream.create(ourContext, is);
collectionRow.setColumn("logo_bitstream_id", newLogo.getID());
logo = newLogo;
// now create policy for logo bitstream
// to match our READ policy
List<ResourcePolicy> policies = AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.READ);
AuthorizeManager.addPolicies(ourContext, policies, newLogo);
log.info(LogManager.getHeader(ourContext, "set_logo",
"collection_id=" + getID() + "logo_bitstream_id="
+ newLogo.getID()));
}
modified = true;
return logo;
}
/**
* Create a workflow group for the given step if one does not already exist.
* Returns either the newly created group or the previously existing one.
* Note that while the new group is created in the database, the association
* between the group and the collection is not written until
* <code>update</code> is called.
*
* @param step
* the step (1-3) of the workflow to create or get the group for
*
* @return the workflow group associated with this collection
* @throws SQLException
* @throws AuthorizeException
*/
public Group createWorkflowGroup(int step) throws SQLException,
AuthorizeException
{
// Check authorisation - Must be an Admin to create Workflow Group
AuthorizeUtil.authorizeManageWorkflowsGroup(ourContext, this);
if (workflowGroup[step - 1] == null)
{
//turn off authorization so that Collection Admins can create Collection Workflow Groups
ourContext.turnOffAuthorisationSystem();
Group g = Group.create(ourContext);
ourContext.restoreAuthSystemState();
g.setName("COLLECTION_" + getID() + "_WORKFLOW_STEP_" + step);
g.update();
setWorkflowGroup(step, g);
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, g);
}
return workflowGroup[step - 1];
}
/**
* Set the workflow group corresponding to a particular workflow step.
* <code>null</code> can be passed in if there should be no associated
* group for that workflow step; any existing group is NOT deleted.
*
* @param step
* the workflow step (1-3)
* @param g
* the new workflow group, or <code>null</code>
*/
public void setWorkflowGroup(int step, Group g)
{
workflowGroup[step - 1] = g;
if (g == null)
{
collectionRow.setColumnNull("workflow_step_" + step);
}
else
{
collectionRow.setColumn("workflow_step_" + step, g.getID());
}
modified = true;
}
/**
* Get the the workflow group corresponding to a particular workflow step.
* This returns <code>null</code> if there is no group associated with
* this collection for the given step.
*
* @param step
* the workflow step (1-3)
*
* @return the group of reviewers or <code>null</code>
*/
public Group getWorkflowGroup(int step)
{
return workflowGroup[step - 1];
}
/**
* Create a default submitters group if one does not already exist. Returns
* either the newly created group or the previously existing one. Note that
* other groups may also be allowed to submit to this collection by the
* authorization system.
*
* @return the default group of submitters associated with this collection
* @throws SQLException
* @throws AuthorizeException
*/
public Group createSubmitters() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin to create Submitters Group
AuthorizeUtil.authorizeManageSubmittersGroup(ourContext, this);
if (submitters == null)
{
//turn off authorization so that Collection Admins can create Collection Submitters
ourContext.turnOffAuthorisationSystem();
submitters = Group.create(ourContext);
ourContext.restoreAuthSystemState();
submitters.setName("COLLECTION_" + getID() + "_SUBMIT");
submitters.update();
}
// register this as the submitter group
collectionRow.setColumn("submitter", submitters.getID());
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, submitters);
modified = true;
return submitters;
}
/**
* Remove the submitters group, if no group has already been created
* then return without error. This will merely dereference the current
* submitters group from the collection so that it may be deleted
* without violating database constraints.
*/
public void removeSubmitters() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin to delete Submitters Group
AuthorizeUtil.authorizeManageSubmittersGroup(ourContext, this);
// just return if there is no administrative group.
if (submitters == null)
{
return;
}
// Remove the link to the collection table.
collectionRow.setColumnNull("submitter");
submitters = null;
modified = true;
}
/**
* Get the default group of submitters, if there is one. Note that the
* authorization system may allow others to submit to the collection, so
* this is not necessarily a definitive list of potential submitters.
* <P>
* The default group of submitters for collection 100 is the one called
* <code>collection_100_submit</code>.
*
* @return the default group of submitters, or <code>null</code> if there
* is no default group.
*/
public Group getSubmitters()
{
return submitters;
}
/**
* Create a default administrators group if one does not already exist.
* Returns either the newly created group or the previously existing one.
* Note that other groups may also be administrators.
*
* @return the default group of editors associated with this collection
* @throws SQLException
* @throws AuthorizeException
*/
public Group createAdministrators() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin to create more Admins
AuthorizeUtil.authorizeManageAdminGroup(ourContext, this);
if (admins == null)
{
//turn off authorization so that Community Admins can create Collection Admins
ourContext.turnOffAuthorisationSystem();
admins = Group.create(ourContext);
ourContext.restoreAuthSystemState();
admins.setName("COLLECTION_" + getID() + "_ADMIN");
admins.update();
}
AuthorizeManager.addPolicy(ourContext, this,
Constants.ADMIN, admins);
// register this as the admin group
collectionRow.setColumn("admin", admins.getID());
modified = true;
return admins;
}
/**
* Remove the administrators group, if no group has already been created
* then return without error. This will merely dereference the current
* administrators group from the collection so that it may be deleted
* without violating database constraints.
*/
public void removeAdministrators() throws SQLException, AuthorizeException
{
// Check authorisation - Must be an Admin of the parent community to delete Admin Group
AuthorizeUtil.authorizeRemoveAdminGroup(ourContext, this);
// just return if there is no administrative group.
if (admins == null)
{
return;
}
// Remove the link to the collection table.
collectionRow.setColumnNull("admin");
admins = null;
modified = true;
}
/**
* Get the default group of administrators, if there is one. Note that the
* authorization system may allow others to be administrators for the
* collection.
* <P>
* The default group of administrators for collection 100 is the one called
* <code>collection_100_admin</code>.
*
* @return group of administrators, or <code>null</code> if there is no
* default group.
*/
public Group getAdministrators()
{
return admins;
}
/**
* Get the license that users must grant before submitting to this
* collection. If the collection does not have a specific license, the
* site-wide default is returned.
*
* @return the license for this collection
*/
public String getLicense()
{
String license = getMetadata("license");
if (license == null || license.trim().equals(""))
{
// Fallback to site-wide default
license = ConfigurationManager.getDefaultSubmissionLicense();
}
return license;
}
/**
* Get the license that users must grant before submitting to this
* collection.
*
* @return the license for this collection
*/
public String getLicenseCollection()
{
return getMetadata("license");
}
/**
* Find out if the collection has a custom license
*
* @return <code>true</code> if the collection has a custom license
*/
public boolean hasCustomLicense()
{
String license = getMetadata("license");
return !( license == null || license.trim().equals("") );
}
/**
* Set the license for this collection. Passing in <code>null</code> means
* that the site-wide default will be used.
*
* @param license
* the license, or <code>null</code>
*/
public void setLicense(String license)
{
setMetadata("license",license);
}
/**
* Get the template item for this collection. <code>null</code> is
* returned if the collection does not have a template. Submission
* mechanisms may copy this template to provide a convenient starting point
* for a submission.
*
* @return the item template, or <code>null</code>
*/
public Item getTemplateItem() throws SQLException
{
return template;
}
/**
* Create an empty template item for this collection. If one already exists,
* no action is taken. Caution: Make sure you call <code>update</code> on
* the collection after doing this, or the item will have been created but
* the collection record will not refer to it.
*
* @throws SQLException
* @throws AuthorizeException
*/
public void createTemplateItem() throws SQLException, AuthorizeException
{
// Check authorisation
AuthorizeUtil.authorizeManageTemplateItem(ourContext, this);
if (template == null)
{
template = Item.create(ourContext);
collectionRow.setColumn("template_item_id", template.getID());
log.info(LogManager.getHeader(ourContext, "create_template_item",
"collection_id=" + getID() + ",template_item_id="
+ template.getID()));
}
modified = true;
}
/**
* Remove the template item for this collection, if there is one. Note that
* since this has to remove the old template item ID from the collection
* record in the database, the colletion record will be changed, including
* any other changes made; in other words, this method does an
* <code>update</code>.
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void removeTemplateItem() throws SQLException, AuthorizeException,
IOException
{
// Check authorisation
AuthorizeUtil.authorizeManageTemplateItem(ourContext, this);
collectionRow.setColumnNull("template_item_id");
DatabaseManager.update(ourContext, collectionRow);
if (template != null)
{
log.info(LogManager.getHeader(ourContext, "remove_template_item",
"collection_id=" + getID() + ",template_item_id="
+ template.getID()));
// temporary turn off auth system, we have already checked the permission on the top of the method
// check it again will fail because we have already broken the relation between the collection and the item
ourContext.turnOffAuthorisationSystem();
template.delete();
ourContext.restoreAuthSystemState();
template = null;
}
ourContext.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, getID(), "remove_template_item"));
}
/**
* Add an item to the collection. This simply adds a relationship between
* the item and the collection - it does nothing like set an issue date,
* remove a personal workspace item etc. This has instant effect;
* <code>update</code> need not be called.
*
* @param item
* item to add
* @throws SQLException
* @throws AuthorizeException
*/
public void addItem(Item item) throws SQLException, AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
log.info(LogManager.getHeader(ourContext, "add_item", "collection_id="
+ getID() + ",item_id=" + item.getID()));
// Create mapping
TableRow row = DatabaseManager.row("collection2item");
row.setColumn("collection_id", getID());
row.setColumn("item_id", item.getID());
DatabaseManager.insert(ourContext, row);
ourContext.addEvent(new Event(Event.ADD, Constants.COLLECTION, getID(), Constants.ITEM, item.getID(), item.getHandle()));
}
/**
* Remove an item. If the item is then orphaned, it is deleted.
*
* @param item
* item to remove
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void removeItem(Item item) throws SQLException, AuthorizeException,
IOException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
// will be the item an orphan?
TableRow row = DatabaseManager.querySingle(ourContext,
"SELECT COUNT(DISTINCT collection_id) AS num FROM collection2item WHERE item_id= ? ",
item.getID());
DatabaseManager.setConstraintDeferred(ourContext, "coll2item_item_fk");
if (row.getLongColumn("num") == 1)
{
// Orphan; delete it
item.delete();
}
log.info(LogManager.getHeader(ourContext, "remove_item",
"collection_id=" + getID() + ",item_id=" + item.getID()));
DatabaseManager.updateQuery(ourContext,
"DELETE FROM collection2item WHERE collection_id= ? "+
"AND item_id= ? ",
getID(), item.getID());
DatabaseManager.setConstraintImmediate(ourContext, "coll2item_item_fk");
ourContext.addEvent(new Event(Event.REMOVE, Constants.COLLECTION, getID(), Constants.ITEM, item.getID(), item.getHandle()));
}
/**
* Update the collection metadata (including logo, and workflow groups) to
* the database. Inserts if this is a new collection.
*
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
public void update() throws SQLException, IOException, AuthorizeException
{
// Check authorisation
canEdit(true);
log.info(LogManager.getHeader(ourContext, "update_collection",
"collection_id=" + getID()));
DatabaseManager.update(ourContext, collectionRow);
if (modified)
{
ourContext.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, getID(), null));
modified = false;
}
if (modifiedMetadata)
{
ourContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.COLLECTION, getID(), getDetails()));
modifiedMetadata = false;
clearDetails();
}
}
public boolean canEditBoolean() throws java.sql.SQLException
{
return canEditBoolean(true);
}
public boolean canEditBoolean(boolean useInheritance) throws java.sql.SQLException
{
try
{
canEdit(useInheritance);
return true;
}
catch (AuthorizeException e)
{
return false;
}
}
public void canEdit() throws AuthorizeException, SQLException
{
canEdit(true);
}
public void canEdit(boolean useInheritance) throws AuthorizeException, SQLException
{
Community[] parents = getCommunities();
for (int i = 0; i < parents.length; i++)
{
if (AuthorizeManager.authorizeActionBoolean(ourContext, parents[i],
Constants.WRITE, useInheritance))
{
return;
}
if (AuthorizeManager.authorizeActionBoolean(ourContext, parents[i],
Constants.ADD, useInheritance))
{
return;
}
}
AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE, useInheritance);
}
/**
* Delete the collection, including the metadata and logo. Items that are
* then orphans are deleted. Groups associated with this collection
* (workflow participants and submitters) are NOT deleted.
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
void delete() throws SQLException, AuthorizeException, IOException
{
log.info(LogManager.getHeader(ourContext, "delete_collection",
"collection_id=" + getID()));
ourContext.addEvent(new Event(Event.DELETE, Constants.COLLECTION, getID(), getHandle()));
// Remove from cache
ourContext.removeCached(this, getID());
// remove subscriptions - hmm, should this be in Subscription.java?
DatabaseManager.updateQuery(ourContext,
"DELETE FROM subscription WHERE collection_id= ? ",
getID());
// Remove Template Item
removeTemplateItem();
// Remove items
ItemIterator items = getAllItems();
try
{
while (items.hasNext())
{
Item item = items.next();
IndexBrowse ib = new IndexBrowse(ourContext);
if (item.isOwningCollection(this))
{
// the collection to be deletd is the owning collection, thus remove
// the item from all collections it belongs to
Collection[] collections = item.getCollections();
for (int i=0; i< collections.length; i++)
{
//notify Browse of removing item.
ib.itemRemoved(item);
// Browse.itemRemoved(ourContext, itemId);
collections[i].removeItem(item);
}
}
// the item was only mapped to this collection, so just remove it
else
{
//notify Browse of removing item mapping.
ib.indexItem(item);
// Browse.itemChanged(ourContext, item);
removeItem(item);
}
}
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new IOException(e.getMessage(), e);
}
finally
{
if (items != null)
{
items.close();
}
}
// Delete bitstream logo
setLogo(null);
// Remove all authorization policies
AuthorizeManager.removeAllPolicies(ourContext, this);
// Remove any WorkflowItems
WorkflowItem[] wfarray = WorkflowItem
.findByCollection(ourContext, this);
for (int x = 0; x < wfarray.length; x++)
{
// remove the workflowitem first, then the item
Item myItem = wfarray[x].getItem();
wfarray[x].deleteWrapper();
myItem.delete();
}
// Remove any WorkspaceItems
WorkspaceItem[] wsarray = WorkspaceItem.findByCollection(ourContext,
this);
for (int x = 0; x < wsarray.length; x++)
{
wsarray[x].deleteAll();
}
// get rid of the content count cache if it exists
try
{
ItemCounter ic = new ItemCounter(ourContext);
ic.remove(this);
}
catch (ItemCountException e)
{
// FIXME: upside down exception handling due to lack of good
// exception framework
throw new IllegalStateException(e.getMessage(), e);
}
// Remove any Handle
HandleManager.unbindHandle(ourContext, this);
// Delete collection row
DatabaseManager.delete(ourContext, collectionRow);
// Remove any workflow groups - must happen after deleting collection
Group g = null;
g = getWorkflowGroup(1);
if (g != null)
{
g.delete();
}
g = getWorkflowGroup(2);
if (g != null)
{
g.delete();
}
g = getWorkflowGroup(3);
if (g != null)
{
g.delete();
}
// Remove default administrators group
g = getAdministrators();
if (g != null)
{
g.delete();
}
// Remove default submitters group
g = getSubmitters();
if (g != null)
{
g.delete();
}
}
/**
* Get the communities this collection appears in
*
* @return array of <code>Community</code> objects
* @throws SQLException
*/
public Community[] getCommunities() throws SQLException
{
// Get the bundle table rows
TableRowIterator tri = DatabaseManager.queryTable(ourContext,"community",
"SELECT community.* FROM community, community2collection WHERE " +
"community.community_id=community2collection.community_id " +
"AND community2collection.collection_id= ? ",
getID());
// Build a list of Community objects
List<Community> communities = new ArrayList<Community>();
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
// First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
if (owner == null)
{
owner = new Community(ourContext, row);
}
communities.add(owner);
// now add any parent communities
Community[] parents = owner.getAllParents();
communities.addAll(Arrays.asList(parents));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray);
return communityArray;
}
/**
* Return <code>true</code> if <code>other</code> is the same Collection
* as this object, <code>false</code> otherwise
*
* @param other
* object to compare to
*
* @return <code>true</code> if object passed in represents the same
* collection as this object
*/
@Override
public boolean equals(Object other)
{
if (other == null)
{
return false;
}
if (getClass() != other.getClass())
{
return false;
}
final Collection otherCollection = (Collection) other;
if (this.getID() != otherCollection.getID())
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 89 * hash + (this.collectionRow != null ? this.collectionRow.hashCode() : 0);
return hash;
}
/**
* Utility method for reading in a group from a group ID in a column. If the
* column is null, null is returned.
*
* @param col
* the column name to read
* @return the group referred to by that column, or null
* @throws SQLException
*/
private Group groupFromColumn(String col) throws SQLException
{
if (collectionRow.isColumnNull(col))
{
return null;
}
return Group.find(ourContext, collectionRow.getIntColumn(col));
}
/**
* return type found in Constants
*
* @return int Constants.COLLECTION
*/
public int getType()
{
return Constants.COLLECTION;
}
/**
* return an array of collections that user has a given permission on
* (useful for trimming 'select to collection' list) or figuring out which
* collections a person is an editor for.
*
* @param context
* @param comm
* (optional) restrict search to a community, else null
* @param actionID
* fo the action
*
* @return Collection [] of collections with matching permissions
* @throws SQLException
*/
public static Collection[] findAuthorized(Context context, Community comm,
int actionID) throws java.sql.SQLException
{
List<Collection> myResults = new ArrayList<Collection>();
Collection[] myCollections = null;
if (comm != null)
{
myCollections = comm.getCollections();
}
else
{
myCollections = Collection.findAll(context);
}
// now build a list of collections you have authorization for
for (int i = 0; i < myCollections.length; i++)
{
if (AuthorizeManager.authorizeActionBoolean(context,
myCollections[i], actionID))
{
myResults.add(myCollections[i]);
}
}
myCollections = new Collection[myResults.size()];
myCollections = (Collection[]) myResults.toArray(myCollections);
return myCollections;
}
/**
* counts items in this collection
*
* @return total items
*/
public int countItems()
throws SQLException
{
int itemcount = 0;
PreparedStatement statement = null;
ResultSet rs = null;
try
{
String query = "SELECT count(*) FROM collection2item, item WHERE "
+ "collection2item.collection_id = ? "
+ "AND collection2item.item_id = item.item_id "
+ "AND in_archive ='1' AND item.withdrawn='0' ";
statement = ourContext.getDBConnection().prepareStatement(query);
statement.setInt(1,getID());
rs = statement.executeQuery();
if (rs != null)
{
rs.next();
itemcount = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
return itemcount;
}
public DSpaceObject getAdminObject(int action) throws SQLException
{
DSpaceObject adminObject = null;
Community community = null;
Community[] communities = getCommunities();
if (communities != null && communities.length > 0)
{
community = communities[0];
}
switch (action)
{
case Constants.REMOVE:
if (AuthorizeConfiguration.canCollectionAdminPerformItemDeletion())
{
adminObject = this;
}
else if (AuthorizeConfiguration.canCommunityAdminPerformItemDeletion())
{
adminObject = community;
}
break;
case Constants.DELETE:
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion())
{
adminObject = community;
}
break;
default:
adminObject = this;
break;
}
return adminObject;
}
@Override
public DSpaceObject getParentObject() throws SQLException
{
Community[] communities = this.getCommunities();
if (communities != null && (communities.length > 0 && communities[0] != null))
{
return communities[0];
}
else
{
return null;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.storage.rdbms.TableRow;
/**
* Class to handle WorkspaceItems which are being supervised. It extends the
* WorkspaceItem class and adds the methods required to be a Supervised Item.
*
* @author Richard Jones
* @version $Revision: 5844 $
*/
public class SupervisedItem extends WorkspaceItem
{
/**
* Construct a supervised item out of the given row
*
* @param context the context this object exists in
* @param row the database row
*/
SupervisedItem(Context context, TableRow row)
throws SQLException
{
// construct a new workspace item
super(context, row);
}
/**
* Get all workspace items which are being supervised
*
* @param context the context this object exists in
*
* @return array of SupervisedItems
*/
public static SupervisedItem[] getAll(Context context)
throws SQLException
{
List<SupervisedItem> sItems = new ArrayList<SupervisedItem>();
// The following query pulls out distinct workspace items which have
// entries in the supervisory linking database. We use DISTINCT to
// prevent multiple instances of the item in the case that it is
// supervised by more than one group
String query = "SELECT DISTINCT workspaceitem.* " +
"FROM workspaceitem, epersongroup2workspaceitem " +
"WHERE workspaceitem.workspace_item_id = " +
"epersongroup2workspaceitem.workspace_item_id " +
"ORDER BY workspaceitem.workspace_item_id";
TableRowIterator tri = DatabaseManager.queryTable(context,
"workspaceitem",
query);
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
SupervisedItem si = new SupervisedItem(context, row);
sItems.add(si);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return sItems.toArray(new SupervisedItem[sItems.size()]);
}
/**
* Gets all the groups that are supervising a particular workspace item
*
* @param c the context this object exists in
* @param wi the ID of the workspace item
*
* @return the supervising groups in an array
*/
public Group[] getSupervisorGroups(Context c, int wi)
throws SQLException
{
List<Group> groupList = new ArrayList<Group>();
String query = "SELECT epersongroup.* " +
"FROM epersongroup, epersongroup2workspaceitem " +
"WHERE epersongroup2workspaceitem.workspace_item_id" +
" = ? " +
" AND epersongroup2workspaceitem.eperson_group_id =" +
" epersongroup.eperson_group_id " +
"ORDER BY epersongroup.name";
TableRowIterator tri = DatabaseManager.queryTable(c,"epersongroup",query, wi);
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
Group group = Group.find(c,row.getIntColumn("eperson_group_id"));
groupList.add(group);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return groupList.toArray(new Group[groupList.size()]);
}
/**
* Gets all the groups that are supervising a this workspace item
*
*
* @return the supervising groups in an array
*/
// FIXME: We should arrange this code to use the above getSupervisorGroups
// method by building the relevant info before passing the request.
public Group[] getSupervisorGroups()
throws SQLException
{
Context ourContext = new Context();
List<Group> groupList = new ArrayList<Group>();
String query = "SELECT epersongroup.* " +
"FROM epersongroup, epersongroup2workspaceitem " +
"WHERE epersongroup2workspaceitem.workspace_item_id" +
" = ? " +
" AND epersongroup2workspaceitem.eperson_group_id =" +
" epersongroup.eperson_group_id " +
"ORDER BY epersongroup.name";
TableRowIterator tri = DatabaseManager.queryTable(ourContext,
"epersongroup",
query, this.getID());
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
Group group = Group.find(ourContext,
row.getIntColumn("eperson_group_id"));
groupList.add(group);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return groupList.toArray(new Group[groupList.size()]);
}
/**
* Get items being supervised by given EPerson
*
* @param ep the eperson who's items to supervise we want
* @param context the dspace context
*
* @return the items eperson is supervising in an array
*/
public static SupervisedItem[] findbyEPerson(Context context, EPerson ep)
throws SQLException
{
List<SupervisedItem> sItems = new ArrayList<SupervisedItem>();
String query = "SELECT DISTINCT workspaceitem.* " +
"FROM workspaceitem, epersongroup2workspaceitem, " +
"epersongroup2eperson " +
"WHERE workspaceitem.workspace_item_id = " +
"epersongroup2workspaceitem.workspace_item_id " +
"AND epersongroup2workspaceitem.eperson_group_id =" +
" epersongroup2eperson.eperson_group_id " +
"AND epersongroup2eperson.eperson_id= ? " +
" ORDER BY workspaceitem.workspace_item_id";
TableRowIterator tri = DatabaseManager.queryTable(context,
"workspaceitem",
query,ep.getID());
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
SupervisedItem si = new SupervisedItem(context, row);
sItems.add(si);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
return sItems.toArray(new SupervisedItem[sItems.size()]);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Specialized iterator for DSpace Items. This iterator is used for loading
* items into memory one by one, since in many cases, it would not make sense to
* load a set of items into memory all at once. For example, loading in an
* entire community or site's worth of items wouldn't make sense.
*
* Note that this class is not a real Iterator, as it does not implement
* the Iterator interface
*
* @author Robert Tansley
* @author Richard Jones
* @version $Revision: 5844 $
*/
public class ItemIterator
{
/*
* This class basically wraps a TableRowIterator.
*/
/** Our context */
private Context ourContext;
/** The table row iterator of Item rows */
private TableRowIterator itemRows;
/** a real iterator which works over the item ids when present */
private Iterator<Integer> iditr;
/**
* Construct an item iterator using a set of TableRow objects from
* the item table
*
* @param context
* our context
* @param rows
* the rows that correspond to the Items to be iterated over
*/
public ItemIterator(Context context, TableRowIterator rows)
{
ourContext = context;
itemRows = rows;
}
/**
* Construct an item iterator using an array list of item ids
*
* @param context
* our context
* @param iids
* the array list to be iterated over
*/
public ItemIterator(Context context, List<Integer> iids)
{
ourContext = context;
iditr = iids.iterator();
}
/**
* Find out if there are any more items to iterate over
*
* @return <code>true</code> if there are more items
* @throws SQLException
*/
public boolean hasNext() throws SQLException
{
if (iditr != null)
{
return iditr.hasNext();
}
else if (itemRows != null)
{
return itemRows.hasNext();
}
return false;
}
/**
* Get the next item in the iterator. Returns <code>null</code> if there
* are no more items.
*
* @return the next item, or <code>null</code>
* @throws SQLException
*/
public Item next() throws SQLException
{
if (iditr != null)
{
return nextByID();
}
else if (itemRows != null)
{
return nextByRow();
}
return null;
}
/**
* This private method knows how to get the next result out of the
* item id iterator
*
* @return the next item instantiated from the id
* @throws SQLException
*/
private Item nextByID()
throws SQLException
{
if (iditr.hasNext())
{
// get the id
int id = iditr.next().intValue();
// Check cache
Item fromCache = (Item) ourContext.fromCache(Item.class, id);
if (fromCache != null)
{
return fromCache;
}
else
{
return Item.find(ourContext, id);
}
}
else
{
return null;
}
}
/**
* return the id of the next item.
*
* @return the next id or -1 if none
*/
public int nextID()
throws SQLException
{
if (iditr != null)
{
return nextByIDID();
}
else if (itemRows != null)
{
return nextByRowID();
}
return -1;
}
/**
* Sorry about the name of this one! It returns the ID of the item
* as opposed to the item itself when we are iterating over an ArrayList
* of item ids
*
* @return the item id, or -1 if none
*/
private int nextByIDID()
{
if (iditr.hasNext())
{
// get the id
int id = iditr.next().intValue();
return id;
}
else
{
return -1;
}
}
/**
* Returns the ID of the item as opposed to the item itself when we are
* iterating over the TableRow array.
*
* @return the item id, or -1 if none
*/
private int nextByRowID()
throws SQLException
{
if (itemRows.hasNext())
{
TableRow row = itemRows.next();
return row.getIntColumn("item_id");
}
else
{
return -1;
}
}
/**
* Return the next item instantiated from the supplied TableRow
*
* @return the item or null if none
* @throws SQLException
*/
private Item nextByRow()
throws SQLException
{
if (itemRows.hasNext())
{
// Convert the row into an Item object
TableRow row = itemRows.next();
// Check cache
Item fromCache = (Item) ourContext.fromCache(Item.class, row
.getIntColumn("item_id"));
if (fromCache != null)
{
return fromCache;
}
else
{
return new Item(ourContext, row);
}
}
else
{
return null;
}
}
/**
* Dispose of this Iterator, and it's underlying resources
*/
public void close()
{
if (itemRows != null)
{
itemRows.close();
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
/**
* DSpace person name utility class
* <P>
* Person names in the Dublin Core value table in the DSpace database are stored
* in the following simple format:
* <P>
* <code>Lastname, First name(s)</code>
* <P>
* <em>FIXME: No policy for dealing with "van"/"van der" and "Jr."</em>
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class DCPersonName
{
/** The person's last name */
private String lastName;
/** The person's first name(s) */
private String firstNames;
/** Construct a blank name */
public DCPersonName()
{
lastName = null;
firstNames = null;
}
/**
* Construct a name from a raw DC value
*
* @param rawValue
* the value entry from the database
*/
public DCPersonName(String rawValue)
{
// Null by default (representing noone)
lastName = null;
firstNames = null;
// Check we've actually been passed a name
if ((rawValue != null) && !rawValue.equals(""))
{
// Extract the last name and first name components
int commaIndex = rawValue.indexOf(',');
// Just in case there's no comma, assume whole thing is
// last name
if (commaIndex == -1)
{
commaIndex = rawValue.length();
}
lastName = rawValue.substring(0, commaIndex).trim();
// Just in case the first name is blank
if (rawValue.length() > (commaIndex + 1))
{
firstNames = rawValue.substring(commaIndex + 1).trim();
}
else
{
// Since we have a name, we don't want to
// leave the first name as null
firstNames = "";
}
}
}
/**
* Construct a name from a last name and first name
*
* @param lastNameIn
* the last name
* @param firstNamesIn
* the first names
*/
public DCPersonName(String lastNameIn, String firstNamesIn)
{
lastName = lastNameIn;
firstNames = firstNamesIn;
}
/**
* Return a string for writing the name to the database
*
* @return the name, suitable for putting in the database
*/
public String toString()
{
StringBuffer out = new StringBuffer();
if (lastName != null)
{
out.append(lastName);
if ((firstNames != null) && !firstNames.equals(""))
{
out.append(", ").append(firstNames);
}
}
return (out.toString());
}
/**
* Get the first name(s). Guaranteed non-null.
*
* @return the first name(s), or an empty string if none
*/
public String getFirstNames()
{
return ((firstNames == null) ? "" : firstNames);
}
/**
* Get the last name. Guaranteed non-null.
*
* @return the last name, or an empty string if none
*/
public String getLastName()
{
return ((lastName == null) ? "" : lastName);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.event.Event;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Class representing bundles of bitstreams stored in the DSpace system
* <P>
* The corresponding Bitstream objects are loaded into memory. At present, there
* is no metadata associated with bundles - they are simple containers. Thus,
* the <code>update</code> method doesn't do much yet. Creating, adding or
* removing bitstreams has instant effect in the database.
*
* @author Robert Tansley
* @version $Revision: 5844 $
*/
public class Bundle extends DSpaceObject
{
/** log4j logger */
private static Logger log = Logger.getLogger(Bundle.class);
/** Our context */
private Context ourContext;
/** The table row corresponding to this bundle */
private TableRow bundleRow;
/** The bitstreams in this bundle */
private List<Bitstream> bitstreams;
/** Flag set when data is modified, for events */
private boolean modified;
/** Flag set when metadata is modified, for events */
private boolean modifiedMetadata;
/**
* Construct a bundle object with the given table row
*
* @param context
* the context this object exists in
* @param row
* the corresponding row in the table
*/
Bundle(Context context, TableRow row) throws SQLException
{
ourContext = context;
bundleRow = row;
bitstreams = new ArrayList<Bitstream>();
String bitstreamOrderingField = ConfigurationManager.getProperty("webui.bitstream.order.field");
String bitstreamOrderingDirection = ConfigurationManager.getProperty("webui.bitstream.order.direction");
if (bitstreamOrderingField == null)
{
bitstreamOrderingField = "sequence_id";
}
if (bitstreamOrderingDirection == null)
{
bitstreamOrderingDirection = "ASC";
}
StringBuilder query = new StringBuilder();
query.append("SELECT bitstream.* FROM bitstream, bundle2bitstream WHERE");
query.append(" bundle2bitstream.bitstream_id=bitstream.bitstream_id AND");
query.append(" bundle2bitstream.bundle_id= ?");
query.append(" ORDER BY bitstream.");
query.append(bitstreamOrderingField);
query.append(" ");
query.append(bitstreamOrderingDirection);
// Get bitstreams
TableRowIterator tri = DatabaseManager.queryTable(
ourContext, "bitstream",
query.toString(),
bundleRow.getIntColumn("bundle_id"));
try
{
while (tri.hasNext())
{
TableRow r = (TableRow) tri.next();
// First check the cache
Bitstream fromCache = (Bitstream) context.fromCache(
Bitstream.class, r.getIntColumn("bitstream_id"));
if (fromCache != null)
{
bitstreams.add(fromCache);
}
else
{
bitstreams.add(new Bitstream(ourContext, r));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
// Cache ourselves
context.cache(this, row.getIntColumn("bundle_id"));
modified = false;
modifiedMetadata = false;
}
/**
* Get a bundle from the database. The bundle and bitstream metadata are all
* loaded into memory.
*
* @param context
* DSpace context object
* @param id
* ID of the bundle
*
* @return the bundle, or null if the ID is invalid.
*/
public static Bundle find(Context context, int id) throws SQLException
{
// First check the cache
Bundle fromCache = (Bundle) context.fromCache(Bundle.class, id);
if (fromCache != null)
{
return fromCache;
}
TableRow row = DatabaseManager.find(context, "bundle", id);
if (row == null)
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bundle",
"not_found,bundle_id=" + id));
}
return null;
}
else
{
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "find_bundle",
"bundle_id=" + id));
}
return new Bundle(context, row);
}
}
/**
* Create a new bundle, with a new ID. This method is not public, since
* bundles need to be created within the context of an item. For this
* reason, authorisation is also not checked; that is the responsibility of
* the caller.
*
* @param context
* DSpace context object
*
* @return the newly created bundle
*/
static Bundle create(Context context) throws SQLException
{
// Create a table row
TableRow row = DatabaseManager.create(context, "bundle");
log.info(LogManager.getHeader(context, "create_bundle", "bundle_id="
+ row.getIntColumn("bundle_id")));
context.addEvent(new Event(Event.CREATE, Constants.BUNDLE, row.getIntColumn("bundle_id"), null));
return new Bundle(context, row);
}
/**
* Get the internal identifier of this bundle
*
* @return the internal identifier
*/
public int getID()
{
return bundleRow.getIntColumn("bundle_id");
}
/**
* Get the name of the bundle
*
* @return name of the bundle (ORIGINAL, TEXT, THUMBNAIL) or NULL if not set
*/
public String getName()
{
return bundleRow.getStringColumn("name");
}
/**
* Set the name of the bundle
*
* @param name
* string name of the bundle (ORIGINAL, TEXT, THUMBNAIL) are the
* values currently used
*/
public void setName(String name)
{
bundleRow.setColumn("name", name);
modifiedMetadata = true;
}
/**
* Get the primary bitstream ID of the bundle
*
* @return primary bitstream ID or -1 if not set
*/
public int getPrimaryBitstreamID()
{
return bundleRow.getIntColumn("primary_bitstream_id");
}
/**
* Set the primary bitstream ID of the bundle
*
* @param bitstreamID
* int ID of primary bitstream (e.g. index html file)
*/
public void setPrimaryBitstreamID(int bitstreamID)
{
bundleRow.setColumn("primary_bitstream_id", bitstreamID);
modified = true;
}
/**
* Unset the primary bitstream ID of the bundle
*/
public void unsetPrimaryBitstreamID()
{
bundleRow.setColumnNull("primary_bitstream_id");
}
public String getHandle()
{
// No Handles for bundles
return null;
}
/**
* @param name
* name of the bitstream you're looking for
*
* @return the bitstream or null if not found
*/
public Bitstream getBitstreamByName(String name)
{
Bitstream target = null;
Iterator i = bitstreams.iterator();
while (i.hasNext())
{
Bitstream b = (Bitstream) i.next();
if (name.equals(b.getName()))
{
target = b;
break;
}
}
return target;
}
/**
* Get the bitstreams in this bundle
*
* @return the bitstreams
*/
public Bitstream[] getBitstreams()
{
Bitstream[] bitstreamArray = new Bitstream[bitstreams.size()];
bitstreamArray = (Bitstream[]) bitstreams.toArray(bitstreamArray);
return bitstreamArray;
}
/**
* Get the items this bundle appears in
*
* @return array of <code>Item</code> s this bundle appears in
*/
public Item[] getItems() throws SQLException
{
List<Item> items = new ArrayList<Item>();
// Get items
TableRowIterator tri = DatabaseManager.queryTable(
ourContext, "item",
"SELECT item.* FROM item, item2bundle WHERE " +
"item2bundle.item_id=item.item_id AND " +
"item2bundle.bundle_id= ? ",
bundleRow.getIntColumn("bundle_id"));
try
{
while (tri.hasNext())
{
TableRow r = (TableRow) tri.next();
// Used cached copy if there is one
Item fromCache = (Item) ourContext.fromCache(Item.class, r
.getIntColumn("item_id"));
if (fromCache != null)
{
items.add(fromCache);
}
else
{
items.add(new Item(ourContext, r));
}
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
Item[] itemArray = new Item[items.size()];
itemArray = (Item[]) items.toArray(itemArray);
return itemArray;
}
/**
* Create a new bitstream in this bundle.
*
* @param is
* the stream to read the new bitstream from
*
* @return the newly created bitstream
*/
public Bitstream createBitstream(InputStream is) throws AuthorizeException,
IOException, SQLException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
Bitstream b = Bitstream.create(ourContext, is);
// FIXME: Set permissions for bitstream
addBitstream(b);
return b;
}
/**
* Create a new bitstream in this bundle. This method is for registering
* bitstreams.
*
* @param assetstore corresponds to an assetstore in dspace.cfg
* @param bitstreamPath the path and filename relative to the assetstore
* @return the newly created bitstream
* @throws IOException
* @throws SQLException
*/
public Bitstream registerBitstream(int assetstore, String bitstreamPath)
throws AuthorizeException, IOException, SQLException
{
// check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
Bitstream b = Bitstream.register(ourContext, assetstore, bitstreamPath);
// FIXME: Set permissions for bitstream
addBitstream(b);
return b;
}
/**
* Add an existing bitstream to this bundle
*
* @param b
* the bitstream to add
*/
public void addBitstream(Bitstream b) throws SQLException,
AuthorizeException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.ADD);
log.info(LogManager.getHeader(ourContext, "add_bitstream", "bundle_id="
+ getID() + ",bitstream_id=" + b.getID()));
// First check that the bitstream isn't already in the list
for (int i = 0; i < bitstreams.size(); i++)
{
Bitstream existing = (Bitstream) bitstreams.get(i);
if (b.getID() == existing.getID())
{
// Bitstream is already there; no change
return;
}
}
// Add the bitstream object
bitstreams.add(b);
ourContext.addEvent(new Event(Event.ADD, Constants.BUNDLE, getID(), Constants.BITSTREAM, b.getID(), String.valueOf(b.getSequenceID())));
// copy authorization policies from bundle to bitstream
// FIXME: multiple inclusion is affected by this...
AuthorizeManager.inheritPolicies(ourContext, this, b);
// Add the mapping row to the database
TableRow mappingRow = DatabaseManager.row("bundle2bitstream");
mappingRow.setColumn("bundle_id", getID());
mappingRow.setColumn("bitstream_id", b.getID());
DatabaseManager.insert(ourContext, mappingRow);
}
/**
* Remove a bitstream from this bundle - the bitstream is only deleted if
* this was the last reference to it
* <p>
* If the bitstream in question is the primary bitstream recorded for the
* bundle the primary bitstream field is unset in order to free the
* bitstream from the foreign key constraint so that the
* <code>cleanup</code> process can run normally.
*
* @param b
* the bitstream to remove
*/
public void removeBitstream(Bitstream b) throws AuthorizeException,
SQLException, IOException
{
// Check authorisation
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
log.info(LogManager.getHeader(ourContext, "remove_bitstream",
"bundle_id=" + getID() + ",bitstream_id=" + b.getID()));
// Remove from internal list of bitstreams
ListIterator li = bitstreams.listIterator();
while (li.hasNext())
{
Bitstream existing = (Bitstream) li.next();
if (b.getID() == existing.getID())
{
// We've found the bitstream to remove
li.remove();
}
}
ourContext.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, getID(), Constants.BITSTREAM, b.getID(), String.valueOf(b.getSequenceID())));
// In the event that the bitstream to remove is actually
// the primary bitstream, be sure to unset the primary
// bitstream.
if (b.getID() == getPrimaryBitstreamID())
{
unsetPrimaryBitstreamID();
}
// Delete the mapping row
DatabaseManager.updateQuery(ourContext,
"DELETE FROM bundle2bitstream WHERE bundle_id= ? "+
"AND bitstream_id= ? ",
getID(), b.getID());
// If the bitstream is orphaned, it's removed
TableRowIterator tri = DatabaseManager.query(ourContext,
"SELECT * FROM bundle2bitstream WHERE bitstream_id= ? ",
b.getID());
try
{
if (!tri.hasNext())
{
// The bitstream is an orphan, delete it
b.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
{
tri.close();
}
}
}
/**
* Update the bundle metadata
*/
public void update() throws SQLException, AuthorizeException
{
// Check authorisation
//AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
log.info(LogManager.getHeader(ourContext, "update_bundle", "bundle_id="
+ getID()));
if (modified)
{
ourContext.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, getID(), null));
modified = false;
}
if (modifiedMetadata)
{
ourContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.BUNDLE, getID(), null));
modifiedMetadata = false;
}
DatabaseManager.update(ourContext, bundleRow);
}
/**
* Delete the bundle. Bitstreams contained by the bundle are removed first;
* this may result in their deletion, if deleting this bundle leaves them as
* orphans.
*/
void delete() throws SQLException, AuthorizeException, IOException
{
log.info(LogManager.getHeader(ourContext, "delete_bundle", "bundle_id="
+ getID()));
ourContext.addEvent(new Event(Event.DELETE, Constants.BUNDLE, getID(), getName()));
// Remove from cache
ourContext.removeCached(this, getID());
// Remove bitstreams
Bitstream[] bs = getBitstreams();
for (int i = 0; i < bs.length; i++)
{
removeBitstream(bs[i]);
}
// remove our authorization policies
AuthorizeManager.removeAllPolicies(ourContext, this);
// Remove ourself
DatabaseManager.delete(ourContext, bundleRow);
}
/**
* return type found in Constants
*/
public int getType()
{
return Constants.BUNDLE;
}
/**
* remove all policies on the bundle and its contents, and replace them with
* the DEFAULT_BITSTREAM_READ policies belonging to the collection.
*
* @param c
* Collection
* @throws java.sql.SQLException
* if an SQL error or if no default policies found. It's a bit
* draconian, but default policies must be enforced.
* @throws AuthorizeException
*/
public void inheritCollectionDefaultPolicies(Collection c)
throws java.sql.SQLException, AuthorizeException
{
List<ResourcePolicy> policies = AuthorizeManager.getPoliciesActionFilter(ourContext, c,
Constants.DEFAULT_BITSTREAM_READ);
// change the action to just READ
// just don't call update on the resourcepolicies!!!
Iterator<ResourcePolicy> i = policies.iterator();
if (!i.hasNext())
{
throw new java.sql.SQLException("Collection " + c.getID()
+ " has no default bitstream READ policies");
}
while (i.hasNext())
{
ResourcePolicy rp = (ResourcePolicy) i.next();
rp.setAction(Constants.READ);
}
replaceAllBitstreamPolicies(policies);
}
/**
* remove all of the policies for the bundle and bitstream contents and replace
* them with a new list of policies
*
* @param newpolicies -
* this will be all of the new policies for the bundle and
* bitstream contents
* @throws SQLException
* @throws AuthorizeException
*/
public void replaceAllBitstreamPolicies(List<ResourcePolicy> newpolicies)
throws SQLException, AuthorizeException
{
if (bitstreams != null && bitstreams.size() > 0)
{
for (Bitstream bs : bitstreams)
{
// change bitstream policies
AuthorizeManager.removeAllPolicies(ourContext, bs);
AuthorizeManager.addPolicies(ourContext, newpolicies, bs);
}
}
// change bundle policies
AuthorizeManager.removeAllPolicies(ourContext, this);
AuthorizeManager.addPolicies(ourContext, newpolicies, this);
}
public List<ResourcePolicy> getBundlePolicies() throws SQLException
{
return AuthorizeManager.getPolicies(ourContext, this);
}
public List<ResourcePolicy> getBitstreamPolicies() throws SQLException
{
List<ResourcePolicy> list = new ArrayList<ResourcePolicy>();
if (bitstreams != null && bitstreams.size() > 0)
{
for (Bitstream bs : bitstreams)
{
list.addAll(AuthorizeManager.getPolicies(ourContext, bs));
}
}
return list;
}
public DSpaceObject getAdminObject(int action) throws SQLException
{
DSpaceObject adminObject = null;
Item[] items = getItems();
Item item = null;
Collection collection = null;
Community community = null;
if (items != null && items.length > 0)
{
item = items[0];
collection = item.getOwningCollection();
if (collection != null)
{
Community[] communities = collection.getCommunities();
if (communities != null && communities.length > 0)
{
community = communities[0];
}
}
}
switch (action)
{
case Constants.REMOVE:
if (AuthorizeConfiguration.canItemAdminPerformBitstreamDeletion())
{
adminObject = item;
}
else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion())
{
adminObject = collection;
}
else if (AuthorizeConfiguration
.canCommunityAdminPerformBitstreamDeletion())
{
adminObject = community;
}
break;
case Constants.ADD:
if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation())
{
adminObject = item;
}
else if (AuthorizeConfiguration
.canCollectionAdminPerformBitstreamCreation())
{
adminObject = collection;
}
else if (AuthorizeConfiguration
.canCommunityAdminPerformBitstreamCreation())
{
adminObject = community;
}
break;
default:
adminObject = this;
break;
}
return adminObject;
}
public DSpaceObject getParentObject() throws SQLException
{
Item[] items = getItems();
if (items != null && (items.length > 0 && items[0] != null))
{
return items[0];
}
else
{
return null;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
/**
* Something went wrong inside the crosswalk, not necessarily caused by
* the input or state (although it could be an incorrectly handled pathological
* case). Most likely caused by a configuration problem. It deserves its own
* exception because many crosswalks are configuration-driven (e.g. the XSLT
* crosswalks) so configuration errors are likely to be common enough that
* they ought to be easy to identify and debug.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class CrosswalkInternalException extends CrosswalkException
{
public CrosswalkInternalException(String s)
{
super(s);
}
public CrosswalkInternalException(String arg0, Throwable arg1)
{
super(arg0, arg1);
}
public CrosswalkInternalException(Throwable arg0)
{
super(arg0);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.File;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.SelfNamedPlugin;
import org.jdom.Namespace;
import org.jdom.transform.XSLTransformException;
import org.jdom.transform.XSLTransformer;
/**
* Configurable XSLT-driven Crosswalk
* <p>
* This is the superclass of the XSLT dissemination and submission crosswalks.
* These classes let you can create many different crosswalks between
* DSpace internal data and any XML without changing any code, just
* XSL transformation (XSLT) stylesheets.
* Each configured stylesheet appears as a new plugin name, although they all
* share the same plugin implementation class.
* <p>
* The XML transformation must produce (for submission) or expect (for
* dissemination) a document in DIM - DSpace Intermediate Metadata format.
* See <a href="http://wiki.dspace.org/DspaceIntermediateMetadata">
* http://wiki.dspace.org/DspaceIntermediateMetadata</a> for details.
* <h3>Configuration</h3>
* Prepare your DSpace configuration as follows:
* <p>
* A submission crosswalk is described by a
* configuration key like
* <pre> crosswalk.submission.<i>PluginName</i>.stylesheet = <i>path</i></pre>
* The <em>alias</em> names the Plugin name,
* and the <em>path</em> value is the pathname (relative to <code><em>dspace.dir</em>/config</code>)
* of the crosswalk stylesheet, e.g. <code>"mycrosswalk.xslt"</code>
* <p>
* For example, this configures a crosswalk named "LOM" using a stylesheet
* in <code>config/crosswalks/d-lom.xsl</code> under the DSpace "home" directory:
* <pre> crosswalk.submission.stylesheet.LOM = crosswalks/d-lom.xsl</pre>
* <p>
* A dissemination crosswalk is described by a
* configuration key like
* <pre> crosswalk.dissemination.<i>PluginName</i>.stylesheet = <i>path</i></pre>
The <em>alias</em> names the Plugin name,
* and the <em>path</em> value is the pathname (relative to <code><em>dspace.dir</em>/config</code>)
* of the crosswalk stylesheet, e.g. <code>"mycrosswalk.xslt"</code>
* <p>
* You can have two names point to the same crosswalk,
* just add two configuration entries with the same path, e.g.
* <pre>
* crosswalk.submission.MyFormat.stylesheet = crosswalks/myformat.xslt
* crosswalk.submission.almost_DC.stylesheet = crosswalks/myformat.xslt
* </pre>
* <p>
* NOTE: This plugin will automatically reload any XSL stylesheet that
* was modified since it was last loaded. This lets you edit and test
* stylesheets without restarting DSpace.
* <p>
* You must use the <code>PluginManager</code> to instantiate an
* XSLT crosswalk plugin, e.g.
* <pre> IngestionCrosswalk xwalk = PluginManager.getPlugin(IngestionCrosswalk.class, "LOM");</pre>
* <p>
* Since there is significant overhead in reading the properties file to
* configure the crosswalk, and a crosswalk instance may be used any number
* of times, we recommend caching one instance of the crosswalk for each
* alias and simply reusing those instances. The <code>PluginManager</code>
* does this automatically.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public abstract class XSLTCrosswalk extends SelfNamedPlugin
{
/** log4j category */
private static Logger log = Logger.getLogger(XSLTCrosswalk.class);
/**
* DSpace XML Namespace in JDOM form.
*/
public static final Namespace DIM_NS =
Namespace.getNamespace("dim", "http://www.dspace.org/xmlns/dspace/dim");
/** Prefix for all lines in the config file for XSLT plugins. */
protected static final String CONFIG_PREFIX = "crosswalk.";
private static final String CONFIG_STYLESHEET = ".stylesheet";
/**
* Derive list of plugin name from DSpace configuration entries
* for crosswalks. The <em>direction</em> parameter should be either
* "dissemination" or "submission", so it looks for keys like
* <code>crosswalk.submission.{NAME}.stylesheet</code>
*/
protected static String[] makeAliases(String direction)
{
String prefix = CONFIG_PREFIX+direction+".";
String suffix = CONFIG_STYLESHEET;
List<String> aliasList = new ArrayList<String>();
Enumeration<String> pe = (Enumeration<String>)ConfigurationManager.propertyNames();
log.debug("XSLTCrosswalk: Looking for config prefix = "+prefix);
while (pe.hasMoreElements())
{
String key = pe.nextElement();
if (key.startsWith(prefix) && key.endsWith(suffix))
{
log.debug("Getting XSLT plugin name from config line: "+key);
aliasList.add(key.substring(prefix.length(), key.length()-suffix.length()));
}
}
return aliasList.toArray(new String[aliasList.size()]);
}
private XSLTransformer transformer = null;
private File transformerFile = null;
private long transformerLastModified = 0;
/**
* Initialize the Transformation stylesheet from configured stylesheet file.
* @param direction the direction of xwalk, either "submission" or
* "dissemination"
* @return transformer or null if there was error initializing.
*/
protected XSLTransformer getTransformer(String direction)
{
if (transformerFile == null)
{
String myAlias = getPluginInstanceName();
if (myAlias == null)
{
log.error("Must use PluginManager to instantiate XSLTCrosswalk so the class knows its name.");
return null;
}
String cmPropName = CONFIG_PREFIX+direction+"."+myAlias+CONFIG_STYLESHEET;
String fname = ConfigurationManager.getProperty(cmPropName);
if (fname == null)
{
log.error("Missing configuration filename for XSLT-based crosswalk: no "+
"value for property = "+cmPropName);
return null;
}
else
{
String parent = ConfigurationManager.getProperty("dspace.dir") +
File.separator + "config" + File.separator;
transformerFile = new File(parent, fname);
}
}
// load if first time, or reload if stylesheet changed:
if (transformer == null ||
transformerFile.lastModified() > transformerLastModified)
{
try
{
log.debug((transformer == null ? "Loading " : "Reloading")+
getPluginInstanceName()+" XSLT stylesheet from "+transformerFile.toString());
transformer = new XSLTransformer(transformerFile);
transformerLastModified = transformerFile.lastModified();
}
catch (XSLTransformException e)
{
log.error("Failed to initialize XSLTCrosswalk("+getPluginInstanceName()+"):"+e.toString());
}
}
return transformer;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* DIM ingestion crosswalk
* <p>
* Processes metadata encoded in DSpace Intermediate Format, without the overhead of XSLT processing.
*
* @author Alexey Maslov
* @version $Revision: 1 $
*/
public class DIMIngestionCrosswalk
implements IngestionCrosswalk
{
private static final Namespace DIM_NS = Namespace.getNamespace("http://www.dspace.org/xmlns/dspace/dim");
public void ingest(Context context, DSpaceObject dso, List<Element> metadata) throws CrosswalkException, IOException, SQLException, AuthorizeException {
Element first = metadata.get(0);
if (first.getName().equals("dim") && metadata.size() == 1) {
ingest(context,dso,first);
}
else if (first.getName().equals("field") && first.getParentElement() != null) {
ingest(context,dso,first.getParentElement());
}
else {
Element wrapper = new Element("wrap", metadata.get(0).getNamespace());
wrapper.addContent(metadata);
ingest(context,dso,wrapper);
}
}
public void ingest(Context context, DSpaceObject dso, Element root) throws CrosswalkException, IOException, SQLException, AuthorizeException {
if (dso.getType() != Constants.ITEM)
{
throw new CrosswalkObjectNotSupported("DIMIngestionCrosswalk can only crosswalk an Item.");
}
Item item = (Item)dso;
if (root == null) {
System.err.println("The element received by ingest was null");
return;
}
List<Element> metadata = root.getChildren("field",DIM_NS);
for (Element field : metadata) {
item.addMetadata(field.getAttributeValue("mdschema"), field.getAttributeValue("element"), field.getAttributeValue("qualifier"),
field.getAttributeValue("lang"), field.getText());
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.ArrayUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.SelfNamedPlugin;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* Disseminator for Simple Dublin Core metadata in XML format.
* Logic stolen from OAIDCCrosswalk. This is mainly intended
* as a proof-of-concept, to use crosswalk plugins in the OAI-PMH
* server.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class SimpleDCDisseminationCrosswalk extends SelfNamedPlugin
implements DisseminationCrosswalk
{
// namespaces of interest.
// XXX FIXME: may also want http://www.openarchives.org/OAI/2.0/oai_dc/ for OAI
private static final Namespace DC_NS =
Namespace.getNamespace("dc", "http://purl.org/dc/elements/1.1/");
// simple DC schema for OAI
private static final String DC_XSD =
"http://dublincore.org/schemas/xmls/simpledc20021212.xsd";
//"http://www.openarchives.org/OAI/2.0/oai_dc.xsd";
private static final String schemaLocation =
DC_NS.getURI()+" "+DC_XSD;
private static final Namespace namespaces[] =
{ DC_NS, XSI_NS };
private static final String aliases[] = { "SimpleDC", "DC" };
public static String[] getPluginNames()
{
return (String[]) ArrayUtils.clone(aliases);
}
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
Element root = new Element("simpledc", DC_NS);
root.setAttribute("schemaLocation", schemaLocation, XSI_NS);
root.addContent(disseminateListInternal(dso, false));
return root;
}
/**
* Returns object's metadata as XML elements.
* Simple-minded copying of elements: convert contributor.author to
* "creator" but otherwise just grab element name without qualifier.
*/
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
return disseminateListInternal(dso, true);
}
public List<Element> disseminateListInternal(DSpaceObject dso, boolean addSchema)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
if (dso.getType() != Constants.ITEM)
{
throw new CrosswalkObjectNotSupported("SimpleDCDisseminationCrosswalk can only crosswalk an Item.");
}
Item item = (Item)dso;
DCValue[] allDC = item.getDC(Item.ANY, Item.ANY, Item.ANY);
List<Element> dcl = new ArrayList<Element>(allDC.length);
for (int i = 0; i < allDC.length; i++)
{
// Do not include description.provenance
if (!(allDC[i].element.equals("description") &&
(allDC[i].qualifier != null && allDC[i].qualifier.equals("provenance"))))
{
String element;
// contributor.author exposed as 'creator'
if (allDC[i].element.equals("contributor")
&& (allDC[i].qualifier != null)
&& allDC[i].qualifier.equals("author"))
{
element = "creator";
}
else
{
element = allDC[i].element;
}
Element field = new Element(element, DC_NS);
field.addContent(allDC[i].value);
if (addSchema)
{
field.setAttribute("schemaLocation", schemaLocation, XSI_NS);
}
dcl.add(field);
}
}
return dcl;
}
public Namespace[] getNamespaces()
{
return (Namespace[]) ArrayUtils.clone(namespaces);
}
public String getSchemaLocation()
{
return schemaLocation;
}
public boolean canDisseminate(DSpaceObject dso)
{
return dso.getType() == Constants.ITEM;
}
public boolean preferList()
{
return true;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.DSpaceObject;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageUtils;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* METSRights Ingestion & Dissemination Crosswalk
* <p>
* Translate between DSpace internal policies (i.e. permissions) and the
* METSRights metadata schema
* (see <a href="http://www.loc.gov/standards/rights/METSRights.xsd">
* http://www.loc.gov/standards/rights/METSRights.xsd</a> for details).
* <p>
* Examples of METSRights usage available from:
* <a href="http://www.loc.gov/standards/rights/">
* http://www.loc.gov/standards/rights/</a>
* <p>
* This Crosswalk provides a way to export DSpace permissions into a standard
* format, and then re-import or restore them into a DSpace instance.
*
* @author Tim Donohue
* @version $Revision: 2108 $
*/
public class METSRightsCrosswalk
implements IngestionCrosswalk, DisseminationCrosswalk
{
/** log4j category */
private static Logger log = Logger.getLogger(METSRightsCrosswalk.class);
private static final Namespace METSRights_NS =
Namespace.getNamespace("rights", "http://cosimo.stanford.edu/sdr/metsrights/");
// XML schemaLocation fragment for this crosswalk, from config.
private String schemaLocation =
METSRights_NS.getURI()+" http://cosimo.stanford.edu/sdr/metsrights.xsd";
private static final Namespace namespaces[] = { METSRights_NS };
private static final Map<Integer,String> otherTypesMapping = new HashMap<Integer,String>();
static
{
//Mapping of DSpace Policy Actions to METSRights PermissionType values
// (These are the values stored in the @OTHERPERMITTYPE attribute in METSRights)
// NOTE: READ, WRITE, DELETE are not included here as they map directly to existing METSRights PermissionTypes
otherTypesMapping.put(Constants.ADD, "ADD CONTENTS");
otherTypesMapping.put(Constants.REMOVE, "REMOVE CONTENTS");
otherTypesMapping.put(Constants.ADMIN, "ADMIN");
otherTypesMapping.put(Constants.DEFAULT_BITSTREAM_READ, "READ FILE CONTENTS");
otherTypesMapping.put(Constants.DEFAULT_ITEM_READ, "READ ITEM CONTENTS");
}
// Value of METSRights <Context> @CONTEXTCLASS attribute to use for DSpace Groups
private static final String GROUP_CONTEXTCLASS = "MANAGED GRP";
// Value of METSRights <Context> @CONTEXTCLASS attribute to use for DSpace EPeople
private static final String PERSON_CONTEXTCLASS = "ACADEMIC USER";
// Value of METSRights <Context> @CONTEXTCLASS attribute to use for "Anonymous" DSpace Group
private static final String ANONYMOUS_CONTEXTCLASS = "GENERAL PUBLIC";
// Value of METSRights <Context> @CONTEXTCLASS attribute to use for "Administrator" DSpace Group
private static final String ADMIN_CONTEXTCLASS = "REPOSITORY MGR";
// Value of METSRights <UserName> @USERTYPE attribute to use for DSpace Groups
private static final String GROUP_USERTYPE = "GROUP";
// Value of METSRights <UserName> @USERTYPE attribute to use for DSpace Groups
private static final String PERSON_USERTYPE = "INDIVIDUAL";
/*----------- Dissemination functions -------------------*/
@Override
public Namespace[] getNamespaces()
{
return (Namespace[]) ArrayUtils.clone(namespaces);
}
@Override
public String getSchemaLocation()
{
return schemaLocation;
}
@Override
public boolean canDisseminate(DSpaceObject dso)
{
//can disseminate all types of DSpace Objects, except for SITE
return (dso.getType()!=Constants.SITE);
}
/**
* Actually Disseminate into METSRights schema. This method locates all DSpace
* policies (permissions) for the provided object, and translates them into
* METSRights PermissionTypes.
*
* @param dso DSpace Object
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
if(dso==null)
{
return null;
}
// we don't have a way to provide METSRights for a SITE object
else if(dso.getType() == Constants.SITE)
{
throw new CrosswalkObjectNotSupported("The METSRightsCrosswalk cannot crosswalk a SITE object");
}
//Root element: RightsDeclarationMD
// All DSpace content is just under LICENSE -- no other rights can be claimed
Element rightsMD = new Element("RightsDeclarationMD", METSRights_NS);
rightsMD.setAttribute("RIGHTSCATEGORY", "LICENSED");
//Three sections to METSRights:
// * RightsDeclaration - general rights statement
// * RightsHolder - info about who owns rights
// * Context - info about specific permissions granted
// We're just crosswalking DSpace policies to "Context" permissions by default
// It's too difficult to make statements about who owns the rights and
// what those rights are -- too many types of content can be stored in DSpace
//Get all policies on this DSpace Object
Context context = new Context();
List<ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dso);
//For each DSpace policy
for(ResourcePolicy policy : policies)
{
// DSpace Policies can either reference a Group or an Individual, but not both!
Group group = policy.getGroup();
EPerson person = policy.getEPerson();
// Create our <Context> node for this policy
Element rightsContext = new Element("Context", METSRights_NS);
//First, handle Group-based policies
// For Group policies we need to setup a
// <Context CONTEXTCLASS='[group-type]'><UserName USERTYPE='GROUP'>[group-name]</UserName>...
if(group != null)
{
//Default all DSpace groups to have "MANAGED GRP" as the type
String contextClass=GROUP_CONTEXTCLASS;
if(group.getID()==0) //DSpace Anonymous Group = 'GENERAL PUBLIC' type
{
contextClass = ANONYMOUS_CONTEXTCLASS;
}
else if(group.getID()==1) //DSpace Administrator Group = 'REPOSITORY MGR' type
{
contextClass = ADMIN_CONTEXTCLASS;
}
rightsContext.setAttribute("CONTEXTCLASS", contextClass);
//If this is a "MANAGED GRP", then create a <UserName> child
//to specify the group Name, and set @USERTYPE='GROUP'
if(contextClass.equals(GROUP_CONTEXTCLASS))
{
try
{
//Translate the Group name for export. This ensures that groups with Internal IDs in their names
// (e.g. COLLECTION_1_ADMIN) are properly translated using the corresponding Handle or external identifier.
String exportGroupName = PackageUtils.translateGroupNameForExport(context, group.getName());
//If translated group name is returned as "null", this means the Group name
// had an Internal Collection/Community ID embedded, which could not be
// translated properly to a Handle. We will NOT export these groups,
// as they could cause conflicts or data integrity problems if they are
// imported into another DSpace system.
if(exportGroupName!=null && !exportGroupName.isEmpty())
{
//Create <UserName USERTYPE='GROUP'> element. Add the Group's name to that element
Element rightsUser = new Element("UserName", METSRights_NS);
rightsUser.setAttribute("USERTYPE",GROUP_USERTYPE);
rightsUser.addContent(exportGroupName);
rightsContext.addContent(rightsUser);
}
else
//Skip over this Group, as we couldn't translate it for export.
//The Group seems to refer to a Community or Collection which no longer exists
continue;
}
catch(PackageException pe)
{
//A PackageException will only be thrown if translateGroupNameForExport() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}
rightsMD.addContent(rightsContext);
}//end if group
//Next, handle User-based policies
// For User policies we need to setup a
// <Context CONTEXTCLASS='ACADEMIC USER'><UserName USERTYPE='INDIVIDUAL'>[group-name]</UserName>...
else if(person!=null)
{
// All EPeople are considered 'Academic Users'
rightsContext.setAttribute("CONTEXTCLASS", PERSON_CONTEXTCLASS);
//Create a <UserName> node corresponding to person's email, set @USERTYPE='INDIVIDUAL'
Element rightsUser = new Element("UserName", METSRights_NS);
rightsUser.setAttribute("USERTYPE",PERSON_USERTYPE);
rightsUser.addContent(person.getEmail());
rightsContext.addContent(rightsUser);
rightsMD.addContent(rightsContext);
}//end if person
else
log.error("Policy " + String.valueOf(policy.getID())
+ " is neither user nor group! Omitted from package.");
//Translate the DSpace ResourcePolicy into a <Permissions> element
Element rightsPerm = translatePermissions(policy);
rightsContext.addContent(rightsPerm);
}//end for each policy
context.complete();
return rightsMD;
}
@Override
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
List<Element> result = new ArrayList<Element>(1);
result.add(disseminateElement(dso));
return result;
}
@Override
public boolean preferList()
{
return false;
}
/**
* Translates a DSpace ResourcePolicy's permissions into a METSRights
* <code>Permissions</code> element. Returns the created
* <code>Permissions</code> element. This element may be empty if
* there was an issue translating the ResourcePolicy.
*
* @param policy The DSpace ResourcePolicy
* @return the Element representing the METSRIghts <code>Permissions</code> or null.
*/
private Element translatePermissions(ResourcePolicy policy)
{
//Create our <Permissions> node to store all permissions in this context
Element rightsPerm = new Element("Permissions", METSRights_NS);
//Determine the 'actions' permitted by this DSpace policy, and translate to METSRights PermissionTypes
int action = policy.getAction();
//All READ-based actions = cannot modify or delete object
if(action==Constants.READ
|| action==Constants.DEFAULT_BITSTREAM_READ
|| action==Constants.DEFAULT_ITEM_READ)
{
// For DSpace, READ = Discover and Display
rightsPerm.setAttribute("DISCOVER", "true");
rightsPerm.setAttribute("DISPLAY", "true");
//Read = cannot modify or delete
rightsPerm.setAttribute("MODIFY", "false");
rightsPerm.setAttribute("DELETE", "false");
}
//All WRITE-based actions = can modify, but cannot delete
else if(action == Constants.WRITE
|| action==Constants.ADD)
{
rightsPerm.setAttribute("DISCOVER", "true");
rightsPerm.setAttribute("DISPLAY", "true");
//Write = can modify, but cannot delete
rightsPerm.setAttribute("MODIFY", "true");
rightsPerm.setAttribute("DELETE", "false");
}
//All DELETE-based actions = can modify & can delete
//(NOTE: Although Constants.DELETE is marked as "obsolete", it is still used in dspace-api)
else if(action == Constants.DELETE
|| action==Constants.REMOVE)
{
rightsPerm.setAttribute("DISCOVER", "true");
rightsPerm.setAttribute("DISPLAY", "true");
//Delete = can both modify and delete
rightsPerm.setAttribute("MODIFY", "true");
rightsPerm.setAttribute("DELETE", "true");
}
//ADMIN action = full permissions
else if(action == Constants.ADMIN)
{
rightsPerm.setAttribute("DISCOVER", "true");
rightsPerm.setAttribute("DISPLAY", "true");
rightsPerm.setAttribute("COPY", "true");
rightsPerm.setAttribute("DUPLICATE", "true");
rightsPerm.setAttribute("MODIFY", "true");
rightsPerm.setAttribute("DELETE", "true");
rightsPerm.setAttribute("PRINT", "true");
}
else
{
//Unknown action -- don't enable any rights by default
//NOTE: ALL WORKFLOW RELATED ACTIONS ARE NOT INCLUDED IN METSRIGHTS
//DSpace API no longer assigns nor checks any of the following 'action' types:
// * Constants.WORKFLOW_STEP_1
// * Constants.WORKFLOW_STEP_2
// * Constants.WORKFLOW_STEP_3
// * Constants.WORKFLOW_ABORT
}//end if
//Also add in OTHER permissionTypes, as necessary (see 'otherTypesMapping' above)
// (These OTHER permissionTypes are used to tell apart similar DSpace permissions during Ingestion)
if(otherTypesMapping.containsKey(action))
{
//if found in our 'otherTypesMapping', enable @OTHER attribute and add in the appropriate value to @OTHERPERMITTYPE attribute
rightsPerm.setAttribute("OTHER", "true");
rightsPerm.setAttribute("OTHERPERMITTYPE", otherTypesMapping.get(action));
}
return rightsPerm;
}
/*----------- Ingestion functions -------------------*/
/**
* Ingest a whole XML document, starting at specified root.
*
* @param context
* @param dso
* @param root
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public void ingest(Context context, DSpaceObject dso, Element root)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (!(root.getName().equals("RightsDeclarationMD")))
{
throw new MetadataValidationException("Wrong root element for METSRights: " + root.toString());
}
ingest(context, dso, root.getChildren());
}
/**
* Ingest a List of XML elements
* <P>
* This method creates new DSpace Policies based on the parsed
* METSRights XML contents. These Policies assign permissions
* to DSpace Groups or EPeople.
* <P>
* NOTE: This crosswalk will NOT create missing DSpace Groups or EPeople.
* Therefore, it is recommended to use this METSRightsCrosswalk in
* conjunction with another Crosswalk which can create/restore missing
* Groups or EPeople (e.g. RoleCrosswalk).
*
* @param context
* @param dso
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @see RoleCrosswalk
*/
@Override
public void ingest(Context context, DSpaceObject dso, List<Element> ml)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// we cannot crosswalk METSRights to a SITE object
if (dso.getType() == Constants.SITE)
{
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
}
//First, clear all existing Policies on this DSpace Object
// as we don't want them to conflict with policies we will be adding
if(!ml.isEmpty())
{
AuthorizeManager.removeAllPolicies(context, dso);
}
// Loop through each Element in the List
for (Element element : ml)
{
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
if (element.getName().equals("RightsDeclarationMD"))
{
ingest(context, dso, element.getChildren());
}
// "Context" section (where permissions are stored)
else if (element.getName().equals("Context"))
{
//get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS");
//also get reference to the <Permissions> element
Element permsElement = element.getChild("Permissions", METSRights_NS);
//Check if this permission pertains to Anonymous users
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Anonymous group, ID=0
Group anonGroup = Group.find(context, 0);
if(anonGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
}
assignPermissions(context, dso, anonGroup, permsElement);
} // else if this permission declaration pertains to Administrators
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Administrator group, ID=1
Group adminGroup = Group.find(context, 1);
if(adminGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
}
assignPermissions(context, dso, adminGroup, permsElement);
} // else if this permission pertains to another DSpace group
else if(GROUP_CONTEXTCLASS.equals(contextClass))
{
try
{
//we need to find the name of DSpace group it pertains to
//Get the text within the <UserName> child element,
// this is the group's name
String groupName = element.getChildTextTrim("UserName", METSRights_NS);
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
// from its external format (e.g. COLLECTION_<handle>_ADMIN)
groupName = PackageUtils.translateGroupNameForImport(context, groupName);
//Check if this group exists in DSpace already
Group group = Group.findByName(context, groupName);
//if not found, throw an error -- user should restore group from the SITE AIP
if(group==null)
{
throw new CrosswalkInternalException("Cannot restore Group permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Group named '" + groupName + "' is missing from DSpace. "
+ "Please restore this group using the SITE AIP, or recreate it.");
}
//assign permissions to group on this object
assignPermissions(context, dso, group, permsElement);
}
catch(PackageException pe)
{
//A PackageException will only be thrown if translateDefaultGroupName() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}//end if Group
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{
//we need to find the person it pertains to
// Get the text within the <UserName> child element,
// this is the person's email address
String personEmail = element.getChildTextTrim("UserName", METSRights_NS);
//Check if this person exists in DSpace already
EPerson person = EPerson.findByEmail(context, personEmail);
//If cannot find by email, try by netID
//(though METSRights should contain email if it was exported by DSpace)
if(person==null)
{
person = EPerson.findByNetid(context, personEmail);
}
//if not found, throw an error -- user should restore person from the SITE AIP
if(person==null)
{
throw new CrosswalkInternalException("Cannot restore Person permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Person with email/netid '" + personEmail + "' is missing from DSpace. "
+ "Please restore this Person object using the SITE AIP, or recreate it.");
}
//assign permissions to person on this object
assignPermissions(context, dso, person, permsElement);
}//end if Person
else
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
} //end if "Context" element
}//end while loop
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified Group
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
* @param group The DSpace Group
* @param permsElement The METSRights <code>Permissions</code> element
*/
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for group '" + group.getName() + "'.");
}
//Otherwise, add the appropriate group policy for this object
AuthorizeManager.addPolicy(context, dso, actionID, group);
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified EPerson
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
* @param person The DSpace EPerson
* @param permsElement The METSRights <code>Permissions</code> element
*/
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for person '" + person.getEmail() + "'.");
}
//Otherwise, add the appropriate EPerson policy for this object
AuthorizeManager.addPolicy(context, dso, actionID, person);
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element) to find the corresponding DSpace permission type. This
* DSpace permission type must be one of the Action IDs specified in
* <code>org.dspace.core.Constants</code>
* <P>
* Returns -1 if failed to parse permissions.
*
* @param permsElement The METSRights <code>Permissions</code> element
* @return A DSpace Action ID from <code>org.dspace.core.Constants</code>
*/
private int parsePermissions(Element permsElement)
{
//First, check if the @OTHERPERMITTYPE attribute is specified
String otherPermitType = permsElement.getAttributeValue("OTHERPERMITTYPE");
//if @OTHERPERMITTYPE attribute exists, it will map directly to a DSpace Action type
if(otherPermitType!=null && !otherPermitType.isEmpty())
{
if(otherTypesMapping.containsValue(otherPermitType))
{
//find the Action ID this value maps to
for(int actionType: otherTypesMapping.keySet())
{
//if found, this is the Action ID corresponding to this permission
if(otherTypesMapping.get(actionType).equals(otherPermitType))
{
return actionType;
}
}
}
else
{
log.warn("Unrecognized @OTHERPERMITTYPE attribute value ("
+ otherPermitType
+ ") found in METSRights section of METS Manifest.");
}
}
else // Otherwise, a closer analysis of all Permission element attributes is necessary
{
boolean discoverPermit = Boolean.parseBoolean(permsElement.getAttributeValue("DISCOVER"));
boolean displayPermit = Boolean.parseBoolean(permsElement.getAttributeValue("DISPLAY"));
boolean modifyPermit = Boolean.parseBoolean(permsElement.getAttributeValue("MODIFY"));
boolean deletePermit = Boolean.parseBoolean(permsElement.getAttributeValue("DELETE"));
boolean otherPermit = Boolean.parseBoolean(permsElement.getAttributeValue("OTHER"));
//if DELETE='true'
if(deletePermit && !otherPermit)
{
//This must refer to the DELETE action type
//(note REMOVE & ADMIN action type have @OTHERPERMITTYPE values specified)
return Constants.DELETE;
}//if MODIFY='true'
else if(modifyPermit && !otherPermit)
{
//This must refer to the WRITE action type
//(note ADD action type has an @OTHERPERMITTYPE value specified)
return Constants.WRITE;
}
else if(discoverPermit && displayPermit && !otherPermit)
{
//This must refer to the READ action type
return Constants.READ;
}
}
//if we got here, we failed to parse out proper permissions
// return -1 to signify failure (as 0 = READ permissions)
return -1;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* A class implementing this interface can crosswalk metadata directly
* from a stream (assumed to be in a specific format) to the object.
* <p>
* Stream-oriented crosswalks are intended to be used for metadata
* formats which are either (a) not XML-based, or (b) too bulky for the
* DOM-ish in-memory model developed for the METS and IMSCP packagers.
* The METS packagers (all subclasses of AbstractMETSDisseminator / AbstractMETSIngester
* are equipped to call these crosswalks as well as the XML-based ones,
* just refer to the desired crosswalk by its plugin name.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public interface StreamIngestionCrosswalk
{
/**
* Execute crosswalk on the given object, taking input from the stream.
*
* @param context the DSpace context
* @param dso the DSpace Object whose metadata is being ingested.
* @param in input stream containing the metadata.
* @param MIMEType MIME type of the ???
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException;
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.