code stringlengths 3 1.18M | language stringclasses 1
value |
|---|---|
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.core.Context;
/**
* Command-line tool for making changes to DSpace database when updating from
* version 1.1/1.1.1 to 1.2.
* <P>
* The changes are:
* <ul>
* <li>Setting owning collection field for items
* <li>Reorganising content bitstreams into one bundle named ORIGINAL, license
* bitstreams into a bundle named LICENSE
* <li>Setting the sequence_id numbers in the bitstream table. This happens as
* item.update() is called on every item.
* <li>If a (newly-reorganised) 'ORIGINAL' bundle contains a text/html
* bitstream, that bitstream is set to the primary bitstream for HTML support.
* </ul>
*/
public class Upgrade11To12
{
public static void main(String[] argv) throws Exception
{
Context c = new Context();
// ve are superuser!
c.setIgnoreAuthorization(true);
ItemIterator ii = null;
// first set owning Collections
Collection[] collections = Collection.findAll(c);
System.out.println("Setting item owningCollection fields in database");
for (int q = 0; q < collections.length; q++)
{
ii = collections[q].getItems();
while (ii.hasNext())
{
Item myItem = ii.next();
// set it if it's not already set
if (myItem.getOwningCollection() == null)
{
myItem.setOwningCollection(collections[q]);
myItem.update();
System.out.println("Set owner of item " + myItem.getID()
+ " to collection " + collections[q].getID());
}
}
}
// commit pending transactions before continuing
c.commit();
// now combine some bundles
ii = Item.findAll(c);
while (ii.hasNext())
{
boolean skipItem = false;
Item myItem = ii.next();
int licenseBundleIndex = -1; // array index of license bundle (we'll
// skip this one often)
int primaryBundleIndex = -1; // array index of our primary bundle
// (all bitstreams assemble here)
System.out.println("Processing item #: " + myItem.getID());
Bundle[] myBundles = myItem.getBundles();
// look for bundles with multiple bitstreams
// (if any found, we'll skip this item)
for (int i = 0; i < myBundles.length; i++)
{
// skip if bundle is already named
if (myBundles[i].getName() != null)
{
System.out
.println("Skipping this item - named bundles already found");
skipItem = true;
break;
}
Bitstream[] bitstreams = myBundles[i].getBitstreams();
// skip this item if we already have bundles combined in this
// item
if (bitstreams.length > 1)
{
System.out
.println("Skipping this item - compound bundles already found");
skipItem = true;
break;
}
// is this the license? check the format
BitstreamFormat bf = bitstreams[0].getFormat();
if ("License".equals(bf.getShortDescription()))
{
System.out.println("Found license!");
if (licenseBundleIndex == -1)
{
licenseBundleIndex = i;
System.out.println("License bundle set to: " + i);
}
else
{
System.out
.println("ERROR - multiple license bundles in item - skipping");
skipItem = true;
break;
}
}
else
{
// not a license, if primary isn't set yet, set it
if (primaryBundleIndex == -1)
{
primaryBundleIndex = i;
System.out.println("Primary bundle set to: " + i);
}
}
}
if (!skipItem)
{
// name the primary and license bundles
if (primaryBundleIndex != -1)
{
myBundles[primaryBundleIndex].setName("ORIGINAL");
myBundles[primaryBundleIndex].update();
}
if (licenseBundleIndex != -1)
{
myBundles[licenseBundleIndex].setName("LICENSE");
myBundles[licenseBundleIndex].update();
}
for (int i = 0; i < myBundles.length; i++)
{
Bitstream[] bitstreams = myBundles[i].getBitstreams();
// now we can safely assume no bundles with multiple
// bitstreams
if (bitstreams.length > 0 && (i != primaryBundleIndex) && (i != licenseBundleIndex))
{
// only option left is a bitstream to be combined
// with primary bundle
// and remove now-redundant bundle
myBundles[primaryBundleIndex]
.addBitstream(bitstreams[0]); // add to
// primary
myItem.removeBundle(myBundles[i]); // remove this
// bundle
System.out.println("Bitstream from bundle " + i
+ " moved to primary bundle");
// flag if HTML bitstream
if (bitstreams[0].getFormat().getMIMEType().equals(
"text/html"))
{
System.out
.println("Set primary bitstream to HTML file in item #"
+ myItem.getID()
+ " for HTML support.");
}
}
}
}
}
c.complete();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.core.Context;
/**
* Class representing a particular Dublin Core metadata type, with various
* utility methods. In general, only used for manipulating the registry of
* Dublin Core types in the system, so most users will not need this.
*
* <p>
* The DCType implementation has been deprecated, please use MetadataManager,
* MetadataSchema and MetadataField instead. For backward compatibility the this
* implementation has been updated to transparently call the new classes.
* </p>
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision: 5844 $
* @deprecated
*/
public class DCType
{
/** Our context */
private Context ourContext;
/** The matching metadata field */
private MetadataField field = new MetadataField();
/**
* Create a DCType from an existing metadata field.
*
* @param context
* @param field
* @deprecated
*/
public DCType(Context context, MetadataField field)
{
this.ourContext = context;
this.field = field;
}
/**
* Default constructor.
*
* @param context
* @deprecated
*/
public DCType(Context context)
{
this.ourContext = context;
}
/**
* Utility method for quick access to an element and qualifier given the
* type ID.
*
* @param context
* context, in case DC types need to be read in from DB
* @param id
* the DC type ID
* @return a two-String array, string 0 is the element, string 1 is the
* qualifier
* @deprecated
*/
public static String[] quickFind(Context context, int id)
throws SQLException
{
MetadataField field = MetadataField.find(context, id);
String[] result = new String[2];
if (field == null)
{
return result;
}
else
{
result[0] = field.getElement();
result[1] = field.getQualifier();
return result;
}
}
/**
* Get a metadata field from the database.
*
* @param context
* DSpace context object
* @param id
* ID of the dublin core type
*
* @return the metadata field, or null if the ID is invalid.
* @deprecated
*/
public static DCType find(Context context, int id) throws SQLException
{
MetadataField field = MetadataField.find(context, id);
return new DCType(context, field);
}
/**
* Find a given Dublin Core type. Returns <code>null</code> if the Dublin
* Core type doesn't exist.
*
* @param context
* the DSpace context to use
* @param element
* the element to find
* @param qualifier
* the qualifier, or <code>null</code> to find an unqualified
* type
*
* @return the Dublin Core type, or <code>null</code> if there isn't a
* corresponding type in the registry
* @throws AuthorizeException
* @deprecated
*/
public static DCType findByElement(Context context, String element,
String qualifier) throws SQLException, AuthorizeException
{
MetadataField field = MetadataField.findByElement(context,
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
if (field == null)
{
return null;
}
else
{
return new DCType(context, field);
}
}
/**
* Retrieve all Dublin Core types from the registry
*
* @return an array of all the Dublin Core types
* @deprecated
*/
public static DCType[] findAll(Context context) throws SQLException
{
MetadataField field[] = MetadataField.findAll(context);
DCType[] typeArray = new DCType[field.length];
for (int ii = 0; ii < field.length; ii++)
{
typeArray[ii] = new DCType(context, field[ii]);
}
// Return the array
return typeArray;
}
/**
* Create a new Dublin Core type
*
* @param context
* DSpace context object
* @return the newly created DCType
* @throws NonUniqueMetadataException
* @throws IOException
* @deprecated
*/
public static DCType create(Context context) throws SQLException,
AuthorizeException, IOException, NonUniqueMetadataException
{
MetadataField field = new MetadataField();
field.setSchemaID(MetadataSchema.DC_SCHEMA_ID);
field.create(context);
return new DCType(context, field);
}
/**
* Delete this DC type. This won't work if there are any DC values in the
* database of this type - they need to be updated first. An
* <code>SQLException</code> (referential integrity violation) will be
* thrown in this case.
* @deprecated
*/
public void delete() throws SQLException, AuthorizeException
{
field.delete(ourContext);
}
/**
* Get the internal identifier of this metadata field
*
* @return the internal identifier
*/
public int getID()
{
return field.getFieldID();
}
/**
* Get the DC element
*
* @return the element
*/
public String getElement()
{
return field.getElement();
}
/**
* Set the DC element
*
* @param s
* the new element
*/
public void setElement(String s)
{
field.setElement(s);
}
/**
* Get the DC qualifier, if any.
*
* @return the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public String getQualifier()
{
return field.getQualifier();
}
/**
* Set the DC qualifier
*
* @param s
* the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public void setQualifier(String s)
{
field.setQualifier(s);
}
/**
* Get the scope note - information about the DC type and its use
*
* @return the scope note
*/
public String getScopeNote()
{
return field.getScopeNote();
}
/**
* Set the scope note
*
* @param s
* the new scope note
*/
public void setScopeNote(String s)
{
field.setScopeNote(s);
}
/**
* Update the dublin core registry
*
* @throws IOException
* @throws NonUniqueMetadataException
* @deprecated
*/
public void update() throws SQLException, AuthorizeException,
NonUniqueMetadataException, IOException
{
field.update(ourContext);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* @author Richard Jones
*
* An exception to report any problems with registry imports
*/
public class RegistryImportException extends Exception
{
/**
* Create an empty authorize exception
*/
public RegistryImportException()
{
super();
}
/**
* create an exception with only a message
*
* @param message
*/
public RegistryImportException(String message)
{
super(message);
}
/**
* create an exception with an inner exception and a message
*
* @param message
* @param e
*/
public RegistryImportException(String message, Throwable e)
{
super(message, e);
}
/**
* create an exception with an inner exception
*
* @param e
*/
public RegistryImportException(Throwable e)
{
super(e);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
/**
* A command-line tool for setting/removing community/sub-community
* relationships. Takes community DB Id or handle arguments as inputs.
*
* @author rrodgers
* @version $Revision: 5844 $
*/
public class CommunityFiliator
{
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("s", "set", false, "set a parent/child relationship");
options.addOption("r", "remove", false,
"remove a parent/child relationship");
options.addOption("p", "parent", true,
"parent community (handle or database ID)");
options.addOption("c", "child", true,
"child community (handle or databaseID)");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
String command = null; // set or remove
String parentID = null;
String childID = null;
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("CommunityFiliator\n", options);
System.out
.println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID");
System.out
.println("remove a relationship: CommunityFiliator -r -p parentID -c childID");
System.exit(0);
}
if (line.hasOption('s'))
{
command = "set";
}
if (line.hasOption('r'))
{
command = "remove";
}
if (line.hasOption('p')) // parent
{
parentID = line.getOptionValue('p');
}
if (line.hasOption('c')) // child
{
childID = line.getOptionValue('c');
}
// now validate
// must have a command set
if (command == null)
{
System.out
.println("Error - must run with either set or remove (run with -h flag for details)");
System.exit(1);
}
if ("set".equals(command) || "remove".equals(command))
{
if (parentID == null)
{
System.out.println("Error - a parentID must be specified (run with -h flag for details)");
System.exit(1);
}
if (childID == null)
{
System.out.println("Error - a childID must be specified (run with -h flag for details)");
System.exit(1);
}
}
CommunityFiliator filiator = new CommunityFiliator();
Context c = new Context();
// ve are superuser!
c.setIgnoreAuthorization(true);
try
{
// validate and resolve the parent and child IDs into commmunities
Community parent = filiator.resolveCommunity(c, parentID);
Community child = filiator.resolveCommunity(c, childID);
if (parent == null)
{
System.out.println("Error, parent community cannot be found: "
+ parentID);
System.exit(1);
}
if (child == null)
{
System.out.println("Error, child community cannot be found: "
+ childID);
System.exit(1);
}
if ("set".equals(command))
{
filiator.filiate(c, parent, child);
}
else
{
filiator.defiliate(c, parent, child);
}
}
catch (SQLException sqlE)
{
System.out.println("Error - SQL exception: " + sqlE.toString());
}
catch (AuthorizeException authE)
{
System.out.println("Error - Authorize exception: "
+ authE.toString());
}
catch (IOException ioE)
{
System.out.println("Error - IO exception: " + ioE.toString());
}
}
public void filiate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException
{
// check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e.
// top-level)
Community childDad = child.getParentCommunity();
if (childDad != null)
{
System.out.println("Error, child community: " + child.getID()
+ " already a child of: " + childDad.getID());
System.exit(1);
}
// second test - circularity: parent's parents can't include proposed
// child
Community[] parentDads = parent.getAllParents();
for (int i = 0; i < parentDads.length; i++)
{
if (parentDads[i].getID() == child.getID())
{
System.out
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
}
// everthing's OK
parent.addSubcommunity(child);
// complete the pending transaction
c.complete();
System.out.println("Filiation complete. Community: '" + parent.getID()
+ "' is parent of community: '" + child.getID() + "'");
}
public void defiliate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException
{
// verify that child is indeed a child of parent
Community[] parentKids = parent.getSubcommunities();
boolean isChild = false;
for (int i = 0; i < parentKids.length; i++)
{
if (parentKids[i].getID() == child.getID())
{
isChild = true;
break;
}
}
if (!isChild)
{
System.out
.println("Error, child community not a child of parent community");
System.exit(1);
}
// OK remove the mappings - but leave the community, which will become
// top-level
DatabaseManager.updateQuery(c,
"DELETE FROM community2community WHERE parent_comm_id= ? "+
"AND child_comm_id= ? ", parent.getID(), child.getID());
// complete the pending transaction
c.complete();
System.out.println("Defiliation complete. Community: '" + child.getID()
+ "' is no longer a child of community: '" + parent.getID()
+ "'");
}
private Community resolveCommunity(Context c, String communityID)
throws SQLException
{
Community community = null;
if (communityID.indexOf('/') != -1)
{
// has a / must be a handle
community = (Community) HandleManager.resolveToObject(c,
communityID);
// ensure it's a community
if ((community == null)
|| (community.getType() != Constants.COMMUNITY))
{
community = null;
}
}
else
{
community = Community.find(c, Integer.parseInt(communityID));
}
return community;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Element;
import org.jdom.output.XMLOutputter;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* This class deals with importing community and collection structures from
* an XML file.
*
* The XML file structure needs to be:
*
* <import_structure>
* <community>
* <name>....</name>
* <community>...</community>
* <collection>
* <name>....</name>
* </collection>
* </community>
* </import_structure>
*
* it can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system
* documentation for more details
*
* @author Richard Jones
*
*/
public class StructBuilder
{
/** the output xml document which will contain updated information about the
* imported structure
*/
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
/** a hashtable to hold metadata for the collection being worked on */
private static Map<String, String> collectionMap = new HashMap<String, String>();
/** a hashtable to hold metadata for the community being worked on */
private static Map<String, String> communityMap = new HashMap<String, String>();
/**
* Main method to be run from the command line to import a structure into
* DSpace
*
* This is of the form:
*
* StructBuilder -f [xml source] -e [administrator email] -o [output file]
*
* The output file will contain exactly the same as the source xml document, but
* with the handle for each imported item added as an attribute.
*/
public static void main(String[] argv)
throws Exception
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption( "f", "file", true, "file");
options.addOption( "e", "eperson", true, "eperson");
options.addOption("o", "output", true, "output");
CommandLine line = parser.parse( options, argv );
String file = null;
String eperson = null;
String output = null;
if (line.hasOption('f'))
{
file = line.getOptionValue('f');
}
if (line.hasOption('e'))
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('o'))
{
output = line.getOptionValue('o');
}
if (output == null || eperson == null || file == null)
{
usage();
System.exit(0);
}
// create a context
Context context = new Context();
// set the context
context.setCurrentUser(EPerson.findByEmail(context, eperson));
// load the XML
Document document = loadXML(file);
// run the preliminary validation, to be sure that the the XML document
// is properly structured
validate(document);
// load the mappings into the member variable hashmaps
communityMap.put("name", "name");
communityMap.put("description", "short_description");
communityMap.put("intro", "introductory_text");
communityMap.put("copyright", "copyright_text");
communityMap.put("sidebar", "side_bar_text");
collectionMap.put("name", "name");
collectionMap.put("description", "short_description");
collectionMap.put("intro", "introductory_text");
collectionMap.put("copyright", "copyright_text");
collectionMap.put("sidebar", "side_bar_text");
collectionMap.put("license", "license");
collectionMap.put("provenance", "provenance_description");
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// run the import starting with the top level communities
Element[] elements = handleCommunities(context, first, null);
// generate the output
Element root = xmlOutput.getRootElement();
for (int i = 0; i < elements.length; i++)
{
root.addContent(elements[i]);
}
// finally write the string into the output file
try
{
BufferedWriter out = new BufferedWriter(new FileWriter(output));
out.write(new XMLOutputter().outputString(xmlOutput));
out.close();
}
catch (IOException e)
{
System.out.println("Unable to write to output file " + output);
System.exit(0);
}
context.complete();
}
/**
* Output the usage information
*/
private static void usage()
{
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
System.out.println("Communities will be created from the top level, and a map of communities to handles will be returned in the output file");
return;
}
/**
* Validate the XML document. This method does not return, but if validation
* fails it generates an error and ceases execution
*
* @param document the XML document object
* @throws TransformerException
*
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML\n");
err.append("No changes have been made to the DSpace instance\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0)
{
err.append("-There are no top level communities in the source document");
System.out.println(err.toString());
System.exit(0);
}
String errs = validateCommunities(first, 1);
if (errs != null)
{
err.append(errs);
trip = true;
}
if (trip)
{
System.out.println(err.toString());
System.exit(0);
}
}
/**
* Validate the communities section of the XML document. This returns a string
* containing any errors encountered, or null if there were no errors
*
* @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes
* of error reporting
*
* @return the errors that need to be generated by the calling method, or null if
* no errors.
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
for (int i = 0; i < communities.getLength(); i++)
{
Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1)
{
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " community in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null)
{
err.append(comErrs);
trip = true;
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null)
{
err.append(colErrs);
trip = true;
}
}
if (trip)
{
errs = err.toString();
}
return errs;
}
/**
* validate the collection section of the XML document. This generates a
* string containing any errors encountered, or returns null if no errors
*
* @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting
*
* @return the errors to be generated by the calling method, or null if none
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
for (int i = 0; i < collections.getLength(); i++)
{
Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1)
{
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " collection in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
}
if (trip)
{
errs = err.toString();
}
return errs;
}
/**
* Load in the XML from file.
*
* @param filename
* the filename to load from
*
* @return the DOM representation of the XML file
*/
private static org.w3c.dom.Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
org.w3c.dom.Document document = builder.parse(new File(filename));
return document;
}
/**
* Return the String value of a Node
*
* @param node the node from which we want to extract the string value
*
* @return the string value of the node
*/
public static String getStringValue(Node node)
{
String value = node.getNodeValue();
if (node.hasChildNodes())
{
Node first = node.getFirstChild();
if (first.getNodeType() == Node.TEXT_NODE)
{
return first.getNodeValue().trim();
}
}
return value;
}
/**
* Take a node list of communities and build the structure from them, delegating
* to the relevant methods in this class for sub-communities and collections
*
* @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create
*
* @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, Exception
{
Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++)
{
Community community;
Element element = new Element("community");
// create the community or sub community
if (parent != null)
{
community = parent.createSubcommunity();
}
else
{
community = Community.create(null, context);
}
// default the short description to be an empty string
community.setMetadata("short_description", " ");
// now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, String> entry : communityMap.entrySet())
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
community.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
}
}
// FIXME: at the moment, if the community already exists by name
// then this will throw a PSQLException on a duplicate key
// violation
// Ideally we'd skip this row and continue to create sub
// communities
// and so forth where they don't exist, but it's proving
// difficult
// to isolate the community that already exists without hitting
// the database directly.
community.update();
// build the element with the handle that identifies the new
// community
// along with all the information that we imported here
// This looks like a lot of repetition of getting information
// from above
// but it's here to keep it separate from the create process in
// case
// we want to move it or make it switchable later
element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name");
nameElement.setText(community.getMetadata("name"));
element.addContent(nameElement);
if (community.getMetadata("short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(community.getMetadata("short_description"));
element.addContent(descriptionElement);
}
if (community.getMetadata("introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(community.getMetadata("introductory_text"));
element.addContent(introElement);
}
if (community.getMetadata("copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(community.getMetadata("copyright_text"));
element.addContent(copyrightElement);
}
if (community.getMetadata("side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(community.getMetadata("side_bar_text"));
element.addContent(sidebarElement);
}
// handle sub communities
NodeList subCommunities = XPathAPI.selectNodeList(tn, "community");
Element[] subCommunityElements = handleCommunities(context, subCommunities, community);
// handle collections
NodeList collections = XPathAPI.selectNodeList(tn, "collection");
Element[] collectionElements = handleCollections(context, collections, community);
int j;
for (j = 0; j < subCommunityElements.length; j++)
{
element.addContent(subCommunityElements[j]);
}
for (j = 0; j < collectionElements.length; j++)
{
element.addContent(collectionElements[j]);
}
elements[i] = element;
}
return elements;
}
/**
* Take a node list of collections and create the structure from them
*
* @param context the context of the request
* @param collections the node list of collections to be created
* @param parent the parent community to whom the collections belong
*
* @return an Element array containing additional information about the
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException, IOException, Exception
{
Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++)
{
Element element = new Element("collection");
Collection collection = parent.createCollection();
// default the short description to the empty string
collection.setMetadata("short_description", " ");
// import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, String> entry : collectionMap.entrySet())
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
collection.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
}
}
collection.update();
element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name");
nameElement.setText(collection.getMetadata("name"));
element.addContent(nameElement);
if (collection.getMetadata("short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(collection.getMetadata("short_description"));
element.addContent(descriptionElement);
}
if (collection.getMetadata("introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(collection.getMetadata("introductory_text"));
element.addContent(introElement);
}
if (collection.getMetadata("copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(collection.getMetadata("copyright_text"));
element.addContent(copyrightElement);
}
if (collection.getMetadata("side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(collection.getMetadata("side_bar_text"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("license") != null)
{
Element sidebarElement = new Element("license");
sidebarElement.setText(collection.getMetadata("license"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("provenance_description") != null)
{
Element sidebarElement = new Element("provenance");
sidebarElement.setText(collection.getMetadata("provenance_description"));
element.addContent(sidebarElement);
}
elements[i] = element;
}
return elements;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.File;
import java.io.IOException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* @author Richard Jones
*
* This class provides the tools that registry importers might need to
* use. Basically some utility methods. And actually, although it says
* I am the author, really I ripped these methods off from other
* classes
*/
public class RegistryImporter
{
/**
* Load in the XML from file.
*
* @param filename
* the filename to load from
*
* @return the DOM representation of the XML file
*/
public static Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
Document document = builder.parse(new File(filename));
return document;
}
/**
* Get the CDATA of a particular element. For example, if the XML document
* contains:
* <P>
* <code>
* <foo><mimetype>application/pdf</mimetype></foo>
* </code>
* passing this the <code>foo</code> node and <code>mimetype</code> will
* return <code>application/pdf</code>.
* </P>
* Why this isn't a core part of the XML API I do not know...
*
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code>
*/
public static String getElementData(Node parentElement, String childName)
throws TransformerException
{
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
if (childNode == null)
{
// No child node, so no values
return null;
}
// Get the #text
Node dataNode = childNode.getFirstChild();
if (dataNode == null)
{
return null;
}
// Get the data
String value = dataNode.getNodeValue().trim();
return value;
}
/**
* Get repeated CDATA for a particular element. For example, if the XML
* document contains:
* <P>
* <code>
* <foo>
* <bar>val1</bar>
* <bar>val2</bar>
* </foo>
* </code>
* passing this the <code>foo</code> node and <code>bar</code> will
* return <code>val1</code> and <code>val2</code>.
* </P>
* Why this also isn't a core part of the XML API I do not know...
*
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
*
* @return the CDATA as a <code>String</code>
*/
public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException
{
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++)
{
// Get the #text node
Node dataNode = childNodes.item(i).getFirstChild();
// Get the data
data[i] = dataNode.getNodeValue().trim();
}
return data;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.sql.SQLException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Implement this interface to participate in the stackable
* authentication mechanism. See the <code>AuthenticationManager</code>
* class for details about configuring authentication handlers.
* <p>
* Each <em>authentication method</em> provides a way to map
* "credentials" supplied by the client into a DSpace e-person.
* "Authentication" is when the credentials are compared against some
* sort of registry or other test of authenticity.
* <p>
* The DSpace instance may configure many authentication methods, in a
* "stack". The same credentials are passed to each method in turn
* until one accepts them, so each method need only attempt to interpret
* and validate the credentials and fail gracefully if they are not
* appropriate for it. The next method in the stack is then called.
*
* @see AuthenticationManager
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public interface AuthenticationMethod {
/**
* Symbolic return values for authenticate() method:
*/
/** Authenticated OK, EPerson has been set. */
public static final int SUCCESS = 1;
/** User exists, but credentials (<em>e.g.</em> passwd) don't match. */
public static final int BAD_CREDENTIALS = 2;
/** Not allowed to login this way without X.509 certificate. */
public static final int CERT_REQUIRED = 3;
/** User not found using this method. */
public static final int NO_SUCH_USER = 4;
/** User or password is not appropriate for this method. */
public static final int BAD_ARGS = 5;
/**
* Predicate, whether to allow new EPerson to be created.
* The answer determines whether a new user is created when
* the credentials describe a valid entity but there is no
* corresponding EPerson in DSpace yet.
* The EPerson is only created if authentication succeeds.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. May be null.
* @param username
* Username, if available. May be null.
* @return true if new ePerson should be created.
*/
public boolean canSelfRegister(Context context,
HttpServletRequest request,
String username)
throws SQLException;
/**
* Initialize a new EPerson record for a self-registered new user.
* Set any data in the EPerson that is specific to this authentication
* method.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. May be null.
* @param eperson
* newly created EPerson record - email + information from the
* registration form will have been filled out.
*/
public void initEPerson(Context context,
HttpServletRequest request,
EPerson eperson)
throws SQLException;
/**
* Should (or can) we allow the user to change their password.
* Note that this means the password stored in the EPerson record, so if
* <em>any</em> method in the stack returns true, the user is
* allowed to change it.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. May be null.
* @param username
* Username, if available. May be null.
* @return true if this method allows user to change ePerson password.
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request,
String username)
throws SQLException;
/**
* Predicate, is this an implicit authentication method.
* An implicit method gets credentials from the environment (such as
* an HTTP request or even Java system properties) rather than the
* explicit username and password. For example, a method that reads
* the X.509 certificates in an HTTPS request is implicit.
*
* @return true if this method uses implicit authentication.
*/
public boolean isImplicit();
/**
* Get list of extra groups that user implicitly belongs to. Note that this
* method will be invoked regardless of the authentication status of the
* user (logged-in or not) e.g. a group that depends on the client
* network-address.
* <p>
* It might make sense to implement this method by itself in a separate
* authentication method that just adds special groups, if the code doesn't
* belong with any existing auth method. The stackable authentication system
* was designed expressly to separate functions into "stacked" methods to
* keep your site-specific code modular and tidy.
*
* @param context
* A valid DSpace context.
*
* @param request
* The request that started this operation, or null if not
* applicable.
*
* @return array of EPerson-group IDs, possibly 0-length, but never
* <code>null</code>.
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException;
/**
* Authenticate the given or implicit credentials.
* This is the heart of the authentication method: test the
* credentials for authenticity, and if accepted, attempt to match
* (or optionally, create) an <code>EPerson</code>. If an <code>EPerson</code> is found it is
* set in the <code>Context</code> that was passed.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param username
* Username (or email address) when method is explicit. Use null for
* implicit method.
*
* @param password
* Password for explicit auth, or null for implicit method.
*
* @param realm
* Realm is an extra parameter used by some authentication methods, leave null if
* not applicable.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
* <p>Meaning:
* <br>SUCCESS - authenticated OK.
* <br>BAD_CREDENTIALS - user exists, but credentials (e.g. passwd) don't match
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
* <br>NO_SUCH_USER - user not found using this method.
* <br>BAD_ARGS - user/pw not appropriate for this method
*/
public int authenticate(Context context,
String username,
String password,
String realm,
HttpServletRequest request)
throws SQLException;
/**
* Get login page to which to redirect.
* Returns URL (as string) to which to redirect to obtain
* credentials (either password prompt or e.g. HTTPS port for client
* cert.); null means no redirect.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL or null
*/
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response);
/**
* Get title of login page to which to redirect.
* Returns a <i>message key</i> that gets translated into the title
* or label for "login page" (or null, if not implemented) This
* title may be used to identify the link to the login page in a
* selection menu, when there are multiple ways to login.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @return title text.
*/
public String loginPageTitle(Context context);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* A stackable authentication method
* based on the DSpace internal "EPerson" database.
* See the <code>AuthenticationMethod</code> interface for more details.
* <p>
* The <em>username</em> is the E-Person's email address,
* and and the <em>password</em> (given to the <code>authenticate()</code>
* method) must match the EPerson password.
* <p>
* This is the default method for a new DSpace configuration.
* If you are implementing a new "explicit" authentication method,
* use this class as a model.
* <p>
* You can use this (or another explict) method in the stack to
* implement HTTP Basic Authentication for servlets, by passing the
* Basic Auth username and password to the <code>AuthenticationManager</code>.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class PasswordAuthentication
implements AuthenticationMethod {
/** log4j category */
private static Logger log = Logger.getLogger(PasswordAuthentication.class);
/**
* Look to see if this email address is allowed to register.
* <p>
* The configuration key authentication.password.domain.valid is examined
* in dspace.cfg to see what doamins are valid.
* <p>
* Example - aber.ac.uk domains : @aber.ac.uk
* Example - MIT domain and all .ac.uk domains: @mit.edu, .ac.uk
*/
public boolean canSelfRegister(Context context,
HttpServletRequest request,
String email)
throws SQLException
{
// Is there anything set in authentication.password.domain.valid?
String domains = ConfigurationManager.getProperty("authentication.password.domain.valid");
if ((domains == null) || (domains.trim().equals("")))
{
// No conditions set, so must be able to self register
return true;
}
else
{
// Itterate through all domains
String[] options = domains.trim().split(",");
String check;
email = email.trim().toLowerCase();
for (int i = 0; i < options.length; i++)
{
check = options[i].trim().toLowerCase();
if (email.endsWith(check))
{
// A match, so we can register this user
return true;
}
}
// No match
return false;
}
}
/**
* Nothing extra to initialize.
*/
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson)
throws SQLException
{
}
/**
* We always allow the user to change their password.
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
return true;
}
/**
* This is an explicit method, since it needs username and password
* from some source.
* @return false
*/
public boolean isImplicit()
{
return false;
}
/**
* Add authenticated users to the group defined in dspace.cfg by
* the password.login.specialgroup key.
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
{
// Prevents anonymous users from being added to this group, and the second check
// ensures they are password users
try
{
if (!context.getCurrentUser().getMetadata("password").equals(""))
{
String groupName = ConfigurationManager.getProperty("password.login.specialgroup");
if ((groupName != null) && (!groupName.trim().equals("")))
{
Group specialGroup = Group.findByName(context, groupName);
if (specialGroup == null)
{
// Oops - the group isn't there.
log.warn(LogManager.getHeader(context,
"password_specialgroup",
"Group defined in password.login.specialgroup does not exist"));
return new int[0];
} else
{
return new int[] { specialGroup.getID() };
}
}
}
}
catch (Exception e) {
// The user is not a password user, so we don't need to worry about them
}
return new int[0];
}
/**
* Check credentials: username must match the email address of an
* EPerson record, and that EPerson must be allowed to login.
* Password must match its password. Also checks for EPerson that
* is only allowed to login via an implicit method
* and returns <code>CERT_REQUIRED</code> if that is the case.
*
* @param context
* DSpace context, will be modified (EPerson set) upon success.
*
* @param username
* Username (or email address) when method is explicit. Use null for
* implicit method.
*
* @param password
* Password for explicit auth, or null for implicit method.
*
* @param realm
* Realm is an extra parameter used by some authentication methods, leave null if
* not applicable.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
* <p>Meaning:
* <br>SUCCESS - authenticated OK.
* <br>BAD_CREDENTIALS - user exists, but assword doesn't match
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
* <br>NO_SUCH_USER - no EPerson with matching email address.
* <br>BAD_ARGS - missing username, or user matched but cannot login.
*/
public int authenticate(Context context,
String username,
String password,
String realm,
HttpServletRequest request)
throws SQLException
{
if (username != null && password != null)
{
EPerson eperson = null;
log.info(LogManager.getHeader(context, "authenticate", "attempting password auth of user="+username));
try
{
eperson = EPerson.findByEmail(context, username.toLowerCase());
}
catch (AuthorizeException e)
{
log.trace("Failed to authorize looking up EPerson", e);
}
if (eperson == null)
{
// lookup failed.
return NO_SUCH_USER;
}
else if (!eperson.canLogIn())
{
// cannot login this way
return BAD_ARGS;
}
else if (eperson.getRequireCertificate())
{
// this user can only login with x.509 certificate
log.warn(LogManager.getHeader(context, "authenticate", "rejecting PasswordAuthentication because "+username+" requires certificate."));
return CERT_REQUIRED;
}
else if (eperson.checkPassword(password))
{
// login is ok if password matches:
context.setCurrentUser(eperson);
log.info(LogManager.getHeader(context, "authenticate", "type=PasswordAuthentication"));
return SUCCESS;
}
else
{
return BAD_CREDENTIALS;
}
}
// BAD_ARGS always defers to the next authentication method.
// It means this method cannot use the given credentials.
else
{
return BAD_ARGS;
}
}
/**
* Returns URL of password-login servlet.
*
* @param context
* DSpace context, will be modified (EPerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL
*/
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response)
{
return response.encodeRedirectURL(request.getContextPath() +
"/password-login");
}
/**
* Returns message key for title of the "login" page, to use
* in a menu showing the choice of multiple login methods.
*
* @param context
* DSpace context, will be modified (EPerson set) upon success.
*
* @return Message key to look up in i18n message catalog.
*/
public String loginPageTitle(Context context)
{
return "org.dspace.eperson.PasswordAuthentication.title";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Adds users to special groups based on IP address. Configuration parameter
* form is:
* <P>
* {@code authentication.ip.<GROUPNAME> = <IPRANGE>[, <IPRANGE> ...]}
* <P>
* e.g. {@code authentication.ip.MIT = 18., 192.25.0.0/255.255.0.0}
* <P>
* Negative matches can be included by prepending the range with a '-'. For example if you want
* to include all of a class B network except for users of a contained class c network, you could use:
* <P>
* 111.222,-111.222.333.
* <p>
* For supported IP ranges see {@link org.dspace.authenticate.IPMatcher}.
*
* @version $Revision: 5844 $
* @author Robert Tansley
*/
public class IPAuthentication implements AuthenticationMethod
{
/** Our logger */
private static Logger log = Logger.getLogger(IPAuthentication.class);
/** Whether to look for x-forwarded headers for logging IP addresses */
private static Boolean useProxies;
/** All the IP matchers */
private List<IPMatcher> ipMatchers;
/** All the negative IP matchers */
private List<IPMatcher> ipNegativeMatchers;
/**
* Maps IPMatchers to group names when we don't know group DB ID yet. When
* the DB ID is known, the IPMatcher is moved to ipMatcherGroupIDs and then
* points to the DB ID.
*/
private Map<IPMatcher, String> ipMatcherGroupNames;
/** Maps IPMatchers to group IDs (Integers) where we know the group DB ID */
private Map<IPMatcher, Integer> ipMatcherGroupIDs;
/**
* Initialize an IP authenticator, reading in the configuration. Note this
* will never fail if the configuration is bad -- a warning will be logged.
*/
public IPAuthentication()
{
ipMatchers = new ArrayList<IPMatcher>();
ipNegativeMatchers = new ArrayList<IPMatcher>();
ipMatcherGroupIDs = new HashMap<IPMatcher, Integer>();
ipMatcherGroupNames = new HashMap<IPMatcher, String>();
Enumeration e = ConfigurationManager.propertyNames();
while (e.hasMoreElements())
{
String propName = (String) e.nextElement();
if (propName.startsWith("authentication.ip."))
{
String[] nameParts = propName.split("\\.");
if (nameParts.length == 3)
{
addMatchers(nameParts[2], ConfigurationManager
.getProperty(propName));
}
else
{
log.warn("Malformed configuration property name: "
+ propName);
}
}
}
}
/**
* Add matchers for the given comma-delimited IP ranges and group.
*
* @param groupName
* name of group
* @param ipRanges
* IP ranges
*/
private void addMatchers(String groupName, String ipRanges)
{
String[] ranges = ipRanges.split("\\s*,\\s*");
for (String entry : ranges)
{
try
{
IPMatcher ipm;
if (entry.startsWith("-"))
{
ipm = new IPMatcher(entry.substring(1));
ipNegativeMatchers.add(ipm);
}
else
{
ipm = new IPMatcher(entry);
ipMatchers.add(ipm);
}
ipMatcherGroupNames.put(ipm, groupName);
if (log.isDebugEnabled())
{
log.debug("Configured " + entry + " for special group "
+ groupName);
}
}
catch (IPMatcherException ipme)
{
log.warn("Malformed IP range specified for group " + groupName,
ipme);
}
}
}
public boolean canSelfRegister(Context context, HttpServletRequest request,
String username) throws SQLException
{
return false;
}
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson) throws SQLException
{
}
public boolean allowSetPassword(Context context,
HttpServletRequest request, String username) throws SQLException
{
return false;
}
public boolean isImplicit()
{
return true;
}
public int[] getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException
{
if (request == null)
{
return new int[0];
}
List<Integer> groupIDs = new ArrayList<Integer>();
// Get the user's IP address
String addr = request.getRemoteAddr();
if (useProxies == null) {
useProxies = ConfigurationManager.getBooleanProperty("useProxies", false);
}
if (useProxies && request.getHeader("X-Forwarded-For") != null)
{
/* This header is a comma delimited list */
for(String xfip : request.getHeader("X-Forwarded-For").split(","))
{
if(!request.getHeader("X-Forwarded-For").contains(addr))
{
addr = xfip.trim();
}
}
}
for (IPMatcher ipm : ipMatchers)
{
try
{
if (ipm.match(addr))
{
// Do we know group ID?
Integer g = ipMatcherGroupIDs.get(ipm);
if (g != null)
{
groupIDs.add(g);
}
else
{
// See if we have a group name
String groupName = ipMatcherGroupNames.get(ipm);
if (groupName != null)
{
Group group = Group.findByName(context, groupName);
if (group != null)
{
// Add ID so we won't have to do lookup again
ipMatcherGroupIDs.put(ipm, Integer.valueOf(group.getID()));
ipMatcherGroupNames.remove(ipm);
groupIDs.add(Integer.valueOf(group.getID()));
}
else
{
log.warn(LogManager.getHeader(context,
"configuration_error", "unknown_group="
+ groupName));
}
}
}
}
}
catch (IPMatcherException ipme)
{
log.warn(LogManager.getHeader(context, "configuration_error",
"bad_ip=" + addr), ipme);
}
}
// Now remove any negative matches
for (IPMatcher ipm : ipNegativeMatchers)
{
try
{
if (ipm.match(addr))
{
// Do we know group ID?
Integer g = ipMatcherGroupIDs.get(ipm);
if (g != null)
{
groupIDs.remove(g);
}
else
{
// See if we have a group name
String groupName = ipMatcherGroupNames.get(ipm);
if (groupName != null)
{
Group group = Group.findByName(context, groupName);
if (group != null)
{
// Add ID so we won't have to do lookup again
ipMatcherGroupIDs.put(ipm, Integer.valueOf(group.getID()));
ipMatcherGroupNames.remove(ipm);
groupIDs.remove(Integer.valueOf(group.getID()));
}
else
{
log.warn(LogManager.getHeader(context,
"configuration_error", "unknown_group="
+ groupName));
}
}
}
}
}
catch (IPMatcherException ipme)
{
log.warn(LogManager.getHeader(context, "configuration_error",
"bad_ip=" + addr), ipme);
}
}
int[] results = new int[groupIDs.size()];
for (int i = 0; i < groupIDs.size(); i++)
{
results[i] = (groupIDs.get(i)).intValue();
}
if (log.isDebugEnabled())
{
StringBuffer gsb = new StringBuffer();
for (int i = 0; i < results.length; i++)
{
if (i > 0)
{
gsb.append(",");
}
gsb.append(results[i]);
}
log.debug(LogManager.getHeader(context, "authenticated",
"special_groups=" + gsb.toString()));
}
return results;
}
public int authenticate(Context context, String username, String password,
String realm, HttpServletRequest request) throws SQLException
{
return BAD_ARGS;
}
public String loginPageURL(Context context, HttpServletRequest request,
HttpServletResponse response)
{
return null;
}
public String loginPageTitle(Context context)
{
return null;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import javax.servlet.http.HttpServletRequest;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.eperson.EPerson;
/**
* Access point for the stackable authentication methods.
* <p>
* This class initializes the "stack" from the DSpace configuration,
* and then invokes methods in the appropriate order on behalf of clients.
* <p>
* See the AuthenticationMethod interface for details about what each
* function does.
* <p>
* <b>Configuration</b><br>
* The stack of authentication methods is defined by one property in the DSpace configuration:
* <pre>
* plugin.sequence.org.dspace.eperson.AuthenticationMethod = <em>a list of method class names</em>
* <em>e.g.</em>
* plugin.sequence.org.dspace.eperson.AuthenticationMethod = \
* org.dspace.eperson.X509Authentication, \
* org.dspace.eperson.PasswordAuthentication
* </pre>
* <p>
* The "stack" is always traversed in order, with the methods
* specified first (in the configuration) thus getting highest priority.
*
* @see AuthenticationMethod
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class AuthenticationManager
{
/** List of authentication methods, highest precedence first. */
private static AuthenticationMethod methodStack[] =
(AuthenticationMethod[])PluginManager.getPluginSequence(AuthenticationMethod.class);
/**
* Test credentials for authenticity.
* Apply the given credentials to each authenticate() method in
* the stack. Returns upon the first <code>SUCCESS</code>, or otherwise
* returns the most favorable outcome from one of the methods.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param username
* Username (or email address) when method is explicit. Use null for
* implicit method.
*
* @param password
* Password for explicit auth, or null for implicit method.
*
* @param realm
* Realm is an extra parameter used by some authentication methods, leave null if
* not applicable.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
* <p>Meaning:
* <br>SUCCESS - authenticated OK.
* <br>BAD_CREDENTIALS - user exists, but credenitals (e.g. passwd) don't match
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
* <br>NO_SUCH_USER - user not found using this method.
* <br>BAD_ARGS - user/pw not appropriate for this method
*/
public static int authenticate(Context context,
String username,
String password,
String realm,
HttpServletRequest request)
{
return authenticateInternal(context, username, password, realm,
request, false);
}
/**
* Test credentials for authenticity, using only Implicit methods.
* Just like <code>authenticate()</code>, except it only invokes the
* <em>implicit</em> authentication methods the stack.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param username
* Username (or email address) when method is explicit. Use null for
* implicit method.
*
* @param password
* Password for explicit auth, or null for implicit method.
*
* @param realm
* Realm is an extra parameter used by some authentication methods, leave null if
* not applicable.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
* <p>Meaning:
* <br>SUCCESS - authenticated OK.
* <br>BAD_CREDENTIALS - user exists, but credenitals (e.g. passwd) don't match
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
* <br>NO_SUCH_USER - user not found using this method.
* <br>BAD_ARGS - user/pw not appropriate for this method
*/
public static int authenticateImplicit(Context context,
String username,
String password,
String realm,
HttpServletRequest request)
{
return authenticateInternal(context, username, password, realm,
request, true);
}
private static int authenticateInternal(Context context,
String username,
String password,
String realm,
HttpServletRequest request,
boolean implicitOnly)
{
// better is lowest, so start with the highest.
int bestRet = AuthenticationMethod.BAD_ARGS;
// return on first success, otherwise "best" outcome.
for (int i = 0; i < methodStack.length; ++i)
{
if (!implicitOnly || methodStack[i].isImplicit())
{
int ret = 0;
try
{
ret = methodStack[i].authenticate(context, username, password, realm, request);
}
catch (SQLException e)
{
ret = AuthenticationMethod.NO_SUCH_USER;
}
if (ret == AuthenticationMethod.SUCCESS)
{
return ret;
}
if (ret < bestRet)
{
bestRet = ret;
}
}
}
return bestRet;
}
/**
* Predicate, can a new EPerson be created.
* Invokes <code>canSelfRegister()</code> of every authentication
* method in the stack, and returns true if any of them is true.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. Can be null.
* @param username
* Username, if available. Can be null.
* @return true if new ePerson should be created.
*/
public static boolean canSelfRegister(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
for (int i = 0; i < methodStack.length; ++i)
{
if (methodStack[i].canSelfRegister(context, request, username))
{
return true;
}
}
return false;
}
/**
* Predicate, can user set EPerson password.
* Returns true if the <code>allowSetPassword()</code> method of any
* member of the stack returns true.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. Can be null.
* @param username
* Username, if available. Can be null.
* @return true if this method allows user to change ePerson password.
*/
public static boolean allowSetPassword(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
for (int i = 0; i < methodStack.length; ++i)
{
if (methodStack[i].allowSetPassword(context, request, username))
{
return true;
}
}
return false;
}
/**
* Initialize a new e-person record for a self-registered new user.
* Give every authentication method in the stack a chance to
* initialize the new ePerson by calling its <code>initEperson()</code>
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed. Can be null.
* @param eperson
* newly created EPerson record - email + information from the
* registration form will have been filled out.
*/
public static void initEPerson(Context context,
HttpServletRequest request,
EPerson eperson)
throws SQLException
{
for (AuthenticationMethod method : methodStack)
{
method.initEPerson(context, request, eperson);
}
}
/**
* Get list of extra groups that user implicitly belongs to.
* Returns accumulation of groups of all the <code>getSpecialGroups()</code>
* methods in the stack.
*
* @param context
* A valid DSpace context.
*
* @param request
* The request that started this operation, or null if not applicable.
*
* @return Returns IDs of any groups the user authenticated by this
* request is in implicitly -- checks for e.g. network-address dependent
* groups.
*/
public static int[] getSpecialGroups(Context context,
HttpServletRequest request)
throws SQLException
{
List<int[]> gll = new ArrayList<int[]>();
int totalLen = 0;
for (int i = 0; i < methodStack.length; ++i)
{
int gl[] = methodStack[i].getSpecialGroups(context, request);
if (gl.length > 0)
{
gll.add(gl);
totalLen += gl.length;
}
}
// Maybe this is over-optimized but it's called on every
// request, and most sites will only have 0 or 1 auth methods
// actually returning groups, so it pays..
if (totalLen == 0)
{
return new int[0];
}
else if (gll.size() == 1)
{
return gll.get(0);
}
else
{
// Have to do it this painful way since list.toArray() doesn't
// work on int[]. stupid Java ints aren't first-class objects.
int result[] = new int[totalLen];
int k = 0;
for (int i = 0; i < gll.size(); ++i)
{
int gl[] = gll.get(i);
for (int aGl : gl)
{
result[k++] = aGl;
}
}
return result;
}
}
/**
* Get stack of authentication methods.
* Return an <code>Iterator</code> that steps through each configured
* authentication method, in order of precedence.
*
* @return Iterator object.
*/
public static Iterator<AuthenticationMethod> authenticationMethodIterator()
{
return Arrays.asList(methodStack).iterator();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.Principal;
import java.security.PublicKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.StringTokenizer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.dspace.authenticate.AuthenticationMethod;
import org.dspace.authenticate.AuthenticationManager;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Implicit authentication method that gets credentials from the X.509 client
* certificate supplied by the HTTPS client when connecting to this server. The
* email address in that certificate is taken as the authenticated user name
* with no further checking, so be sure your HTTP server (e.g. Tomcat) is
* configured correctly to accept only client certificates it can validate.
* <p>
* See the <code>AuthenticationMethod</code> interface for more details.
* <p>
* <b>Configuration:</b>
*
* <pre>
* authentication.x509.keystore.path =
* <em>
* path to Java keystore file
* </em>
* authentication.x509.keystore.password =
* <em>
* password to access the keystore
* </em>
* authentication.x509.ca.cert =
* <em>
* path to certificate file for CA whose client certs to accept.
* </em>
* authentication.x509.autoregister =
* <em>
* "true" if E-Person is created automatically for unknown new users.
* </em>
* authentication.x509.groups =
* <em>
* comma-delimited list of special groups to add user to if authenticated.
* </em>
* authentication.x509.emaildomain =
* <em>
* email address domain (after the 'at' symbol) to match before allowing
* membership in special groups.
* </em>
* </pre>
*
* Only one of the "<code>keystore.path</code>" or "<code>ca.cert</code>"
* options is required. If you supply a keystore, then all of the "trusted"
* certificates in the keystore represent CAs whose client certificates will be
* accepted. The <code>ca.cert</code> option only allows a single CA to be
* named.
* <p>
* You can configure <em>both</em> a keystore and a CA cert, and both will be
* used.
* <p>
* The <code>autoregister</code> configuration parameter determines what the
* <code>canSelfRegister()</code> method returns. It also allows an EPerson
* record to be created automatically when the presented certificate is
* acceptable but there is no corresponding EPerson.
*
* @author Larry Stone
* @version $Revision: 5844 $
*/
public class X509Authentication implements AuthenticationMethod
{
/** log4j category */
private static Logger log = Logger.getLogger(X509Authentication.class);
/** public key of CA to check client certs against. */
private static PublicKey caPublicKey = null;
/** key store for CA certs if we use that */
private static KeyStore caCertKeyStore = null;
private static String loginPageTitle = null;
private static String loginPageURL = null;
/**
* Initialization: Set caPublicKey and/or keystore. This loads the
* information needed to check if a client cert presented is valid and
* acceptable.
*/
static
{
/*
* allow identification of alternative entry points for certificate
* authentication when selected by the user rather than implicitly.
*/
loginPageTitle = ConfigurationManager
.getProperty("authentication.x509.chooser.title.key");
loginPageURL = ConfigurationManager
.getProperty("authentication.x509.chooser.uri");
String keystorePath = ConfigurationManager
.getProperty("authentication.x509.keystore.path");
String keystorePassword = ConfigurationManager
.getProperty("authentication.x509.keystore.password");
String caCertPath = ConfigurationManager
.getProperty("authentication.x509.ca.cert");
// backward-compatible kludge
if (caCertPath == null)
{
caCertPath = ConfigurationManager.getProperty("webui.cert.ca");
}
// First look for keystore full of trusted certs.
if (keystorePath != null)
{
FileInputStream fis = null;
if (keystorePassword == null)
{
keystorePassword = "";
}
try
{
KeyStore ks = KeyStore.getInstance("JKS");
fis = new FileInputStream(keystorePath);
ks.load(fis, keystorePassword.toCharArray());
caCertKeyStore = ks;
}
catch (IOException e)
{
log
.error("X509Authentication: Failed to load CA keystore, file="
+ keystorePath + ", error=" + e.toString());
}
catch (GeneralSecurityException e)
{
log
.error("X509Authentication: Failed to extract CA keystore, file="
+ keystorePath + ", error=" + e.toString());
}
finally
{
if (fis != null)
{
try
{
fis.close();
}
catch (IOException ioe)
{
}
}
}
}
// Second, try getting public key out of CA cert, if that's configured.
if (caCertPath != null)
{
InputStream is = null;
FileInputStream fis = null;
try
{
fis = new FileInputStream(caCertPath);
is = new BufferedInputStream(fis);
X509Certificate cert = (X509Certificate) CertificateFactory
.getInstance("X.509").generateCertificate(is);
if (cert != null)
{
caPublicKey = cert.getPublicKey();
}
}
catch (IOException e)
{
log.error("X509Authentication: Failed to load CA cert, file="
+ caCertPath + ", error=" + e.toString());
}
catch (CertificateException e)
{
log
.error("X509Authentication: Failed to extract CA cert, file="
+ caCertPath + ", error=" + e.toString());
}
finally
{
if (is != null)
{
try
{
is.close();
}
catch (IOException ioe)
{
}
}
if (fis != null)
{
try
{
fis.close();
}
catch (IOException ioe)
{
}
}
}
}
}
/**
* Return the email address from <code>certificate</code>, or null if an
* email address cannot be found in the certificate.
* <p>
* Note that the certificate parsing has only been tested with certificates
* granted by the MIT Certification Authority, and may not work elsewhere.
*
* @param certificate -
* An X509 certificate object
* @return - The email address found in certificate, or null if an email
* address cannot be found in the certificate.
*/
private static String getEmail(X509Certificate certificate)
throws SQLException
{
Principal principal = certificate.getSubjectDN();
if (principal == null)
{
return null;
}
String dn = principal.getName();
if (dn == null)
{
return null;
}
StringTokenizer tokenizer = new StringTokenizer(dn, ",");
String token = null;
while (tokenizer.hasMoreTokens())
{
int len = "emailaddress=".length();
token = (String) tokenizer.nextToken();
if (token.toLowerCase().startsWith("emailaddress="))
{
// Make sure the token actually contains something
if (token.length() <= len)
{
return null;
}
return token.substring(len).toLowerCase();
}
}
return null;
}
/**
* Verify CERTIFICATE against KEY. Return true if and only if CERTIFICATE is
* valid and can be verified against KEY.
*
* @param certificate -
* An X509 certificate object
* @param key -
* PublicKey to check the certificate against.
* @return - True if CERTIFICATE is valid and can be verified against KEY,
* false otherwise.
*/
private static boolean isValid(Context context, X509Certificate certificate)
{
if (certificate == null)
{
return false;
}
// This checks that current time is within cert's validity window:
try
{
certificate.checkValidity();
}
catch (CertificateException e)
{
log.info(LogManager.getHeader(context, "authentication",
"X.509 Certificate is EXPIRED or PREMATURE: "
+ e.toString()));
return false;
}
// Try CA public key, if available.
if (caPublicKey != null)
{
try
{
certificate.verify(caPublicKey);
return true;
}
catch (GeneralSecurityException e)
{
log.info(LogManager.getHeader(context, "authentication",
"X.509 Certificate FAILED SIGNATURE check: "
+ e.toString()));
}
}
// Try it with keystore, if available.
if (caCertKeyStore != null)
{
try
{
Enumeration ke = caCertKeyStore.aliases();
while (ke.hasMoreElements())
{
String alias = (String) ke.nextElement();
if (caCertKeyStore.isCertificateEntry(alias))
{
Certificate ca = caCertKeyStore.getCertificate(alias);
try
{
certificate.verify(ca.getPublicKey());
return true;
}
catch (CertificateException ce)
{
}
}
}
log
.info(LogManager
.getHeader(context, "authentication",
"Keystore method FAILED SIGNATURE check on client cert."));
}
catch (GeneralSecurityException e)
{
log.info(LogManager.getHeader(context, "authentication",
"X.509 Certificate FAILED SIGNATURE check: "
+ e.toString()));
}
}
return false;
}
/**
* Predicate, can new user automatically create EPerson. Checks
* configuration value. You'll probably want this to be true to take
* advantage of a Web certificate infrastructure with many more users than
* are already known by DSpace.
*/
public boolean canSelfRegister(Context context, HttpServletRequest request,
String username) throws SQLException
{
return ConfigurationManager
.getBooleanProperty("authentication.x509.autoregister");
}
/**
* Nothing extra to initialize.
*/
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson) throws SQLException
{
}
/**
* We don't use EPerson password so there is no reason to change it.
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request, String username) throws SQLException
{
return false;
}
/**
* Returns true, this is an implicit method.
*/
public boolean isImplicit()
{
return true;
}
/**
* Returns a list of group names that the user should be added to upon
* successful authentication, configured in dspace.cfg.
*
* @return List<String> of special groups configured for this authenticator
*/
private List<String> getX509Groups()
{
List<String> groupNames = new ArrayList<String>();
String x509GroupConfig = null;
x509GroupConfig = ConfigurationManager
.getProperty("authentication.x509.groups");
if (null != x509GroupConfig && !x509GroupConfig.equals(""))
{
String[] groups = x509GroupConfig.split("\\s*,\\s*");
for (int i = 0; i < groups.length; i++)
{
groupNames.add(groups[i].trim());
}
}
return groupNames;
}
/**
* Checks for configured email domain required to grant special groups
* membership. If no email domain is configured to verify, special group
* membership is simply granted.
*
* @param request -
* The current request object
* @param email -
* The email address from the x509 certificate
*/
private void setSpecialGroupsFlag(HttpServletRequest request, String email)
{
String emailDomain = null;
emailDomain = (String) request
.getAttribute("authentication.x509.emaildomain");
HttpSession session = request.getSession(true);
if (null != emailDomain && !emailDomain.equals(""))
{
if (email.substring(email.length() - emailDomain.length()).equals(
emailDomain))
{
session.setAttribute("x509Auth", Boolean.TRUE);
}
}
else
{
// No configured email domain to verify. Just flag
// as authenticated so special groups are granted.
session.setAttribute("x509Auth", Boolean.TRUE);
}
}
/**
* Return special groups configured in dspace.cfg for X509 certificate
* authentication.
*
* @param context
* @param request
* object potentially containing the cert
*
* @return An int array of group IDs
*
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException
{
if (request == null)
{
return new int[0];
}
Boolean authenticated = false;
HttpSession session = request.getSession(false);
authenticated = (Boolean) session.getAttribute("x509Auth");
authenticated = (null == authenticated) ? false : authenticated;
if (authenticated)
{
List<String> groupNames = getX509Groups();
List<Integer> groupIDs = new ArrayList<Integer>();
if (groupNames != null)
{
for (String groupName : groupNames)
{
if (groupName != null)
{
Group group = Group.findByName(context, groupName);
if (group != null)
{
groupIDs.add(Integer.valueOf(group.getID()));
}
else
{
log.warn(LogManager.getHeader(context,
"configuration_error", "unknown_group="
+ groupName));
}
}
}
}
int[] results = new int[groupIDs.size()];
for (int i = 0; i < groupIDs.size(); i++)
{
results[i] = (groupIDs.get(i)).intValue();
}
if (log.isDebugEnabled())
{
StringBuffer gsb = new StringBuffer();
for (int i = 0; i < results.length; i++)
{
if (i > 0)
{
gsb.append(",");
}
gsb.append(results[i]);
}
log.debug(LogManager.getHeader(context, "authenticated",
"special_groups=" + gsb.toString()));
}
return results;
}
return new int[0];
}
/**
* X509 certificate authentication. The client certificate is obtained from
* the <code>ServletRequest</code> object.
* <ul>
* <li>If the certificate is valid, and corresponds to an existing EPerson,
* and the user is allowed to login, return success.</li>
* <li>If the user is matched but is not allowed to login, it fails.</li>
* <li>If the certificate is valid, but there is no corresponding EPerson,
* the <code>"authentication.x509.autoregister"</code> configuration
* parameter is checked (via <code>canSelfRegister()</code>)
* <ul>
* <li>If it's true, a new EPerson record is created for the certificate,
* and the result is success.</li>
* <li>If it's false, return that the user was unknown.</li>
* </ul>
* </li>
* </ul>
*
* @return One of: SUCCESS, BAD_CREDENTIALS, NO_SUCH_USER, BAD_ARGS
*/
public int authenticate(Context context, String username, String password,
String realm, HttpServletRequest request) throws SQLException
{
// Obtain the certificate from the request, if any
X509Certificate[] certs = null;
if (request != null)
{
certs = (X509Certificate[]) request
.getAttribute("javax.servlet.request.X509Certificate");
}
if ((certs == null) || (certs.length == 0))
{
return BAD_ARGS;
}
else
{
// We have a cert -- check it and get username from it.
try
{
if (!isValid(context, certs[0]))
{
log
.warn(LogManager
.getHeader(context, "authenticate",
"type=x509certificate, status=BAD_CREDENTIALS (not valid)"));
return BAD_CREDENTIALS;
}
// And it's valid - try and get an e-person
String email = getEmail(certs[0]);
EPerson eperson = null;
if (email != null)
{
eperson = EPerson.findByEmail(context, email);
}
if (eperson == null)
{
// Cert is valid, but no record.
if (email != null
&& canSelfRegister(context, request, null))
{
// Register the new user automatically
log.info(LogManager.getHeader(context, "autoregister",
"from=x.509, email=" + email));
// TEMPORARILY turn off authorisation
context.setIgnoreAuthorization(true);
eperson = EPerson.create(context);
eperson.setEmail(email);
eperson.setCanLogIn(true);
AuthenticationManager.initEPerson(context, request,
eperson);
eperson.update();
context.commit();
context.setIgnoreAuthorization(false);
context.setCurrentUser(eperson);
setSpecialGroupsFlag(request, email);
return SUCCESS;
}
else
{
// No auto-registration for valid certs
log
.warn(LogManager
.getHeader(context, "authenticate",
"type=cert_but_no_record, cannot auto-register"));
return NO_SUCH_USER;
}
}
// make sure this is a login account
else if (!eperson.canLogIn())
{
log.warn(LogManager.getHeader(context, "authenticate",
"type=x509certificate, email=" + email
+ ", canLogIn=false, rejecting."));
return BAD_ARGS;
}
else
{
log.info(LogManager.getHeader(context, "login",
"type=x509certificate"));
context.setCurrentUser(eperson);
setSpecialGroupsFlag(request, email);
return SUCCESS;
}
}
catch (AuthorizeException ce)
{
log.warn(LogManager.getHeader(context, "authorize_exception",
""), ce);
}
return BAD_ARGS;
}
}
/**
* Returns URL of password-login servlet.
*
* @param context
* DSpace context, will be modified (EPerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not
* applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL
*/
public String loginPageURL(Context context, HttpServletRequest request,
HttpServletResponse response)
{
return loginPageURL;
}
/**
* Returns message key for title of the "login" page, to use in a menu
* showing the choice of multiple login methods.
*
* @param context
* DSpace context, will be modified (EPerson set) upon success.
*
* @return Message key to look up in i18n message catalog.
*/
public String loginPageTitle(Context context)
{
return loginPageTitle;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
/**
* Quickly tests whether a given IPv4 4-byte address matches an IP range. An
* {@code IPMatcher} is initialized with a particular IP range specification.
* Calls to {@link IPMatcher#match(String) match} method will then quickly
* determine whether a given IP falls within that range.
* <p>
* Supported range specifications areL
* <p>
* <ul>
* <li>Full IP address, e.g. {@code 12.34.56.78}</li>
* <li>Partial IP address, e.g. {@code 12.34} (which matches any IP starting
* {@code 12.34})</li>
* <li>Network/netmask, e.g. {@code 18.25.0.0/255.255.0.0}</li>
* <li>CIDR slash notation, e.g. {@code 18.25.0.0/16}</li>
* </ul>
*
* @version $Revision: 5844 $
* @author Robert Tansley
*/
public class IPMatcher
{
/** Network to match */
private byte[] network;
/** Network mask */
private byte[] netmask;
/**
* Construct an IPMatcher that will test for the given IP specification
*
* @param ipSpec
* IP specification (full or partial URL, network/netmask,
* network/cidr)
* @throws IPMatcherException
* if there is an error parsing the specification (i.e. it is
* somehow malformed)
*/
public IPMatcher(String ipSpec) throws IPMatcherException
{
// Boil all specs down to network + mask
network = new byte[4];
netmask = new byte[] { -1, -1, -1, -1 };
// Allow partial IP
boolean mustHave4 = false;
String ipPart = ipSpec;
String[] parts = ipSpec.split("/");
switch (parts.length)
{
case 2:
// Some kind of slash notation -- we'll need a full network IP
ipPart = parts[0];
mustHave4 = true;
String[] maskParts = parts[1].split("\\.");
if (maskParts.length == 1)
{
// CIDR slash notation
int x;
try
{
x = Integer.parseInt(maskParts[0]);
}
catch (NumberFormatException nfe)
{
throw new IPMatcherException(
"Malformed IP range specification " + ipSpec, nfe);
}
if (x < 0 || x > 32)
{
throw new IPMatcherException();
}
int fullMask = -1 << (32 - x);
netmask[0] = (byte) ((fullMask & 0xFF000000) >>> 24);
netmask[1] = (byte) ((fullMask & 0x00FF0000) >>> 16);
netmask[2] = (byte) ((fullMask & 0x0000FF00) >>> 8);
netmask[3] = (byte) (fullMask & 0x000000FF);
}
else
{
// full subnet specified
ipToBytes(parts[1], netmask, true);
}
case 1:
// Get IP
int partCount = ipToBytes(ipPart, network, mustHave4);
// If partial IP, set mask for remaining bytes
for (int i = 3; i >= partCount; i--)
{
netmask[i] = 0;
}
break;
default:
throw new IPMatcherException("Malformed IP range specification "
+ ipSpec);
}
}
/**
* Fill out a given four-byte array with the IP address specified in the
* given String
*
* @param ip
* IP address as a dot-delimited String
* @param bytes
* 4-byte array to fill out
* @param mustHave4
* if true, will require that the given IP string specify all
* four bytes
* @return the number of actual IP bytes found in the given IP address
* String
* @throws IPMatcherException
* if there is a problem parsing the IP string -- e.g. number
* outside of range 0-255, too many numbers, less than 4 numbers
* if {@code mustHave4} is true
*/
private int ipToBytes(String ip, byte[] bytes, boolean mustHave4)
throws IPMatcherException
{
String[] parts = ip.split("\\.");
if (parts.length > 4 || mustHave4 && parts.length != 4)
{
throw new IPMatcherException("Malformed IP specification " + ip);
}
try
{
for (int i = 0; i < parts.length; i++)
{
int p = Integer.parseInt(parts[i]);
if (p < 0 || p > 255)
{
throw new IPMatcherException("Malformed IP specification "
+ ip);
}
bytes[i] = (byte) (p < 128 ? p : p - 256);
}
}
catch (NumberFormatException nfe)
{
throw new IPMatcherException("Malformed IP specification " + ip,
nfe);
}
return parts.length;
}
/**
* Determine whether the given full IP falls within the range this
* {@code IPMatcher} was initialized with.
*
* @param ipIn
* IP address as dot-delimited String
* @return {@code true} if the IP matches the range of this
* {@code IPMatcher}; {@code false} otherwise
* @throws IPMatcherException
* if the IP passed in cannot be parsed correctly (i.e. is
* malformed)
*/
public boolean match(String ipIn) throws IPMatcherException
{
byte[] bytes = new byte[4];
ipToBytes(ipIn, bytes, true);
for (int i = 0; i < 4; i++)
{
if ((bytes[i] & netmask[i]) != (network[i] & netmask[i]))
{
return false;
}
}
return true;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
/**
* Thrown when there is a problem parsing an IP matcher specification.
*
* @version $Revision: 5844 $
* @author Robert Tansley
*/
public class IPMatcherException extends Exception
{
public IPMatcherException()
{
super();
}
public IPMatcherException(String message)
{
super(message);
}
public IPMatcherException(Throwable cause)
{
super(cause);
}
public IPMatcherException(String message, Throwable cause)
{
super(message, cause);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import java.util.Collection;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.LogManager;
import org.dspace.authenticate.AuthenticationManager;
import org.dspace.authenticate.AuthenticationMethod;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Shibboleth authentication for DSpace, tested on Shibboleth 1.3.x and
* Shibboleth 2.x. Read <a href=
* "https://mams.melcoe.mq.edu.au/zope/mams/pubs/Installation/dspace15/view"
* >Shib DSpace 1.5</a> for installation procedure. Read dspace.cfg for details
* on options available.
*
* @author <a href="mailto:bliong@melcoe.mq.edu.au">Bruc Liong, MELCOE</a>
* @author <a href="mailto:kli@melcoe.mq.edu.au">Xiang Kevin Li, MELCOE</a>
* @version $Revision: 5844 $
*/
public class ShibAuthentication implements AuthenticationMethod
{
/** log4j category */
private static Logger log = Logger.getLogger(ShibAuthentication.class);
public int authenticate(Context context, String username, String password,
String realm, HttpServletRequest request) throws SQLException
{
if (request == null)
{
return BAD_ARGS;
}
log.info("Shibboleth login started...");
java.util.Enumeration names = request.getHeaderNames();
String name;
while (names.hasMoreElements())
{
name = names.nextElement().toString();
log.debug("header:" + name + "=" + request.getHeader(name));
}
boolean isUsingTomcatUser = ConfigurationManager.getBooleanProperty("authentication.shib.email-use-tomcat-remote-user");
String emailHeader = ConfigurationManager.getProperty("authentication.shib.email-header");
String fnameHeader = ConfigurationManager.getProperty("authentication.shib.firstname-header");
String lnameHeader = ConfigurationManager.getProperty("authentication.shib.lastname-header");
String email = null;
String fname = null;
String lname = null;
if (emailHeader != null)
{
// try to grab email from the header
email = request.getHeader(emailHeader);
// fail, try lower case
if (email == null)
{
email = request.getHeader(emailHeader.toLowerCase());
}
}
// try to pull the "REMOTE_USER" info instead of the header
if (email == null && isUsingTomcatUser)
{
email = request.getRemoteUser();
log.info("RemoteUser identified as: " + email);
}
// No email address, perhaps the eperson has been setup, better check it
if (email == null)
{
EPerson p = context.getCurrentUser();
if (p != null)
{
email = p.getEmail();
}
}
if (email == null)
{
log
.error("No email is given, you're denied access by Shib, please release email address");
return AuthenticationMethod.BAD_ARGS;
}
email = email.toLowerCase();
if (fnameHeader != null)
{
// try to grab name from the header
fname = request.getHeader(fnameHeader);
// fail, try lower case
if (fname == null)
{
fname = request.getHeader(fnameHeader.toLowerCase());
}
}
if (lnameHeader != null)
{
// try to grab name from the header
lname = request.getHeader(lnameHeader);
// fail, try lower case
if (lname == null)
{
lname = request.getHeader(lnameHeader.toLowerCase());
}
}
// future version can offer auto-update feature, this needs testing
// before inclusion to core code
EPerson eperson = null;
try
{
eperson = EPerson.findByEmail(context, email);
context.setCurrentUser(eperson);
}
catch (AuthorizeException e)
{
log.warn("Fail to locate user with email:" + email, e);
eperson = null;
}
// auto create user if needed
if (eperson == null
&& ConfigurationManager
.getBooleanProperty("authentication.shib.autoregister"))
{
log.info(LogManager.getHeader(context, "autoregister", "email="
+ email));
// TEMPORARILY turn off authorisation
context.setIgnoreAuthorization(true);
try
{
eperson = EPerson.create(context);
eperson.setEmail(email);
if (fname != null)
{
eperson.setFirstName(fname);
}
if (lname != null)
{
eperson.setLastName(lname);
}
eperson.setCanLogIn(true);
AuthenticationManager.initEPerson(context, request, eperson);
eperson.update();
context.commit();
context.setCurrentUser(eperson);
}
catch (AuthorizeException e)
{
log.warn("Fail to authorize user with email:" + email, e);
eperson = null;
}
finally
{
context.setIgnoreAuthorization(false);
}
}
if (eperson == null)
{
return AuthenticationMethod.NO_SUCH_USER;
}
else
{
// the person exists, just return ok
context.setCurrentUser(eperson);
request.getSession().setAttribute("shib.authenticated",
Boolean.TRUE);
}
return AuthenticationMethod.SUCCESS;
}
/**
* Grab the special groups to be automatically provisioned for the current
* user. Currently the mapping for the groups is done one-to-one, future
* version can consider the usage of regex for such mapping.
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
{
// no user logged in or user not logged from shibboleth
if (request == null || context.getCurrentUser() == null
|| request.getSession().getAttribute("shib.authenticated") == null)
{
return new int[0];
}
if (request.getSession().getAttribute("shib.specialgroup") != null)
{
return (int[]) request.getSession().getAttribute(
"shib.specialgroup");
}
java.util.Set groups = new java.util.HashSet();
String roleHeader = ConfigurationManager
.getProperty("authentication.shib.role-header");
boolean roleHeader_ignoreScope = ConfigurationManager
.getBooleanProperty("authentication.shib.role-header.ignore-scope");
if (roleHeader == null || roleHeader.trim().length() == 0)
{
roleHeader = "Shib-EP-UnscopedAffiliation";
} // fall back to default
String affiliations = request.getHeader(roleHeader);
// try again with all lower case...maybe has better luck
if (affiliations == null)
{
affiliations = request.getHeader(roleHeader.toLowerCase());
}
// default role when fully authN but not releasing any roles?
String defaultRoles = ConfigurationManager
.getProperty("authentication.shib.default-roles");
if (affiliations == null && defaultRoles != null)
{
affiliations = defaultRoles;
}
if (affiliations != null)
{
java.util.StringTokenizer st = new java.util.StringTokenizer(
affiliations, ";,");
// do the mapping here
while (st.hasMoreTokens())
{
String affiliation = st.nextToken().trim();
// strip scope if present and roleHeader_ignoreScope
if (roleHeader_ignoreScope)
{
int index = affiliation.indexOf('@');
if (index != -1)
{
affiliation = affiliation.substring(0, index);
}
}
// perform mapping here if necessary
String groupLabels = ConfigurationManager
.getProperty("authentication.shib.role." + affiliation);
if (groupLabels == null || groupLabels.trim().length() == 0)
{
groupLabels = ConfigurationManager
.getProperty("authentication.shib.role."
+ affiliation.toLowerCase());
}
// revert back to original entry when no mapping is provided
if (groupLabels == null)
{
groupLabels = affiliation;
}
String[] labels = groupLabels.split(",");
for (int i = 0; i < labels.length; i++)
{
addGroup(groups, context, labels[i].trim());
}
}
}
int ids[] = new int[groups.size()];
java.util.Iterator it = groups.iterator();
for (int i = 0; it.hasNext(); i++)
{
ids[i] = ((Integer) it.next()).intValue();
}
// store the special group, if already transformed from headers
// since subsequent header may not have the values anymore
if (ids.length != 0)
{
request.getSession().setAttribute("shib.specialgroup", ids);
}
return ids;
}
/** Find dspaceGroup in DSpace database, if found, include it into groups */
private void addGroup(Collection groups, Context context, String dspaceGroup)
{
try
{
Group g = Group.findByName(context, dspaceGroup);
if (g == null)
{
// oops - no group defined
log.warn(LogManager.getHeader(context, dspaceGroup
+ " group is not found!! Admin needs to create one!",
"requiredGroup=" + dspaceGroup));
groups.add(Integer.valueOf(0));
}
else
{
groups.add(Integer.valueOf(g.getID()));
}
log.info("Mapping group: " + dspaceGroup + " to groupID: "
+ (g == null ? 0 : g.getID()));
}
catch (SQLException e)
{
log.error("Mapping group:" + dspaceGroup + " failed with error", e);
}
}
/**
* Indicate whether or not a particular self-registering user can set
* themselves a password in the profile info form.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case anything in that is used to decide
* @param email
* e-mail address of user attempting to register
*
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request, String email) throws SQLException
{
// don't use password at all
return false;
}
/**
* Predicate, is this an implicit authentication method. An implicit method
* gets credentials from the environment (such as an HTTP request or even
* Java system properties) rather than the explicit username and password.
* For example, a method that reads the X.509 certificates in an HTTPS
* request is implicit.
*
* @return true if this method uses implicit authentication.
*/
public boolean isImplicit()
{
return true;
}
/**
* Indicate whether or not a particular user can self-register, based on
* e-mail address.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case anything in that is used to decide
* @param username
* e-mail address of user attempting to register
*
*/
public boolean canSelfRegister(Context context, HttpServletRequest request,
String username) throws SQLException
{
return true;
}
/**
* Initialise a new e-person record for a self-registered new user.
*
* @param context
* DSpace context
* @param request
* HTTP request, in case it's needed
* @param eperson
* newly created EPerson record - email + information from the
* registration form will have been filled out.
*
*/
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson) throws SQLException
{
}
/**
* Get login page to which to redirect. Returns URL (as string) to which to
* redirect to obtain credentials (either password prompt or e.g. HTTPS port
* for client cert.); null means no redirect.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not
* applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL or null
*/
public String loginPageURL(Context context, HttpServletRequest request,
HttpServletResponse response)
{
return response.encodeRedirectURL(request.getContextPath()
+ "/shibboleth-login");
}
/**
* Get title of login page to which to redirect. Returns a <i>message
* key</i> that gets translated into the title or label for "login page" (or
* null, if not implemented) This title may be used to identify the link to
* the login page in a selection menu, when there are multiple ways to
* login.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @return title text.
*/
public String loginPageTitle(Context context)
{
return "org.dspace.authenticate.ShibAuthentication.title";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Hashtable;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* This LDAP authentication method is more complex than the simple 'LDAPAuthentication'
* in that it allows authentication against structured hierarchical LDAP trees of
* users. An initial bind is required using a user name and password in order to
* search the tree and find the DN of the user. A second bind is then required to
* check the credentials of the user by binding directly to their DN.
*
* @author Stuart Lewis, Chris Yates, Alex Barbieri, Flavio Botelho, Reuben Pasquini
* @version $Revision: 5844 $
*/
public class LDAPHierarchicalAuthentication
implements AuthenticationMethod {
/** log4j category */
private static Logger log = Logger.getLogger(LDAPHierarchicalAuthentication.class);
/**
* Let a real auth method return true if it wants.
*/
public boolean canSelfRegister(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
// Looks to see if webui.ldap.autoregister is set or not
return ConfigurationManager.getBooleanProperty("webui.ldap.autoregister");
}
/**
* Nothing here, initialization is done when auto-registering.
*/
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson)
throws SQLException
{
// XXX should we try to initialize netid based on email addr,
// XXX for eperson created by some other method??
}
/**
* Cannot change LDAP password through dspace, right?
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
// XXX is this right?
return false;
}
/*
* This is an explicit method.
*/
public boolean isImplicit()
{
return false;
}
/*
* Add authenticated users to the group defined in dspace.cfg by
* the ldap.login.specialgroup key.
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
{
// Prevents anonymous users from being added to this group, and the second check
// ensures they are LDAP users
try
{
if (!context.getCurrentUser().getNetid().equals(""))
{
String groupName = ConfigurationManager.getProperty("ldap.login.specialgroup");
if ((groupName != null) && (!groupName.trim().equals("")))
{
Group ldapGroup = Group.findByName(context, groupName);
if (ldapGroup == null)
{
// Oops - the group isn't there.
log.warn(LogManager.getHeader(context,
"ldap_specialgroup",
"Group defined in ldap.login.specialgroup does not exist"));
return new int[0];
} else
{
return new int[] { ldapGroup.getID() };
}
}
}
}
catch (Exception npe) {
// The user is not an LDAP user, so we don't need to worry about them
}
return new int[0];
}
/*
* Authenticate the given credentials.
* This is the heart of the authentication method: test the
* credentials for authenticity, and if accepted, attempt to match
* (or optionally, create) an <code>EPerson</code>. If an <code>EPerson</code> is found it is
* set in the <code>Context</code> that was passed.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param username
* Username (or email address) when method is explicit. Use null for
* implicit method.
*
* @param password
* Password for explicit auth, or null for implicit method.
*
* @param realm
* Realm is an extra parameter used by some authentication methods, leave null if
* not applicable.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
* <p>Meaning:
* <br>SUCCESS - authenticated OK.
* <br>BAD_CREDENTIALS - user exists, but credentials (e.g. passwd) don't match
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
* <br>NO_SUCH_USER - user not found using this method.
* <br>BAD_ARGS - user/pw not appropriate for this method
*/
public int authenticate(Context context,
String netid,
String password,
String realm,
HttpServletRequest request)
throws SQLException
{
log.info(LogManager.getHeader(context, "auth", "attempting trivial auth of user="+netid));
// Skip out when no netid or password is given.
if (netid == null || password == null)
{
return BAD_ARGS;
}
// Locate the eperson
EPerson eperson = null;
try
{
eperson = EPerson.findByNetid(context, netid.toLowerCase());
}
catch (SQLException e)
{
}
SpeakerToLDAP ldap = new SpeakerToLDAP(log);
// Get the DN of the user
String adminUser = ConfigurationManager.getProperty("ldap.search.user");
String adminPassword = ConfigurationManager.getProperty("ldap.search.password");
String dn = ldap.getDNOfUser(adminUser, adminPassword, context, netid);
// Check a DN was found
if ((dn == null) || (dn.trim().equals("")))
{
log.info(LogManager
.getHeader(context, "failed_login", "no DN found for user " + netid));
return BAD_CREDENTIALS;
}
// if they entered a netid that matches an eperson
if (eperson != null)
{
// e-mail address corresponds to active account
if (eperson.getRequireCertificate())
{
return CERT_REQUIRED;
}
else if (!eperson.canLogIn())
{
return BAD_ARGS;
}
if (ldap.ldapAuthenticate(dn, password, context))
{
context.setCurrentUser(eperson);
log.info(LogManager
.getHeader(context, "authenticate", "type=ldap"));
return SUCCESS;
}
else
{
return BAD_CREDENTIALS;
}
}
// the user does not already exist so try and authenticate them
// with ldap and create an eperson for them
else
{
if (ldap.ldapAuthenticate(dn, password, context))
{
// Register the new user automatically
log.info(LogManager.getHeader(context,
"autoregister", "netid=" + netid));
if ((ldap.ldapEmail!=null)&&(!ldap.ldapEmail.equals("")))
{
try
{
eperson = EPerson.findByEmail(context, ldap.ldapEmail);
if (eperson!=null)
{
log.info(LogManager.getHeader(context,
"type=ldap-login", "type=ldap_but_already_email"));
context.setIgnoreAuthorization(true);
eperson.setNetid(netid.toLowerCase());
eperson.update();
context.commit();
context.setIgnoreAuthorization(false);
context.setCurrentUser(eperson);
return SUCCESS;
}
else
{
if (canSelfRegister(context, request, netid))
{
// TEMPORARILY turn off authorisation
try
{
context.setIgnoreAuthorization(true);
eperson = EPerson.create(context);
if ((ldap.ldapEmail != null) && (!ldap.ldapEmail.equals("")))
{
eperson.setEmail(ldap.ldapEmail);
}
else
{
eperson.setEmail(netid + ConfigurationManager.getProperty("ldap.netid_email_domain"));
}
if ((ldap.ldapGivenName!=null) && (!ldap.ldapGivenName.equals("")))
{
eperson.setFirstName(ldap.ldapGivenName);
}
if ((ldap.ldapSurname!=null) && (!ldap.ldapSurname.equals("")))
{
eperson.setLastName(ldap.ldapSurname);
}
if ((ldap.ldapPhone!=null)&&(!ldap.ldapPhone.equals("")))
{
eperson.setMetadata("phone", ldap.ldapPhone);
}
eperson.setNetid(netid.toLowerCase());
eperson.setCanLogIn(true);
AuthenticationManager.initEPerson(context, request, eperson);
eperson.update();
context.commit();
context.setCurrentUser(eperson);
}
catch (AuthorizeException e)
{
return NO_SUCH_USER;
}
finally
{
context.setIgnoreAuthorization(false);
}
log.info(LogManager.getHeader(context, "authenticate",
"type=ldap-login, created ePerson"));
return SUCCESS;
}
else
{
// No auto-registration for valid certs
log.info(LogManager.getHeader(context,
"failed_login", "type=ldap_but_no_record"));
return NO_SUCH_USER;
}
}
}
catch (AuthorizeException e)
{
eperson = null;
}
finally
{
context.setIgnoreAuthorization(false);
}
}
}
}
return BAD_ARGS;
}
/**
* Internal class to manage LDAP query and results, mainly
* because there are multiple values to return.
*/
private static class SpeakerToLDAP {
private Logger log = null;
protected String ldapEmail = null;
protected String ldapGivenName = null;
protected String ldapSurname = null;
protected String ldapPhone = null;
/** LDAP settings */
String ldap_provider_url = ConfigurationManager.getProperty("ldap.provider_url");
String ldap_id_field = ConfigurationManager.getProperty("ldap.id_field");
String ldap_search_context = ConfigurationManager.getProperty("ldap.search_context");
String ldap_object_context = ConfigurationManager.getProperty("ldap.object_context");
String ldap_search_scope = ConfigurationManager.getProperty("ldap.search_scope");
String ldap_email_field = ConfigurationManager.getProperty("ldap.email_field");
String ldap_givenname_field = ConfigurationManager.getProperty("ldap.givenname_field");
String ldap_surname_field = ConfigurationManager.getProperty("ldap.surname_field");
String ldap_phone_field = ConfigurationManager.getProperty("ldap.phone_field");
SpeakerToLDAP(Logger thelog)
{
log = thelog;
}
protected String getDNOfUser(String adminUser, String adminPassword, Context context, String netid)
{
// The resultant DN
String resultDN;
// The search scope to use (default to 0)
int ldap_search_scope_value = 0;
try
{
ldap_search_scope_value = Integer.parseInt(ldap_search_scope.trim());
}
catch (NumberFormatException e)
{
// Log the error if it has been set but is invalid
if (ldap_search_scope != null)
{
log.warn(LogManager.getHeader(context,
"ldap_authentication", "invalid search scope: " + ldap_search_scope));
}
}
// Set up environment for creating initial context
Hashtable env = new Hashtable(11);
env.put(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(javax.naming.Context.PROVIDER_URL, ldap_provider_url);
if ((adminUser != null) && (!adminUser.trim().equals("")) &&
(adminPassword != null) && (!adminPassword.trim().equals("")))
{
// Use admin credencials for search// Authenticate
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "simple");
env.put(javax.naming.Context.SECURITY_PRINCIPAL, adminUser);
env.put(javax.naming.Context.SECURITY_CREDENTIALS, adminPassword);
}
else
{
// Use anonymous authentication
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "none");
}
DirContext ctx = null;
try
{
// Create initial context
ctx = new InitialDirContext(env);
Attributes matchAttrs = new BasicAttributes(true);
matchAttrs.put(new BasicAttribute(ldap_id_field, netid));
// look up attributes
try
{
SearchControls ctrls = new SearchControls();
ctrls.setSearchScope(ldap_search_scope_value);
NamingEnumeration<SearchResult> answer = ctx.search(
ldap_provider_url + ldap_search_context,
"(&({0}={1}))", new Object[] { ldap_id_field,
netid }, ctrls);
while (answer.hasMoreElements()) {
SearchResult sr = answer.next();
if (StringUtils.isEmpty(ldap_search_context)) {
resultDN = sr.getName();
} else {
resultDN = (sr.getName() + "," + ldap_search_context);
}
String attlist[] = {ldap_email_field, ldap_givenname_field,
ldap_surname_field, ldap_phone_field};
Attributes atts = sr.getAttributes();
Attribute att;
if (attlist[0] != null) {
att = atts.get(attlist[0]);
if (att != null)
{
ldapEmail = (String) att.get();
}
}
if (attlist[1] != null) {
att = atts.get(attlist[1]);
if (att != null)
{
ldapGivenName = (String) att.get();
}
}
if (attlist[2] != null) {
att = atts.get(attlist[2]);
if (att != null)
{
ldapSurname = (String) att.get();
}
}
if (attlist[3] != null) {
att = atts.get(attlist[3]);
if (att != null)
{
ldapPhone = (String) att.get();
}
}
if (answer.hasMoreElements()) {
// Oh dear - more than one match
// Ambiguous user, can't continue
} else {
log.debug(LogManager.getHeader(context, "got DN", resultDN));
return resultDN;
}
}
}
catch (NamingException e)
{
// if the lookup fails go ahead and create a new record for them because the authentication
// succeeded
log.warn(LogManager.getHeader(context,
"ldap_attribute_lookup", "type=failed_search "
+ e));
}
}
catch (NamingException e)
{
log.warn(LogManager.getHeader(context,
"ldap_authentication", "type=failed_auth " + e));
}
finally
{
// Close the context when we're done
try
{
if (ctx != null)
{
ctx.close();
}
}
catch (NamingException e)
{
}
}
// No DN match found
return null;
}
/**
* contact the ldap server and attempt to authenticate
*/
protected boolean ldapAuthenticate(String netid, String password,
Context context) {
if (!password.equals("")) {
// Set up environment for creating initial context
Hashtable<String, String> env = new Hashtable<String, String>();
env.put(javax.naming.Context.INITIAL_CONTEXT_FACTORY,
"com.sun.jndi.ldap.LdapCtxFactory");
env.put(javax.naming.Context.PROVIDER_URL, ldap_provider_url);
// Authenticate
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "Simple");
env.put(javax.naming.Context.SECURITY_PRINCIPAL, netid);
env.put(javax.naming.Context.SECURITY_CREDENTIALS, password);
env.put(javax.naming.Context.AUTHORITATIVE, "true");
env.put(javax.naming.Context.REFERRAL, "follow");
DirContext ctx = null;
try {
// Try to bind
ctx = new InitialDirContext(env);
} catch (NamingException e) {
log.warn(LogManager.getHeader(context,
"ldap_authentication", "type=failed_auth " + e));
return false;
} finally {
// Close the context when we're done
try {
if (ctx != null)
{
ctx.close();
}
} catch (NamingException e) {
}
}
} else {
return false;
}
return true;
}
}
/*
* Returns URL to which to redirect to obtain credentials (either password
* prompt or e.g. HTTPS port for client cert.); null means no redirect.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL
*/
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response)
{
return response.encodeRedirectURL(request.getContextPath() +
"/ldap-login");
}
/**
* Returns message key for title of the "login" page, to use
* in a menu showing the choice of multiple login methods.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @return Message key to look up in i18n message catalog.
*/
public String loginPageTitle(Context context)
{
return "org.dspace.eperson.LDAPAuthentication.title";
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Hashtable;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchResult;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Authentication module to authenticate against a flat LDAP tree where
* all users are in the same unit.
*
* @author Larry Stone, Stuart Lewis
* @version $Revision: 5844 $
*/
public class LDAPAuthentication
implements AuthenticationMethod {
/** log4j category */
private static Logger log = Logger.getLogger(LDAPAuthentication.class);
/**
* Let a real auth method return true if it wants.
*/
public boolean canSelfRegister(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
// XXX might also want to check that username exists in LDAP.
return ConfigurationManager.getBooleanProperty("webui.ldap.autoregister");
}
/**
* Nothing here, initialization is done when auto-registering.
*/
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson)
throws SQLException
{
// XXX should we try to initialize netid based on email addr,
// XXX for eperson created by some other method??
}
/**
* Cannot change LDAP password through dspace, right?
*/
public boolean allowSetPassword(Context context,
HttpServletRequest request,
String username)
throws SQLException
{
// XXX is this right?
return false;
}
/*
* This is an explicit method.
*/
public boolean isImplicit()
{
return false;
}
/*
* Add authenticated users to the group defined in dspace.cfg by
* the ldap.login.specialgroup key.
*/
public int[] getSpecialGroups(Context context, HttpServletRequest request)
{
// Prevents anonymous users from being added to this group, and the second check
// ensures they are LDAP users
try
{
if (!context.getCurrentUser().getNetid().equals(""))
{
String groupName = ConfigurationManager.getProperty("ldap.login.specialgroup");
if ((groupName != null) && (!groupName.trim().equals("")))
{
Group ldapGroup = Group.findByName(context, groupName);
if (ldapGroup == null)
{
// Oops - the group isn't there.
log.warn(LogManager.getHeader(context,
"ldap_specialgroup",
"Group defined in ldap.login.specialgroup does not exist"));
return new int[0];
} else
{
return new int[] { ldapGroup.getID() };
}
}
}
}
catch (Exception npe) {
// The user is not an LDAP user, so we don't need to worry about them
}
return new int[0];
}
/*
* MIT policy on certs and groups, so always short-circuit.
*
* @return One of:
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
*/
public int authenticate(Context context,
String netid,
String password,
String realm,
HttpServletRequest request)
throws SQLException
{
log.info(LogManager.getHeader(context, "auth", "attempting trivial auth of user="+netid));
// Skip out when no netid or password is given.
if (netid == null || password == null)
{
return BAD_ARGS;
}
// Locate the eperson
EPerson eperson = null;
try
{
eperson = EPerson.findByNetid(context, netid.toLowerCase());
}
catch (SQLException e)
{
}
SpeakerToLDAP ldap = new SpeakerToLDAP(log);
// if they entered a netid that matches an eperson
if (eperson != null)
{
// e-mail address corresponds to active account
if (eperson.getRequireCertificate())
{
return CERT_REQUIRED;
}
else if (!eperson.canLogIn())
{
return BAD_ARGS;
}
if (ldap.ldapAuthenticate(netid, password, context))
{
eperson = EPerson.findByNetid(context, netid.toLowerCase());
context.setCurrentUser(eperson);
log.info(LogManager.getHeader(context, "authenticate", "type=ldap"));
return SUCCESS;
}
else
{
return BAD_CREDENTIALS;
}
}
// the user does not already exist so try and authenticate them
// with ldap and create an eperson for them
else
{
if (ldap.ldapAuthenticate(netid, password, context))
{
// Register the new user automatically
log.info(LogManager.getHeader(context,
"autoregister", "netid=" + netid));
if ((ldap.ldapEmail!=null)&&(!ldap.ldapEmail.equals("")))
{
try
{
eperson = EPerson.findByEmail(context, ldap.ldapEmail);
if (eperson!=null)
{
log.info(LogManager.getHeader(context,
"type=ldap-login", "type=ldap_but_already_email"));
context.setIgnoreAuthorization(true);
eperson.setNetid(netid.toLowerCase());
eperson.update();
context.commit();
context.setIgnoreAuthorization(false);
context.setCurrentUser(eperson);
return SUCCESS;
}
else
{
if (canSelfRegister(context, request, netid))
{
// TEMPORARILY turn off authorisation
try
{
context.setIgnoreAuthorization(true);
eperson = EPerson.create(context);
if ((ldap.ldapEmail!=null)&&(!ldap.ldapEmail.equals("")))
{
eperson.setEmail(ldap.ldapEmail);
}
else
{
eperson.setEmail(netid);
}
if ((ldap.ldapGivenName!=null)&&(!ldap.ldapGivenName.equals("")))
{
eperson.setFirstName(ldap.ldapGivenName);
}
if ((ldap.ldapSurname!=null)&&(!ldap.ldapSurname.equals("")))
{
eperson.setLastName(ldap.ldapSurname);
}
if ((ldap.ldapPhone!=null)&&(!ldap.ldapPhone.equals("")))
{
eperson.setMetadata("phone", ldap.ldapPhone);
}
eperson.setNetid(netid.toLowerCase());
eperson.setCanLogIn(true);
AuthenticationManager.initEPerson(context, request, eperson);
eperson.update();
context.commit();
context.setCurrentUser(eperson);
}
catch (AuthorizeException e)
{
return NO_SUCH_USER;
}
finally
{
context.setIgnoreAuthorization(false);
}
log.info(LogManager.getHeader(context, "authenticate",
"type=ldap-login, created ePerson"));
return SUCCESS;
}
else
{
// No auto-registration for valid certs
log.info(LogManager.getHeader(context,
"failed_login", "type=ldap_but_no_record"));
return NO_SUCH_USER;
}
}
}
catch (AuthorizeException e)
{
eperson = null;
}
finally
{
context.setIgnoreAuthorization(false);
}
}
}
}
return BAD_ARGS;
}
/**
* Internal class to manage LDAP query and results, mainly
* because there are multiple values to return.
*/
private static class SpeakerToLDAP {
private Logger log = null;
/** ldap email result */
protected String ldapEmail = null;
/** ldap name result */
protected String ldapGivenName = null;
protected String ldapSurname = null;
protected String ldapPhone = null;
SpeakerToLDAP(Logger thelog)
{
log = thelog;
}
/**
* contact the ldap server and attempt to authenticate
*/
protected boolean ldapAuthenticate(String netid, String password, Context context)
{
if (!password.equals(""))
{
String ldap_provider_url = ConfigurationManager.getProperty("ldap.provider_url");
String ldap_id_field = ConfigurationManager.getProperty("ldap.id_field");
String ldap_search_context = ConfigurationManager.getProperty("ldap.search_context");
String ldap_object_context = ConfigurationManager.getProperty("ldap.object_context");
// Set up environment for creating initial context
Hashtable env = new Hashtable(11);
env.put(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(javax.naming.Context.PROVIDER_URL, ldap_provider_url);
// Authenticate
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "simple");
env.put(javax.naming.Context.SECURITY_PRINCIPAL, ldap_id_field+"="+netid+","+ldap_object_context);
env.put(javax.naming.Context.SECURITY_CREDENTIALS, password);
DirContext ctx = null;
try
{
// Create initial context
ctx = new InitialDirContext(env);
String ldap_email_field = ConfigurationManager.getProperty("ldap.email_field");
String ldap_givenname_field = ConfigurationManager.getProperty("ldap.givenname_field");
String ldap_surname_field = ConfigurationManager.getProperty("ldap.surname_field");
String ldap_phone_field = ConfigurationManager.getProperty("ldap.phone_field");
Attributes matchAttrs = new BasicAttributes(true);
matchAttrs.put(new BasicAttribute(ldap_id_field, netid));
String attlist[] = {ldap_email_field, ldap_givenname_field, ldap_surname_field, ldap_phone_field};
// look up attributes
try
{
NamingEnumeration answer = ctx.search(ldap_search_context, matchAttrs, attlist);
while(answer.hasMore()) {
SearchResult sr = (SearchResult)answer.next();
Attributes atts = sr.getAttributes();
Attribute att;
if (attlist[0]!=null)
{
att = atts.get(attlist[0]);
if (att != null)
{
ldapEmail = (String) att.get();
}
}
if (attlist[1]!=null)
{
att = atts.get(attlist[1]);
if (att != null)
{
ldapGivenName = (String) att.get();
}
}
if (attlist[2]!=null)
{
att = atts.get(attlist[2]);
if (att != null)
{
ldapSurname = (String) att.get();
}
}
if (attlist[3]!=null)
{
att = atts.get(attlist[3]);
if (att != null)
{
ldapPhone = (String) att.get();
}
}
}
}
catch (NamingException e)
{
// if the lookup fails go ahead and create a new record for them because the authentication
// succeeded
log.warn(LogManager.getHeader(context,
"ldap_attribute_lookup", "type=failed_search "+e));
return true;
}
}
catch (NamingException e)
{
log.warn(LogManager.getHeader(context,
"ldap_authentication", "type=failed_auth "+e));
return false;
}
finally
{
// Close the context when we're done
try
{
if (ctx != null)
{
ctx.close();
}
}
catch (NamingException e)
{
}
}
}
else
{
return false;
}
return true;
}
}
/*
* Returns URL to which to redirect to obtain credentials (either password
* prompt or e.g. HTTPS port for client cert.); null means no redirect.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @param request
* The HTTP request that started this operation, or null if not applicable.
*
* @param response
* The HTTP response from the servlet method.
*
* @return fully-qualified URL
*/
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response)
{
return response.encodeRedirectURL(request.getContextPath() +
"/ldap-login");
}
/**
* Returns message key for title of the "login" page, to use
* in a menu showing the choice of multiple login methods.
*
* @param context
* DSpace context, will be modified (ePerson set) upon success.
*
* @return Message key to look up in i18n message catalog.
*/
public String loginPageTitle(Context context)
{
return "org.dspace.eperson.LDAPAuthentication.title";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.submission;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.xml.sax.SAXException;
import org.w3c.dom.*;
import javax.xml.parsers.*;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.log4j.Logger;
import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.core.ConfigurationManager;
/**
* Submission form generator for DSpace. Reads and parses the installation
* form definitions file, input-forms.xml, from the configuration directory.
* A forms definition details the page and field layout of the metadata
* collection pages used by the submission process. Each forms definition
* starts with a unique name that gets associated with that form set.
*
* The file also specifies which collections use which form sets. At a
* minimum, the definitions file must define a default mapping from the
* placeholder collection #0 to the distinguished form 'default'. Any
* collections that use a custom form set are listed paired with the name
* of the form set they use.
*
* The definitions file also may contain sets of value pairs. Each value pair
* will contain one string that the user reads, and a paired string that will
* supply the value stored in the database if its sibling display value gets
* selected from a choice list.
*
* @author Brian S. Hughes
* @version $Revision: 4365 $
*/
public class DCInputsReaderExt
{
/**
* The ID of the default collection. Will never be the ID of a named
* collection
*/
public static final String DEFAULT_COLLECTION = "default";
/** Name of the extended form definition XML file (AgriOceanDSpace project) */
static final String FORM_DEF_FILE = "input-forms-extended.xml";
/** Keyname for storing dropdown value-pair set name */
static final String PAIR_TYPE_NAME = "value-pairs-name";
/** log4j logger */
private static Logger log = Logger.getLogger(DCInputsReaderExt.class);
/** The fully qualified pathname of the form definition XML file */
private String defsFile = ConfigurationManager.getProperty("dspace.dir")
+ File.separator + "config" + File.separator + FORM_DEF_FILE;
/**
* Reference to the types to forms map, computed from the forms
* definition file. One form can be used for different types, but not vice versa.
* (form name, type name)
*/
private HashMap<String, String> type2Forms = null;
/**
* Reference to the collections to types map, computed from the forms
* definition file
* (handle, (type, type ...))
*/
private HashMap<String, List<String>> col2Types = null;
/**
* Reference to the forms definitions map, computed from the forms
* definition file
*/
private HashMap<String, DCInputSetExt> formDefns = null;
/**
* Reference to the value-pairs map, computed from the forms defition file
*/
private HashMap valuePairs = null; // Holds display/storage pairs
/**
* Parse an XML encoded submission forms template file, and create a hashmap
* containing all the form information. This hashmap will contain four top
* level structures: a map between collections and document types, a map
* between document types and forms ,the definition for each page of each
* form, and lists of pairs of values that populate selection boxes.
*/
public DCInputsReaderExt()
throws DCInputsReaderException
{
type2Forms = new HashMap<String, String>();
col2Types = new HashMap<String, List<String>>();
formDefns = new HashMap<String, DCInputSetExt>();
valuePairs = new HashMap();
buildInputs(defsFile);
}
public DCInputsReaderExt(String fileName)
throws DCInputsReaderException
{
type2Forms = new HashMap<String, String>();
col2Types = new HashMap<String, List<String>>();
formDefns = new HashMap<String, DCInputSetExt>();
valuePairs = new HashMap();
buildInputs(fileName);
}
private void buildInputs(String fileName) throws DCInputsReaderException
{
String uri = "file:" + new File(fileName).getAbsolutePath();
try
{
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
DocumentBuilder db = factory.newDocumentBuilder();
db.setErrorHandler(new XMLReadErrorHandler(log, uri));
Document doc = db.parse(uri);
XPathFactory xfactory = XPathFactory.newInstance();
XPath xPath = xfactory.newXPath();
processColMap((Node) xPath.evaluate("/input-forms/collection-type", doc, XPathConstants.NODE));
processTypeMap((Node) xPath.evaluate("/input-forms/type-form", doc, XPathConstants.NODE));
processValuePairs((Node) xPath.evaluate("/input-forms/form-value-pairs", doc, XPathConstants.NODE));
processForms(doc);
} catch (FactoryConfigurationError fe)
{
throw new DCInputsReaderException("Cannot create Submission form parser", fe);
} catch (Exception e)
{
throw new DCInputsReaderException("Error creating submission forms: " + e);
}
}
/**
* Process the form-definitions section of the XML file. Each element is
* formed thusly: <form name="..." baseForm="...">...pages...</form> Each pages
* subsection is formed: <page number="#"> ...groups... </page> Each group
* is formed from field rows and each field row is formed from field. Each
* field is formed from: dc-element, dc-qualifier, label, hint,
* input-type name, required text, and repeatable flag.
*/
private void processForms(Document doc)
throws XPathExpressionException, SAXException, DCInputsReaderException
{
XPathFactory xfactory = XPathFactory.newInstance();
XPath xPath = xfactory.newXPath();
NodeList forms = (NodeList) xPath.evaluate("/input-forms/form-definitions/form", doc, XPathConstants.NODESET);
for (int i = 0; i < forms.getLength(); i++)
{
processDefinition(forms.item(i), doc);
}
}
private void processDefinition(Node e, Document doc)
throws SAXException, XPathExpressionException, DCInputsReaderException
{
if (e != null)
{
String formName = getAttribute(e, "name");
String baseForm = getAttribute(e, "baseForm");
if (formName == null)
{
throw new SAXException("form element has no name attribute");
}
/* if base form defined we check is it processed already,
* if not - call processDefinition for base form first (recursion).
*/
if (baseForm != null && !formDefns.containsKey(baseForm))
{
XPathFactory xfactory = XPathFactory.newInstance();
XPath xPath = xfactory.newXPath();
Node baseFormNode = (Node) xPath.evaluate("/input-forms/form-definitions/form[@name='" + baseForm + "']", doc, XPathConstants.NODE);
if (baseFormNode != null)
{
processDefinition(baseFormNode, doc);
} else
{
throw new DCInputsReaderException("base form definition is missing. baseForm='" + baseForm + "'");
}
/* Now we have baseForm processed and can merge input pages/groups
* on the fly
*/
}
String formHint = null;
HashMap<Integer, List<DCInputGroup>> pages = new HashMap<Integer, List<DCInputGroup>>();
if (baseForm != null)
{
if (formDefns.containsKey(baseForm))
{
pages = formDefns.get(baseForm).copyAllPages();
} else
{
throw new DCInputsReaderException("base form definition is missing. baseForm='" + baseForm + "'");
}
}
NodeList pl = e.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++)
{
Node npg = pl.item(j);
//get form hint if provided
if (npg.getNodeName().equals("hint"))
{
formHint = getValue(npg);
}
// process each page definition
if (npg.getNodeName().equals("page"))
{
String pgNum = getAttribute(npg, "number");
int pgNumInt = -1;
if (pgNum == null)
{
throw new SAXException("Form " + formName + " has no identified pages");
} else
{
try
{
pgNumInt = Integer.parseInt(pgNum);
} catch (Exception ex)
{
throw new SAXException("Form " + formName + " has no non integer page number " + pgNum);
}
}
/* Check if page with number pgNumInt not in pages then add */
if (!pages.containsKey(pgNumInt))
{
pages.put(pgNumInt, new ArrayList<DCInputGroup>());
}
// process each fields group on the given page
NodeList gl = npg.getChildNodes();
int lengl = gl.getLength();
for (int g = 0; g < lengl; g++)
{
Node ng = gl.item(g); // get fieldgroup node
if (ng.getNodeName().equals("fieldgroup"))
{
String groupName = getAttribute(ng, "name");
// go inside <fieldgroup> element
DCInputGroup group = new DCInputGroup(groupName);
NodeList frl = ng.getChildNodes();
int lenfrl = frl.getLength();
for (int f = 0; f < lenfrl; f++)
{
Node nfr = frl.item(f);
if (nfr.getNodeName().equals("label"))
{
group.setLabel(getValue(nfr));
}
if (nfr.getNodeName().equals("hint"))
{
group.setHint(getValue(nfr));
}
if (nfr.getNodeName().equals("fieldrow"))
{
ArrayList<DCInput> row = new ArrayList<DCInput>();
// process each row of fields, at last
NodeList fl = nfr.getChildNodes();
int lenfl = fl.getLength();
for (int l = 0; l < lenfl; l++)
{
Node nfld = fl.item(l);
if (nfld.getNodeName().equals("field"))
{
// process each field definition
DCInput curField = processField(formName, pgNum, nfld);
row.add(curField);
}
}
group.setRow(row);
}
}
/*We have to check if group with
* groupName already present then replace it with just created one
* else just add
*/
if (getDuplicateGroup(pages.get(pgNumInt), group) != null)
{
pages.get(pgNumInt).remove(group);
}
pages.get(pgNumInt).add(group);
}
}
}
}
// sanity check number of pages
if (pages.size() < 1)
{
throw new DCInputsReaderException("Form " + formName + " has no pages");
}
DCInputSetExt form = new DCInputSetExt(formName, formHint, baseForm, pages);
formDefns.put(formName, form);
}
}
/**
* Process the collection-type section of the XML file.
* Each element looks like:
* <collection handle="default">
<type name="Article - published" />
* ...
* </collection>
* Extract the collection handle and type name, put name in hashmap list keyed
* by the collection handle.
* @see DCInputsReader#col2Types
*/
private void processColMap(Node e)
throws SAXException
{
NodeList nl = e.getChildNodes(); //<collection handle="...">
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
String nodeName = nl.item(i).getNodeName();
if (nodeName.equals("collection"))
{
String colhandle = getAttribute(nl.item(i), "handle");
if (colhandle == null)
{
throw new SAXException("collection element is missing handle attribute");
}
NodeList cols = nl.item(i).getChildNodes(); //<type name="..." />
int colsnum = cols.getLength();
for (int j = 0; j < colsnum; j++)
{
Node nd = cols.item(j);
if (nd.getNodeName().equals("type"))
{
String typename = getAttribute(nd, "name");
if (typename == null)
{
throw new SAXException("type element is missing name attribute");
}
if (col2Types.containsKey(colhandle))
{
col2Types.get(colhandle).add(typename);
} else
{
ArrayList<String> types = new ArrayList<String>();
types.add(typename);
col2Types.put(colhandle, types);
}
}
}
}
}
}
/**
* Process the type-form section of the XML file.
* Each element looks like:
* <type name="type name" form="form definition name"/>
* Extract the type name and form name, put form name in hashmap keyed
* by the type name.
*/
private void processTypeMap(Node e)
throws SAXException
{
NodeList nl = e.getChildNodes(); //<type name="" form=""/>
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
if (nl.item(i).getNodeName().equals("type"))
{
String name = getAttribute(nl.item(i), "name");
String form = getAttribute(nl.item(i), "form");
if (name == null)
{
throw new SAXException("type element is missing name attribute");
}
if (form == null)
{
throw new SAXException("type element is missing form attribute");
}
if (type2Forms.containsKey(name))
{
type2Forms.remove(name);
type2Forms.put(name, form);
} else
{
type2Forms.put(name, form);
}
}
}
}
/**
* Process parts of a field
* At the end, make sure that input-types 'qualdrop_value' and
* 'twobox' are marked repeatable. Complain if dc-element, label,
* or input-type are missing.
*/
private DCInput processField(String formName, String page, Node n)
throws SAXException
{
HashMap field = new HashMap();
Integer inputSize = 0;
NodeList nl = n.getChildNodes();
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
Node nd = nl.item(i);
if (!isEmptyTextNode(nd))
{
String tagName = nd.getNodeName();
String value = getValue(nd);
field.put(tagName, value);
if (tagName.equals("input-type"))
{
try
{
String size = getAttribute(nd, "size");
inputSize = size != null ? Integer.parseInt(size) : 0;
} catch (NumberFormatException e)
{
inputSize = 0;
log.info("invalid value of size attribute = "
+ getAttribute(nd, "size") + " of <input-type> "
+ "element in the " + FORM_DEF_FILE + "\n" + e.getMessage());
}
field.put("size", inputSize.toString());
if (value.equals("dropdown")
|| value.equals("qualdrop_value")
|| value.equals("list"))
{
String pairTypeName = getAttribute(nd, PAIR_TYPE_NAME);
if (pairTypeName == null)
{
throw new SAXException("Form " + formName + ", field "
+ field.get("dc-element")
+ "." + field.get("dc-qualifier")
+ " has no name attribute");
} else if (!valuePairs.containsKey(pairTypeName))
{
throw new SAXException("Form " + formName + ", field "
+ field.get("dc-element")
+ "." + field.get("dc-qualifier")
+ " has wrong name attribute. "
+ "Such value-pairs is not defined.");
} else
{
field.put(PAIR_TYPE_NAME, pairTypeName);
}
}
} else if (tagName.equals("vocabulary"))
{
String closedVocabularyString = getAttribute(nd, "closed");
field.put("closedVocabulary", closedVocabularyString);
} else if (tagName.equals("ask-language"))
{
String flag = getValue(nd);
if (flag.toLowerCase().equals("true") || flag.toLowerCase().equals("yes"))
{
field.put("asklang", "true");
} else
{
field.put("asklang", "false");
}
} else if (tagName.equals("authority"))
{
String suffix = getValue(nd);
String presentation = getAttribute(nd, "presentation");
String limit = getAttribute(nd, "limit");
String editable = getAttribute(nd, "editable");
String closed = getAttribute(nd, "closed");
field.put("authority", "true");
field.put("aclosed", closed);
field.put("aeditable", editable);
field.put("choices", limit);
field.put("authURL", suffix);
field.put("presentation", presentation);
}
}
}
String missing = null;
if (field.get("dc-element") == null)
{
missing = "dc-element";
}
if (field.get("label") == null)
{
missing = "label";
}
if (field.get("input-type") == null)
{
missing = "input-type";
}
if (missing != null)
{
String msg = "Required field " + missing + " missing on page " + page + " of form " + formName;
throw new SAXException(msg);
}
String type = (String) field.get("input-type");
if (type.equals("twobox") || type.equals("qualdrop_value"))
{
String rpt = (String) field.get("repeatable");
if ((rpt == null)
|| ((!rpt.equalsIgnoreCase("yes"))
&& (!rpt.equalsIgnoreCase("true"))))
{
String msg = "The field \'" + field.get("label") + "\' must be repeatable";
throw new SAXException(msg);
}
}
return new DCInput(field, valuePairs);
}
/**
* Check that this is the only field with the name dc-element.dc-qualifier
* If there is a duplicate, return an error message, else return null;
*/
// private boolean checkForDups() {
// boolean err = true;
// for (String formName : formDefns.keySet()) {
// DCInputSetExt form = formDefns.get(formName);
// if (hasDuplicate(form.getAllFieldsQual())) {
// log.error("Duplicate field detected in form " + formName + ": ");
// for (DCInput dd : (Collection<DCInput>) getDuplicate(form.getAllFields())) {
// log.error(dd.getFullQualName() + ",");
// }
// log.error("\n");
// err = false;
// }
// }
// return err;
// }
/*
* Returns list of submission types defined for given collection.
If no specific rules defined for given collection, list of submission types
for default collection will be returned.
*/
public List<String> getTypesListforCollection(String collectionHandle)
{
if (col2Types.containsKey(collectionHandle))
{
return col2Types.get(collectionHandle);
} else
{
return col2Types.get(DEFAULT_COLLECTION);
}
}
/**
* Returns the set of DC inputs used for a particular collection, or the
* default set if no inputs defined for the collection
*
* @param collectionHandle
* collection's unique Handle
* @return DC input set extended
* @throws DCInputsReaderException
* if no default set defined
* @see DCInputSet
*/
public DCInputSetExt getInputs(String collectionHandle, String documentType)
throws DCInputsReaderException
{
String ch = (collectionHandle != null && !collectionHandle.equals("")) ? collectionHandle : DEFAULT_COLLECTION;
if (documentType != null && !"".equals(documentType))
{
String formName;
if (getTypesListforCollection(ch).contains(documentType))
{
formName = type2Forms.get(documentType);
} else
{
if (!getTypesListforCollection(ch).isEmpty())
{
documentType = getTypesListforCollection(ch).get(0);
formName = type2Forms.get(documentType);
} else
{
throw new DCInputsReaderException(documentType + " is not allowed for collection "
+ collectionHandle + " check [dspace]/config/input-forms.xml");
}
}
if (formName == null)
{
if (type2Forms.size() > 0)
{
formName = (String) type2Forms.values().toArray()[0];
} else
{
throw new DCInputsReaderException("No form designated as default");
}
}
if (formDefns.containsKey(formName))
{
return formDefns.get(formName);
} else
{
throw new DCInputsReaderException("Form definition is missing for " + formName);
}
} else
{
return null;
}
}
public DCInputSetExt getInputs(String documentType)
throws DCInputsReaderException
{
String formName = type2Forms.get(documentType);
if (formName == null)
{
throw new DCInputsReaderException("No form designated as default");
}
if (formDefns.containsKey(formName))
{
return formDefns.get(formName);
} else
{
throw new DCInputsReaderException("Form definition is missing for " + formName);
}
}
/**
* Return the number of pages the inputs span for a desginated collection
* @param collectionHandle collection's unique Handle
* @return number of pages of input
* @throws DCInputsReaderException if no default set defined
*/
public int getNumberInputPages(String collectionHandle, String docType)
throws DCInputsReaderException
{
if (getInputs(collectionHandle, docType) != null)
{
return getInputs(collectionHandle, docType).getNumberPages();
} else
{
return 0;
}
}
public Iterator getPairsNameIterator()
{
return valuePairs.keySet().iterator();
}
public List getPairs(String name)
{
return (List) valuePairs.get(name);
}
/**
* Process the form-value-pairs section of the XML file.
* Each element is formed thusly:
* <value-pairs name="..." dc-term="...">
* <pair>
* <display>displayed name-</display>
* <storage>stored name</storage>
* </pair>
* For each value-pairs element, create a new vector, and extract all
* the pairs contained within it. Put the display and storage values,
* respectively, in the next slots in the vector. Store the vector
* in the passed in hashmap.
*/
private void processValuePairs(Node e)
throws SAXException
{
NodeList nl = e.getChildNodes();
if (nl != null)
{
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
Node nd = nl.item(i);
String tagName = nd.getNodeName();
// process each value-pairs set
if (tagName.equals("value-pairs"))
{
String pairsName = getAttribute(nd, PAIR_TYPE_NAME);
String dcTerm = getAttribute(nd, "dc-term");
if (pairsName == null)
{
String errString =
"Missing name attribute for value-pairs for DC term " + dcTerm;
throw new SAXException(errString);
}
ArrayList pairs = new ArrayList();
valuePairs.put(pairsName, pairs);
NodeList cl = nd.getChildNodes();
int lench = cl.getLength();
for (int j = 0; j < lench; j++)
{
Node nch = cl.item(j);
String display = null;
String storage = null;
if (nch.getNodeName().equals("pair"))
{
NodeList pl = nch.getChildNodes();
int plen = pl.getLength();
for (int k = 0; k < plen; k++)
{
Node vn = pl.item(k);
String vName = vn.getNodeName();
if (vName.equals("displayed-value"))
{
display = getValue(vn);
} else if (vName.equals("stored-value"))
{
storage = getValue(vn);
if (storage == null)
{
storage = "";
}
} // ignore any children that aren't 'display' or 'storage'
}
pairs.add(display);
pairs.add(storage);
} // ignore any children that aren't a 'pair'
}
} // ignore any children that aren't a 'value-pair'
}
}
}
private Node getElement(Node nd)
{
NodeList nl = nd.getChildNodes();
if (nl != null)
{
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
Node n = nl.item(i);
if (n.getNodeType() == Node.ELEMENT_NODE)
{
return n;
}
}
}
return null;
}
private boolean isEmptyTextNode(Node nd)
{
boolean isEmpty = false;
if (nd.getNodeType() == Node.TEXT_NODE)
{
String text = nd.getNodeValue().trim();
if (text.length() == 0)
{
isEmpty = true;
}
}
return isEmpty;
}
/**
* Returns the value of the node's attribute named <name>
*/
private String getAttribute(Node e, String name)
{
NamedNodeMap attrs = e.getAttributes();
if (attrs != null)
{
int len = attrs.getLength();
if (len > 0)
{
int i;
for (i = 0; i < len; i++)
{
Node attr = attrs.item(i);
if (name.equals(attr.getNodeName()))
{
return attr.getNodeValue().trim();
}
}
}
}
//no such attribute
return null;
}
/**
* Returns the value found in the Text node (if any) in the
* node list that's passed in.
*/
private String getValue(Node nd)
{
NodeList nl = nd.getChildNodes();
if (nl != null)
{
int len = nl.getLength();
for (int i = 0; i < len; i++)
{
Node n = nl.item(i);
short type = n.getNodeType();
if (type == Node.TEXT_NODE)
{
return n.getNodeValue().trim();
}
}
}
// Didn't find a text node
return null;
}
private static <T> List getDuplicate(Collection<T> list)
{
final List<T> duplicatedObjects = new ArrayList<T>();
Set<T> set = new HashSet<T>()
{
@Override
public boolean add(T e)
{
if (contains(e))
{
duplicatedObjects.add(e);
}
return super.add(e);
}
};
for (T t : list)
{
set.add(t);
}
return duplicatedObjects;
}
private static <T> boolean hasDuplicate(Collection<T> list)
{
if (getDuplicate(list).isEmpty())
{
return false;
}
return true;
}
private DCInputGroup getDuplicateGroup(List<DCInputGroup> groups, DCInputGroup g)
{
if (!"".equals(g.getName()))
{
for (DCInputGroup group : groups)
{
if (group.getName().equals(g.getName()))
{
return group;
}
}
return null;
} else
{
return null;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.submission;
import org.apache.log4j.Logger;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
/**
*
* @author Denys Slipetskyy <d.slipetskiy@gmail.com>
*/
public class XMLReadErrorHandler extends DefaultHandler
{
private Logger log = Logger.getLogger(XMLReadErrorHandler.class);
String filename = "";
public XMLReadErrorHandler(Logger logger, String parsedFilename)
{
log = logger;
filename = parsedFilename;
}
@Override
public void warning(SAXParseException e) throws SAXException {
log.warn("Parsing " + filename, e);
printInfo(e);
}
@Override
public void error(SAXParseException e) throws SAXException {
log.error("Parsing " + filename, e);
printInfo(e);
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
log.fatal("Parsing " + filename, e);
printInfo(e);
}
private void printInfo(SAXParseException e) {
log.info(" Line number: "+e.getLineNumber());
log.info(" Column number: "+e.getColumnNumber());
log.info(" Message: "+e.getMessage());
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.submission;
//~--- non-JDK imports --------------------------------------------------------
import org.dspace.app.util.DCInput;
//~--- JDK imports ------------------------------------------------------------
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This class represents group of DCInput fields grouped in rows.
* @author Denys Slipetskyy
* @version $Revision: 1 $
* @since 1.6
* @see DCInput
* @see DCIputSet
* @see DCInputsReader
*/
public class DCInputGroup {
/**
* user hint
*/
private String hint = null;
/**
* Label of the group
*/
private String label = null;
/**
* Name of the group - it is like ID of the group, but not necessary unique.
* In situation then some group on the same page of basic form have the same
* name, it will be replaced completely by this group.
* @see [dspace]/config/input-forms-extended.xml
*/
private String name = null;
/**
* Rows of fields. Each group can contain zero or more rows. Row in this
* case is a combination of DCInput fields placed in one row on the web page.
* key - integer number of the row, value - List of DCInput objects;
* It is logical organization for UI only.
* @see [dspace]/config/input-forms-extended.xml
*/
private HashMap<Integer, List<DCInput>> rows;
// =============== Methods =======================================
public DCInputGroup(String name) {
if (name == null) {
this.name = "";
} else {
this.name = name;
}
rows = new HashMap<Integer, List<DCInput>>();
}
/**
* @return the hint for the group
*/
public String getHint() {
return hint;
}
/**
* @param set the hint for the group
*/
public void setHint(String hint) {
this.hint = (hint == null)
? ""
: hint;
}
/**
* @return the group label
*/
public String getLabel() {
return label;
}
/**
* @param set group label
*/
public void setLabel(String label) {
this.label = (label == null)
? ""
: label;
}
/**
* @return the group name
*/
public String getName() {
return name;
}
/**
* @param set the group name
*/
public void setName(String name) {
this.name = name;
}
public int getRowsCount() {
return rows.size();
}
/**
* @return the rows of the group. List of DCInput objects.
*/
public HashMap<Integer, List<DCInput>> getRows() {
return rows;
}
public List<DCInput> getAllInputs() {
List<DCInput> result = new ArrayList<DCInput>();
for (Map.Entry row : rows.entrySet()) {
result.addAll((List<DCInput>) row.getValue());
}
return result;
}
/**
* @param the rows of DCInput fields to set
*/
public void setRows(HashMap<Integer, List<DCInput>> rows) {
this.rows = rows;
}
public void mergeRows(HashMap<Integer, List<DCInput>> rows) {
int i = this.rows.size();
for (Map.Entry row : rows.entrySet()) {
this.rows.put(i, (List<DCInput>) row.getValue());
++i;
}
}
public boolean hasRows() {
return rows.isEmpty();
}
/**
* Returns a List of DCInput objects for row with specified index. If index
* is out of range returns null.
* @param index of required row
* @return List of DCInput objects
*/
public List<DCInput> getRow(int index) {
return (index < rows.size())
? rows.get(index)
: null;
}
/**
* Puts a new row into the map
* @param newRow - List of DCInput objects
*/
public void setRow(List<DCInput> newRow) {
rows.put(rows.size(), newRow);
}
}
//~ Formatted by Jindent --- http://www.jindent.com
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.submission;
//~--- non-JDK imports --------------------------------------------------------
import org.apache.log4j.Logger;
import org.dspace.app.util.DCInput;
//~--- JDK imports ------------------------------------------------------------
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Class representing all DC inputs required for a submission, organized into pages
* field groups and rows.
*
* @author Denys Slipetskyy based on Brian S. Hughes, based on work by Jenny Toves, OCLC
* @see DCInputSetExt
* @see DCInput
* @version $Revision: 3734 $
*/
public class DCInputSetExt {
/** name of the base (parent) form of this one. Not required. */
private String baseForm = null;
/** form hint for the users to be displayed on UI JSP. */
private String formHint = null;
/** name of the input set */
private String formName = null;
/**
* the inputs ordered by page and group
* key - page number (unique)
* value - List of DCInputGroup
*/
private HashMap<Integer, List<DCInputGroup>> inputPages = null;
/** constructor */
public DCInputSetExt(String formName, String formHint, String baseForm, HashMap pages) {
this.formName = formName;
this.formHint = formHint;
this.baseForm = baseForm;
inputPages = new HashMap<Integer, List<DCInputGroup>>();
for (Map.Entry page : ((HashMap<Integer, List<DCInputGroup>>) pages).entrySet()) {
inputPages.put((Integer) page.getKey(), (List<DCInputGroup>) page.getValue());
}
}
/**
* Return the name of the form that defines this input set
*
* @return formName the name of the form
*/
public String getFormName() {
return formName;
}
public String getFormHint() {
return this.formHint;
}
public String getBaseForm() {
return this.baseForm;
}
/**
* Return the number of pages in this input set
*
* @return number of pages
*/
public int getNumberPages() {
return inputPages.size();
}
/**
* Get all the groups for a page from the form definition.
*
* @param pageNum desired page within set
* @return an array containing the page's displayable groups
*/
public List<DCInputGroup> getPage(Integer pageNum) {
if (inputPages.containsKey(pageNum)) {
return inputPages.get(pageNum);
} else {
return null;
}
}
/**
* Creates deep copy of inputPages.
*/
public HashMap copyAllPages() {
HashMap<Integer, List<DCInputGroup>> result = new HashMap<Integer, List<DCInputGroup>>();
for (Integer pNum : inputPages.keySet()) {
result.put(pNum, new ArrayList(inputPages.get(pNum)));
}
return result;
}
/**
* Get all the iput fields for a page from the form definition.
*
* @param pageNum desired page within set
* @return an array containing the page's DCInput fields
*/
public List<DCInput> getPageInputs(Integer pageNum) {
List<DCInput> result = new ArrayList<DCInput>();
for (DCInputGroup group : getPage(pageNum)) {
result.addAll(group.getAllInputs());
}
return result;
}
/**
* Get form's pages.
*
* @return all form's pages
*/
public HashMap<Integer, List<DCInputGroup>> getPages() {
return inputPages;
}
private boolean hasPage(int pageNum) {
return inputPages.containsKey(pageNum);
}
/**
* Get list of all fields from all pages of the form
*
* @return <CODE>List<DCInput></CODE>
*/
public List<DCInput> getAllFields() {
List<DCInput> result = new ArrayList<DCInput>();
// iterate through all pages
for (Map.Entry page : inputPages.entrySet()) {
// iterate through fields groups on a given page
for (DCInputGroup group : (List<DCInputGroup>) page.getValue()) {
// iterate through rows in a given group
for (Map.Entry row : group.getRows().entrySet()) {
result.addAll((List<DCInput>) row.getValue());
}
}
}
return result;
}
/**
* Get list of all fields names
*
* @return <CODE>List<String></CODE>
*/
public List<String> getAllFieldsQual() {
List<String> result = new ArrayList<String>();
// iterate through all pages
for (Map.Entry page : inputPages.entrySet()) {
// iterate through fields groups on a given page
for (DCInputGroup group : (List<DCInputGroup>) page.getValue()) {
// iterate through rows in a given group
for (Map.Entry row : group.getRows().entrySet()) {
for (DCInput field : (List<DCInput>) row.getValue()) {
result.add(field.getFullQualName());
}
}
}
}
return result;
}
/**
* Does this set of inputs include an alternate title field?
*
* @return true if the current set has an alternate title field
*/
public boolean isDefinedMultTitles() {
return isFieldPresent("title.alternative");
}
/**
* Does this set of inputs include the previously published fields?
*
* @return true if the current set has all the prev. published fields
*/
public boolean isDefinedPubBefore() {
return (isFieldPresent("date.issued") && isFieldPresent("identifier.citation")
&& isFieldPresent("publisher.null"));
}
/**
* Does the current input set define the named field?
* Scan through every field in every page of the input set
*
* @return true if the current set has the named field
*/
public boolean isFieldPresent(String fieldName) {
for (DCInput field : this.getAllFields()) {
String fullName = field.getElement() + "." + field.getQualifier();
if (fullName.equals(fieldName)) {
return true;
}
}
return false;
}
/*
* For DEBUGing purposes - put structure of the form to the log
*/
public void logInputFields() {
// log4j logger
Logger log = Logger.getLogger(DCInputSetExt.class);
log.info("**********");
log.info("Form name = " + formName);
log.info("Base form name = " + baseForm);
log.info("Total fields = " + this.getAllFields().size());
log.info("");
for (Map.Entry page : inputPages.entrySet()) {
log.info("page = " + page.getKey().toString());
for (DCInputGroup grp : (List<DCInputGroup>) page.getValue()) {
log.info("group = " + grp.getName());
for (int j = 0; j < grp.getRowsCount(); j++) {
log.info("row = " + j);
log.info("----------");
List<DCInput> row = grp.getRow(j);
for (DCInput field : row) {
log.info(field.getFullQualName());
}
log.info("---------");
}
}
}
log.info("**********");
}
}
//~ Formatted by Jindent --- http://www.jindent.com
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.authority;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.dspace.core.ConfigurationManager;
import java.util.ArrayList;
import org.apache.log4j.Logger;
import org.dspace.content.authority.Choice;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
*
* @author Denys Slipetskyy
* @version 1
*/
public class AuthorityManager {
private static String driver = null;
private static String url = null;
private static String username = null;
private static String password = null;
private static String sql = null;
private static Logger log = Logger.getLogger(AuthorityManager.class);
Context ctx;
public AuthorityManager(String authority, Context context) {
sql = ConfigurationManager.getProperty("sql." + authority);
ctx = context;
}
public ArrayList<Choice> getAutocompleteSet(String query) {
ArrayList<Choice> v = new ArrayList<Choice>();
try {
TableRowIterator tri = DatabaseManager.query(ctx, sql, query);
TableRow tr;
while (tri.hasNext()) {
tr = tri.next();
v.add(new Choice(tr.getStringColumn("authority"), tr.getStringColumn("value"), ""));
}
} catch (SQLException ex) {
log.error("AuthorityManager SQL error: " + ex.getLocalizedMessage());
} finally {
return v;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.utils;
import java.util.List;
import org.jdom.Attribute;
import org.jdom.Element;
/**
*
* @author Denys Slipetskyy
*/
public class utilsXML
{
public static void mergeXMLTrees(Element newParent, Element tag, String limitTagName)
{
if (tag.getQualifiedName().equals(limitTagName) || tag.getChildren().isEmpty())
{
newParent.getChildren().add(tag.clone());
}
else
{
List<Element> testlist = newParent.getChildren();
if (testlist.isEmpty() || !haveTag(testlist,tag))
{
newParent.getChildren().add(tag.clone());
}
else
{
for (int i = 0; i < testlist.size(); i++)
{
Element e = testlist.get(i);
if (equalTag((Element) e, tag))
{
for (Element c : (List<Element>) tag.getChildren())
{
mergeXMLTrees(e, c, limitTagName);
}
}
}
}
}
}
public static boolean equalTag(Element a, Element b)
{
if (a.getQualifiedName().equals(b.getQualifiedName()))
{
if (a.getAttributes().size() != b.getAttributes().size())
{
return false;
}
for (Attribute attr : (List<Attribute>) a.getAttributes())
{
if (!attr.getValue().equals((b.getAttribute(attr.getName(),attr.getNamespace()) != null ? b.getAttribute(attr.getName(),attr.getNamespace()).getValue() : "")))
{
return false;
}
}
return true;
}
return false;
}
public static boolean haveTag(List<Element> list, Element test)
{
for(Element e: list)
{
if(equalTag(e, test))
{
return true;
}
}
return false;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.citation;
import java.util.ArrayList;
/**
*
* @author Denys SLIPETSKYY
*/
public class CitationTemplate {
public String type;
public String name;
public String definingField;
public ArrayList<String> template;
public CitationTemplate()
{
template = new ArrayList<String>();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.citation;
import java.util.HashMap;
/**
*
* @author Denys SLIPETSKYY
*/
public class CitationTemplatesCollection {
private HashMap<String, CitationTemplate> templates;
public CitationTemplatesCollection() {
templates = new HashMap<String, CitationTemplate>();
}
public CitationTemplate getTemplateByName(String name) {
if (templates.containsKey(name)) {
return templates.get(name);
} else {
return null;
}
}
public boolean addTemplate(String name, CitationTemplate template) {
if (!templates.containsKey(name)) {
templates.put(name, template);
return true;
} else {
return false;
}
}
public int getTemplatesCount() {
return templates.size();
}
}
//~ Formatted by Jindent --- http://www.jindent.com
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package proj.oceandocs.citation;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;
import java.util.regex.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchema;
import org.dspace.core.ConfigurationManager;
/**
*
* @author Denys SLIPETSKYY, Dirk Leinders
*/
public class CitationManager {
private HashMap<String, CitationTemplatesCollection> types;
private org.w3c.dom.Document document;
public CitationManager()
{
types = new HashMap<String, CitationTemplatesCollection>();
}
private boolean LoadTemplates(String filename)
{
try
{
if ((new File(filename)).exists())
{
this.document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(filename);
//this.document.normalizeDocument();
this.visitDocument();
return true;
} else
{
return false;
}
} catch (SAXException ex)
{
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
return false;
} catch (IOException ex)
{
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
return false;
} catch (ParserConfigurationException ex)
{
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
return false;
}
}
private void visitDocument()
{
org.w3c.dom.Element element = document.getDocumentElement();
element.normalize();
if ((element != null) && element.getTagName().equals("citation"))
{
visitElement_citation(element);
} else
{
System.out.println("Wrong XML format: root elemnt <citation> is missing");
}
}
private void visitElement_citation(org.w3c.dom.Element element)
{
org.w3c.dom.NodeList nodes = element.getChildNodes();
String typeName;
for (int i = 0; i < nodes.getLength(); i++)
{
org.w3c.dom.Node node = nodes.item(i);
switch (node.getNodeType())
{
case org.w3c.dom.Node.CDATA_SECTION_NODE:
break;
case org.w3c.dom.Node.ELEMENT_NODE:
org.w3c.dom.Element nodeElement = (org.w3c.dom.Element) node;
if (nodeElement.getTagName().equals("type"))
{
typeName = nodeElement.getAttribute("name");
types.put(typeName, visitElement_type(nodeElement, typeName));
}
break;
case org.w3c.dom.Node.PROCESSING_INSTRUCTION_NODE:
break;
}
}
}
private CitationTemplatesCollection visitElement_type(org.w3c.dom.Element element, String typeName)
{
CitationTemplatesCollection tmpCol = new CitationTemplatesCollection();
org.w3c.dom.NodeList nodes = element.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++)
{
org.w3c.dom.Node node = nodes.item(i);
switch (node.getNodeType())
{
case org.w3c.dom.Node.CDATA_SECTION_NODE:
break;
case org.w3c.dom.Node.ELEMENT_NODE:
org.w3c.dom.Element nodeElement = (org.w3c.dom.Element) node;
if (nodeElement.getTagName().equals("template"))
{
CitationTemplate tmpTMPL = new CitationTemplate();
org.w3c.dom.NamedNodeMap attrs = nodeElement.getAttributes();
for (int j = 0; j < attrs.getLength(); j++)
{
org.w3c.dom.Attr attr = (org.w3c.dom.Attr) attrs.item(j);
if (attr.getName().equals("name"))
{
tmpTMPL.name = attr.getValue().toString();
}
if (attr.getName().equals("defining-field"))
{
tmpTMPL.definingField = attr.getValue().toString();
}
}
org.w3c.dom.NodeList g_nodes = nodeElement.getChildNodes();
for (int gg = 0; gg < g_nodes.getLength(); gg++)
{
org.w3c.dom.Node g_template = g_nodes.item(gg);
if (g_template.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE)
{
org.w3c.dom.Element g_element = (org.w3c.dom.Element) g_template;
if (g_element.getTagName().equals("g"))
{
org.w3c.dom.NodeList g_nodes2 = g_template.getChildNodes();
for (int gg2 = 0; gg2 < g_nodes2.getLength(); gg2++)
{
org.w3c.dom.Node g_template2 = g_nodes2.item(gg2);
if (g_template2.getNodeType() == org.w3c.dom.Node.TEXT_NODE)
{
tmpTMPL.template.add(g_template2.getNodeValue());
}
}
}
}
}
tmpTMPL.type = typeName;
tmpCol.addTemplate(tmpTMPL.name, tmpTMPL);
}
break;
case org.w3c.dom.Node.PROCESSING_INSTRUCTION_NODE:
break;
}
}
if (tmpCol.getTemplatesCount() > 0)
{
return tmpCol;
} else
{
return null;
}
}
private CitationTemplate getTemplate(String type, String name)
{
if (this.types.containsKey(type))
{
CitationTemplatesCollection tmpCol = this.types.get(type);
if (!name.isEmpty())
{
return tmpCol.getTemplateByName(name);
} else
{
return tmpCol.getTemplateByName("default");
}
} else
{
return null;
}
}
private CitationTemplatesCollection getAllTemplatesForType(String type)
{
if (this.types.containsKey(type))
{
return this.types.get(type);
} else
{
return null;
}
}
private HashSet fillQuals(String type)
{
return fillQuals(type, "default");
}
private HashSet fillQuals(String type, String name)
{
CitationTemplate tmpTMPL = this.getTemplate(type, name);
HashSet h = new HashSet();
if (tmpTMPL != null)
{
//now find what metadata fields are in template and put them in a HashSet
Pattern p = Pattern.compile("\\$(\\w+.\\w+)\\$", Pattern.CASE_INSENSITIVE);
for (int i = 0; i < tmpTMPL.template.size(); i++)
{
Matcher m = p.matcher(tmpTMPL.template.get(i));
while (m.find())
{
h.add(m.group(m.groupCount()));
}
}
}
return h;
}
private String compileCitation(String type, HashMap map)
{
return compileCitation(type, map, "default");
}
private String compileCitation(String type, HashMap map, String name)
{
String citation = "";
DCValue dcv = null;
DCValue[] authors = null;
CitationTemplate tmpTMPL = this.getTemplate(type, name);
// do not set identifier.citation when definingField has no value
if (map.containsKey(tmpTMPL.definingField) && !map.get(tmpTMPL.definingField).equals(""))
{
if (tmpTMPL != null)
{
Pattern p = Pattern.compile("\\$(\\w+.\\w+)\\$", Pattern.CASE_INSENSITIVE);
for (int i = 0; i < tmpTMPL.template.size(); i++)
{
Matcher m = p.matcher(tmpTMPL.template.get(i));
StringBuffer sb = new StringBuffer();
while (m.find())
{
/* if metadata field is null - must skip it with related formatting.
* Also in the case of authors names we need put them all (we will
* have DCValue[] instead of just DCValue) in the map in that case
*/
if ("bibliographicCitation.authors".equalsIgnoreCase(m.group(m.groupCount())))
{
authors = (DCValue[]) map.get(m.group(m.groupCount()));
String allAuthorsNames = "";
for (int a = 0; a < authors.length; a++)
{
if (a > 0)
{
allAuthorsNames += "; ";
}
allAuthorsNames += authors[a].value != null ? (authors[a].value) : "";
}
if (!allAuthorsNames.equals(""))
{
m.appendReplacement(sb, allAuthorsNames);
m.appendTail(sb);
citation += sb.toString();
}
} else
{
dcv = (DCValue) map.get(m.group(m.groupCount()));
if ((dcv != null) && (dcv.value != null)/*map.containsKey(m.group(m.groupCount()))*/)
{
m.appendReplacement(sb, dcv.value);
m.appendTail(sb);
citation += sb.toString();
}
}
}
}
}
}
return citation;
}
public String updateCitationString(Item item) throws SQLException, AuthorizeException
{
String citation = "";
DCValue doctypes[] = item.getMetadata(MetadataSchema.DC_SCHEMA, "type", null, Item.ANY);
String type = "";
HashSet quals;
HashMap values;
if (doctypes.length > 0)
{
type = doctypes[0].value;
if (LoadTemplates(ConfigurationManager.getProperty("dspace.dir")
+ File.separator + "config" + File.separator + "citation-templates.xml"))
{
if (this.types.size() > 0)
{
//find element.qualifier used in template
quals = fillQuals(type);
values = getBibliographicValues(item, quals);
citation = compileCitation(type, values);
if (!citation.equals(""))
{
item.clearMetadata(MetadataSchema.DC_SCHEMA, "identifier", "citation", Item.ANY);
item.addMetadata(MetadataSchema.DC_SCHEMA, "identifier", "citation", this.getDefiningFieldLanguage(item, type), citation);
}
item.update();
}
}
}
return citation;
}
private String getDefiningFieldLanguage(Item item, String type)
{
CitationTemplate tmpl = this.getTemplate(type, "");
HashSet singletonDefiningField = new HashSet();
singletonDefiningField.add(tmpl.definingField);
HashMap result = this.getBibliographicValues(item, singletonDefiningField);
return ((DCValue)result.get(tmpl.definingField)).language;
}
// // Added by Walter Brebels
// private String getString(DCValue[] v)
// {
// if (v.length > 0)
// {
// return v[0].value;
// } else
// {
// return null;
// }
// }
//
// // Added by Walter Brebels
// private void addLanguages(HashSet languages, DCValue v[])
// {
// if (v != null)
// {
// for (int i = 0; i < v.length; i++)
// {
// languages.add(v[i].language);
// }
// }
// }
/**
* @author Walter Brebels
* @author Denys Slipetskyy
* @see proj.oceandocs.CitationManager
*/
private HashMap getBibliographicValues(Item item, HashSet quals)
{
Iterator qit;
DCValue v[];
HashMap result = new HashMap();
String qual;
qit = quals.iterator();
while (qit.hasNext())
{
qual = (String) qit.next();
// element = qual.split("\\.")[0];
// if (qual.split("\\.").length >= 2)
// {
// qualifier = qual.split("\\.")[1];
// }
// else
// {
// qualifier = "null";
// }
v = item.getMetadata("dc." + qual);
if (qual.equalsIgnoreCase("bibliographicCitation.authors"))
{
result.put(qual, v.length > 0 ? v : null);
} else
{
result.put(qual, v.length > 0 ? v[0] : null);
}
}
return result;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.browse;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import mockit.Mock;
import mockit.MockClass;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* Mocks some methods of BrowseCreateDAOOracle to enable compatibility with H2
* @author pvillega
*/
@MockClass(realClass=BrowseCreateDAOOracle.class)
public class MockBrowseCreateDAOOracle
{
/** log4j category */
private static Logger log = Logger.getLogger(MockBrowseCreateDAOOracle.class);
/**
* internal copy of the current DSpace context (including the database
* connection)
*/
protected Context internalContext;
/** Database specific set of utils used when prepping the database */
protected BrowseDAOUtils utils;
/**
* Constructor
*/
@Mock
public void $init(Context ctx)
{
}
protected void cleanContext()
{
try
{
if(internalContext != null && internalContext.isValid())
{
internalContext.complete();
}
}
catch (SQLException ex)
{
log.error("SQL Exception cleaning Mock BrowseCreateDAOOracle",ex);
}
}
/**
* Due to how the classloader works, we need to create custom context every time
* this mock is called.
*/
private void checkContext()
{
try
{
if(internalContext == null || !internalContext.isValid())
{
internalContext = new Context();
}
// obtain the relevant Utils for this class
utils = BrowseDAOFactory.getUtils(internalContext);
}
catch (SQLException ex)
{
log.error("SQL Exception checkContext BrowseCreateDAOOracle",ex);
}
catch (BrowseException ex)
{
log.error("Browse Exception checkContext BrowseCreateDAOOracle",ex);
}
}
/*
* (non-Javadoc)
*
* @see org.dspace.browse.BrowseCreateDAO#createCollectionView(java.lang.String,
* java.lang.String, boolean)
*/
@Mock
public String createCollectionView(String table, String view, boolean execute) throws BrowseException
{
try
{
String createColView = "CREATE VIEW " + view + " AS " +
"SELECT Collection2Item.collection_id, " + table + ".* " +
"FROM " + table + ", Collection2Item " +
"WHERE " + table + ".item_id = Collection2Item.item_id";
if (execute)
{
DatabaseManager.updateQuery(internalContext, createColView);
}
return createColView + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createCommunityView(java.lang.String, java.lang.String, boolean)
*/
@Mock
public String createCommunityView(String table, String view, boolean execute) throws BrowseException
{
try
{
String createComView = "CREATE VIEW " + view + " AS " +
"SELECT Communities2Item.community_id, " + table + ".* " +
"FROM " + table + ", Communities2Item " +
"WHERE " + table + ".item_id = Communities2Item.item_id";
if (execute)
{
DatabaseManager.updateQuery(internalContext, createComView);
}
return createComView + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createDatabaseIndices(java.lang.String, boolean)
*/
@Mock
public String[] createDatabaseIndices(String table, List<Integer> sortCols, boolean value, boolean execute) throws BrowseException
{
try
{
checkContext();
ArrayList<String> array = new ArrayList<String>();
array.add("CREATE INDEX " + table + "_item_id_idx ON " + table + "(item_id)");
if (value)
array.add("CREATE INDEX " + table + "_value_idx ON " + table + "(sort_value)");
for (Integer i : sortCols)
{
array.add("CREATE INDEX " + table + "_s" + i + "_idx ON " + table + "(sort_" + i + ")");
}
if (execute)
{
for (String query : array)
{
DatabaseManager.updateQuery(internalContext, query);
}
}
String[] arr = new String[array.size()];
return array.toArray(arr);
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createDatabaseIndices(java.lang.String, boolean)
*/
@Mock
public String[] createMapIndices(String disTable, String mapTable, boolean execute) throws BrowseException
{
try
{
checkContext();
String[] arr = new String[5];
arr[0] = "CREATE INDEX " + disTable + "_svalue_idx ON " + disTable + "(sort_value)";
arr[1] = "CREATE INDEX " + disTable + "_value_idx ON " + disTable + "(value)";
arr[2] = "CREATE INDEX " + disTable + "_uvalue_idx ON " + disTable + "(value)";
arr[3] = "CREATE INDEX " + mapTable + "_item_id_idx ON " + mapTable + "(item_id)";
arr[4] = "CREATE INDEX " + mapTable + "_dist_idx ON " + mapTable + "(distinct_id)";
if (execute)
{
for (String query : arr)
{
DatabaseManager.updateQuery(internalContext, query);
}
}
return arr;
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createDistinctMap(java.lang.String, java.lang.String, boolean)
*/
@Mock
public String createDistinctMap(String table, String map, boolean execute) throws BrowseException
{
try
{
checkContext();
String create = "CREATE TABLE " + map + " (" +
"map_id NUMBER PRIMARY KEY, " +
"item_id NUMBER REFERENCES item(item_id), " +
"distinct_id NUMBER REFERENCES " + table + "(id)" +
")";
if (execute)
{
DatabaseManager.updateQuery(internalContext, create);
}
return create + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
@Mock
public MappingResults updateDistinctMappings(String table, int itemID, Set<Integer> distinctIDs) throws BrowseException
{
BrowseMappingResults results = new BrowseMappingResults();
try
{
checkContext();
Set<Integer> addDistinctIDs = null;
// Find all existing mappings for this item
TableRowIterator tri = DatabaseManager.queryTable(internalContext, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID);
if (tri != null)
{
addDistinctIDs = (Set<Integer>)((HashSet<Integer>)distinctIDs).clone();
try
{
while (tri.hasNext())
{
TableRow tr = tri.next();
// Check the item mappings to see if it contains this mapping
boolean itemIsMapped = false;
int trDistinctID = tr.getIntColumn("distinct_id");
if (distinctIDs.contains(trDistinctID))
{
// Found this mapping
results.addRetainedDistinctId(trDistinctID);
// Flag it, and remove (-1) from the item mappings
itemIsMapped = true;
addDistinctIDs.remove(trDistinctID);
}
// The item is no longer mapped to this community, so remove the database record
if (!itemIsMapped)
{
results.addRemovedDistinctId(trDistinctID);
DatabaseManager.delete(internalContext, tr);
}
}
}
finally
{
tri.close();
}
}
else
{
addDistinctIDs = distinctIDs;
}
// Any remaining mappings need to be added to the database
for (int distinctID : addDistinctIDs)
{
if (distinctID > -1)
{
TableRow row = DatabaseManager.row(table);
row.setColumn("item_id", itemID);
row.setColumn("distinct_id", distinctID);
DatabaseManager.insert(internalContext, row);
results.addAddedDistinctId(distinctID);
}
}
}
catch (SQLException e)
{
log.error("caught exception: ", e);
String msg = "problem updating distinct mappings: table=" + table + ",item-id=" + itemID;
throw new BrowseException(msg, e);
}
finally
{
cleanContext();
}
return results;
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createDistinctTable(java.lang.String, boolean)
*/
@Mock
public String createDistinctTable(String table, boolean execute) throws BrowseException
{
try
{
String create = "CREATE TABLE " + table + " (" +
"id INTEGER PRIMARY KEY, " +
"authority VARCHAR2(100), " +
"value " + getValueColumnDefinition() + ", " +
"sort_value " + getSortColumnDefinition() +
")";
if (execute)
{
checkContext();
DatabaseManager.updateQuery(internalContext, create);
}
return create + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
@Mock
public String createPrimaryTable(String table, List<Integer> sortCols, boolean execute) throws BrowseException
{
try
{
StringBuilder sb = new StringBuilder();
Iterator itr = sortCols.iterator();
while (itr.hasNext())
{
Integer no = (Integer) itr.next();
sb.append(", sort_");
sb.append(no.toString());
sb.append(getSortColumnDefinition());
}
String createTable = "CREATE TABLE " + table + " (" +
"id INTEGER PRIMARY KEY," +
"item_id INTEGER REFERENCES item(item_id)" +
sb.toString() +
")";
if (execute)
{
checkContext();
DatabaseManager.updateQuery(internalContext, createTable);
}
return createTable;
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#createSequence(java.lang.String, boolean)
*/
@Mock
public String createSequence(String sequence, boolean execute) throws BrowseException
{
try
{
String create = "CREATE SEQUENCE " + sequence;
if (execute)
{
checkContext();
DatabaseManager.updateQuery(internalContext, create);
}
return create + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#deleteByItemID(java.lang.String, int)
*/
@Mock
public void deleteByItemID(String table, int itemID) throws BrowseException
{
try
{
checkContext();
Object[] params = { new Integer(itemID) };
String dquery = "DELETE FROM " + table + " WHERE item_id=?";
DatabaseManager.updateQuery(internalContext, dquery, params);
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#deleteCommunityMappings(java.lang.String, int)
*/
@Mock
public void deleteCommunityMappings(int itemID)
throws BrowseException
{
try
{
checkContext();
Object[] params = { new Integer(itemID) };
String dquery = "DELETE FROM Communities2Item WHERE item_id = ?";
DatabaseManager.updateQuery(internalContext, dquery, params);
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#dropIndexAndRelated(java.lang.String, boolean)
*/
@Mock
public String dropIndexAndRelated(String table, boolean execute) throws BrowseException
{
try
{
checkContext();
String dropper = "DROP TABLE " + table + " CASCADE CONSTRAINTS";
if (execute)
{
DatabaseManager.updateQuery(internalContext, dropper);
}
return dropper + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#dropSequence(java.lang.String, boolean)
*/
@Mock
public String dropSequence(String sequence, boolean execute) throws BrowseException
{
try
{
checkContext();
String dropSeq = "DROP SEQUENCE " + sequence;
if (execute)
{
DatabaseManager.updateQuery(internalContext, dropSeq);
}
return dropSeq + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#dropView(java.lang.String, boolean)
*/
@Mock
public String dropView(String view, boolean execute)
throws BrowseException
{
if (view != null && !"".equals(view))
{
try
{
checkContext();
String dropView = "DROP VIEW " + view + " CASCADE CONSTRAINTS";
if (execute)
{
DatabaseManager.updateQuery(internalContext, dropView);
}
return dropView + ";";
}
catch (SQLException e)
{
log.error("caught exception: ", e);
// We can't guarantee a test for existence, or force Oracle
// not to complain if it isn't there, so we just catch the exception
// and pretend nothing is wrong
}
finally
{
cleanContext();
}
}
return "";
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#getDistinctID(java.lang.String, java.lang.String, java.lang.String)
*/
@Mock
public int getDistinctID(String table, String value, String authority, String sortValue) throws BrowseException
{
TableRowIterator tri = null;
if (log.isDebugEnabled())
{
log.debug("getDistinctID: table=" + table + ",value=" + value + ",authority=" + authority + ",sortValue=" + sortValue);
}
try
{
checkContext();
Object[] params;
String select = "SELECT id FROM " + table;
if (ConfigurationManager.getBooleanProperty("webui.browse.metadata.case-insensitive", false))
{
if (isValueColumnClob())
select = select + " WHERE TO_CHAR(value)=?";
else
select = select + " WHERE value=?";
}
else
{
if (isValueColumnClob())
select = select + " WHERE TO_CHAR(value)=?";
else
select = select + " WHERE value=?";
}
if (authority != null)
{
select += " AND authority = ?";
params = new Object[]{ value, authority };
}
else
{
select += " AND authority IS NULL";
params = new Object[]{ value };
}
tri = DatabaseManager.query(internalContext, select, params);
int distinctID = -1;
if (!tri.hasNext())
{
distinctID = insertDistinctRecord(table, value, authority, sortValue);
}
else
{
distinctID = tri.next().getIntColumn("id");
}
if (log.isDebugEnabled())
{
log.debug("getDistinctID: return=" + distinctID);
}
return distinctID;
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
if (tri != null)
tri.close();
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#updateCommunityMappings(int)
*/
@Mock
public void updateCommunityMappings(int itemID) throws BrowseException
{
try
{
// Get all the communities for this item
int[] commID = getAllCommunityIDs(itemID);
// Remove (set to -1) any duplicate communities
for (int i = 0; i < commID.length; i++)
{
if (!isFirstOccurrence(commID, i))
commID[i] = -1;
}
checkContext();
// Find all existing mappings for this item
TableRowIterator tri = DatabaseManager.queryTable(internalContext, "Communities2Item", "SELECT * FROM Communities2Item WHERE item_id=?", itemID);
if (tri != null)
{
try
{
while (tri.hasNext())
{
TableRow tr = tri.next();
// Check the item mappings to see if it contains this community mapping
boolean itemIsMapped = false;
int trCommID = tr.getIntColumn("community_id");
for (int i = 0; i < commID.length; i++)
{
// Found this community
if (commID[i] == trCommID)
{
// Flag it, and remove (-1) from the item mappings
itemIsMapped = true;
commID[i] = -1;
}
}
// The item is no longer mapped to this community, so remove the database record
if (!itemIsMapped)
DatabaseManager.delete(internalContext, tr);
}
}
finally
{
tri.close();
}
}
// Any remaining mappings need to be added to the database
for (int i = 0; i < commID.length; i++)
{
if (commID[i] > -1)
{
TableRow row = DatabaseManager.row("Communities2Item");
row.setColumn("item_id", itemID);
row.setColumn("community_id", commID[i]);
DatabaseManager.insert(internalContext, row);
}
}
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#insertDistinctRecord(java.lang.String, java.lang.String, java.lang.String)
*/
@Mock
public int insertDistinctRecord(String table, String value, String authority, String sortValue) throws BrowseException
{
if (log.isDebugEnabled())
{
log.debug("insertDistinctRecord: table=" + table + ",value=" + value+ ",sortValue=" + sortValue);
}
try
{
checkContext();
TableRow dr = DatabaseManager.row(table);
dr.setColumn("value", utils.truncateValue(value));
dr.setColumn("sort_value", utils.truncateSortValue(sortValue));
if (authority != null)
{
dr.setColumn("authority", utils.truncateValue(authority,100));
}
DatabaseManager.insert(internalContext, dr);
int distinctID = dr.getIntColumn("id");
if (log.isDebugEnabled())
{
log.debug("insertDistinctRecord: return=" + distinctID);
}
return distinctID;
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
@Mock
public void insertIndex(String table, int itemID, Map sortCols)
throws BrowseException
{
try
{
checkContext();
// create us a row in the index
TableRow row = DatabaseManager.row(table);
// set the primary information for the index
row.setColumn("item_id", itemID);
// now set the columns for the other sort values
Iterator itra = sortCols.keySet().iterator();
while (itra.hasNext())
{
Integer key = (Integer) itra.next();
String nValue = (String) sortCols.get(key);
row.setColumn("sort_" + key.toString(), utils.truncateSortValue(nValue));
}
DatabaseManager.insert(internalContext, row);
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#updateIndex(java.lang.String, int, java.util.Map)
*/
@Mock
public boolean updateIndex(String table, int itemID, Map sortCols)
throws BrowseException
{
try
{
checkContext();
boolean rowUpdated = false;
TableRow row = DatabaseManager.findByUnique(internalContext, table, "item_id", itemID);
// If the item does not exist in the table, return that it couldn't be found
if (row == null)
return false;
// Iterate through all the sort values
Iterator itra = sortCols.keySet().iterator();
while (itra.hasNext())
{
Integer key = (Integer) itra.next();
// Generate the appropriate column name
String column = "sort_" + key.toString();
// Create the value that will be written in to the column
String newValue = utils.truncateSortValue( (String) sortCols.get(key) );
// Check the column exists - if it doesn't, something has gone seriously wrong
if (!row.hasColumn(column))
throw new BrowseException("Column '" + column + "' does not exist in table " + table);
// Get the existing value from the column
String oldValue = row.getStringColumn(column);
// If the new value differs from the old value, update the column and flag that the row has changed
if (oldValue != null && !oldValue.equals(newValue))
{
row.setColumn(column, newValue);
rowUpdated = true;
}
else if (newValue != null && !newValue.equals(oldValue))
{
row.setColumn(column, newValue);
rowUpdated = true;
}
}
// We've updated the row, so save it back to the database
if (rowUpdated)
DatabaseManager.update(internalContext, row);
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
// Return that the original record was found
return true;
}
@Mock
public List<Integer> deleteMappingsByItemID(String mapTable, int itemID) throws BrowseException
{
System.out.println("Map: " + mapTable + ", item: " + itemID);
List<Integer> distinctIds = new ArrayList<Integer>();
TableRowIterator tri = null;
try
{
checkContext();
try
{
tri = DatabaseManager.queryTable(internalContext, mapTable, "SELECT * FROM " + mapTable + " WHERE item_id=?", itemID);
if (tri != null)
{
while (tri.hasNext())
{
TableRow tr = tri.next();
distinctIds.add(tr.getIntColumn("distinct_id"));
DatabaseManager.delete(internalContext, tr);
}
}
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
if (tri != null)
{
tri.close();
}
}
}
finally
{
cleanContext();
}
return distinctIds;
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#pruneDistinct(java.lang.String, java.lang.String)
*/
@Mock
public void pruneDistinct(String table, String map, List<Integer> distinctIds) throws BrowseException
{
try
{
checkContext();
StringBuilder query = new StringBuilder();
query.append("DELETE FROM ").append(table).append(" WHERE NOT EXISTS (SELECT 1 FROM ");
query.append(map).append(" WHERE ").append(map).append(".distinct_id = ").append(table).append(".id)");
if (distinctIds != null && distinctIds.size() > 0)
{
query.append(" AND ").append(table).append(".id=?");
PreparedStatement stmt = null;
try
{
stmt = internalContext.getDBConnection().prepareStatement(query.toString());
for (Integer distinctId : distinctIds)
{
stmt.setInt(1, distinctId);
stmt.execute();
stmt.clearParameters();
}
}
finally
{
if (stmt != null)
{
stmt.close();
}
}
}
else
{
DatabaseManager.updateQuery(internalContext, query.toString());
}
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#pruneExcess(java.lang.String, java.lang.String)
*/
@Mock
public void pruneExcess(String table, boolean withdrawn) throws BrowseException
{
try
{
checkContext();
StringBuilder query = new StringBuilder();
query.append("DELETE FROM ").append(table).append(" WHERE NOT EXISTS (SELECT 1 FROM item WHERE item.item_id=");
query.append(table).append(".item_id AND ");
if (withdrawn)
{
query.append("item.withdrawn = 1");
}
else
{
query.append("item.in_archive = 1 AND item.withdrawn = 0");
}
query.append(")");
DatabaseManager.updateQuery(internalContext, query.toString());
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
@Mock
public void pruneMapExcess(String map, boolean withdrawn, List<Integer> distinctIds) throws BrowseException
{
try
{
checkContext();
StringBuilder query = new StringBuilder();
query.append("DELETE FROM ").append(map).append(" WHERE NOT EXISTS (SELECT 1 FROM item WHERE item.item_id=");
query.append(map).append(".item_id AND ");
if (withdrawn)
{
query.append("item.withdrawn = 1");
}
else
{
query.append("item.in_archive = 1 AND item.withdrawn = 0");
}
query.append(")");
if (distinctIds != null && distinctIds.size() > 0)
{
query.append(" AND ").append(map).append(".distinct_id=?");
PreparedStatement stmt = null;
try
{
stmt = internalContext.getDBConnection().prepareStatement(query.toString());
for (Integer distinctId : distinctIds)
{
stmt.setInt(1, distinctId);
stmt.execute();
stmt.clearParameters();
}
}
finally
{
if (stmt != null)
{
stmt.close();
}
}
}
else
{
DatabaseManager.updateQuery(internalContext, query.toString());
}
}
catch (SQLException e)
{
log.error("caught exception: ", e);
throw new BrowseException(e);
}
finally
{
cleanContext();
}
}
/* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#testTableExistence(java.lang.String)
*/
@Mock
public boolean testTableExistence(String table) throws BrowseException
{
// this method can kill the db connection, so we start up
// our own private context to do it
Context c = null;
try
{
c = new Context();
String testQuery = "SELECT * FROM " + table + " WHERE ROWNUM=1";
DatabaseManager.query(c, testQuery);
return true;
}
catch (SQLException e)
{
return false;
}
finally
{
if (c != null && c.isValid())
{
c.abort();
}
}
}
/**
* Get the definition of the value column - CLOB if the size is greater than 4000 bytes
* otherwise a VARCHAR2.
*
* @return
*/
@Mock
private String getValueColumnDefinition()
{
if (getValueColumnMaxBytes() < 1 || getValueColumnMaxBytes() > 4000)
{
return " CLOB ";
}
return " VARCHAR2(" + getValueColumnMaxBytes() + ") ";
}
/**
* Get the definition of the sort_value column - always a VARCHAR2
* (required for ordering)
*
* @return
*/
@Mock
private String getSortColumnDefinition()
{
return " VARCHAR2(" + getSortColumnMaxBytes() + ") ";
}
/**
* Get the size in bytes of the value columns.
*
* As the size is configured in chars, double the number of bytes
* (to account for UTF-8)
*
* @return
*/
@Mock
private int getValueColumnMaxBytes()
{
checkContext();
int chars = utils.getValueColumnMaxChars();
if (chars > 2000 || chars < 1)
{
return 4000;
}
cleanContext();
return chars * 2;
}
/**
* Get the size in bytes of the sort columns.
* MUST return a value between 1 and 4000.
*
* As the size is configured in chars, double the number of bytes
* (to account for UTF-8)
*
* @return
*/
@Mock
private int getSortColumnMaxBytes()
{
checkContext();
int chars = utils.getSortColumnMaxChars();
if (chars > 2000 || chars < 1)
{
return 4000;
}
cleanContext();
return chars * 2;
}
/**
* If getValueColumnDefinition() is returning a CLOB definition,
* then this must return true.
*
* @return
*/
@Mock
private boolean isValueColumnClob()
{
if (getValueColumnMaxBytes() < 1)
{
return true;
}
return false;
}
/**
* perform a database query to get all the communities that this item belongs to,
* including all mapped communities, and ancestors
*
* this is done here instead of using the Item api, because for reindexing we may
* not have Item objects, and in any case this is *much* faster
*
* @param itemId
* @return
* @throws SQLException
*/
@Mock
private int[] getAllCommunityIDs(int itemId) throws SQLException
{
List<Integer> commIdList = new ArrayList<Integer>();
TableRowIterator tri = null;
try
{
checkContext();
tri = DatabaseManager.queryTable(internalContext, "Community2Item",
"SELECT * FROM Community2Item WHERE item_id=?", itemId);
while (tri.hasNext())
{
TableRow row = tri.next();
int commId = row.getIntColumn("community_id");
commIdList.add(commId);
// Get the parent community, and continue to get all ancestors
Integer parentId = getParentCommunityID(commId);
while (parentId != null)
{
commIdList.add(parentId);
parentId = getParentCommunityID(parentId);
}
}
}
finally
{
if (tri != null)
tri.close();
cleanContext();
}
// Need to iterate the array as toArray will produce an array Integers,
// not ints as we need.
int[] cIds = new int[commIdList.size()];
for (int i = 0; i < commIdList.size(); i++)
{
cIds[i] = commIdList.get(i);
}
return cIds;
}
/**
* Get the id of the parent community. Returns Integer, as null is used to
* signify that there are no parents (ie. top-level).
*
* @param commId
* @return
* @throws SQLException
*/
@Mock
private Integer getParentCommunityID(int commId) throws SQLException
{
TableRowIterator tri = null;
try
{
checkContext();
tri = DatabaseManager.queryTable(internalContext, "Community2Community",
"SELECT * FROM Community2Community WHERE child_comm_id=?", commId);
if (tri.hasNext())
{
return tri.next().getIntColumn("parent_comm_id");
}
}
finally
{
if (tri != null)
tri.close();
}
return null;
}
/**
* Check to see if the integer at pos is the first occurrence of that value
* in the array.
*
* @param ids
* @param pos
* @return
*/
@Mock
private boolean isFirstOccurrence(int[] ids, int pos)
{
if (pos < 0 || pos >= ids.length)
return false;
int id = ids[pos];
for (int i = 0; i < pos; i++)
{
if (id == ids[i])
return false;
}
return true;
}
private static class BrowseMappingResults implements MappingResults
{
private List<Integer> addedDistinctIds = new ArrayList<Integer>();
private List<Integer> retainedDistinctIds = new ArrayList<Integer>();
private List<Integer> removedDistinctIds = new ArrayList<Integer>();
private void addAddedDistinctId(int id)
{
addedDistinctIds.add(id);
}
private void addRetainedDistinctId(int id)
{
retainedDistinctIds.add(id);
}
private void addRemovedDistinctId(int id)
{
removedDistinctIds.add(id);
}
public List<Integer> getAddedDistinctIds()
{
return addedDistinctIds;
}
public List<Integer> getRetainedDistinctIds()
{
return retainedDistinctIds;
}
public List<Integer> getRemovedDistinctIds()
{
return Collections.unmodifiableList(removedDistinctIds);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import mockit.Mock;
import mockit.MockClass;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.net.URL;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.dbcp.ConnectionFactory;
import org.apache.commons.dbcp.DriverManagerConnectionFactory;
import org.apache.commons.dbcp.PoolableConnectionFactory;
import org.apache.commons.dbcp.PoolingDriver;
import org.apache.commons.pool.ObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.apache.commons.pool.impl.GenericKeyedObjectPoolFactory;
import org.apache.commons.pool.impl.GenericObjectPool;
import org.apache.log4j.Logger;
import org.apache.log4j.Level;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
/**
* Mocks a DatabaseManager so unit tests can be run without a real DB connection
* The code is basically the same as the original DatabaseManager but it
* establishes a connection to an in-memory database.
*
* @author pvillega
*/
@MockClass(realClass = DatabaseManager.class)
public class MockDatabaseManager
{
/** log4j category */
private static Logger log = Logger.getLogger(DatabaseManager.class);
/** True if initialization has been done */
private static boolean initialized = false;
private static Map<String, String> insertSQL = new HashMap<String, String>();
private static boolean isOracle = false;
private static boolean isPostgres = false;
static
{
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
isOracle = true;
isPostgres = false;
}
else
{
isOracle = false;
isPostgres = true;
}
}
/** Name to use for the pool */
private static String poolName = "dspacepool";
/**
* This regular expression is used to perform sanity checks
* on database names (i.e. tables and columns).
*
* Regular expressions can be slow to solve this in the future we should
* probably create a system where we don't pass in column and table names to these low
* level database methods. This approach is highly exploitable for injection
* type attacks because we are unable to determine where the input came from. Instead
* we could pass in static integer constants which are then mapped to their sql name.
*/
private static final Pattern DB_SAFE_NAME = Pattern.compile("^[a-zA-Z_1-9.]+$");
/**
* A map of database column information. The key is the table name, a
* String; the value is an array of ColumnInfo objects.
*/
private static Map<String, Map<String, ColumnInfo>> info = new HashMap<String, Map<String, ColumnInfo>>();
/**
* It allows us to print information on the pool, for debugging purposes
*/
private static ObjectPool connectionPool;
/**
* Constructor
*/
@Mock
public void $init()
{
}
/**
* Static initializer
*/
@Mock
public void $clinit()
{
}
/**
* Set the constraint check to deferred (commit time)
*
* @param context
* The context object
* @param constraintName
* the constraint name to deferred
* @throws SQLException
*/
@Mock
public static void setConstraintDeferred(Context context,
String constraintName) throws SQLException
{
Statement statement = null;
try
{
statement = context.getDBConnection().createStatement();
statement
.execute("SET REFERENTIAL_INTEGRITY FALSE");
statement.close();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Set the constraint check to immediate (every query)
*
* @param context
* The context object
* @param constraintName
* the constraint name to check immediately after every query
* @throws SQLException
*/
@Mock
public static void setConstraintImmediate(Context context,
String constraintName) throws SQLException
{
Statement statement = null;
try
{
statement = context.getDBConnection().createStatement();
statement.execute("SET REFERENTIAL_INTEGRITY TRUE");
statement.close();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Return an iterator with the results of the query. The table parameter
* indicates the type of result. If table is null, the column names are read
* from the ResultSetMetaData.
*
* @param context
* The context object
* @param table
* The name of the table which results
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRowIterator with the results of the query
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRowIterator queryTable(Context context, String table, String query, Object... parameters ) throws SQLException
{
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder("Running query \"").append(query).append("\" with parameters: ");
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug(sb.toString());
}
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
try
{
loadParameters(statement, parameters);
TableRowIterator retTRI = new TableRowIterator(statement.executeQuery(), canonicalize(table));
retTRI.setStatement(statement);
return retTRI;
}
catch (SQLException sqle)
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException s)
{
}
}
throw sqle;
}
}
/**
* Return an iterator with the results of the query.
*
* @param context
* The context object
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRowIterator with the results of the query
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRowIterator query(Context context, String query,
Object... parameters) throws SQLException
{
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder();
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug("Running query \"" + query + "\" with parameters: " + sb.toString());
}
PreparedStatement statement = context.getDBConnection().prepareStatement(query);
try
{
loadParameters(statement,parameters);
TableRowIterator retTRI = new TableRowIterator(statement.executeQuery());
retTRI.setStatement(statement);
return retTRI;
}
catch (SQLException sqle)
{
if (statement != null)
try { statement.close(); } catch (SQLException s) { }
throw sqle;
}
}
/**
* Return the single row result to this query, or null if no result. If more
* than one row results, only the first is returned.
*
* @param context
* Current DSpace context
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRow object, or null if no result
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRow querySingle(Context context, String query,
Object... parameters) throws SQLException
{
TableRow retRow = null;
TableRowIterator iterator = null;
try
{
iterator = query(context, query, parameters);
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
iterator.close();
}
return (retRow);
}
/**
* Return the single row result to this query, or null if no result. If more
* than one row results, only the first is returned.
*
* @param context
* Current DSpace context
* @param table
* The name of the table which results
* @param query
* The SQL query
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return A TableRow object, or null if no result
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRow querySingleTable(Context context, String table,
String query, Object... parameters) throws SQLException
{
TableRow retRow = null;
TableRowIterator iterator = queryTable(context, canonicalize(table), query, parameters);
try
{
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
iterator.close();
}
return (retRow);
}
/**
* Execute an update, insert or delete query. Returns the number of rows
* affected by the query.
*
* @param context
* Current DSpace context
* @param query
* The SQL query to execute
* @param parameters
* A set of SQL parameters to be included in query. The order of
* the parameters must correspond to the order of their reference
* within the query.
* @return The number of rows affected by the query.
* @exception SQLException
* If a database error occurs
*/
@Mock
public static int updateQuery(Context context, String query, Object... parameters) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder("Running query \"").append(query).append("\" with parameters: ");
for (int i = 0; i < parameters.length; i++)
{
if (i > 0)
{
sb.append(",");
}
sb.append(parameters[i].toString());
}
log.debug(sb.toString());
}
try
{
statement = context.getDBConnection().prepareStatement(query);
loadParameters(statement,parameters);
return statement.executeUpdate();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Create a new row in the given table, and assigns a unique id.
*
* @param context
* Current DSpace context
* @param table
* The RDBMS table in which to create the new row
* @return The newly created row
*/
@Mock
public static TableRow create(Context context, String table)
throws SQLException
{
TableRow row = new TableRow(canonicalize(table), getColumnNames(table));
insert(context, row);
return row;
}
/**
* Find a table row by its primary key. Returns the row, or null if no row
* with that primary key value exists.
*
* @param context
* Current DSpace context
* @param table
* The table in which to find the row
* @param id
* The primary key value
* @return The row resulting from the query, or null if no row with that
* primary key value exists.
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRow find(Context context, String table, int id)
throws SQLException
{
String ctable = canonicalize(table);
return findByUnique(context, ctable, getPrimaryKeyColumn(ctable),
new Integer(id));
}
/**
* Find a table row by a unique value. Returns the row, or null if no row
* with that primary key value exists. If multiple rows with the value
* exist, one is returned.
*
* @param context
* Current DSpace context
* @param table
* The table to use to find the object
* @param column
* The name of the unique column
* @param value
* The value of the unique column
* @return The row resulting from the query, or null if no row with that
* value exists.
* @exception SQLException
* If a database error occurs
*/
@Mock
public static TableRow findByUnique(Context context, String table,
String column, Object value) throws SQLException
{
String ctable = canonicalize(table);
if ( ! DB_SAFE_NAME.matcher(ctable).matches())
throw new SQLException("Unable to execute select query because table name ("+ctable+") contains non alphanumeric characters.");
if ( ! DB_SAFE_NAME.matcher(column).matches())
throw new SQLException("Unable to execute select query because column name ("+column+") contains non alphanumeric characters.");
StringBuilder sql = new StringBuilder("select * from ").append(ctable).append(" where ").append(column).append(" = ? ");
return querySingleTable(context, ctable, sql.toString(), value);
}
/**
* Delete a table row via its primary key. Returns the number of rows
* deleted.
*
* @param context
* Current DSpace context
* @param table
* The table to delete from
* @param id
* The primary key value
* @return The number of rows deleted
* @exception SQLException
* If a database error occurs
*/
@Mock
public static int delete(Context context, String table, int id)
throws SQLException
{
String ctable = canonicalize(table);
return deleteByValue(context, ctable, getPrimaryKeyColumn(ctable),
new Integer(id));
}
/**
* Delete all table rows with the given value. Returns the number of rows
* deleted.
*
* @param context
* Current DSpace context
* @param table
* The table to delete from
* @param column
* The name of the column
* @param value
* The value of the column
* @return The number of rows deleted
* @exception SQLException
* If a database error occurs
*/
@Mock
public static int deleteByValue(Context context, String table,
String column, Object value) throws SQLException
{
String ctable = canonicalize(table);
if ( ! DB_SAFE_NAME.matcher(ctable).matches())
throw new SQLException("Unable to execute delete query because table name ("+ctable+") contains non alphanumeric characters.");
if ( ! DB_SAFE_NAME.matcher(column).matches())
throw new SQLException("Unable to execute delete query because column name ("+column+") contains non alphanumeric characters.");
StringBuilder sql = new StringBuilder("delete from ").append(ctable).append(" where ").append(column).append(" = ? ");
return updateQuery(context, sql.toString(), value);
}
/**
* Obtain an RDBMS connection.
*
* @return A new database connection.
* @exception SQLException
* If a database error occurs, or a connection cannot be
* obtained.
*/
@Mock
public static Connection getConnection() throws SQLException
{
initialize();
//we need to find who creates so many connections
Throwable t = new Throwable();
StackTraceElement[] elements = t.getStackTrace();
String callers = "";
for(int i = 0; i < Math.min(elements.length,4); i++)
{
callers += " > "+elements[i].getClassName()+":"+elements[i].getMethodName();
}
//uncomment to see the infromation on callers
//log.info(callers+" ("+connectionPool.getNumActive()+" "+connectionPool.getNumIdle()+")");
return DriverManager
.getConnection("jdbc:apache:commons:dbcp:" + poolName);
}
/**
* Release resources associated with this connection.
*
* @param c
* The connection to release
*/
@Mock
public static void freeConnection(Connection c)
{
//we check who frees the connection
Throwable t = new Throwable();
StackTraceElement[] elements = t.getStackTrace();
String callers = "";
for(int i = 0; i < Math.min(elements.length,4); i++)
{
callers += " > "+elements[i].getClassName()+":"+elements[i].getMethodName();
}
//uncomment to see the infromation on callers
//log.info(callers+" ("+connectionPool.getNumActive()+" "+connectionPool.getNumIdle()+")");
try
{
if (c != null)
{
c.close();
}
}
catch (SQLException e)
{
log.warn(e.getMessage());
}
}
/**
* Create a table row object that can be passed into the insert method, not
* commonly used unless the table has a referential integrity constraint.
*
* @param table
* The RDBMS table in which to create the new row
* @return The newly created row
* @throws SQLException
*/
@Mock
public static TableRow row(String table) throws SQLException
{
return new TableRow(canonicalize(table), getColumnNames(table));
}
/**
* Insert a table row into the RDBMS.
*
* @param context
* Current DSpace context
* @param row
* The row to insert
* @exception SQLException
* If a database error occurs
*/
@Mock
public static void insert(Context context, TableRow row) throws SQLException
{
int newID = -1;
String table = row.getTable();
Statement statement = null;
ResultSet rs = null;
try
{
// Get an ID (primary key) for this row by using the "getnextid"
// SQL function in H2 database
String myQuery = "SELECT NEXTVAL('" + table + "_seq') AS result";
statement = context.getDBConnection().createStatement();
rs = statement.executeQuery(myQuery);
rs.next();
newID = rs.getInt(1);
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
if (newID < 0)
throw new SQLException("Unable to retrieve sequence ID");
// Set the ID in the table row object
row.setColumn(getPrimaryKeyColumn(table), newID);
Collection<ColumnInfo> info = getColumnInfo(table);
String sql = insertSQL.get(table);
if (sql == null)
{
StringBuilder sqlBuilder = new StringBuilder().append("INSERT INTO ").append(table).append(" ( ");
boolean firstColumn = true;
for (ColumnInfo col : info)
{
if (firstColumn)
{
sqlBuilder.append(col.getName());
firstColumn = false;
}
else
{
sqlBuilder.append(",").append(col.getName());
}
}
sqlBuilder.append(") VALUES ( ");
// Values to insert
firstColumn = true;
for (int i = 0; i < info.size(); i++)
{
if (firstColumn)
{
sqlBuilder.append("?");
firstColumn = false;
}
else
{
sqlBuilder.append(",").append("?");
}
}
// Watch the syntax
sqlBuilder.append(")");
sql = sqlBuilder.toString();
insertSQL.put(table, sql);
}
execute(context.getDBConnection(), sql.toString(), info, row);
}
/**
* Update changes to the RDBMS. Note that if the update fails, the values in
* the row will NOT be reverted.
*
* @param context
* Current DSpace context
* @param row
* The row to update
* @return The number of rows affected (1 or 0)
* @exception SQLException
* If a database error occurs
*/
@Mock
public static int update(Context context, TableRow row) throws SQLException
{
String table = row.getTable();
StringBuilder sql = new StringBuilder().append("update ").append(table)
.append(" set ");
List<ColumnInfo> columns = new ArrayList<ColumnInfo>();
ColumnInfo pk = getPrimaryKeyColumnInfo(table);
Collection<ColumnInfo> info = getColumnInfo(table);
String separator = "";
for (ColumnInfo col : info)
{
// Only update this column if it has changed
if (!col.isPrimaryKey())
{
if (row.hasColumnChanged(col.getName()))
{
sql.append(separator).append(col.getName()).append(" = ?");
columns.add(col);
separator = ", ";
}
}
}
// Only execute the update if there is anything to update
if (columns.size() > 0)
{
sql.append(" where ").append(pk.getName()).append(" = ?");
columns.add(pk);
return executeUpdate(context.getDBConnection(), sql.toString(), columns, row);
}
return 1;
}
/**
* Delete row from the RDBMS.
*
* @param context
* Current DSpace context
* @param row
* The row to delete
* @return The number of rows affected (1 or 0)
* @exception SQLException
* If a database error occurs
*/
@Mock
public static int delete(Context context, TableRow row) throws SQLException
{
String pk = getPrimaryKeyColumn(row);
if (row.isColumnNull(pk))
{
throw new IllegalArgumentException("Primary key value is null");
}
return delete(context, row.getTable(), row.getIntColumn(pk));
}
/**
* Return metadata about a table.
*
* @param table
* The name of the table
* @return An array of ColumnInfo objects
* @exception SQLException
* If a database error occurs
*/
@Mock
static Collection<ColumnInfo> getColumnInfo(String table) throws SQLException
{
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(table);
return (cinfo == null) ? null : cinfo.values();
}
/**
* Return info about column in table.
*
* @param table
* The name of the table
* @param column
* The name of the column
* @return Information about the column
* @exception SQLException
* If a database error occurs
*/
@Mock
static ColumnInfo getColumnInfo(String table, String column)
throws SQLException
{
Map<String, ColumnInfo> info = getColumnInfoInternal(table);
return (info == null) ? null : info.get(column);
}
/**
* Return the names of all the columns of the given table.
*
* @param table
* The name of the table
* @return The names of all the columns of the given table, as a List. Each
* element of the list is a String.
* @exception SQLException
* If a database error occurs
*/
@Mock
static List<String> getColumnNames(String table) throws SQLException
{
List<String> results = new ArrayList<String>();
Collection<ColumnInfo> info = getColumnInfo(table);
for (ColumnInfo col : info)
{
results.add(col.getName());
}
return results;
}
/**
* Return the names of all the columns of the ResultSet.
*
* @param meta
* The ResultSetMetaData
* @return The names of all the columns of the given table, as a List. Each
* element of the list is a String.
* @exception SQLException
* If a database error occurs
*/
@Mock
static List<String> getColumnNames(ResultSetMetaData meta) throws SQLException
{
List<String> results = new ArrayList<String>();
int columns = meta.getColumnCount();
for (int i = 0; i < columns; i++)
{
results.add(meta.getColumnLabel(i + 1));
}
return results;
}
/**
* Return the canonical name for a table.
*
* @param table
* The name of the table.
* @return The canonical name of the table.
*/
@Mock
static String canonicalize(String table)
{
// Oracle expects upper-case table names
if (isOracle)
{
return (table == null) ? null : table.toUpperCase();
}
// default database postgres wants lower-case table names
return (table == null) ? null : table.toLowerCase();
}
////////////////////////////////////////
// SQL loading methods
////////////////////////////////////////
/**
* Load SQL into the RDBMS.
*
* @param sql
* The SQL to load.
* throws SQLException
* If a database error occurs
*/
@Mock
public static void loadSql(String sql) throws SQLException
{
try
{
loadSql(new StringReader(sql));
}
catch (IOException ioe)
{
}
}
/**
* Load SQL from a reader into the RDBMS.
*
* @param r
* The Reader from which to read the SQL.
* @throws SQLException
* If a database error occurs
* @throws IOException
* If an error occurs obtaining data from the reader
*/
@Mock
public static void loadSql(Reader r) throws SQLException, IOException
{
BufferedReader reader = new BufferedReader(r);
StringBuffer sql = new StringBuffer();
String SQL = null;
String line = null;
Connection connection = null;
Statement statement = null;
try
{
connection = getConnection();
connection.setAutoCommit(true);
statement = connection.createStatement();
boolean inquote = false;
while ((line = reader.readLine()) != null)
{
// Look for comments
int commentStart = line.indexOf("--");
String input = (commentStart != -1) ? line.substring(0,
commentStart) : line;
// Empty line, skip
if (input.trim().equals(""))
{
continue;
}
// Put it on the SQL buffer
sql.append(input.replace(';', ' ')); // remove all semicolons
// from sql file!
// Add a space
sql.append(" ");
// More to come?
// Look for quotes
int index = 0;
int count = 0;
int inputlen = input.length();
while ((index = input.indexOf("'", count)) != -1)
{
// Flip the value of inquote
inquote = !inquote;
// Move the index
count = index + 1;
// Make sure we do not exceed the string length
if (count >= inputlen)
{
break;
}
}
// If we are in a quote, keep going
// Note that this is STILL a simple heuristic that is not
// guaranteed to be correct
if (inquote)
{
continue;
}
int endMarker = input.indexOf(";", index);
if (endMarker == -1)
{
continue;
}
if (log.isDebugEnabled())
{
log.debug("Running database query \"" + sql + "\"");
}
SQL = sql.toString();
try
{
// Use execute, not executeQuery (which expects results) or
// executeUpdate
boolean succeeded = statement.execute(SQL);
}
catch (SQLWarning sqlw)
{
if (log.isDebugEnabled())
{
log.debug("Got SQL Warning: " + sqlw, sqlw);
}
}
catch (SQLException sqle)
{
String msg = "Got SQL Exception: " + sqle;
String sqlmessage = sqle.getMessage();
// These are Postgres-isms:
// There's no easy way to check if a table exists before
// creating it, so we always drop tables, then create them
boolean isDrop = ((SQL != null) && (sqlmessage != null)
&& (SQL.toUpperCase().startsWith("DROP")) && (sqlmessage
.indexOf("does not exist") != -1));
// Creating a view causes a bogus warning
boolean isNoResults = ((SQL != null)
&& (sqlmessage != null)
&& ((SQL.toUpperCase().startsWith("CREATE VIEW")) || (SQL
.toUpperCase()
.startsWith("CREATE FUNCTION"))) && (sqlmessage
.indexOf("No results were returned") != -1));
// If the messages are bogus, give them a low priority
if (isDrop || isNoResults)
{
if (log.isDebugEnabled())
{
log.debug(msg, sqle);
}
}
// Otherwise, we need to know!
else
{
if (log.isEnabledFor(Level.WARN))
{
log.warn(msg, sqle);
}
}
}
// Reset SQL buffer
sql = new StringBuffer();
SQL = null;
}
}
finally
{
if (connection != null)
{
connection.close();
}
if (statement != null)
{
statement.close();
}
}
}
////////////////////////////////////////
// Helper methods
////////////////////////////////////////
/**
* Convert the current row in a ResultSet into a TableRow object.
*
* @param results
* A ResultSet to process
* @param table
* The name of the table
* @return A TableRow object with the data from the ResultSet
* @exception SQLException
* If a database error occurs
*/
@Mock
static TableRow process(ResultSet results, String table)
throws SQLException
{
return process(results, table, null);
}
/**
* Convert the current row in a ResultSet into a TableRow object.
*
* @param results
* A ResultSet to process
* @param table
* The name of the table
* @param pColumnNames
* The name of the columns in this resultset
* @return A TableRow object with the data from the ResultSet
* @exception SQLException
* If a database error occurs
*/
@Mock
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException
{
String dbName =ConfigurationManager.getProperty("db.name");
ResultSetMetaData meta = results.getMetaData();
int columns = meta.getColumnCount() + 1;
// If we haven't been passed the column names try to generate them from the metadata / table
List<String> columnNames = pColumnNames != null ? pColumnNames :
((table == null) ? getColumnNames(meta) : getColumnNames(table));
TableRow row = new TableRow(canonicalize(table), columnNames);
// Process the columns in order
// (This ensures maximum backwards compatibility with
// old JDBC drivers)
for (int i = 1; i < columns; i++)
{
String name = meta.getColumnName(i);
int jdbctype = meta.getColumnType(i);
if (jdbctype == Types.BIT || jdbctype == Types.BOOLEAN)
{
row.setColumn(name, results.getBoolean(i));
}
else if ((jdbctype == Types.INTEGER) || (jdbctype == Types.NUMERIC)
|| (jdbctype == Types.DECIMAL))
{
// If we are using oracle
if ("oracle".equals(dbName))
{
// Test the value from the record set. If it can be represented using an int, do so.
// Otherwise, store it as long
long longValue = results.getLong(i);
if (longValue <= (long)Integer.MAX_VALUE)
row.setColumn(name, (int)longValue);
else
row.setColumn(name, longValue);
}
else
row.setColumn(name, results.getInt(i));
}
else if (jdbctype == Types.BIGINT)
{
row.setColumn(name, results.getLong(i));
}
else if (jdbctype == Types.DOUBLE)
{
row.setColumn(name, results.getDouble(i));
}
else if (jdbctype == Types.CLOB && "oracle".equals(dbName))
{
// Support CLOBs in place of TEXT columns in Oracle
row.setColumn(name, results.getString(i));
}
else if (jdbctype == Types.VARCHAR)
{
/*try
{
byte[] bytes = results.getBytes(i);
if (bytes != null)
{
String mystring = new String(results.getBytes(i),
"UTF-8");
row.setColumn(name, mystring);
}
else
{
row.setColumn(name, results.getString(i));
}
}
catch (UnsupportedEncodingException e)
{
// do nothing, UTF-8 is built in!
}*/
//removing issue with H2 and getBytes
row.setColumn(name, results.getString(i));
}
else if (jdbctype == Types.DATE)
{
row.setColumn(name, results.getDate(i));
}
else if (jdbctype == Types.TIME)
{
row.setColumn(name, results.getTime(i));
}
else if (jdbctype == Types.TIMESTAMP)
{
row.setColumn(name, results.getTimestamp(i));
}
else
{
throw new IllegalArgumentException("Unsupported JDBC type: "
+ jdbctype +" ("+name+")");
}
if (results.wasNull())
{
row.setColumnNull(name);
}
}
// Now that we've prepped the TableRow, reset the flags so that we can detect which columns have changed
row.resetChanged();
return row;
}
/**
* Return the name of the primary key column. We assume there's only one
* primary key per table; if there are more, only the first one will be
* returned.
*
* @param row
* The TableRow to return the primary key for.
* @return The name of the primary key column, or null if the row has no
* primary key.
* @exception SQLException
* If a database error occurs
*/
@Mock
public static String getPrimaryKeyColumn(TableRow row) throws SQLException
{
return getPrimaryKeyColumn(row.getTable());
}
/**
* Return the name of the primary key column in the given table. We assume
* there's only one primary key per table; if there are more, only the first
* one will be returned.
*
* @param table
* The name of the RDBMS table
* @return The name of the primary key column, or null if the table has no
* primary key.
* @exception SQLException
* If a database error occurs
*/
@Mock
protected static String getPrimaryKeyColumn(String table)
throws SQLException
{
ColumnInfo cinfo = getPrimaryKeyColumnInfo(table);
return (cinfo == null) ? null : cinfo.getName();
}
/**
* Return column information for the primary key column, or null if the
* table has no primary key. We assume there's only one primary key per
* table; if there are more, only the first one will be returned.
*
* @param table
* The name of the RDBMS table
* @return A ColumnInfo object, or null if the table has no primary key.
* @exception SQLException
* If a database error occurs
*/
@Mock
static ColumnInfo getPrimaryKeyColumnInfo(String table) throws SQLException
{
Collection<ColumnInfo> cinfo = getColumnInfo(canonicalize(table));
for (ColumnInfo info : cinfo)
{
if (info.isPrimaryKey())
{
return info;
}
}
return null;
}
/**
* Execute SQL as a PreparedStatement on Connection. Bind parameters in
* columns to the values in the table row before executing.
*
* @param connection
* The SQL connection
* @param sql
* The query to execute
* @param columns
* The columns to bind
* @param row
* The row
* @return The number of rows affected by the query.
* @exception SQLException
* If a database error occurs
*/
@Mock
private static void execute(Connection connection, String sql, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
log.debug("Running query \"" + sql + "\"");
}
try
{
statement = connection.prepareStatement(sql);
loadParameters(statement, columns, row);
statement.execute();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
@Mock
private static int executeUpdate(Connection connection, String sql, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
PreparedStatement statement = null;
if (log.isDebugEnabled())
{
log.debug("Running query \"" + sql + "\"");
}
try
{
statement = connection.prepareStatement(sql);
loadParameters(statement, columns, row);
return statement.executeUpdate();
}
finally
{
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException sqle)
{
}
}
}
}
/**
* Return metadata about a table.
*
* @param table
* The name of the table
* @return An map of info.
* @exception SQLException
* If a database error occurs
*/
@Mock
private static Map<String, ColumnInfo> getColumnInfoInternal(String table) throws SQLException
{
String ctable = canonicalize(table);
Map<String, ColumnInfo> results = info.get(ctable);
if (results != null)
{
return results;
}
results = retrieveColumnInfo(ctable);
info.put(ctable, results);
return results;
}
/**
* Read metadata about a table from the database.
*
* @param table
* The RDBMS table.
* @return A map of information about the columns. The key is the name of
* the column, a String; the value is a ColumnInfo object.
* @exception SQLException
* If there is a problem retrieving information from the
* RDBMS.
*/
@Mock
private static Map<String, ColumnInfo> retrieveColumnInfo(String table) throws SQLException
{
Connection connection = null;
ResultSet pkcolumns = null;
ResultSet columns = null;
try
{
String schema = ConfigurationManager.getProperty("db.schema");
String catalog = null;
int dotIndex = table.indexOf(".");
if (dotIndex > 0)
{
catalog = table.substring(0, dotIndex);
table = table.substring(dotIndex + 1, table.length());
log.warn("catalog: " + catalog);
log.warn("table: " + table);
}
connection = getConnection();
DatabaseMetaData metadata = connection.getMetaData();
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
//H2 database has no limit or is unknown, so the result is 0. We
//have to comment to avoid errors
//int max = metadata.getMaxTableNameLength();
//String tname = (table.length() >= max) ? table
// .substring(0, max - 1) : table;
pkcolumns = metadata.getPrimaryKeys(catalog, schema, table);
Set<String> pks = new HashSet<String>();
while (pkcolumns.next())
pks.add(pkcolumns.getString(4));
columns = metadata.getColumns(catalog, schema, table, null);
while (columns.next())
{
String column = columns.getString(4);
ColumnInfo cinfo = new ColumnInfo();
cinfo.setName(column);
cinfo.setType((int) columns.getShort(5));
if (pks.contains(column))
{
cinfo.setIsPrimaryKey(true);
}
results.put(column, cinfo);
}
return results;
}
finally
{
if (pkcolumns != null)
{
try { pkcolumns.close(); } catch (SQLException sqle) { }
}
if (columns != null)
{
try { columns.close(); } catch (SQLException sqle) { }
}
if (connection != null)
{
try { connection.close(); } catch (SQLException sqle) { }
}
}
}
/**
* Provide a means for a (web) application to cleanly terminate the connection pool.
* @throws SQLException
*/
@Mock
public static synchronized void shutdown() throws SQLException
{
if (initialized)
{
initialized = false;
// Get the registered DBCP pooling driver
PoolingDriver driver = (PoolingDriver)DriverManager.getDriver("jdbc:apache:commons:dbcp:");
// Close the named pool
if (driver != null)
driver.closePool(poolName);
}
}
/**
* Initialize the DatabaseManager.
*/
@Mock
private static synchronized void initialize() throws SQLException
{
if (initialized)
{
return;
}
try
{
// Register basic JDBC driver
Class.forName(ConfigurationManager.getProperty("db.driver"));
// Register the DBCP driver
Class.forName("org.apache.commons.dbcp.PoolingDriver");
// Read pool configuration parameter or use defaults
// Note we check to see if property is null; getIntProperty returns
// '0' if the property is not set OR if it is actually set to zero.
// But 0 is a valid option...
int maxConnections = ConfigurationManager
.getIntProperty("db.maxconnections");
if (ConfigurationManager.getProperty("db.maxconnections") == null)
{
maxConnections = 30;
}
int maxWait = ConfigurationManager.getIntProperty("db.maxwait");
if (ConfigurationManager.getProperty("db.maxwait") == null)
{
maxWait = 5000;
}
int maxIdle = ConfigurationManager.getIntProperty("db.maxidle");
if (ConfigurationManager.getProperty("db.maxidle") == null)
{
maxIdle = -1;
}
boolean useStatementPool = ConfigurationManager.getBooleanProperty("db.statementpool",true);
// Create object pool
connectionPool = new GenericObjectPool(null, // PoolableObjectFactory
// - set below
maxConnections, // max connections
GenericObjectPool.WHEN_EXHAUSTED_BLOCK, maxWait, // don't
// block
// more than 5
// seconds
maxIdle, // max idle connections (unlimited)
true, // validate when we borrow connections from pool
false // don't bother validation returned connections
);
// ConnectionFactory the pool will use to create connections.
ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(
ConfigurationManager.getProperty("db.url"),
ConfigurationManager.getProperty("db.username"),
ConfigurationManager.getProperty("db.password"));
//
// Now we'll create the PoolableConnectionFactory, which wraps
// the "real" Connections created by the ConnectionFactory with
// the classes that implement the pooling functionality.
//
String validationQuery = "SELECT 1";
// Oracle has a slightly different validation query
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
validationQuery = "SELECT 1 FROM DUAL";
}
GenericKeyedObjectPoolFactory statementFactory = null;
if (useStatementPool)
{
// The statement Pool is used to pool prepared statements.
GenericKeyedObjectPool.Config statementFactoryConfig = new GenericKeyedObjectPool.Config();
// Just grow the pool size when needed.
//
// This means we will never block when attempting to
// create a query. The problem is unclosed statements,
// they can never be reused. So if we place a maximum
// cap on them, then we might reach a condition where
// a page can only be viewed X number of times. The
// downside of GROW_WHEN_EXHAUSTED is that this may
// allow a memory leak to exist. Both options are bad,
// but I'd prefer a memory leak over a failure.
//
// Perhaps this decision should be derived from config parameters?
statementFactoryConfig.whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
statementFactory = new GenericKeyedObjectPoolFactory(null,statementFactoryConfig);
}
PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(
connectionFactory, connectionPool, statementFactory,
validationQuery, // validation query
false, // read only is not default for now
false); // Autocommit defaults to none
// Obtain a poolName from the config, default is "dspacepool"
if (ConfigurationManager.getProperty("db.poolname") != null)
{
poolName = ConfigurationManager.getProperty("db.poolname");
}
//
// Finally, we get the PoolingDriver itself...
//
PoolingDriver driver = (PoolingDriver)DriverManager.getDriver("jdbc:apache:commons:dbcp:");
//
// ...and register our pool with it.
//
if (driver != null)
driver.registerPool(poolName, connectionPool);
//preload the contents of the database
URL fileSchema = MockDatabaseManager.class.getClassLoader().getResource("database_schema.sql");
String s = new String();
StringBuilder sb = new StringBuilder();
FileReader fr = new FileReader(new File(fileSchema.getPath()));
BufferedReader br = new BufferedReader(fr);
while((s = br.readLine()) != null)
{
//we skip white lines and comments
if(!"".equals(s.trim()) && !s.trim().startsWith("--"))
{
sb.append(s);
}
}
br.close();
//we use ";" as a delimiter for each request. This assumes no triggers
//nor other calls besides CREATE TABLE, CREATE SEQUENCE and INSERT
//exist in the file
String[] stmts = sb.toString().split(";");
//stablish the connection using the pool
Connection con = DriverManager.getConnection("jdbc:apache:commons:dbcp:" + poolName);
Statement st = con.createStatement();
for(int i = 0; i<stmts.length; i++)
{
// we ensure that there is no spaces before or after the request string
// in order to not execute empty statements
if(!stmts[i].trim().equals(""))
{
st.executeUpdate(stmts[i]);
log.debug("Loading into database: "+stmts[i]);
}
}
//commit changes
con.commit();
con.close();
// Old SimplePool init
//DriverManager.registerDriver(new SimplePool());
initialized = true;
}
catch (SQLException se)
{
// Simply throw up SQLExceptions
throw se;
}
catch (Exception e)
{
// Need to be able to catch other exceptions. Pretend they are
// SQLExceptions, but do log
log.warn("Exception initializing DB pool", e);
throw new SQLException(e.toString());
}
}
/**
* Iterate over the given parameters and add them to the given prepared statement.
* Only a select number of datatypes are supported by the JDBC driver.
*
* @param statement
* The unparameterized statement.
* @param parameters
* The parameters to be set on the statement.
*/
@Mock
protected static void loadParameters(PreparedStatement statement, Object[] parameters)
throws SQLException{
statement.clearParameters();
for(int i=0; i < parameters.length; i++)
{
// Select the object we are setting.
Object parameter = parameters[i];
int idx = i+1; // JDBC starts counting at 1.
if (parameter == null)
{
throw new SQLException("Attempting to insert null value into SQL query.");
}
if (parameter instanceof String)
{
statement.setString(idx,(String) parameters[i]);
}
else if (parameter instanceof Integer)
{
int ii = ((Integer) parameter).intValue();
statement.setInt(idx,ii);
}
else if (parameter instanceof Double)
{
double d = ((Double) parameter).doubleValue();
statement.setDouble(idx,d);
}
else if (parameter instanceof Float)
{
float f = ((Float) parameter).floatValue();
statement.setFloat(idx,f);
}
else if (parameter instanceof Short)
{
short s = ((Short) parameter).shortValue();
statement.setShort(idx,s);
}
else if (parameter instanceof Long)
{
long l = ((Long) parameter).longValue();
statement.setLong(idx,l);
}
else if (parameter instanceof Date)
{
Date date = (Date) parameter;
statement.setDate(idx,date);
}
else if (parameter instanceof Time)
{
Time time = (Time) parameter;
statement.setTime(idx,time);
}
else if (parameter instanceof Timestamp)
{
Timestamp timestamp = (Timestamp) parameter;
statement.setTimestamp(idx,timestamp);
}
else
{
throw new SQLException("Attempting to insert unknown datatype ("+parameter.getClass().getName()+") into SQL statement.");
}
}
}
@Mock
private static void loadParameters(PreparedStatement statement, Collection<ColumnInfo> columns, TableRow row) throws SQLException
{
int count = 0;
for (ColumnInfo info : columns)
{
count++;
String column = info.getName();
int jdbctype = info.getType();
if (row.isColumnNull(column))
{
statement.setNull(count, jdbctype);
}
else
{
switch (jdbctype)
{
case Types.BIT:
case Types.BOOLEAN:
statement.setBoolean(count, row.getBooleanColumn(column));
break;
case Types.INTEGER:
if (isOracle)
{
statement.setLong(count, row.getLongColumn(column));
}
else
{
statement.setInt(count, row.getIntColumn(column));
}
break;
case Types.NUMERIC:
case Types.DECIMAL:
statement.setLong(count, row.getLongColumn(column));
// FIXME should be BigDecimal if TableRow supported that
break;
case Types.BIGINT:
statement.setLong(count, row.getLongColumn(column));
break;
case Types.CLOB:
if (isOracle)
{
// Support CLOBs in place of TEXT columns in Oracle
statement.setString(count, row.getStringColumn(column));
}
else
{
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
break;
case Types.VARCHAR:
statement.setString(count, row.getStringColumn(column));
break;
case Types.DATE:
statement.setDate(count, new java.sql.Date(row.getDateColumn(column).getTime()));
break;
case Types.TIME:
statement.setTime(count, new Time(row.getDateColumn(column).getTime()));
break;
case Types.TIMESTAMP:
statement.setTimestamp(count, new Timestamp(row.getDateColumn(column).getTime()));
break;
default:
throw new IllegalArgumentException("Unsupported JDBC type: " + jdbctype);
}
}
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
/**
* StatisticsLogging Consumer for SolrLogger which captures Create, Update
* and Delete Events on DSpace Objects.
*
* All usage-events will be updated to capture changes to e.g.
* the owning collection
*
* @author kevinvandevelde at atmire.com
* @author ben at atmrie.com
*/
public class StatisticsLoggingConsumer implements Consumer
{
private Set<String> toRemoveQueries = null;
public void initialize() throws Exception
{
}
// TODO: checkout whether moving of collections, communities and bitstreams works
// TODO: use async threaded consumer as this might require some processing time
// TODO: we might be able to improve the performance: changing the collection will trigger 4 update commands
public void consume(Context ctx, Event event) throws Exception
{
if (toRemoveQueries == null)
{
toRemoveQueries = new HashSet<String>();
}
int dsoId = event.getSubjectID();
int dsoType = event.getSubjectType();
int eventType = event.getEventType();
// Check if we are deleting something
if (eventType == Event.DELETE)
{
// First make sure we delete everything for this dso
String query = "id:" + dsoId + " AND type:" + dsoType;
toRemoveQueries.add(query);
}
else if (eventType == Event.MODIFY && dsoType == Constants.ITEM)
{
// We have a modified item check for a withdraw/reinstate
}
else if (eventType == Event.MODIFY_METADATA
&& event.getSubjectType() == Constants.ITEM)
{
Item item = Item.find(ctx, event.getSubjectID());
String updateQuery = "id:" + item.getID() + " AND type:"
+ item.getType();
Map<String, List<String>> indexedValues = SolrLogger.queryField(
updateQuery, null, null);
// Get all the metadata
Map<String, String> metadataStorageInfo = SolrLogger.getMetadataStorageInfo();
List<String> storageFieldList = new ArrayList<String>();
List<List<Object>> storageValuesList = new ArrayList<List<Object>>();
for (Map.Entry<String, String> entry : metadataStorageInfo.entrySet())
{
String[] metadataFieldInfo = entry.getValue().split("\\.");
List<Object> values = new ArrayList<Object>();
List<Object> valuesLow = new ArrayList<Object>();
for (int i = 0; i < item.getMetadata(metadataFieldInfo[0],
metadataFieldInfo[1], metadataFieldInfo[2], Item.ANY).length; i++)
{
values.add(item.getMetadata(metadataFieldInfo[0],
metadataFieldInfo[1], metadataFieldInfo[2],
Item.ANY)[i].value);
valuesLow.add(item.getMetadata(metadataFieldInfo[0],
metadataFieldInfo[1], metadataFieldInfo[2],
Item.ANY)[i].value.toLowerCase());
}
List<String> indexedVals = indexedValues.get(entry.getKey());
boolean update = true;
if (values.size() == indexedVals.size() && values.containsAll(indexedVals))
{
update = false;
}
if (update)
{
storageFieldList.add(entry.getKey());
storageFieldList.add(entry.getKey() + "_search");
storageValuesList.add(values);
storageValuesList.add(valuesLow);
}
}
SolrLogger.update(updateQuery, "replace", storageFieldList,
storageValuesList);
}
if (eventType == Event.ADD && dsoType == Constants.COLLECTION
&& event.getObject(ctx) instanceof Item)
{
// We are mapping a new item make sure that the owning collection is
// updated
Item newItem = (Item) event.getObject(ctx);
String updateQuery = "id: " + newItem.getID() + " AND type:"
+ newItem.getType();
List<String> fieldNames = new ArrayList<String>();
List<List<Object>> valuesList = new ArrayList<List<Object>>();
fieldNames.add("owningColl");
fieldNames.add("owningComm");
List<Object> valsList = new ArrayList<Object>();
valsList.add(dsoId);
valuesList.add(valsList);
valsList = new ArrayList<Object>();
valsList.addAll(findOwningCommunities(ctx, dsoId));
valuesList.add(valsList);
// Now make sure we also update the communities
SolrLogger.update(updateQuery, "addOne", fieldNames, valuesList);
}
else if (eventType == Event.REMOVE && dsoType == Constants.COLLECTION
&& event.getObject(ctx) instanceof Item)
{
// Unmapping items
Item newItem = (Item) event.getObject(ctx);
String updateQuery = "id: " + newItem.getID() + " AND type:"
+ newItem.getType();
List<String> fieldNames = new ArrayList<String>();
List<List<Object>> valuesList = new ArrayList<List<Object>>();
fieldNames.add("owningColl");
fieldNames.add("owningComm");
List<Object> valsList = new ArrayList<Object>();
valsList.add(dsoId);
valuesList.add(valsList);
valsList = new ArrayList<Object>();
valsList.addAll(findOwningCommunities(ctx, dsoId));
valuesList.add(valsList);
SolrLogger.update(updateQuery, "remOne", fieldNames, valuesList);
}
}
private List<Object> findOwningCommunities(Context context, int collId)
throws SQLException
{
Collection coll = Collection.find(context, collId);
List<Object> owningComms = new ArrayList<Object>();
for (int i = 0; i < coll.getCommunities().length; i++)
{
Community community = coll.getCommunities()[i];
findComms(community, owningComms);
}
return owningComms;
}
private void findComms(Community comm, List<Object> parentComms)
throws SQLException
{
if (comm == null)
{
return;
}
if (!parentComms.contains(comm.getID()))
{
parentComms.add(comm.getID());
}
findComms(comm.getParentCommunity(), parentComms);
}
public void end(Context ctx) throws Exception
{
if (toRemoveQueries != null)
{
for (String query : toRemoveQueries)
{
SolrLogger.removeIndex(query);
}
}
// clean out toRemoveQueries
toRemoveQueries = null;
}
public void finish(Context ctx) throws Exception
{
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import org.apache.log4j.Logger;
import org.dspace.eperson.EPerson;
import org.dspace.services.model.Event;
import org.dspace.usage.AbstractUsageEventListener;
import org.dspace.usage.UsageEvent;
/**
* Simple SolrLoggerUsageEvent facade to separate Solr specific
* logging implementation from DSpace.
*
* @author mdiggory
*
*/
public class SolrLoggerUsageEventListener extends AbstractUsageEventListener {
private static Logger log = Logger.getLogger(SolrLoggerUsageEventListener.class);
public void receiveEvent(Event event) {
if(event instanceof UsageEvent)
{
try{
UsageEvent ue = (UsageEvent)event;
EPerson currentUser = ue.getContext() == null ? null : ue.getContext().getCurrentUser();
SolrLogger.post(ue.getObject(), ue.getRequest(), currentUser);
}
catch(Exception e)
{
log.error(e.getMessage());
}
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.Ostermiller.util.ExcelCSVPrinter;
import org.apache.commons.lang.ArrayUtils;
/**
*
* @author kevinvandevelde at atmire.com
* Date: 21-jan-2009
* Time: 13:44:48
*
*/
public class Dataset {
private int nbRows;
private int nbCols;
/* The labels shown in our columns */
private List<String> colLabels;
/* The labels shown in our rows */
private List<String> rowLabels;
private String colTitle;
private String rowTitle;
/* The attributes for the colls */
private List<Map<String, String>> colLabelsAttrs;
/* The attributes for the rows */
private List<Map<String, String>> rowLabelsAttrs;
/* The data in a matrix */
private float[][]matrix;
/* The format in which we format our floats */
private String format = "0";
public Dataset(int rows, int cols){
matrix = new float[rows][cols];
nbRows = rows;
nbCols = cols;
initColumnLabels(cols);
initRowLabels(rows);
}
public Dataset(float[][] matrix){
this.matrix = (float[][]) ArrayUtils.clone(matrix);
nbRows = matrix.length;
if(0 < matrix.length && 0 < matrix[0].length)
{
nbCols = matrix[0].length;
}
initColumnLabels(nbCols);
initRowLabels(nbRows);
}
private void initRowLabels(int rows) {
rowLabels = new ArrayList<String>(rows);
rowLabelsAttrs = new ArrayList<Map<String, String>>();
for (int i = 0; i < rows; i++) {
rowLabels.add("Row " + (i+1));
rowLabelsAttrs.add(new HashMap<String, String>());
}
}
private void initColumnLabels(int nbCols) {
colLabels = new ArrayList<String>(nbCols);
colLabelsAttrs = new ArrayList<Map<String, String>>();
for (int i = 0; i < nbCols; i++) {
colLabels.add("Column " + (i+1));
colLabelsAttrs.add(new HashMap<String, String>());
}
}
public void setColLabel(int n, String label){
colLabels.set(n, label);
}
public void setRowLabel(int n, String label){
rowLabels.set(n, label);
}
public String getRowTitle() {
return rowTitle;
}
public String getColTitle() {
return colTitle;
}
public void setColTitle(String colTitle) {
this.colTitle = colTitle;
}
public void setRowTitle(String rowTitle) {
this.rowTitle = rowTitle;
}
public void setRowLabelAttr(int pos, String attrName, String attr){
Map<String, String> attrs = rowLabelsAttrs.get(pos);
attrs.put(attrName, attr);
rowLabelsAttrs.set(pos, attrs);
}
public void setRowLabelAttr(int pos, Map<String, String> attrMap){
rowLabelsAttrs.set(pos, attrMap);
}
public void setColLabelAttr(int pos, String attrName, String attr){
Map<String, String> attrs = colLabelsAttrs.get(pos);
attrs.put(attrName, attr);
colLabelsAttrs.set(pos, attrs);
}
public void setColLabelAttr(int pos, Map<String, String> attrMap) {
colLabelsAttrs.set(pos, attrMap);
}
public List<Map<String, String>> getColLabelsAttrs() {
return colLabelsAttrs;
}
public List<Map<String, String>> getRowLabelsAttrs() {
return rowLabelsAttrs;
}
public List<String> getColLabels() {
return colLabels;
}
public List<String> getRowLabels() {
return rowLabels;
}
public float[][] getMatrix() {
return (float[][]) ArrayUtils.clone(matrix);
}
public int getNbRows() {
return nbRows;
}
public int getNbCols() {
return nbCols;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String[][] getMatrixFormatted(){
DecimalFormat decimalFormat = new DecimalFormat(format);
if (matrix.length == 0) {
return new String[0][0];
} else {
String[][] strMatrix = new String[matrix.length][matrix[0].length];
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
strMatrix[i][j] = decimalFormat.format(matrix[i][j]);
}
}
return strMatrix;
}
}
public void addValueToMatrix(int row, int coll, float value) {
matrix[row][coll] = value;
}
public void addValueToMatrix(int row, int coll, String value) throws ParseException {
DecimalFormat decimalFormat = new DecimalFormat(format);
Number number = decimalFormat.parse(value);
matrix[row][coll] = number.floatValue();
}
/**
* Returns false if this dataset only contains zero's.
*/
public boolean containsNonZeroValues(){
if (matrix != null) {
for (float[] vector : matrix) {
for (float v : vector) {
if (v != 0)
{
return true;
}
}
}
}
return false;
}
public void flipRowCols(){
//Lets make sure we at least have something to flip
if(0 < matrix.length && 0 < matrix[0].length){
//Flip the data first
float[][] newMatrix = new float[matrix[0].length][matrix.length];
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
newMatrix[j][i] = matrix[i][j];
}
}
//Flip the rows & column labels
List<String> backup = colLabels;
colLabels = rowLabels;
rowLabels = backup;
//Also flip the links
List<Map<String, String>> backList = colLabelsAttrs;
colLabelsAttrs = rowLabelsAttrs;
rowLabelsAttrs = backList;
matrix = newMatrix;
}
//Also flip these sizes
int backUp = nbRows;
nbRows = nbCols;
nbCols = backUp;
//Also flip the title's
String backup = rowTitle;
rowTitle = colTitle;
colTitle = backup;
}
public ByteArrayOutputStream exportAsCSV() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ExcelCSVPrinter ecsvp = new ExcelCSVPrinter(baos);
ecsvp.changeDelimiter(';');
ecsvp.setAlwaysQuote(true);
//Generate the item row
List<String> colLabels = getColLabels();
ecsvp.write("");
for (String colLabel : colLabels) {
ecsvp.write(colLabel);
}
ecsvp.writeln();
List<String> rowLabels = getRowLabels();
String[][] matrix = getMatrixFormatted();
for (int i = 0; i < rowLabels.size(); i++) {
String rowLabel = rowLabels.get(i);
ecsvp.write(rowLabel);
for (int j = 0; j < matrix[i].length; j++) {
ecsvp.write(matrix[i][j]);
}
ecsvp.writeln();
}
ecsvp.flush();
ecsvp.close();
return baos;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.statistics.SolrLogger;
import org.dspace.statistics.content.filter.StatisticsFilter;
/**
* Class that will hold the data needed to show
* statistics in the browse and search pages.
*
* User: @author kevinvandevelde at atmire.com (kevin at atmire.com)
* Date: 20-mei-2009
* Time: 16:44:29
*/
public class StatisticsBSAdapter {
private boolean displayItemViews;
private boolean displayBitstreamViews;
private boolean displayTotalViews;
private List<StatisticsFilter> filters;
/** visitType is ITEM */
public static final int ITEM_VISITS = 0;
/** visitType is BITSTREAM */
public static final int BITSTREAM_VISITS = 1;
/** visitType is TOTAL */
public static final int TOTAL_VISITS = 2;
public StatisticsBSAdapter() {
displayItemViews = false;
displayBitstreamViews = false;
filters = new ArrayList<StatisticsFilter>();
}
/**
* Returns the number of visits for the item.
* Depending on the visitType it can either be item, bitstream, total, ...
*
* @param visitType the type of visits we want, from the item, bitstream, total
* @param item the item from which we need our visits
* @return the number of visits
* @throws SolrServerException ....
*/
public long getNumberOfVisits(int visitType, Item item) throws SolrServerException {
switch (visitType){
case ITEM_VISITS:
return SolrLogger.queryTotal("type: " + Constants.ITEM + " AND id: " + item.getID(), resolveFilterQueries()).getCount();
case BITSTREAM_VISITS:
return SolrLogger.queryTotal("type: " + Constants.BITSTREAM + " AND owningItem: " + item.getID(), resolveFilterQueries()).getCount();
case TOTAL_VISITS:
return getNumberOfVisits(ITEM_VISITS, item) + getNumberOfVisits(BITSTREAM_VISITS, item);
}
return -1;
}
private String resolveFilterQueries(){
StringBuilder out = new StringBuilder();
for (int i = 0; i < filters.size(); i++) {
StatisticsFilter statisticsFilter = filters.get(i);
out.append(statisticsFilter.toQuery());
if(i != 0 && (i != filters.size() -1))
{
out.append(" AND ");
}
}
return out.toString();
}
///////////////////////
// GETTERS & SETTERS //
///////////////////////
public boolean isDisplayTotalViews() {
return displayTotalViews;
}
public void setDisplayTotalViews(boolean displayTotalViews) {
this.displayTotalViews = displayTotalViews;
}
public boolean isDisplayItemViews() {
return displayItemViews;
}
public void setDisplayItemViews(boolean displayItemViews) {
this.displayItemViews = displayItemViews;
}
public boolean isDisplayBitstreamViews() {
return displayBitstreamViews;
}
public void setDisplayBitstreamViews(boolean displayBitstreamViews) {
this.displayBitstreamViews = displayBitstreamViews;
}
public List<StatisticsFilter> getFilters() {
return filters;
}
public void addFilter(StatisticsFilter filter){
this.filters.add(filter);
}
public void setFilters(List<StatisticsFilter> filters) {
this.filters = filters;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import java.util.Calendar;
import java.util.Date;
/**
* Represents a date facet for filtering.
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 9:44:57
*
*/
public class DatasetTimeGenerator extends DatasetGenerator {
private String type = "time";
private String dateType;
private String startDate;
private String endDate;
private Date actualStartDate;
private Date actualEndDate;
//TODO: process includetotal
public DatasetTimeGenerator() {
}
/**
* Sets the date interval.
* For example if you wish to see the data from today to six months ago give
* the following parameters:
* datatype = "month"
* start = "-6"
* end = "+1" // the +1 indicates this month also
*
* @param dateType type can be days, months, years
* @param start the start of the interval
* @param end the end of the interval
*/
public void setDateInterval(String dateType, String start, String end){
this.startDate = start;
this.endDate = end;
this.dateType = dateType;
}
public void setDateInterval(String dateType, Date start, Date end)
throws IllegalArgumentException
{
actualStartDate = (start == null ? null : new Date(start.getTime()));
actualEndDate = (end == null ? null : new Date(end.getTime()));
this.dateType = dateType;
//Check if end comes before start
Calendar startCal1 = Calendar.getInstance();
Calendar endCal1 = Calendar.getInstance();
if (startCal1 == null || endCal1 == null)
{
throw new IllegalStateException("Unable to create calendar instances");
}
startCal1.setTime(start);
endCal1.setTime(end);
if(endCal1.before(startCal1))
{
throw new IllegalArgumentException();
}
// TODO: ensure future dates are tested. Although we normally do not
// have visits from the future.
//Depending on our dateType check if we need to use days/months/years.
int type = -1;
if("year".equalsIgnoreCase(dateType)){
type = Calendar.YEAR;
}else
if("month".equalsIgnoreCase(dateType)){
type = Calendar.MONTH;
}else
if("day".equalsIgnoreCase(dateType)){
type = Calendar.DATE;
}else
if("hour".equalsIgnoreCase(dateType)){
type = Calendar.HOUR;
}
int difStart = getTimeDifference(start, Calendar.getInstance().getTime(), type);
int difEnd = getTimeDifference(end, Calendar.getInstance().getTime(), type);
// System.out.println(difStart + " " + difEnd);
boolean endPos = false;
if(difEnd == 0){
//Includes the current
difEnd = 1;
endPos = true;
}else
if(0 < difEnd)
{
endPos = true;
}
else{
difEnd++;
}
startDate = "" + difStart;
//We need +1 so we can count the current month/year/...
endDate = (endPos ? "+" : "") + difEnd;
}
public String getStartDate() {
return startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
public String getEndDate() {
return endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public String getDateType() {
return dateType.toUpperCase();
}
public Date getActualStartDate() {
return actualStartDate == null ? null : new Date(actualStartDate.getTime());
}
public void setActualStartDate(Date actualStartDate) {
this.actualStartDate = (actualStartDate == null ? null : new Date(actualStartDate.getTime()));
}
public Date getActualEndDate() {
return actualEndDate == null ? null : new Date(actualEndDate.getTime());
}
public void setActualEndDate(Date actualEndDate) {
this.actualEndDate = (actualEndDate == null ? null : new Date(actualEndDate.getTime()));
}
public void setDateType(String dateType) {
this.dateType = dateType;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
/** Get the difference between two Dates in terms of a given interval.
* That is: if you specify the difference in months, you get back the
* number of months between the dates.
*
* @param date1 the first date
* @param date2 the other date
* @param type Calendar.HOUR or .DATE or .MONTH
* @return number of {@code type} intervals between {@code date1} and
* {@code date2}
*/
private int getTimeDifference(Date date1, Date date2, int type){
int toAdd;
int elapsed = 0;
//We need calendar objects to compare
Calendar cal1, cal2;
cal1 = Calendar.getInstance();
cal2 = Calendar.getInstance();
cal1.setTime(date1);
cal2.setTime(date2);
cal1.clear(Calendar.MILLISECOND);
cal2.clear(Calendar.MILLISECOND);
cal1.clear(Calendar.SECOND);
cal2.clear(Calendar.SECOND);
cal1.clear(Calendar.MINUTE);
cal2.clear(Calendar.MINUTE);
if(type != Calendar.HOUR){
cal1.clear(Calendar.HOUR);
cal2.clear(Calendar.HOUR);
cal1.clear(Calendar.HOUR_OF_DAY);
cal2.clear(Calendar.HOUR_OF_DAY);
//yet i know calendar just won't clear his hours
cal1.set(Calendar.HOUR_OF_DAY, 0);
cal2.set(Calendar.HOUR_OF_DAY, 0);
}
if(type != Calendar.DATE){
cal1.set(Calendar.DATE, 1);
cal2.set(Calendar.DATE, 1);
}
if(type != Calendar.MONTH){
cal1.clear(Calendar.MONTH);
cal2.clear(Calendar.MONTH);
}
//Switch em if needed
if(cal1.after(cal2) || cal1.equals(cal2)){
Calendar backup = cal1;
cal1 = cal2;
cal2 = backup;
toAdd = 1;
}else
{
toAdd = -1;
}
/*if(type != Calendar.YEAR){
cal1.clear(Calendar.YEAR);
cal2.clear(Calendar.YEAR);
}
*/
while(cal1.before(cal2)){
cal1.add(type, 1);
elapsed += toAdd;
}
return elapsed;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import org.dspace.core.Constants;
/**
* Describes the displayed representation of the statistics on a DSpaceObject
* and its children.
* @author TODO
*/
public class DSORepresentation {
/** The type of DSpaceObject to be shown. */
private Integer type;
/** The maximum number of children to show. **/
private Integer max;
/** Determines if should show the DSOs as separate entities or use the sum of them. */
private Boolean separate;
private Integer nameLength;
/** Construct a representation assumed to be of an ITEM. */
public DSORepresentation() {
setType(Constants.ITEM);
}
/** Construct a representation as described.
*
* @param type Object type, e.g. Constants.COLLECTION
* @param max Maximum number of children to display
* @param separate True if children's statistics are distinct; false if summed
*/
public DSORepresentation(Integer type, Integer max, Boolean separate) {
this.type = type;
this.max = max;
this.separate = separate;
}
public final Integer getType() {
return type;
}
/**
* @param type Object type, e.g. Constants.COLLECTION
*/
public final void setType(Integer type) {
this.type = type;
}
public final Integer getMax() {
return (max == null) ? -1 : max;
}
/**
* @param max Maximum number of children to display
*/
public final void setMax(Integer max) {
this.max = max;
}
public final Integer getNameLength() {
return nameLength;
}
public final void setNameLength(Integer nameLength) {
this.nameLength = nameLength;
}
public final Boolean getSeparate() {
return (separate != null) && separate;
}
/**
* @param separate true for distinct child statistics; false to sum them
*/
public final void setSeparate(Boolean separate) {
this.separate = separate;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
/**
* Represents a single facet for filtering.
* Can be one of the axes in a table.
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 9:39:37
*
*/
public abstract class DatasetGenerator {
/** The type of generator can either be CATEGORY or SERIE **/
protected int datasetType;
protected boolean includeTotal = false;
public int getDatasetType(){
return datasetType;
}
public void setDatasetType(int datasetType){
this.datasetType = datasetType;
}
public boolean isIncludeTotal() {
return includeTotal;
}
public void setIncludeTotal(boolean includeTotal) {
this.includeTotal = includeTotal;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.content.filter.StatisticsFilter;
/**
* Encapsulates all data to render the statistics
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 9:27:09
*
*/
public abstract class StatisticsDisplay {
private String id;
private StatisticsData statisticsData;
private String title;
/** css information used to position the display object in a html page**/
private List<String> css;
public void setTitle(String title) {
this.title = title;
}
public String getTitle() {
return title;
}
protected StatisticsDisplay(StatisticsData statisticsData){
this.statisticsData = statisticsData;
}
public List<DatasetGenerator> getDatasetGenerators() {
return statisticsData.getDatasetGenerators();
}
public void addDatasetGenerator(DatasetGenerator set){
statisticsData.addDatasetGenerator(set);
}
public void addFilter(StatisticsFilter filter){
statisticsData.addFilters(filter);
}
public List<StatisticsFilter> getFilters(){
return statisticsData.getFilters();
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public void setDataset(Dataset dataset) {
statisticsData.setDataset(dataset);
}
public abstract String getType();
public Dataset getDataset() {
return statisticsData.getDataset();
}
public Dataset getDataset(Context context) throws SQLException, SolrServerException, IOException, ParseException {
return statisticsData.createDataset(context);
}
public void addCss(String style){
if (style != null) {
if (css == null)
{
css = new ArrayList<String>();
}
css.add(style.trim());
}
}
public String getCss() {
if (css != null) {
StringBuilder result = new StringBuilder();
for (String s : css) {
result.append(s);
if (!s.endsWith(";"))
{
result.append(";");
}
}
return result.toString();
}
return "";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
/**
* Represents a simple string facet for filtering.
* Doesn't offer any special interaction.
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 12:44:27
*
*/
public class DatasetTypeGenerator extends DatasetGenerator {
/** The type of our generator (EXAMPLE: country) **/
private String type;
/** The number of values shown (max) **/
private int max;
public DatasetTypeGenerator() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public int getMax() {
return max;
}
public void setMax(int max) {
this.max = max;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
/**
* Encapsulates all data to render the statistics as a list
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 12:38:58
*
*/
public class StatisticsListing extends StatisticsDisplay {
public StatisticsListing(StatisticsData statisticsData){
super(statisticsData);
}
@Override
public String getType() {
return "listing";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.content.filter.StatisticsFilter;
import org.dspace.core.Context;
import org.apache.solr.client.solrj.SolrServerException;
import java.util.List;
import java.util.ArrayList;
import java.sql.SQLException;
import java.io.IOException;
import java.text.ParseException;
/**
* Abstract "factory" for statistical queries.
* @author kevinvandevelde at atmire.com
* Date: 23-feb-2009
* Time: 12:37:04
*/
public abstract class StatisticsData {
private Dataset dataset;
private List<DatasetGenerator> datasetgenerators;
private List<StatisticsFilter> filters;
/** Construct a blank query factory. */
protected StatisticsData() {
datasetgenerators = new ArrayList<DatasetGenerator>(2);
filters = new ArrayList<StatisticsFilter>();
}
/** Wrap an existing Dataset in an unconfigured query factory. */
protected StatisticsData(Dataset dataset) {
this.dataset = dataset;
datasetgenerators = new ArrayList<DatasetGenerator>(2);
filters = new ArrayList<StatisticsFilter>();
}
/** Augment the list of facets (generators). */
public void addDatasetGenerator(DatasetGenerator set){
datasetgenerators.add(set);
}
/** Augment the list of filters. */
public void addFilters(StatisticsFilter filter){
filters.add(filter);
}
/** Return the current list of generators. */
public List<DatasetGenerator> getDatasetGenerators() {
return datasetgenerators;
}
/** Return the current list of filters. */
public List<StatisticsFilter> getFilters() {
return filters;
}
/** Return the existing query result if there is one. */
public Dataset getDataset() {
return dataset;
}
/** Jam an existing query result in. */
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
/** Run the accumulated query and return its results. */
public abstract Dataset createDataset(Context context) throws SQLException,
SolrServerException, IOException, ParseException;
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Build queries to the statistical subsystem and create UI elements from the
* results. The underlying SOLR engine uses a text-based query language. The
* classes here map a structure of constraint objects into that language.
* <p>
* {@link org.dspace.statistics.content.StatisticsDataVisits} is somewhat like a
* factory for statistical queries. An instance is customized with
* DatasetGenerator instances to specify interesting facets of the data and with
* filters to specify TBS. The "factory methods" then produce arrays of values
* meeting the factory's criteria, either raw or formatted for presentation.
* <p>
* DatasetGenerator subclasses are available for constraining the results to a
* given DSpaceObject, object type, and time interval.
* <p>
* A StatisticsDataVisits object can be wrapped in a
* {@link org.dspace.statistics.content.StatisticsDisplay}
* subclass to format its content as a list or a table.
*/
package org.dspace.statistics.content;
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import org.dspace.content.*;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.ObjectCount;
import org.dspace.statistics.SolrLogger;
import org.dspace.statistics.content.filter.StatisticsFilter;
import org.dspace.statistics.content.filter.StatisticsSolrDateFilter;
import org.dspace.statistics.util.LocationUtils;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.handle.HandleManager;
import org.dspace.app.util.Util;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.util.ClientUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.sql.SQLException;
import java.text.ParseException;
import java.io.UnsupportedEncodingException;
/**
* Query factory associated with a DSpaceObject.
* Encapsulates the raw data, independent of rendering.
* <p>
* To use:
* <ol>
* <li>Instantiate, passing a reference to the interesting DSO.</li>
* <li>Add a {@link DatasetDSpaceObjectGenerator} for the appropriate object type.</li>
* <li>Add other generators as required to get the statistic you want.</li>
* <li>Add {@link org.dspace.statistics.content.filter filters} as required.</li>
* <li>{@link #createDataset(Context)} will run the query and return a result matrix.
* Subsequent calls skip the query and return the same matrix.</li>
* </ol>
*
* @author kevinvandevelde at atmire.com
* Date: 23-feb-2009
* Time: 12:25:20
*/
public class StatisticsDataVisits extends StatisticsData
{
/** Current DSpaceObject for which to generate the statistics. */
private DSpaceObject currentDso;
/** Construct a completely uninitialized query. */
public StatisticsDataVisits()
{
}
/** Construct an empty query concerning a given DSpaceObject. */
public StatisticsDataVisits(DSpaceObject dso)
{
super();
this.currentDso = dso;
}
/** Construct an unconfigured query around a given DSO and Dataset. */
public StatisticsDataVisits(DSpaceObject currentDso, Dataset dataset)
{
super(dataset);
this.currentDso = currentDso;
}
/** Construct an unconfigured query around a given Dataset. */
public StatisticsDataVisits(Dataset dataset)
{
super(dataset);
}
public Dataset createDataset(Context context) throws SQLException,
SolrServerException, ParseException
{
//Check if we already have one.
//If we do then give it back.
if(getDataset() != null)
{
return getDataset();
}
///////////////////////////
// 1. DETERMINE OUR AXIS //
///////////////////////////
ArrayList<DatasetQuery> datasetQueries = new ArrayList<DatasetQuery>();
for (int i = 0; i < getDatasetGenerators().size(); i++) {
DatasetGenerator dataSet = getDatasetGenerators().get(i);
processAxis(dataSet, datasetQueries);
}
//Now lets determine our values.
//First check if we have a date facet & if so find it.
DatasetTimeGenerator dateFacet = null;
if (getDatasetGenerators().get(0) instanceof DatasetTimeGenerator
|| (1 < getDatasetGenerators().size() && getDatasetGenerators()
.get(1) instanceof DatasetTimeGenerator))
{
if(getDatasetGenerators().get(0) instanceof DatasetTimeGenerator)
{
dateFacet = (DatasetTimeGenerator) getDatasetGenerators().get(0);
}
else
{
dateFacet = (DatasetTimeGenerator) getDatasetGenerators().get(1);
}
}
/////////////////////////
// 2. DETERMINE VALUES //
/////////////////////////
boolean showTotal = false;
//Check if we need our total
if ((getDatasetGenerators().get(0) != null && getDatasetGenerators()
.get(0).isIncludeTotal())
|| (1 < getDatasetGenerators().size()
&& getDatasetGenerators().get(1) != null && getDatasetGenerators()
.get(1).isIncludeTotal()))
{
showTotal = true;
}
if (dateFacet != null && dateFacet.getActualStartDate() != null
&& dateFacet.getActualEndDate() != null)
{
StatisticsSolrDateFilter dateFilter = new StatisticsSolrDateFilter();
dateFilter.setStartDate(dateFacet.getActualStartDate());
dateFilter.setEndDate(dateFacet.getActualEndDate());
dateFilter.setTypeStr(dateFacet.getDateType());
addFilters(dateFilter);
}
else if (dateFacet != null && dateFacet.getStartDate() != null
&& dateFacet.getEndDate() != null)
{
StatisticsSolrDateFilter dateFilter = new StatisticsSolrDateFilter();
dateFilter.setStartStr(dateFacet.getStartDate());
dateFilter.setEndStr(dateFacet.getEndDate());
dateFilter.setTypeStr(dateFacet.getDateType());
addFilters(dateFilter);
}
//Determine our filterQuery
String filterQuery = null;
for (int i = 0; i < getFilters().size(); i++) {
if(filterQuery == null)
{
filterQuery = "";
}
StatisticsFilter filter = getFilters().get(i);
filterQuery += "(" + filter.toQuery() + ")";
if(i != (getFilters().size() -1))
{
filterQuery += " AND ";
}
}
// System.out.println("FILTERQUERY: " + filterQuery);
//We determine our values on the queries resolved above
Dataset dataset = null;
//Run over our queries.
//First how many queries do we have ?
if(dateFacet != null){
//So do all the queries and THEN do the date facet
for (int i = 0; i < datasetQueries.size(); i++) {
DatasetQuery dataSetQuery = datasetQueries.get(i);
if(dataSetQuery.getQueries().size() != 1){
//TODO: do this
}else{
String query = dataSetQuery.getQueries().get(0).getQuery();
if(dataSetQuery.getMax() == -1){
//We are asking from our current query all the visits faceted by date
ObjectCount[] results = SolrLogger.queryFacetDate(query, filterQuery, dataSetQuery.getMax(), dateFacet.getDateType(), dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal);
dataset = new Dataset(1, results.length);
//Now that we have our results put em in a matrix
for(int j = 0; j < results.length; j++){
dataset.setColLabel(j, results[j].getValue());
dataset.addValueToMatrix(0, j, results[j].getCount());
}
//TODO: change this !
//Now add the column label
dataset.setRowLabel(0, getResultName(dataSetQuery.getName(), dataSetQuery, context));
dataset.setRowLabelAttr(0, getAttributes(dataSetQuery.getName(), dataSetQuery, context));
}else{
//We need to get the max objects and the next part of the query on them (next part beeing the datasettimequery
ObjectCount[] maxObjectCounts = SolrLogger.queryFacetField(query, filterQuery, dataSetQuery.getFacetField(), dataSetQuery.getMax(), false, null);
for (int j = 0; j < maxObjectCounts.length; j++) {
ObjectCount firstCount = maxObjectCounts[j];
String newQuery = dataSetQuery.getFacetField() + ": " + ClientUtils.escapeQueryChars(firstCount.getValue()) + " AND " + query;
ObjectCount[] maxDateFacetCounts = SolrLogger.queryFacetDate(newQuery, filterQuery, dataSetQuery.getMax(), dateFacet.getDateType(), dateFacet.getStartDate(), dateFacet.getEndDate(), showTotal);
//Make sure we have a dataSet
if(dataset == null)
{
dataset = new Dataset(maxObjectCounts.length, maxDateFacetCounts.length);
}
//TODO: this is a very dirty fix change this ! ! ! ! ! !
dataset.setRowLabel(j, getResultName(firstCount.getValue(), dataSetQuery, context));
dataset.setRowLabelAttr(j, getAttributes(firstCount.getValue(), dataSetQuery, context));
for (int k = 0; k < maxDateFacetCounts.length; k++) {
ObjectCount objectCount = maxDateFacetCounts[k];
//No need to add this many times
if(j == 0)
{
dataset.setColLabel(k, objectCount.getValue());
}
dataset.addValueToMatrix(j, k, objectCount.getCount());
}
}
if(dataset != null && !(getDatasetGenerators().get(0) instanceof DatasetTimeGenerator)){
dataset.flipRowCols();
}
}
}
}
}else{
//We do NOT have a date facet so just do queries after each other
/*
for (int i = 0; i < datasetQueries.size(); i++) {
DatasetQuery datasetQuery = datasetQueries.get(i);
if(datasetQuery.getQueries().size() != 1){
//TODO: do this
}else{
String query = datasetQuery.getQueries().get(0);
//Loop over the queries & do em
// ObjectCount[] topCounts = SolrLogger.queryFacetField(query, );
}
}
*/
DatasetQuery firsDataset = datasetQueries.get(0);
//Do the first query
ObjectCount[] topCounts1 = null;
// if(firsDataset.getQueries().size() == 1){
topCounts1 = queryFacetField(firsDataset, firsDataset.getQueries().get(0).getQuery(), filterQuery);
// }else{
// TODO: do this
// }
//Check if we have more queries that need to be done
if(datasetQueries.size() == 2){
DatasetQuery secondDataSet = datasetQueries.get(1);
//Now do the second one
ObjectCount[] topCounts2 = queryFacetField(secondDataSet, secondDataSet.getQueries().get(0).getQuery(), filterQuery);
//Now that have results for both of them lets do x.y queries
List<String> facetQueries = new ArrayList<String>();
for (ObjectCount count2 : topCounts2) {
String facetQuery = secondDataSet.getFacetField() + ":" + ClientUtils.escapeQueryChars(count2.getValue());
//Check if we also have a type present (if so this should be put into the query
if ("id".equals(secondDataSet.getFacetField()) && secondDataSet.getQueries().get(0).getDsoType() != -1)
{
facetQuery += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
}
facetQueries.add(facetQuery);
}
for (int i = 0; i < topCounts1.length; i++){
ObjectCount count1 = topCounts1[i];
ObjectCount[] currentResult = new ObjectCount[topCounts2.length];
//Make sure we have a dataSet
if(dataset == null)
{
dataset = new Dataset(topCounts2.length, topCounts1.length);
}
dataset.setColLabel(i, getResultName(count1.getValue(), firsDataset, context));
dataset.setColLabelAttr(i, getAttributes(count1.getValue(), firsDataset, context));
String query = firsDataset.getFacetField() + ":" + ClientUtils.escapeQueryChars(count1.getValue());
//Check if we also have a type present (if so this should be put into the query
if("id".equals(firsDataset.getFacetField()) && firsDataset.getQueries().get(0).getDsoType() != -1)
{
query += " AND type:" + firsDataset.getQueries().get(0).getDsoType();
}
Map<String, Integer> facetResult = SolrLogger.queryFacetQuery(query, filterQuery, facetQueries);
//TODO: the show total
//No need to add this many times
//TODO: dit vervangen door te displayen value
for (int j = 0; j < topCounts2.length; j++) {
ObjectCount count2 = topCounts2[j];
if(i == 0) {
dataset.setRowLabel(j, getResultName(count2.getValue(), secondDataSet, context));
dataset.setRowLabelAttr(j, getAttributes(count2.getValue(), secondDataSet, context));
}
//Get our value the value is the same as the query
String facetQuery = secondDataSet.getFacetField() + ":" + ClientUtils.escapeQueryChars(count2.getValue());
//Check if we also have a type present (if so this should be put into the query
if ("id".equals(secondDataSet.getFacetField()) && secondDataSet.getQueries().get(0).getDsoType() != -1)
{
facetQuery += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
}
//We got our query so now get the value
dataset.addValueToMatrix(j, i, facetResult.get(facetQuery));
}
/*
for (int j = 0; j < topCounts2.length; j++) {
ObjectCount count2 = topCounts2[j];
String query = firsDataset.getFacetField() + ":" + count1.getValue();
//Check if we also have a type present (if so this should be put into the query
if("id".equals(firsDataset.getFacetField()) && firsDataset.getQueries().get(0).getDsoType() != -1)
query += " AND type:" + firsDataset.getQueries().get(0).getDsoType();
query += " AND " + secondDataSet.getFacetField() + ":" + count2.getValue();
//Check if we also have a type present (if so this should be put into the query
if("id".equals(secondDataSet.getFacetField()) && secondDataSet.getQueries().get(0).getDsoType() != -1)
query += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
long count = SolrLogger.queryFacetQuery(query, filterQuery);
//TODO: the show total
//No need to add this many times
//TODo: dit vervangen door te displayen value
if(i == 0) {
dataset.setRowLabel(j, getResultName(count2.getValue(), secondDataSet, context));
dataset.setRowLabelAttr(j, getAttributes(count2.getValue(), secondDataSet, context));
}
dataset.addValueToMatrix(j, i, count);
}
*/
}
// System.out.println("BOTH");
} else{
//Make sure we have a dataSet
dataset = new Dataset(1, topCounts1.length);
for (int i = 0; i < topCounts1.length; i++) {
ObjectCount count = topCounts1[i];
dataset.setColLabel(i, getResultName(count.getValue(), firsDataset, context));
dataset.setColLabelAttr(i, getAttributes(count.getValue(), firsDataset, context));
dataset.addValueToMatrix(0, i, count.getCount());
}
}
}
if(dataset != null){
dataset.setRowTitle("Dataset 1");
dataset.setColTitle("Dataset 2");
}else
{
dataset = new Dataset(0, 0);
}
return dataset;
}
private void processAxis(DatasetGenerator datasetGenerator, List<DatasetQuery> queries) throws SQLException {
if(datasetGenerator instanceof DatasetDSpaceObjectGenerator){
DatasetDSpaceObjectGenerator dspaceObjAxis = (DatasetDSpaceObjectGenerator) datasetGenerator;
//Get the types involved
List<DSORepresentation> dsoRepresentations = dspaceObjAxis.getDsoRepresentations();
for (int i = 0; i < dsoRepresentations.size(); i++){
DatasetQuery datasetQuery = new DatasetQuery();
Integer dsoType = dsoRepresentations.get(i).getType();
boolean seperate = dsoRepresentations.get(i).getSeparate();
Integer dsoLength = dsoRepresentations.get(i).getNameLength();
//Check if our type is our current object
if(currentDso != null && dsoType == currentDso.getType()){
Query query = new Query();
query.setDso(currentDso.getID(), currentDso.getType(), dsoLength);
datasetQuery.addQuery(query);
}else{
//TODO: only do this for bitstreams from an item
Query query = new Query();
if(currentDso != null && seperate && dsoType == Constants.BITSTREAM){
//CURRENTLY THIS IS ONLY POSSIBLE FOR AN ITEM ! ! ! ! ! ! !
//We need to get the separate bitstreams from our item and make a query for each of them
Item item = (Item) currentDso;
for (int j = 0; j < item.getBundles().length; j++) {
Bundle bundle = item.getBundles()[j];
for (int k = 0; k < bundle.getBitstreams().length; k++) {
Bitstream bitstream = bundle.getBitstreams()[k];
if(!bitstream.getFormat().isInternal()){
//Add a separate query for each bitstream
query.setDso(bitstream.getID(), bitstream.getType(), dsoLength);
}
}
}
} else {
//We have something else than our current object.
//So we need some kind of children from it, so put this in our query
query.setOwningDso(currentDso);
query.setDsoLength(dsoLength);
String title = "";
switch(dsoType){
case Constants.BITSTREAM:
title = "Files";
break;
case Constants.ITEM:
title = "Items";
break;
case Constants.COLLECTION:
title = "Collections";
break;
case Constants.COMMUNITY:
title = "Communities";
break;
}
datasetQuery.setName(title);
//Put the type in so we only get the children of the type specified
query.setDsoType(dsoType);
}
datasetQuery.addQuery(query);
}
datasetQuery.setFacetField("id");
datasetQuery.setMax(dsoRepresentations.get(i).getMax());
queries.add(datasetQuery);
}
}else
if(datasetGenerator instanceof DatasetTypeGenerator){
DatasetTypeGenerator typeAxis = (DatasetTypeGenerator) datasetGenerator;
DatasetQuery datasetQuery = new DatasetQuery();
//First make sure our query is in order
Query query = new Query();
if(currentDso != null)
{
query.setDso(currentDso.getID(), currentDso.getType());
}
datasetQuery.addQuery(query);
//Then add the rest
datasetQuery.setMax(typeAxis.getMax());
datasetQuery.setFacetField(typeAxis.getType());
datasetQuery.setName(typeAxis.getType());
queries.add(datasetQuery);
}
}
/**
* Gets the name of the DSO (example for collection: ((Collection) dso).getname();
* @return the name of the given DSO
*/
private String getResultName(String value, DatasetQuery datasetQuery,
Context context) throws SQLException
{
if("continent".equals(datasetQuery.getName())){
value = LocationUtils.getContinentName(value);
}else
if("countryCode".equals(datasetQuery.getName())){
value = LocationUtils.getCountryName(value);
}else{
Query query = datasetQuery.getQueries().get(0);
//TODO: CHANGE & THROW AWAY THIS ENTIRE METHOD
//Check if int
int dsoId;
int dsoLength = query.getDsoLength();
try {
dsoId = Integer.parseInt(value);
}catch(Exception e){
dsoId = -1;
}
if(dsoId == -1 && query.getDsoId() != -1 && value == null)
{
dsoId = query.getDsoId();
}
if(dsoId != -1 && query.dsoType != -1){
DSpaceObject dso = DSpaceObject.find(context, query.getDsoType(), dsoId);
if(dso != null){
switch(dso.getType()){
case Constants.BITSTREAM:
Bitstream bit = (Bitstream) dso;
return bit.getName();
case Constants.ITEM:
Item item = (Item) dso;
String name = "untitled";
DCValue[] vals = item.getMetadata("dc", "title", null, Item.ANY);
if(vals != null && 0 < vals.length)
{
name = vals[0].value;
}
if(dsoLength != -1 && name.length() > dsoLength){
//Cut it off at the first space
int firstSpace = name.indexOf(' ', dsoLength);
if(firstSpace != -1){
name = name.substring(0, firstSpace) + " ...";
}
}
return name;
case Constants.COLLECTION:
Collection coll = (Collection) dso;
name = coll.getName();
if(dsoLength != -1 && name.length() > dsoLength){
//Cut it off at the first space
int firstSpace = name.indexOf(' ', dsoLength);
if(firstSpace != -1){
name = name.substring(0, firstSpace) + " ...";
}
}
return name;
case Constants.COMMUNITY:
Community comm = (Community) dso;
name = comm.getName();
if(dsoLength != -1 && name.length() > dsoLength){
//Cut it off at the first space
int firstSpace = name.indexOf(' ', dsoLength);
if(firstSpace != -1){
name = name.substring(0, firstSpace) + " ...";
}
}
return name;
}
}
}
}
return value;
}
private Map<String, String> getAttributes(String value,
DatasetQuery datasetQuery, Context context) throws SQLException
{
HashMap<String, String> attrs = new HashMap<String, String>();
Query query = datasetQuery.getQueries().get(0);
//TODO: CHANGE & THROW AWAY THIS ENTIRE METHOD
//Check if int
int dsoId;
try {
dsoId = Integer.parseInt(value);
}catch(Exception e){
dsoId = -1;
}
if(dsoId == -1 && query.getDsoId() != -1 && value == null)
{
dsoId = query.getDsoId();
}
if(dsoId != -1 && query.dsoType != -1){
DSpaceObject dso = DSpaceObject.find(context, query.getDsoType(), dsoId);
if(dso != null){
switch(dso.getType()){
case Constants.BITSTREAM:
Bitstream bit = (Bitstream) dso;
//Get our owning item
Item owningItem = null;
Bundle[] bunds = bit.getBundles();
if(0 < bunds.length && 0 < bunds[0].getItems().length)
{
owningItem = bunds[0].getItems()[0];
}
// If possible reference this bitstream via a handle, however this may
// be null if a handle has not yet been assigned. In this case refrence the
// item its internal id. In the last case where the bitstream is not associated
// with an item (such as a community logo) then reference the bitstreamID directly.
String identifier = null;
if (owningItem != null && owningItem.getHandle() != null)
{
identifier = "handle/" + owningItem.getHandle();
}
else if (owningItem != null)
{
identifier = "item/" + owningItem.getID();
}
else
{
identifier = "id/" + bit.getID();
}
String url = ConfigurationManager.getProperty("dspace.url") + "/bitstream/"+identifier+"/";
// If we can put the pretty name of the bitstream on the end of the URL
try
{
if (bit.getName() != null)
{
url += Util.encodeBitstreamName(bit.getName(), "UTF-8");
}
}
catch (UnsupportedEncodingException uee)
{
// Just ignore it: we don't have to have a pretty
// name on the end of the URL because the sequence id will
// locate it. However it means that links in this file might
// not work....
}
url += "?sequence="+bit.getSequenceID();
attrs.put("url", url);
break;
case Constants.ITEM:
Item item = (Item) dso;
attrs.put("url", HandleManager.resolveToURL(context, item.getHandle()));
break;
case Constants.COLLECTION:
Collection coll = (Collection) dso;
attrs.put("url", HandleManager.resolveToURL(context, coll.getHandle()));
break;
case Constants.COMMUNITY:
Community comm = (Community) dso;
attrs.put("url", HandleManager.resolveToURL(context, comm.getHandle()));
break;
}
}
}
return attrs;
}
private ObjectCount[] queryFacetField(DatasetQuery dataset, String query,
String filterQuery) throws SolrServerException
{
String facetType = dataset.getFacetField() == null ? "id" : dataset
.getFacetField();
return SolrLogger.queryFacetField(query, filterQuery, facetType,
dataset.getMax(), false, null);
}
public static class DatasetQuery {
private String name;
private int max;
private String facetField;
private List<Query> queries;
public DatasetQuery() {
queries = new ArrayList<Query>();
}
public int getMax() {
return max;
}
public void setMax(int max) {
this.max = max;
}
public void addQuery(Query q){
queries.add(q);
}
public List<Query> getQueries() {
return queries;
}
public String getFacetField() {
return facetField;
}
public void setFacetField(String facetField) {
this.facetField = facetField;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public class Query {
private int dsoType;
private int dsoId;
private int dsoLength;
private DSpaceObject owningDso;
public Query() {
dsoId = -1;
dsoType = -1;
dsoLength = -1;
owningDso = null;
}
public void setOwningDso(DSpaceObject owningDso) {
this.owningDso = owningDso;
}
public void setDso(int dsoId, int dsoType){
this.dsoId = dsoId;
this.dsoType = dsoType;
}
public void setDso(int dsoId, int dsoType, int length){
this.dsoId = dsoId;
this.dsoType = dsoType;
this.dsoLength = length;
}
public void setDsoType(int dsoType) {
this.dsoType = dsoType;
}
public int getDsoLength() {
return dsoLength;
}
public void setDsoLength(int dsoLength) {
this.dsoLength = dsoLength;
}
public int getDsoId() {
return dsoId;
}
public int getDsoType(){
return dsoType;
}
public String getQueryResultName(){
//TODO: This has got to be done differently in case we have a string query.
//This is just a temporary solution so we can get on with our work.
return dsoType + ":" + dsoId;
}
public String getQuery() {
//Time to construct our query
String query = "";
//Check (& add if needed) the dsoType
if(dsoType != -1)
{
query += "type: " + dsoType;
}
//Check (& add if needed) the dsoId
if(dsoId != -1)
{
query += (query.equals("") ? "" : " AND ") + " id:" + dsoId;
}
if(owningDso != null && currentDso != null){
query += (query.equals("") ? "" : " AND " );
String owningStr = "";
switch(currentDso.getType()){
case Constants.ITEM:
owningStr = "owningItem";
break;
case Constants.COLLECTION:
owningStr = "owningColl";
break;
case Constants.COMMUNITY:
owningStr = "owningComm";
break;
}
owningStr += ":" + currentDso.getID();
query += owningStr;
}
if(query.equals(""))
{
query = "*:*";
}
return query;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import java.util.ArrayList;
import java.util.List;
/**
* Represents a dspace object based facet for filtering.
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 11:38:20
*
*/
public class DatasetDSpaceObjectGenerator extends DatasetGenerator {
/** The children of our dspaceobject to be shown **/
private List<DSORepresentation> dsoRepresentations;
public DatasetDSpaceObjectGenerator() {
dsoRepresentations = new ArrayList<DSORepresentation>();
}
public void addDsoChild(DSORepresentation representation){
dsoRepresentations.add(representation);
}
public void addDsoChild(int type, int max, boolean seperate, int nameLength){
DSORepresentation rep = new DSORepresentation(type, max, seperate);
rep.setNameLength(nameLength);
dsoRepresentations.add(rep);
}
public List<DSORepresentation> getDsoRepresentations() {
return dsoRepresentations;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content.filter;
import org.dspace.statistics.SolrLogger;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Calendar;
/**
* Encapsulate a range of dates for Solr query filtering.
* Created by IntelliJ IDEA.
* User: kevinvandevelde
* Date: 13-mrt-2009
* Time: 13:14:14
*/
public class StatisticsSolrDateFilter implements StatisticsFilter {
private Date startDate;
private Date endDate;
private String startStr;
private String endStr;
private String typeStr;
public StatisticsSolrDateFilter() {
}
/** Set the start date as a string expression.
* Must be paired with {@link #setEndStr(String)}.
*/
public void setStartStr(String startStr) {
this.startStr = startStr;
}
/** Set the end date as a string expression.
* Must be paired with {@link #setStartStr(String)}.
*/
public void setEndStr(String endStr) {
this.endStr = endStr;
}
/** Set the range granularity: DAY, MONTH, or YEAR. */
public void setTypeStr(String typeStr) {
this.typeStr = typeStr;
}
/** Set the start date as a Date object.
* Must be paired with {@link #setEndDate(Date)}.
*/
public void setStartDate(Date startDate) {
this.startDate = (startDate == null ? null : new Date(startDate.getTime()));
}
/** Set the end date as a Date object.
* Must be paired with {@link #setStartDate(Date)}.
*/
public void setEndDate(Date endDate) {
this.endDate = (endDate == null ? null : new Date(endDate.getTime()));
}
/** Convert the date range to a filter expression.
* @return Solr date filter expression
*/
public String toQuery() {
if(startDate == null || endDate == null){
// We have got strings instead of dates so calculate our dates out
// of these strings
Calendar startCal = Calendar.getInstance();
startCal.clear(Calendar.MILLISECOND);
startCal.clear(Calendar.SECOND);
startCal.clear(Calendar.MINUTE);
startCal.set(Calendar.HOUR_OF_DAY, 0);
int dateType = -1;
if(typeStr.equalsIgnoreCase("day")) {
dateType = Calendar.DATE;
} else if(typeStr.equalsIgnoreCase("month")) {
dateType = Calendar.MONTH;
startCal.set(Calendar.DATE, 1);
} else if(typeStr.equalsIgnoreCase("year")) {
startCal.clear(Calendar.MONTH);
startCal.set(Calendar.DATE, 1);
dateType = Calendar.YEAR;
} else
{
return "";
}
Calendar endCal = (Calendar) startCal.clone();
if (startDate == null)
{
if(startStr.startsWith("+"))
{
startStr = startStr.substring(startStr.indexOf('+') + 1);
}
startCal.add(dateType, Integer.parseInt(startStr));
startDate = startCal.getTime();
}
if (endDate == null)
{
if(endStr.startsWith("+"))
{
endStr = endStr.substring(endStr.indexOf('+') + 1);
}
endCal.add(dateType, Integer.parseInt(endStr));
endDate = endCal.getTime();
}
}
//Parse the dates
SimpleDateFormat formatter = new SimpleDateFormat(SolrLogger.DATE_FORMAT_8601);
String startDateParsed = formatter.format(startDate);
String endDateParsed = formatter.format(endDate);
//Create our string
return "time:[" + startDateParsed + " TO " + endDateParsed + "]";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content.filter;
/**
* A wrapper for some kind of Solr filter expression.
* @author kevinvandevelde at atmire.com
* Date: 12-mrt-2009
* Time: 10:36:03
*/
public interface StatisticsFilter {
/** Convert this filter's configuration to a query string fragment. */
public String toQuery();
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
/**
* Encapsulates all data to render the statistics as a table
*
* @author kevinvandevelde at atmire.com
* Date: 23-dec-2008
* Time: 9:27:52
*
*/
public class StatisticsTable extends StatisticsDisplay{
public StatisticsTable(StatisticsData statisticsData){
super(statisticsData);
}
@Override
public String getType() {
return "table";
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import com.maxmind.geoip.Location;
import com.maxmind.geoip.LookupService;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.MapSolrParams;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.core.ConfigurationManager;
import org.dspace.eperson.EPerson;
import org.dspace.statistics.util.DnsLookup;
import org.dspace.statistics.util.LocationUtils;
import org.dspace.statistics.util.SpiderDetector;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* Static holder for a HttpSolrClient connection pool to issue
* usage logging events to Solr from DSpace libraries, and some static query
* composers.
*
* @author ben at atmire.com
* @author kevinvandevelde at atmire.com
* @author mdiggory at atmire.com
*/
public class SolrLogger
{
private static Logger log = Logger.getLogger(SolrLogger.class);
private static final CommonsHttpSolrServer solr;
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
public static final String DATE_FORMAT_DCDATE = "yyyy-MM-dd'T'HH:mm:ss'Z'";
private static final LookupService locationService;
private static final boolean useProxies;
private static Map<String, String> metadataStorageInfo;
static
{
log.info("solr.spidersfile:" + ConfigurationManager.getProperty("solr.spidersfile"));
log.info("solr.log.server:" + ConfigurationManager.getProperty("solr.log.server"));
log.info("solr.dbfile:" + ConfigurationManager.getProperty("solr.dbfile"));
CommonsHttpSolrServer server = null;
if (ConfigurationManager.getProperty("solr.log.server") != null)
{
try
{
server = new CommonsHttpSolrServer(ConfigurationManager.getProperty("solr.log.server"));
SolrQuery solrQuery = new SolrQuery()
.setQuery("type:2 AND id:1");
server.query(solrQuery);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
solr = server;
// Read in the file so we don't have to do it all the time
//spiderIps = SpiderDetector.getSpiderIpAddresses();
LookupService service = null;
// Get the db file for the location
String dbfile = ConfigurationManager.getProperty("solr.dbfile");
if (dbfile != null)
{
try
{
service = new LookupService(dbfile,
LookupService.GEOIP_STANDARD);
}
catch (IOException e)
{
e.printStackTrace();
}
}
else
{
// System.out.println("NO SOLR DB FILE !");
}
locationService = service;
if ("true".equals(ConfigurationManager.getProperty("useProxies")))
{
useProxies = true;
}
else
{
useProxies = false;
}
log.info("useProxies=" + useProxies);
metadataStorageInfo = new HashMap<String, String>();
int count = 1;
String metadataVal;
while ((metadataVal = ConfigurationManager.getProperty("solr.metadata.item." + count)) != null)
{
String storeVal = metadataVal.split(":")[0];
String metadataField = metadataVal.split(":")[1];
metadataStorageInfo.put(storeVal, metadataField);
log.info("solr.metadata.item." + count + "=" + metadataVal);
count++;
}
}
/**
* Store a usage event into Solr.
*
* @param dspaceObject the object used.
* @param request the current request context.
* @param currentUser the current session's user.
*/
public static void post(DSpaceObject dspaceObject, HttpServletRequest request,
EPerson currentUser)
{
if (solr == null || locationService == null)
{
return;
}
boolean isSpiderBot = SpiderDetector.isSpider(request);
try
{
if(isSpiderBot &&
!ConfigurationManager.getBooleanProperty("solr.statistics.logBots",true))
{
return;
}
SolrInputDocument doc1 = new SolrInputDocument();
// Save our basic info that we already have
String ip = request.getRemoteAddr();
if(isUseProxies() && request.getHeader("X-Forwarded-For") != null)
{
/* This header is a comma delimited list */
for(String xfip : request.getHeader("X-Forwarded-For").split(","))
{
/* proxy itself will sometime populate this header with the same value in
remote address. ordering in spec is vague, we'll just take the last
not equal to the proxy
*/
if(!request.getHeader("X-Forwarded-For").contains(ip))
{
ip = xfip.trim();
}
}
}
doc1.addField("ip", ip);
doc1.addField("id", dspaceObject.getID());
doc1.addField("type", dspaceObject.getType());
// Save the current time
doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601));
if (currentUser != null)
{
doc1.addField("epersonid", currentUser.getID());
}
try
{
String dns = DnsLookup.reverseDns(ip);
doc1.addField("dns", dns.toLowerCase());
}
catch (Exception e)
{
log.error("Failed DNS Lookup for IP:" + ip);
log.debug(e.getMessage(),e);
}
// Save the location information if valid, save the event without
// location information if not valid
Location location = locationService.getLocation(ip);
if (location != null
&& !("--".equals(location.countryCode)
&& location.latitude == -180 && location.longitude == -180))
{
try
{
doc1.addField("continent", LocationUtils
.getContinentCode(location.countryCode));
}
catch (Exception e)
{
System.out
.println("COUNTRY ERROR: " + location.countryCode);
}
doc1.addField("countryCode", location.countryCode);
doc1.addField("city", location.city);
doc1.addField("latitude", location.latitude);
doc1.addField("longitude", location.longitude);
doc1.addField("isBot",isSpiderBot);
if(request.getHeader("User-Agent") != null)
{
doc1.addField("userAgent", request.getHeader("User-Agent"));
}
}
if (dspaceObject instanceof Item)
{
Item item = (Item) dspaceObject;
// Store the metadata
for (Object storedField : metadataStorageInfo.keySet())
{
String dcField = metadataStorageInfo
.get(storedField);
DCValue[] vals = item.getMetadata(dcField.split("\\.")[0],
dcField.split("\\.")[1], dcField.split("\\.")[2],
Item.ANY);
for (DCValue val1 : vals)
{
String val = val1.value;
doc1.addField(String.valueOf(storedField), val);
doc1.addField(storedField + "_search", val
.toLowerCase());
}
}
}
storeParents(doc1, dspaceObject);
solr.add(doc1);
//commits are executed automatically using the solr autocommit
// solr.commit(false, false);
}
catch (RuntimeException re)
{
throw re;
}
catch (Exception e)
{
log.error(e.getMessage(), e);
}
}
public static Map<String, String> getMetadataStorageInfo()
{
return metadataStorageInfo;
}
/**
* Method just used to log the parents.
* <ul>
* <li>Community log: owning comms.</li>
* <li>Collection log: owning comms & their comms.</li>
* <li>Item log: owning colls/comms.</li>
* <li>Bitstream log: owning item/colls/comms.</li>
* </ul>
*
* @param doc1
* the current SolrInputDocument
* @param dso
* the current dspace object we want to log
* @throws java.sql.SQLException
* ignore it
*/
public static void storeParents(SolrInputDocument doc1, DSpaceObject dso)
throws SQLException
{
if (dso instanceof Community)
{
Community comm = (Community) dso;
while (comm != null && comm.getParentCommunity() != null)
{
comm = comm.getParentCommunity();
doc1.addField("owningComm", comm.getID());
}
}
else if (dso instanceof Collection)
{
Collection coll = (Collection) dso;
for (int i = 0; i < coll.getCommunities().length; i++)
{
Community community = coll.getCommunities()[i];
doc1.addField("owningComm", community.getID());
storeParents(doc1, community);
}
}
else if (dso instanceof Item)
{
Item item = (Item) dso;
for (int i = 0; i < item.getCollections().length; i++)
{
Collection collection = item.getCollections()[i];
doc1.addField("owningColl", collection.getID());
storeParents(doc1, collection);
}
}
else if (dso instanceof Bitstream)
{
Bitstream bitstream = (Bitstream) dso;
for (int i = 0; i < bitstream.getBundles().length; i++)
{
Bundle bundle = bitstream.getBundles()[i];
for (int j = 0; j < bundle.getItems().length; j++)
{
Item item = bundle.getItems()[j];
doc1.addField("owningItem", item.getID());
storeParents(doc1, item);
}
}
}
}
public static boolean isUseProxies()
{
return useProxies;
}
/**
* Delete data from the index, as described by a query.
*
* @param query description of the records to be deleted.
* @throws IOException
* @throws SolrServerException
*/
public static void removeIndex(String query) throws IOException,
SolrServerException
{
solr.deleteByQuery(query);
solr.commit();
}
public static Map<String, List<String>> queryField(String query,
List oldFieldVals, String field)
{
Map<String, List<String>> currentValsStored = new HashMap<String, List<String>>();
try
{
// Get one document (since all the metadata for all the values
// should be the same just get the first one we find
Map<String, String> params = new HashMap<String, String>();
params.put("q", query);
params.put("rows", "1");
MapSolrParams solrParams = new MapSolrParams(params);
QueryResponse response = solr.query(solrParams);
// Make sure we at least got a document
if (response.getResults().getNumFound() == 0)
{
return currentValsStored;
}
// We have at least one document good
SolrDocument document = response.getResults().get(0);
for (Object storedField : metadataStorageInfo.keySet())
{
// For each of these fields that are stored we are to create a
// list of the values it holds now
java.util.Collection collection = document
.getFieldValues((String) storedField);
List<String> storedVals = new ArrayList<String>();
storedVals.addAll(collection);
// Now add it to our hashmap
currentValsStored.put((String) storedField, storedVals);
}
// System.out.println("HERE");
// Get the info we need
}
catch (SolrServerException e)
{
e.printStackTrace();
}
return currentValsStored;
}
public static class ResultProcessor
{
public void execute(String query) throws SolrServerException, IOException {
Map<String, String> params = new HashMap<String, String>();
params.put("q", query);
params.put("rows", "10");
MapSolrParams solrParams = new MapSolrParams(params);
QueryResponse response = solr.query(solrParams);
long numbFound = response.getResults().getNumFound();
// process the first batch
process(response.getResults());
// Run over the rest
for (int i = 10; i < numbFound; i += 10)
{
params.put("start", String.valueOf(i));
solrParams = new MapSolrParams(params);
response = solr.query(solrParams);
process(response.getResults());
}
}
public void commit() throws IOException, SolrServerException {
solr.commit();
}
/**
* Override to manage pages of documents
* @param docs
*/
public void process(List<SolrDocument> docs) throws IOException, SolrServerException {
for(SolrDocument doc : docs){
process(doc);
}
}
/**
* Override to manage individual documents
* @param doc
*/
public void process(SolrDocument doc) throws IOException, SolrServerException {
}
}
public static void markRobotsByIP()
{
for(String ip : SpiderDetector.getSpiderIpAddresses()){
try {
/* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor(){
public void process(SolrDocument doc) throws IOException, SolrServerException {
doc.removeFields("isBot");
doc.addField("isBot", true);
SolrInputDocument newInput = ClientUtils.toSolrInputDocument(doc);
solr.add(newInput);
log.info("Marked " + doc.getFieldValue("ip") + " as bot");
}
};
/* query for ip, exclude results previously set as bots. */
processor.execute("ip:"+ip+ "* AND -isBot:true");
solr.commit();
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
}
public static void markRobotByUserAgent(String agent){
try {
/* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor(){
public void process(SolrDocument doc) throws IOException, SolrServerException {
doc.removeFields("isBot");
doc.addField("isBot", true);
SolrInputDocument newInput = ClientUtils.toSolrInputDocument(doc);
solr.add(newInput);
}
};
/* query for ip, exclude results previously set as bots. */
processor.execute("userAgent:"+agent+ " AND -isBot:true");
solr.commit();
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
public static void deleteRobotsByIsBotFlag()
{
try {
solr.deleteByQuery("isBot:true");
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
public static void deleteIP(String ip)
{
try {
solr.deleteByQuery("ip:"+ip + "*");
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
public static void deleteRobotsByIP()
{
for(String ip : SpiderDetector.getSpiderIpAddresses()){
deleteIP(ip);
}
}
/*
* //TODO: below are not used public static void
* update(String query, boolean addField, String fieldName, Object
* fieldValue, Object oldFieldValue) throws SolrServerException, IOException
* { List<Object> vals = new ArrayList<Object>(); vals.add(fieldValue);
* List<Object> oldvals = new ArrayList<Object>(); oldvals.add(fieldValue);
* update(query, addField, fieldName, vals, oldvals); }
*/
public static void update(String query, String action,
List<String> fieldNames, List<List<Object>> fieldValuesList)
throws SolrServerException, IOException
{
// Since there is NO update
// We need to get our documents
// QueryResponse queryResponse = solr.query()//query(query, null, -1,
// null, null, null);
final List<SolrDocument> docsToUpdate = new ArrayList<SolrDocument>();
ResultProcessor processor = new ResultProcessor(){
public void process(List<SolrDocument> docs) throws IOException, SolrServerException {
docsToUpdate.addAll(docs);
}
};
processor.execute(query);
// We have all the docs delete the ones we don't need
solr.deleteByQuery(query);
// Add the new (updated onces
for (int i = 0; i < docsToUpdate.size(); i++)
{
SolrDocument solrDocument = docsToUpdate.get(i);
// Now loop over our fieldname actions
for (int j = 0; j < fieldNames.size(); j++)
{
String fieldName = fieldNames.get(j);
List<Object> fieldValues = fieldValuesList.get(j);
if (action.equals("addOne") || action.equals("replace"))
{
if (action.equals("replace"))
{
solrDocument.removeFields(fieldName);
}
for (Object fieldValue : fieldValues)
{
solrDocument.addField(fieldName, fieldValue);
}
}
else if (action.equals("remOne"))
{
// Remove the field
java.util.Collection<Object> values = solrDocument
.getFieldValues(fieldName);
solrDocument.removeFields(fieldName);
for (Object value : values)
{
// Keep all the values besides the one we need to remove
if (!fieldValues.contains((value)))
{
solrDocument.addField(fieldName, value);
}
}
}
}
SolrInputDocument newInput = ClientUtils
.toSolrInputDocument(solrDocument);
solr.add(newInput);
}
solr.commit();
// System.out.println("SolrLogger.update(\""+query+"\"):"+(new
// Date().getTime() - start)+"ms,"+numbFound+"records");
}
public static void query(String query, int max) throws SolrServerException
{
query(query, null, null, max, null, null, null, null);
}
/**
* Query used to get values grouped by the given facet field.
*
* @param query
* the query to be used
* @param facetField
* the facet field on which to group our values
* @param max
* the max number of values given back (in case of 10 the top 10
* will be given)
* @param showTotal
* a boolean determining whether the total amount should be given
* back as the last element of the array
* @return an array containing our results
* @throws SolrServerException
* ...
*/
public static ObjectCount[] queryFacetField(String query,
String filterQuery, String facetField, int max, boolean showTotal,
List<String> facetQueries) throws SolrServerException
{
QueryResponse queryResponse = query(query, filterQuery, facetField,
max, null, null, null, facetQueries);
if (queryResponse == null)
{
return new ObjectCount[0];
}
FacetField field = queryResponse.getFacetField(facetField);
// At least make sure we have one value
if (0 < field.getValueCount())
{
// Create an array for our result
ObjectCount[] result = new ObjectCount[field.getValueCount()
+ (showTotal ? 1 : 0)];
// Run over our results & store them
for (int i = 0; i < field.getValues().size(); i++)
{
FacetField.Count fieldCount = field.getValues().get(i);
result[i] = new ObjectCount();
result[i].setCount(fieldCount.getCount());
result[i].setValue(fieldCount.getName());
}
if (showTotal)
{
result[result.length - 1] = new ObjectCount();
result[result.length - 1].setCount(queryResponse.getResults()
.getNumFound());
result[result.length - 1].setValue("total");
}
return result;
}
else
{
// Return an empty array cause we got no data
return new ObjectCount[0];
}
}
/**
* Query used to get values grouped by the date.
*
* @param query
* the query to be used
* @param max
* the max number of values given back (in case of 10 the top 10
* will be given)
* @param dateType
* the type to be used (example: DAY, MONTH, YEAR)
* @param dateStart
* the start date Format:(-3, -2, ..) the date is calculated
* relatively on today
* @param dateEnd
* the end date stop Format (-2, +1, ..) the date is calculated
* relatively on today
* @param showTotal
* a boolean determining whether the total amount should be given
* back as the last element of the array
* @return and array containing our results
* @throws SolrServerException
* ...
*/
public static ObjectCount[] queryFacetDate(String query,
String filterQuery, int max, String dateType, String dateStart,
String dateEnd, boolean showTotal) throws SolrServerException
{
QueryResponse queryResponse = query(query, filterQuery, null, max,
dateType, dateStart, dateEnd, null);
if (queryResponse == null)
{
return new ObjectCount[0];
}
FacetField dateFacet = queryResponse.getFacetDate("time");
// TODO: check if this cannot crash I checked it, it crashed!!!
// Create an array for our result
ObjectCount[] result = new ObjectCount[dateFacet.getValueCount()
+ (showTotal ? 1 : 0)];
// Run over our datefacet & store all the values
for (int i = 0; i < dateFacet.getValues().size(); i++)
{
FacetField.Count dateCount = dateFacet.getValues().get(i);
result[i] = new ObjectCount();
result[i].setCount(dateCount.getCount());
result[i].setValue(getDateView(dateCount.getName(), dateType));
}
if (showTotal)
{
result[result.length - 1] = new ObjectCount();
result[result.length - 1].setCount(queryResponse.getResults()
.getNumFound());
// TODO: Make sure that this total is gotten out of the msgs.xml
result[result.length - 1].setValue("total");
}
return result;
}
public static Map<String, Integer> queryFacetQuery(String query,
String filterQuery, List<String> facetQueries)
throws SolrServerException
{
QueryResponse response = query(query, filterQuery, null, 1, null, null,
null, facetQueries);
return response.getFacetQuery();
}
public static ObjectCount queryTotal(String query, String filterQuery)
throws SolrServerException
{
QueryResponse queryResponse = query(query, filterQuery, null, -1, null,
null, null, null);
ObjectCount objCount = new ObjectCount();
objCount.setCount(queryResponse.getResults().getNumFound());
return objCount;
}
private static String getDateView(String name, String type)
{
if (name != null && name.matches("^[0-9]{4}\\-[0-9]{2}.*"))
{
/*
* if("YEAR".equalsIgnoreCase(type)) return name.substring(0, 4);
* else if("MONTH".equalsIgnoreCase(type)) return name.substring(0,
* 7); else if("DAY".equalsIgnoreCase(type)) return
* name.substring(0, 10); else if("HOUR".equalsIgnoreCase(type))
* return name.substring(11, 13);
*/
// Get our date
Date date = null;
try
{
SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT_8601);
date = format.parse(name);
}
catch (ParseException e)
{
try
{
// We should use the dcdate (the dcdate is used when
// generating random data)
SimpleDateFormat format = new SimpleDateFormat(
DATE_FORMAT_DCDATE);
date = format.parse(name);
}
catch (ParseException e1)
{
e1.printStackTrace();
}
// e.printStackTrace();
}
String dateformatString = "dd-MM-yyyy";
if ("DAY".equals(type))
{
dateformatString = "dd-MM-yyyy";
}
else if ("MONTH".equals(type))
{
dateformatString = "MMMM yyyy";
}
else if ("YEAR".equals(type))
{
dateformatString = "yyyy";
}
SimpleDateFormat simpleFormat = new SimpleDateFormat(
dateformatString);
if (date != null)
{
name = simpleFormat.format(date);
}
}
return name;
}
private static QueryResponse query(String query, String filterQuery,
String facetField, int max, String dateType, String dateStart,
String dateEnd, List<String> facetQueries)
throws SolrServerException
{
if (solr == null)
{
return null;
}
// System.out.println("QUERY");
SolrQuery solrQuery = new SolrQuery().setRows(0).setQuery(query)
.setFacetMinCount(1);
// Set the date facet if present
if (dateType != null)
{
solrQuery.setParam("facet.date", "time")
.
// EXAMPLE: NOW/MONTH+1MONTH
setParam("facet.date.end",
"NOW/" + dateType + dateEnd + dateType).setParam(
"facet.date.gap", "+1" + dateType)
.
// EXAMPLE: NOW/MONTH-" + nbMonths + "MONTHS
setParam("facet.date.start",
"NOW/" + dateType + dateStart + dateType + "S")
.setFacet(true);
}
if (facetQueries != null)
{
for (int i = 0; i < facetQueries.size(); i++)
{
String facetQuery = facetQueries.get(i);
solrQuery.addFacetQuery(facetQuery);
}
if (0 < facetQueries.size())
{
solrQuery.setFacet(true);
}
}
if (facetField != null)
{
solrQuery.addFacetField(facetField);
}
// Set the top x of if present
if (max != -1)
{
solrQuery.setFacetLimit(max);
}
// A filter is used instead of a regular query to improve
// performance and ensure the search result ordering will
// not be influenced
// Choose to filter by the Legacy spider IP list (may get too long to properly filter all IP's
if(ConfigurationManager.getBooleanProperty("solr.statistics.query.filter.spiderIp",false))
{
solrQuery.addFilterQuery(getIgnoreSpiderIPs());
}
// Choose to filter by isBot field, may be overriden in future
// to allow views on stats based on bots.
if(ConfigurationManager.getBooleanProperty("solr.statistics.query.filter.isBot",true))
{
solrQuery.addFilterQuery("-isBot:true");
}
if (filterQuery != null)
{
solrQuery.addFilterQuery(filterQuery);
}
QueryResponse response = null;
try
{
// solr.set
response = solr.query(solrQuery);
}
catch (SolrServerException e)
{
System.err.println("Error using query " + query);
throw e;
}
return response;
}
/** String of IP and Ranges in IPTable as a Solr Query */
private static String filterQuery = null;
/**
* Returns in a filterQuery string all the ip addresses that should be ignored
*
* @return a string query with ip addresses
*/
public static String getIgnoreSpiderIPs() {
if (filterQuery == null) {
StringBuilder query = new StringBuilder();
boolean first = true;
for (String ip : SpiderDetector.getSpiderIpAddresses()) {
if (first) {
query.append(" AND ");
first = false;
}
query.append(" NOT(ip: ").append(ip).append(")");
}
filterQuery = query.toString();
}
return filterQuery;
}
/**
* Maintenance to keep a SOLR index efficient.
* Note: This might take a long time.
*/
public static void optimizeSOLR() {
try {
long start = System.currentTimeMillis();
System.out.println("SOLR Optimize -- Process Started:"+start);
solr.optimize();
long finish = System.currentTimeMillis();
System.out.println("SOLR Optimize -- Process Finished:"+finish);
System.out.println("SOLR Optimize -- Total time taken:"+(finish-start) + " (ms).");
} catch (SolrServerException sse) {
System.err.println(sse.getMessage());
} catch (IOException ioe) {
System.err.println(ioe.getMessage());
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
/**
* Data structure for returning results from statistics searches.
*
* @author mdiggory at atmire.com
* @author ben at atmire.com
* @author kevinvandevelde at atmire.com
*/
public class ObjectCount {
private long count;
private String value;
public ObjectCount(){
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A Spare v4 IPTable implementation that uses nested HashMaps
* to optimize IP Address matching over ranges of IP Addresses.
*
* @author mdiggory at atmire.com
*/
public class IPTable {
/* A lookup tree for IP Addresses and SubnetRanges */
private Map<String, Map<String, Map<String, Set<String>>>> map =
new HashMap<String, Map<String, Map<String, Set<String>>>>();
/**
* Can be full v4 IP, subnet or range string
*
* @param ip
*/
public void add(String ip) throws IPFormatException {
String[] start;
String[] end;
String[] range = ip.split("-");
if (range.length >= 2) {
start = range[0].trim().split("/")[0].split("\\.");
end = range[1].trim().split("/")[0].split("\\.");
if (start.length != 4 || end.length != 4)
{
throw new IPFormatException(ip + " - Ranges need to be full IPv4 Addresses");
}
if (!(start[0].equals(end[0]) && start[1].equals(end[1]) && start[2].equals(end[2]))) {
throw new IPFormatException(ip + " - Ranges can only be across the last subnet x.y.z.0 - x.y.z.254");
}
} else {
//need to ignore CIDR notation for the moment.
//ip = ip.split("\\/")[0];
String[] subnets = ip.split("\\.");
if (subnets.length < 3) {
throw new IPFormatException(ip + " - require at least three subnet places (255.255.255.0");
}
start = subnets;
end = subnets;
}
if (start.length >= 3) {
Map<String, Map<String, Set<String>>> first = map.get(start[0]);
if (first == null) {
first = new HashMap<String, Map<String, Set<String>>>();
map.put(start[0], first);
}
Map<String, Set<String>> second = first.get(start[1]);
if (second == null) {
second = new HashMap<String, Set<String>>();
first.put(start[1], second);
}
Set<String> third = second.get(start[2]);
if (third == null) {
third = new HashSet<String>();
second.put(start[2], third);
}
//now populate fourth place (* or value 0-254);
if (start.length == 3) {
third.add("*");
}
if (third.contains("*")) {
return;
}
if (start.length >= 4) {
int s = Integer.valueOf(start[3]);
int e = Integer.valueOf(end[3]);
for (int i = s; i <= e; i++) {
third.add(String.valueOf(i));
}
}
}
}
/** Check whether a given address is contained in this netblock.
*
* @param ip the address to be tested
* @return true if {@code ip} is within this table's limits
* @throws IPFormatException
*/
public boolean contains(String ip) throws IPFormatException {
String[] subnets = ip.split("\\.");
if (subnets.length != 4)
{
throw new IPFormatException("needs to be single IP Address");
}
Map<String, Map<String, Set<String>>> first = map.get(subnets[0]);
if (first == null)
{
return false;
}
Map<String, Set<String>> second = first.get(subnets[1]);
if (second == null)
{
return false;
}
Set<String> third = second.get(subnets[2]);
if (third == null)
{
return false;
}
return third.contains(subnets[3]) || third.contains("*");
}
/** Convert to a Set.
* @return this table's content as a Set
*/
public Set<String> toSet() {
HashSet<String> set = new HashSet<String>();
for (Map.Entry<String, Map<String, Map<String, Set<String>>>> first : map.entrySet()) {
String firstString = first.getKey();
Map<String, Map<String, Set<String>>> secondMap = first.getValue();
for (Map.Entry<String, Map<String, Set<String>>> second : secondMap.entrySet()) {
String secondString = second.getKey();
Map<String, Set<String>> thirdMap = second.getValue();
for (Map.Entry<String, Set<String>> third : thirdMap.entrySet()) {
String thirdString = third.getKey();
Set<String> fourthSet = third.getValue();
if (fourthSet.contains("*")) {
set.add(firstString + "." + secondString + "." + thirdString);
} else {
for (String fourth : fourthSet) {
set.add(firstString + "." + secondString + "." + thirdString + "." + fourth);
}
}
}
}
}
return set;
}
/**
* Exception Class to deal with IPFormat errors.
*/
public static class IPFormatException extends Exception {
public IPFormatException(String s) {
super(s);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.apache.tools.ant.taskdefs.Get;
import org.dspace.core.ConfigurationManager;
import org.dspace.statistics.SolrLogger;
import java.io.*;
import java.net.URL;
/**
* Class to load intermediate statistics files into solr
*
* @author Stuart Lewis
*/
public class StatisticsClient
{
private static final Logger log = Logger.getLogger(StatisticsClient.class);
/**
* Print the help message
*
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("StatisticsClient\n", options);
System.exit(exitCode);
}
/**
* Main method to run the statistics importer.
*
* @param args The command line arguments
* @throws Exception If something goes wrong
*/
public static void main(String[] args) throws Exception
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("u", "update-spider-files", false,
"Update Spider IP Files from internet into " +
ConfigurationManager.getProperty("dspace.dir") + "/config/spiders");
options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr");
options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag");
options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address");
options.addOption("o", "optimize", false, "Run maintenance on the SOLR index");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
// Did the user ask to see the help?
if (line.hasOption('h'))
{
printHelp(options, 0);
}
if(line.hasOption("u"))
{
StatisticsClient.updateSpiderFiles();
}
else if (line.hasOption('m'))
{
SolrLogger.markRobotsByIP();
}
else if(line.hasOption('f'))
{
SolrLogger.deleteRobotsByIsBotFlag();
}
else if(line.hasOption('i'))
{
SolrLogger.deleteRobotsByIP();
}
else if(line.hasOption('o'))
{
SolrLogger.optimizeSOLR();
}
else
{
printHelp(options, 0);
}
}
/**
* Method to update Spiders in config directory.
*/
private static void updateSpiderFiles()
{
try
{
System.out.println("Downloading latest spider IP addresses:");
// Get the list URLs to download from
String urls = ConfigurationManager.getProperty("solr.spiderips.urls");
if ((urls == null) || ("".equals(urls)))
{
System.err.println(" - Missing setting from dspace.cfg: solr.spiderips.urls");
System.exit(0);
}
// Get the location of spiders directory
File spiders = new File(ConfigurationManager.getProperty("dspace.dir"),"config/spiders");
if (!spiders.exists() && !spiders.mkdirs())
{
log.error("Unable to create spiders directory");
}
String[] values = urls.split(",");
for (String value : values)
{
value = value.trim();
System.out.println(" Downloading: " + value);
URL url = new URL(value);
Get get = new Get();
get.setDest(new File(spiders, url.getHost() + url.getPath().replace("/","-")));
get.setSrc(url);
get.setUseTimestamp(true);
get.execute();
}
} catch (Exception e)
{
System.err.println(" - Error: " + e.getMessage());
e.printStackTrace();
System.exit(1);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import java.util.Arrays;
import java.util.List;
import org.dspace.core.I18nUtil;
/**
* Mapping between Country codes, English Country names,
* Continent Codes, and English Continent names
*
* @author kevinvandevelde at atmire.com
* @author ben at atmire.com
*/
public class LocationUtils {
// TODO: put lists below in a file ? Although this will not get changed often if it does we need to adjust code.....
private static String[] countryNames = new String[]{
"Afghanistan, Islamic Republic of",
"Åland Islands",
"Albania",
"Algeria",
"American Samoa",
"Andorra",
"Angola",
"Anguilla",
"Antarctica",
"Antigua and Barbuda",
"Argentina",
"Armenia",
"Aruba",
"Australia",
"Austria",
"Azerbaijan",
"Bahamas",
"Bahrain",
"Bangladesh",
"Barbados",
"Belarus",
"Belgium",
"Belize",
"Benin",
"Bermuda",
"Bhutan",
"Bolivia",
"Bosnia and Herzegovina",
"Botswana",
"Bouvet Island (Bouvetoya)",
"Brazil",
"British Indian Ocean Territory (Chagos Archipelago)",
"British Virgin Islands",
"Brunei Darussalam",
"Bulgaria",
"Burkina Faso",
"Burundi",
"Cambodia",
"Cameroon",
"Canada",
"Cape Verde",
"Cayman Islands",
"Central African Republic",
"Chad",
"Chile",
"China",
"Christmas Island",
"Cocos (Keeling) Islands",
"Colombia",
"Comoros",
"Congo",
"Congo",
"Cook Islands",
"Costa Rica",
"Cote d'Ivoire",
"Croatia",
"Cuba",
"Cyprus",
"Czech Republic",
"Denmark",
"Djibouti",
"Dominica",
"Dominican Republic",
"Ecuador",
"Egypt",
"El Salvador",
"Equatorial Guinea",
"Eritrea",
"Estonia",
"Ethiopia",
"Faroe Islands",
"Falkland Islands (Malvinas)",
"Fiji",
"Finland",
"France",
"French Guiana",
"French Polynesia",
"French Southern Territories",
"Gabon",
"Gambia",
"Georgia",
"Germany",
"Ghana",
"Gibraltar",
"Greece",
"Greenland",
"Grenada",
"Guadeloupe",
"Guam",
"Guatemala",
"Guernsey",
"Guinea",
"Guinea-Bissau",
"Guyana",
"Haiti",
"Heard Island and McDonald Islands",
"Holy See (Vatican City State)",
"Honduras",
"Hong Kong",
"Hungary",
"Iceland",
"India",
"Indonesia",
"Iran",
"Iraq",
"Ireland",
"Isle of Man",
"Israel",
"Italy",
"Jamaica",
"Japan",
"Jersey",
"Jordan",
"Kazakhstan",
"Kenya",
"Kiribati",
"Korea",
"Korea",
"Kuwait",
"Kyrgyz Republic",
"Lao People's Democratic Republic",
"Latvia",
"Lebanon",
"Lesotho",
"Liberia",
"Libyan Arab Jamahiriya",
"Liechtenstein",
"Lithuania",
"Luxembourg",
"Macao, Special Administrative Region of China",
"Macedonia",
"Madagascar",
"Malawi",
"Malaysia",
"Maldives",
"Mali",
"Malta",
"Marshall Islands",
"Martinique",
"Mauritania",
"Mauritius",
"Mayotte",
"Mexico",
"Micronesia",
"Moldova",
"Monaco",
"Mongolia",
"Montenegro",
"Montserrat",
"Morocco",
"Mozambique",
"Myanmar",
"Namibia",
"Nauru",
"Nepal",
"Netherlands Antilles",
"Netherlands",
"New Caledonia",
"New Zealand",
"Nicaragua",
"Niger",
"Nigeria",
"Niue",
"Norfolk Island",
"Northern Mariana Islands",
"Norway",
"Oman",
"Pakistan",
"Palau",
"Palestinian Territory",
"Panama",
"Papua New Guinea",
"Paraguay",
"Peru",
"Philippines",
"Pitcairn Islands",
"Poland",
"Portugal",
"Puerto Rico",
"Qatar",
"Reunion",
"Romania",
"Russian Federation",
"Rwanda",
"Saint Barthelemy",
"Saint Helena",
"Saint Kitts and Nevis",
"Saint Lucia",
"Saint Martin",
"Saint Pierre and Miquelon",
"Saint Vincent and the Grenadines",
"Samoa",
"San Marino",
"Sao Tome and Principe",
"Saudi Arabia",
"Senegal",
"Serbia",
"Seychelles",
"Sierra Leone",
"Singapore",
"Slovakia (Slovak Republic)",
"Slovenia",
"Solomon Islands",
"Somalia",
"South Africa",
"South Georgia and the South Sandwich Islands",
"Spain",
"Sri Lanka",
"Sudan",
"Suriname",
"Svalbard & Jan Mayen Islands",
"Swaziland",
"Sweden",
"Switzerland",
"Syrian Arab Republic",
"Taiwan",
"Tajikistan",
"Tanzania",
"Thailand",
"Timor-Leste",
"Togo",
"Tokelau",
"Tonga",
"Trinidad and Tobago",
"Tunisia",
"Turkey",
"Turkmenistan",
"Turks and Caicos Islands",
"Tuvalu",
"Uganda",
"Ukraine",
"United Arab Emirates",
"United Kingdom",
"United States of America",
"United States Minor Outlying Islands",
"United States Virgin Islands",
"Uruguay",
"Uzbekistan",
"Vanuatu",
"Venezuela",
"Vietnam",
"Wallis and Futuna",
"Western Sahara",
"Yemen",
"Zambia",
"Zimbabwe"
};
private static String[] countryCodes = new String[]{
"AF",
"AX",
"AL",
"DZ",
"AS",
"AD",
"AO",
"AI",
"AQ",
"AG",
"AR",
"AM",
"AW",
"AU",
"AT",
"AZ",
"BS",
"BH",
"BD",
"BB",
"BY",
"BE",
"BZ",
"BJ",
"BM",
"BT",
"BO",
"BA",
"BW",
"BV",
"BR",
"IO",
"VG",
"BN",
"BG",
"BF",
"BI",
"KH",
"CM",
"CA",
"CV",
"KY",
"CF",
"TD",
"CL",
"CN",
"CX",
"CC",
"CO",
"KM",
"CD",
"CG",
"CK",
"CR",
"CI",
"HR",
"CU",
"CY",
"CZ",
"DK",
"DJ",
"DM",
"DO",
"EC",
"EG",
"SV",
"GQ",
"ER",
"EE",
"ET",
"FO",
"FK",
"FJ",
"FI",
"FR",
"GF",
"PF",
"TF",
"GA",
"GM",
"GE",
"DE",
"GH",
"GI",
"GR",
"GL",
"GD",
"GP",
"GU",
"GT",
"GG",
"GN",
"GW",
"GY",
"HT",
"HM",
"VA",
"HN",
"HK",
"HU",
"IS",
"IN",
"ID",
"IR",
"IQ",
"IE",
"IM",
"IL",
"IT",
"JM",
"JP",
"JE",
"JO",
"KZ",
"KE",
"KI",
"KP",
"KR",
"KW",
"KG",
"LA",
"LV",
"LB",
"LS",
"LR",
"LY",
"LI",
"LT",
"LU",
"MO",
"MK",
"MG",
"MW",
"MY",
"MV",
"ML",
"MT",
"MH",
"MQ",
"MR",
"MU",
"YT",
"MX",
"FM",
"MD",
"MC",
"MN",
"ME",
"MS",
"MA",
"MZ",
"MM",
"NA",
"NR",
"NP",
"AN",
"NL",
"NC",
"NZ",
"NI",
"NE",
"NG",
"NU",
"NF",
"MP",
"NO",
"OM",
"PK",
"PW",
"PS",
"PA",
"PG",
"PY",
"PE",
"PH",
"PN",
"PL",
"PT",
"PR",
"QA",
"RE",
"RO",
"RU",
"RW",
"BL",
"SH",
"KN",
"LC",
"MF",
"PM",
"VC",
"WS",
"SM",
"ST",
"SA",
"SN",
"RS",
"SC",
"SL",
"SG",
"SK",
"SI",
"SB",
"SO",
"ZA",
"GS",
"ES",
"LK",
"SD",
"SR",
"SJ",
"SZ",
"SE",
"CH",
"SY",
"TW",
"TJ",
"TZ",
"TH",
"TL",
"TG",
"TK",
"TO",
"TT",
"TN",
"TR",
"TM",
"TC",
"TV",
"UG",
"UA",
"AE",
"GB",
"US",
"UM",
"VI",
"UY",
"UZ",
"VU",
"VE",
"VN",
"WF",
"EH",
"YE",
"ZM",
"ZW"
};
private static String[] continentCodes = new String[]{
"AS",
"EU",
"EU",
"AF",
"OC",
"EU",
"AF",
"NA",
"AN",
"NA",
"SA",
"AS",
"NA",
"OC",
"EU",
"AS",
"NA",
"AS",
"AS",
"NA",
"EU",
"EU",
"NA",
"AF",
"NA",
"AS",
"SA",
"EU",
"AF",
"AN",
"SA",
"AS",
"NA",
"AS",
"EU",
"AF",
"AF",
"AS",
"AF",
"NA",
"AF",
"NA",
"AF",
"AF",
"SA",
"AS",
"AS",
"AS",
"SA",
"AF",
"AF",
"AF",
"OC",
"NA",
"AF",
"EU",
"NA",
"AS",
"EU",
"EU",
"AF",
"NA",
"NA",
"SA",
"AF",
"NA",
"AF",
"AF",
"EU",
"AF",
"EU",
"SA",
"OC",
"EU",
"EU",
"SA",
"OC",
"AN",
"AF",
"AF",
"AS",
"EU",
"AF",
"EU",
"EU",
"NA",
"NA",
"NA",
"OC",
"NA",
"EU",
"AF",
"AF",
"SA",
"NA",
"AN",
"EU",
"NA",
"AS",
"EU",
"EU",
"AS",
"AS",
"AS",
"AS",
"EU",
"EU",
"AS",
"EU",
"NA",
"AS",
"EU",
"AS",
"AS",
"AF",
"OC",
"AS",
"AS",
"AS",
"AS",
"AS",
"EU",
"AS",
"AF",
"AF",
"AF",
"EU",
"EU",
"EU",
"AS",
"EU",
"AF",
"AF",
"AS",
"AS",
"AF",
"EU",
"OC",
"NA",
"AF",
"AF",
"AF",
"NA",
"OC",
"EU",
"EU",
"AS",
"EU",
"NA",
"AF",
"AF",
"AS",
"AF",
"OC",
"AS",
"NA",
"EU",
"OC",
"OC",
"NA",
"AF",
"AF",
"OC",
"OC",
"OC",
"EU",
"AS",
"AS",
"OC",
"AS",
"NA",
"OC",
"SA",
"SA",
"AS",
"OC",
"EU",
"EU",
"NA",
"AS",
"AF",
"EU",
"EU",
"AF",
"NA",
"AF",
"NA",
"NA",
"NA",
"NA",
"NA",
"OC",
"EU",
"AF",
"AS",
"AF",
"EU",
"AF",
"AF",
"AS",
"EU",
"EU",
"OC",
"AF",
"AF",
"AN",
"EU",
"AS",
"AF",
"SA",
"EU",
"AF",
"EU",
"EU",
"AS",
"AS",
"AS",
"AF",
"AS",
"AS",
"AF",
"OC",
"OC",
"NA",
"AF",
"AS",
"AS",
"NA",
"OC",
"AF",
"EU",
"AS",
"EU",
"NA",
"OC",
"NA",
"SA",
"AS",
"OC",
"SA",
"AS",
"OC",
"AF",
"AS",
"AF",
"AF",
};
private static List<String> continentCodeList;
private static List<String> countryCodeList;
private static List<String> countryNameList;
static
{
if(countryCodeList == null)
{
countryCodeList = Arrays.asList(countryCodes);
}
if(continentCodeList == null)
{
continentCodeList = Arrays.asList(continentCodes);
}
if(countryNameList == null)
{
countryNameList = Arrays.asList(countryNames);
}
}
private static String[][] continentCodeToName = new String[][]{
{"NA", "North America"},
{"SA", "South America"},
{"AN", "Antarctica"},
{"AF", "Africa"},
{"EU", "Europe"},
{"AS", "Asia"},
{"OC", "Oceania"}};
public static String getCountryName(String countryCode){
if (countryCode.length() > 0 && countryCodeList.contains(countryCode)) {
int index = countryCodeList.indexOf(countryCode);
return countryNameList.get(index).toString();
} else {
return I18nUtil.getMessage("org.dspace.statistics.util.LocationUtils.unknown-country");
}
}
public static String getContinentCode(String countryCode){
if(countryCode.length() > 0 && countryCodeList.contains(countryCode)) {
int index = countryCodeList.indexOf(countryCode);
return continentCodeList.get(index).toString();
} else {
return I18nUtil.getMessage("org.dspace.statistics.util.LocationUtils.unknown-continent");
}
}
public static String getContinentName(String continentCode){
// String continentCode = getContinentCode(countryCode);
for (String[] contCodeName : continentCodeToName) {
if (contCodeName[0].equals(continentCode))
{
return contCodeName[1];
}
}
return continentCode;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.app.statistics.LogAnalyser;
import org.dspace.app.statistics.LogLine;
import org.dspace.content.*;
import org.dspace.handle.HandleManager;
import org.dspace.core.Context;
import java.io.*;
import java.sql.SQLException;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.text.SimpleDateFormat;
import java.text.ParsePosition;
/**
* A utility class to convert the classic dspace.log (as generated
* by log4j) files into an intermediate format for ingestion into
* the new solr stats.
*
* @author Stuart Lewis
*/
public class ClassicDSpaceLogConverter {
private Logger log = Logger.getLogger(ClassicDSpaceLogConverter.class);
/** A DSpace context */
private Context context;
/** Whether or not to provide verbose output */
private boolean verbose = false;
/** Whether to include actions logged by org.dspace.usage.LoggerUsageEventListener */
private boolean newEvents = false;
/** A regular expression for extracting the IP address from a log line */
private Pattern ipaddrPattern = Pattern.compile("ip_addr=(\\d*\\.\\d*\\.\\d*\\.\\d*):");
/** Date format (in) from the log line */
private SimpleDateFormat dateFormatIn = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
/** Date format out (for solr) */
private SimpleDateFormat dateFormatOut = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
/** Date format (in) from the log line for the UID */
private SimpleDateFormat dateFormatInUID = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
/** Date format out (for uid) */
private SimpleDateFormat dateFormatOutUID = new SimpleDateFormat("yyyyMMddHHmmssSSS");
/**
* Create an instance of the converter utility
*
* @param c The context
* @param v Whether or not to provide verbose output
* @param nE Whether to include actions logged by org.dspace.usage.LoggerUsageEventListener
*/
public ClassicDSpaceLogConverter(Context c, boolean v, boolean nE)
{
// Set up some variables
context = c;
verbose = v;
newEvents = nE;
}
/**
* Convert a classic log file
*
* @param in The filename to read from
* @param out The filename to write to
* @return The number of lines processed
*/
public int convert(String in, String out)
{
// Line counter
int counter = 0;
int lines = 0;
// Say what we're going to do
System.out.println(" About to convert '" + in + "' to '" + out + "'");
// Setup the regular expressions for the log file
LogAnalyser.setRegex(in);
// Open the file and read it line by line
BufferedReader input = null;
Writer output = null;
try {
String line;
LogLine lline;
String lout;
String id;
String handle;
String ip;
String date;
DSpaceObject dso;
String uid;
String lastLine = "";
input = new BufferedReader(new FileReader(new File(in)));
output = new BufferedWriter(new FileWriter(new File(out)));
while ((line = input.readLine()) != null)
{
// Read inthe line and covnert it to a LogLine
lines++;
if (verbose)
{
System.out.println(" - IN: " + line);
}
lline = LogAnalyser.getLogLine(line);
// Get rid of any lines that aren't INFO
if ((lline == null) || (!lline.isLevel("INFO")))
{
if (verbose)
{
System.out.println(" - IGNORED!");
}
continue;
}
// Get the IP address of the user
Matcher matcher = ipaddrPattern.matcher(line);
if (matcher.find())
{
ip = matcher.group(1);
}
else
{
ip = "unknown";
}
// Get and format the date
// We can use lline.getDate() as this strips the time element
date = dateFormatOut.format(
dateFormatIn.parse(line.substring(0, line.indexOf(',')),
new ParsePosition(0)));
// Generate a UID for the log line
// - based on the date/time
uid = dateFormatOutUID.format(
dateFormatInUID.parse(line.substring(0, line.indexOf(' ', line.indexOf(' ') + 1)),
new ParsePosition(0)));
try
{
// What sort of view is it?
// (ignore lines from org.dspace.usage.LoggerUsageEventListener which is 1.6 code)
if ((lline.getAction().equals("view_bitstream")) &&
(!lline.getParams().contains("invalid_bitstream_id")) &&
(!lline.getParams().contains("withdrawn")) &&
((!line.contains("org.dspace.usage.LoggerUsageEventListener")) || newEvents))
{
id = lline.getParams().substring(13);
}
else if ((lline.getAction().equals("view_item")) &&
((!line.contains("org.dspace.usage.LoggerUsageEventListener")) || newEvents))
{
handle = lline.getParams().substring(7);
dso = HandleManager.resolveToObject(context, handle);
id = "" + dso.getID();
}
else if ((lline.getAction().equals("view_collection")) &&
((!line.contains("org.dspace.usage.LoggerUsageEventListener")) || newEvents))
{
id = lline.getParams().substring(14);
}
else if ((lline.getAction().equals("view_community")) &&
((!line.contains("org.dspace.usage.LoggerUsageEventListener")) || newEvents))
{
id = lline.getParams().substring(13);
}
else
{
//if (verbose) System.out.println(" - IGNORED!");
continue;
}
// Construct the log line
lout = uid + "," +
lline.getAction() + "," +
id + "," +
date + "," +
lline.getUser() + "," +
ip + "\n";
}
catch (Exception e)
{
if (verbose)
{
System.out.println(" - IN: " + line);
}
if (verbose)
{
System.err.println("Error with log line! " + e.getMessage());
}
continue;
}
if ((verbose) && (!"".equals(lout)))
{
System.out.println(" - IN: " + line);
System.out.println(" - OUT: " + lout);
}
// Write the output line
if ((!"".equals(lout)) && (!lout.equals(lastLine)))
{
output.write(lout);
counter++;
lastLine = lout;
}
}
}
catch (IOException e)
{
log.error("File access problem", e);
}
finally
{
// Clean up the input and output streams
try { input.close(); } catch (IOException e) { log.error(e.getMessage(), e); }
try { output.flush(); } catch (IOException e) { log.error(e.getMessage(), e); }
try { output.close(); } catch (IOException e) { log.error(e.getMessage(), e); }
}
// Tell the user what we have done
System.out.println(" Read " + lines + " lines and recorded " + counter + " events");
return counter;
}
/**
* Print the help message
*
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ClassicDSpaceLogConverter\n", options);
System.out.println("\n\tClassicDSpaceLogConverter -i infilename -o outfilename -v (for verbose output)");
System.exit(exitCode);
}
/**
* Main method to execute the converter
*
* @param args CLI args
*/
public static void main(String[] args)
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("i", "in", true, "source file");
options.addOption("o", "out", true, "destination directory");
options.addOption("m", "multiple",false, "treat the input file as having a wildcard ending");
options.addOption("n", "newformat",false, "process new format log lines (1.6+)");
options.addOption("v", "verbose", false, "display verbose output (useful for debugging)");
options.addOption("h", "help", false, "help");
// Parse the command line arguments
CommandLine line;
try
{
line = parser.parse(options, args);
}
catch (ParseException pe)
{
System.err.println("Error parsing command line arguments: " + pe.getMessage());
System.exit(1);
return;
}
// Did the user ask to see the help?
if (line.hasOption('h'))
{
printHelp(options, 0);
}
// Check we have an input and output file
if ((!line.hasOption('i')) && (!line.hasOption('o')))
{
System.err.println("-i and -o input and output file names are required");
printHelp(options, 1);
}
else if (!line.hasOption('i'))
{
System.err.println("-i input file name is required");
printHelp(options, 1);
}
if (!line.hasOption('o'))
{
System.err.println("-o output file names is required");
printHelp(options, 1);
}
// Whether or not to include event created by org.dspace.usage.LoggerUsageEventListener
boolean newEvents = line.hasOption('n');
// Create a copy of the converter
Context context = null;
try
{
context = new Context();
context.turnOffAuthorisationSystem();
}
catch (SQLException sqle)
{
System.err.println("Unable to create DSpace context: " + sqle.getMessage());
System.exit(1);
}
ClassicDSpaceLogConverter converter = new ClassicDSpaceLogConverter(context,
line.hasOption('v'),
newEvents);
// Set up the log analyser
try
{
LogAnalyser.readConfig();
}
catch (IOException ioe)
{
System.err.println("Unable to read config file: " + LogAnalyser.getConfigFile());
System.exit(1);
}
// Are we converting multiple files?
if (line.hasOption('m'))
{
// Convert all the files
final File sample = new File(line.getOptionValue('i'));
File dir = sample.getParentFile();
FilenameFilter filter = new FilenameFilter()
{
public boolean accept(File dir, String name)
{
return name.startsWith(sample.getName());
}
};
String[] children = dir.list(filter);
for (String in : children)
{
System.out.println(in);
String out = line.getOptionValue('o') +
(dir.getAbsolutePath() +
System.getProperty("file.separator") + in).substring(line.getOptionValue('i').length());
converter.convert(dir.getAbsolutePath() + System.getProperty("file.separator") + in, out);
}
}
else
{
// Just convert the one file
converter.convert(line.getOptionValue('i'), line.getOptionValue('o'));
}
// Clean everything up
context.restoreAuthSystemState();
context.abort();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import java.io.*;
import java.util.HashSet;
import java.util.Set;
/**
* Commandline utility to create a file of spider addresses from an Apache
* log file.
*
* @author Mark Diggory (mdiggory at atmire.com)
* @author kevinvandevelde at atmire.com
* @author ben at atmire.com
*/
public class ApacheLogRobotsProcessor {
/**
* Creates a file containing spiders based on an Apache logfile
* by analyzing users of the robots.txt file
*
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
// create an Options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("l", "logfile", true, "type: Input log file");
options.addOption("s", "spiderfile", true, "type: Spider IP file");
CommandLine line = parser.parse(options, args);
String logFileLoc;
String spiderIpPath;
if (line.hasOption("l"))
{
logFileLoc = line.getOptionValue("l");
}
else {
System.out.println("We need our log file");
return;
}
if (line.hasOption("s"))
{
spiderIpPath = line.getOptionValue("s");
}
else {
System.out.println("We need a spider IP output file");
return;
}
File spiderIpFile = new File(spiderIpPath);
//Get the IPs already added in our file
Set<String> logSpiders;
if (spiderIpFile.exists())
{
logSpiders = SpiderDetector.readIpAddresses(spiderIpFile);
}
else
{
logSpiders = new HashSet<String>();
}
//First read in our log file line per line
BufferedReader in = new BufferedReader(new FileReader(logFileLoc));
String logLine;
while ((logLine = in.readLine()) != null) {
//Currently only check if robot.txt is present in our line
if (logLine.contains("robots.txt")) {
//We got a robots.txt so we got a bot
String ip = logLine.substring(0, logLine.indexOf('-')).trim();
//Only add single IP addresses once we got it in it is enough
logSpiders.add(ip);
}
}
in.close();
//Last but not least add the IPs to our file
BufferedWriter output = new BufferedWriter(new FileWriter(spiderIpFile));
//Second write the new IPs
for (String ip : logSpiders) {
System.out.println("Adding new ip: " + ip);
//Write each new IP on a separate line
output.write(ip + "\n");
}
output.flush();
output.close();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.statistics.SolrLogger;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
/**
* SpiderDetector is used to find IP's that are spiders...
* In future someone may add UserAgents and Host Domains
* to the detection criteria here.
*
* @author kevinvandevelde at atmire.com
* @author ben at atmire.com
* @author Mark Diggory (mdiggory at atmire.com)
*/
public class SpiderDetector {
private static Logger log = Logger.getLogger(SpiderDetector.class);
/**
* Sparse HAshTable structure to hold IP Address Ranges.
*/
private static IPTable table = null;
/**
* Utility method which Reads the ip addresses out a file & returns them in a Set
*
* @param spiderIpFile the location of our spider file
* @return a vector full of ip's
* @throws IOException could not happen since we check the file be4 we use it
*/
public static Set<String> readIpAddresses(File spiderIpFile) throws IOException {
Set<String> ips = new HashSet<String>();
if (!spiderIpFile.exists() || !spiderIpFile.isFile())
{
return ips;
}
//Read our file & get all them ip's
BufferedReader in = new BufferedReader(new FileReader(spiderIpFile));
String line;
while ((line = in.readLine()) != null) {
if (!line.startsWith("#")) {
line = line.trim();
if (!line.equals("") && !Character.isDigit(line.charAt(0))) {
// is a hostname
// add this functionality later...
} else if (!line.equals("")) {
ips.add(line);
// is full v4 ip (too tired to deal with v6)...
}
} else {
// ua.add(line.replaceFirst("#","").replaceFirst("UA","").trim());
// ... add this functionality later
}
}
in.close();
return ips;
}
/**
* Get an immutable Set representing all the Spider Addresses here
*
* @return
*/
public static Set<String> getSpiderIpAddresses() {
loadSpiderIpAddresses();
return table.toSet();
}
/*
private loader to populate the table from files.
*/
private static void loadSpiderIpAddresses() {
if (table == null) {
table = new IPTable();
String filePath = ConfigurationManager.getProperty("dspace.dir");
try {
File spidersDir = new File(filePath, "config/spiders");
if (spidersDir.exists() && spidersDir.isDirectory()) {
for (File file : spidersDir.listFiles()) {
for (String ip : readIpAddresses(file)) {
table.add(ip);
}
log.info("Loaded Spider IP file: " + file);
}
} else {
log.info("No spider file loaded");
}
}
catch (Exception e) {
log.error("Error Loading Spiders:" + e.getMessage(), e);
}
}
}
/**
* Static Service Method for testing spiders against existing spider files.
* <p/>
* In the future this will be extended to support User Agent and
* domain Name detection.
* <p/>
* In future spiders HashSet may be optimized as byte offset array to
* improve performance and memory footprint further.
*
* @param request
* @return true|false if the request was detected to be from a spider
*/
public static boolean isSpider(HttpServletRequest request) {
if (SolrLogger.isUseProxies() && request.getHeader("X-Forwarded-For") != null) {
/* This header is a comma delimited list */
for (String xfip : request.getHeader("X-Forwarded-For").split(",")) {
if (isSpider(xfip))
{
return true;
}
}
}
return isSpider(request.getRemoteAddr());
}
/**
* Check individual IP is a spider.
*
* @param ip
* @return if is spider IP
*/
public static boolean isSpider(String ip) {
if (table == null) {
SpiderDetector.loadSpiderIpAddresses();
}
try {
if (table.contains(ip)) {
return true;
}
} catch (Exception e) {
return false;
}
return false;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.commons.cli.*;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Bitstream;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.eperson.EPerson;
import org.dspace.statistics.SolrLogger;
import java.util.Date;
import java.util.Map;
import java.text.SimpleDateFormat;
import com.maxmind.geoip.LookupService;
import com.maxmind.geoip.Location;
/**
* Test class to generate random statistics data.
* Used for load testing of searches. Inputs are slow
* due to inefficient randomizer.
*
* @author kevinvandevelde at atmire.com
* @author ben at atmire.com
*/
public class StatisticsDataGenerator {
public static void main(String[] args) throws Exception {
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("n", "nrlogs", true,
"type: nr of logs to be generated");
options.addOption("s", "startDate", true,
"type: the start date from which we start generating our logs");
options.addOption("e", "endDate", true,
"type: the end date from which we start generating our logs");
options.addOption("a", "cms", true, "The starting id of our community");
options.addOption("b", "cme", true, "The end id of our community");
options
.addOption("c", "cls", true,
"The starting id of our collection");
options.addOption("d", "cle", true, "The end if of our collection");
options.addOption("f", "is", true, "The starting id of our item");
options.addOption("g", "ie", true, "The end id of our item");
options.addOption("h", "bs", true, "The starting id of our bitstream");
options.addOption("i", "be", true, "The end id of our bitstream");
options.addOption("j", "ps", true, "The starting id of our epersons");
options.addOption("k", "pe", true, "The end id of our epersons");
CommandLine line = parser.parse(options, args);
int nrLogs;
long startDate;
long endDate;
long commStartId;
long commEndId;
long collStartId;
long collEndId;
long itemStartId;
long itemEndId;
long bitStartId;
long bitEndId;
long epersonStartId;
long epersonEndId;
if (line.hasOption("n"))
{
nrLogs = Integer.parseInt(line.getOptionValue("n"));
}
else {
System.out
.println("We need to know how many logs we need to create");
return;
}
if (line.hasOption("s")) {
startDate = getDateInMiliseconds(line.getOptionValue("s"));
} else
{
startDate = getDateInMiliseconds("01/01/2006");
}
if (line.hasOption("e")) {
endDate = getDateInMiliseconds(line.getOptionValue("e"));
} else
{
endDate = new Date().getTime();
}
if (line.hasOption("a"))
{
commStartId = Long.parseLong(line.getOptionValue("a"));
}
else
{
return;
}
if (line.hasOption("b"))
{
commEndId = Long.parseLong(line.getOptionValue("b"));
}
else
{
return;
}
if (line.hasOption("c"))
{
collStartId = Long.parseLong(line.getOptionValue("c"));
}
else
{
return;
}
if (line.hasOption("d"))
{
collEndId = Long.parseLong(line.getOptionValue("d"));
}
else
{
return;
}
if (line.hasOption("f"))
{
itemStartId = Long.parseLong(line.getOptionValue("f"));
}
else
{
return;
}
if (line.hasOption("g"))
{
itemEndId = Long.parseLong(line.getOptionValue("g"));
}
else
{
return;
}
if (line.hasOption("h"))
{
bitStartId = Long.parseLong(line.getOptionValue("h"));
}
else
{
return;
}
if (line.hasOption("i"))
{
bitEndId = Long.parseLong(line.getOptionValue("i"));
}
else
{
return;
}
if (line.hasOption("j"))
{
epersonStartId = Long.parseLong(line.getOptionValue("j"));
}
else
{
return;
}
if (line.hasOption("k"))
{
epersonEndId = Long.parseLong(line.getOptionValue("k"));
}
else
{
return;
}
// Get the max id range
long maxIdTotal = Math.max(commEndId, collEndId);
maxIdTotal = Math.max(maxIdTotal, itemEndId);
maxIdTotal = Math.max(maxIdTotal, bitEndId);
// We got 3/4 chance the person visting the dso is not logged in
epersonEndId *= 4;
// We got all our parameters now get the rest
Context context = new Context();
// Find our solrserver
CommonsHttpSolrServer solr = new CommonsHttpSolrServer(
ConfigurationManager.getProperty("solr.log.server"));
solr.deleteByQuery("*:*");
solr.commit();
Map<String, String> metadataStorageInfo = SolrLogger.getMetadataStorageInfo();
String prevIp = null;
String dbfile = ConfigurationManager.getProperty("solr.dbfile");
LookupService cl = new LookupService(dbfile,
LookupService.GEOIP_STANDARD);
int countryErrors = 0;
for (int i = 0; i < nrLogs; i++) {
String ip = "";
Date time;
String continent;
String countryCode;
float longitude;
float latitude;
String city;
// 1. Generate an ip for our user
StringBuilder ipBuilder = new StringBuilder();
for (int j = 0; j < 4; j++) {
ipBuilder.append(getRandomNumberInRange(0, 254));
if (j != 3)
{
ipBuilder.append(".");
}
}
ip = ipBuilder.toString();
// 2 Depending on our ip get all the location info
Location location;
try {
location = cl.getLocation(ip);
} catch (Exception e) {
location = null;
}
if (location == null) {
// If we haven't got a prev ip this is pretty useless so move on
// to the next one
if (prevIp == null)
{
continue;
}
ip = prevIp;
location = cl.getLocation(ip);
}
city = location.city;
countryCode = location.countryCode;
longitude = location.longitude;
latitude = location.latitude;
try {
continent = LocationUtils.getContinentCode(countryCode);
} catch (Exception e) {
// We could get an error if our country == Europa this doesn't
// matter for generating statistics so ignore it
System.out.println("COUNTRY ERROR: " + countryCode);
countryErrors++;
continue;
}
// 3. Generate a date that the object was visited
time = new Date(getRandomNumberInRange(startDate, endDate));
// 4. Get our dspaceobject we are supposed to be working on
// We got mostly item views so lets say we got 1/2 chance that we
// got an item view
// What type have we got (PS: I know we haven't got 5 as a dso type
// we can log but it is used so our item gets move traffic)
int type = (int) getRandomNumberInRange(0, 8);
if (type == Constants.BUNDLE || type >= 5)
{
type = Constants.ITEM;
}
int dsoId = -1;
// Now we need to find a valid id
switch (type) {
case Constants.COMMUNITY:
dsoId = (int) getRandomNumberInRange(commStartId, commEndId);
break;
case Constants.COLLECTION:
dsoId = (int) getRandomNumberInRange(collStartId, collEndId);
break;
case Constants.ITEM:
dsoId = (int) getRandomNumberInRange(itemStartId, itemEndId);
break;
case Constants.BITSTREAM:
dsoId = (int) getRandomNumberInRange(bitStartId, bitEndId);
break;
}
// Now find our dso
DSpaceObject dso = DSpaceObject.find(context, type, dsoId);
if (dso instanceof Bitstream) {
Bitstream bit = (Bitstream) dso;
if (bit.getFormat().isInternal()) {
dso = null;
}
}
// Make sure we got a dso
boolean substract = false;
while (dso == null) {
// If our dsoId gets higher then our maxIdtotal we need to lower
// to find a valid id
if (dsoId == maxIdTotal)
{
substract = true;
}
if (substract)
{
dsoId--;
}
else
{
dsoId++;
}
dso = DSpaceObject.find(context, type, dsoId);
if (dso instanceof Bitstream) {
Bitstream bit = (Bitstream) dso;
if (bit.getFormat().isInternal()) {
dso = null;
}
}
// System.out.println("REFIND");
}
// Find the person who is visting us
int epersonId = (int) getRandomNumberInRange(epersonStartId, epersonEndId);
EPerson eperson = EPerson.find(context, epersonId);
if (eperson == null)
{
epersonId = -1;
}
// System.out.println(ip);
// System.out.println(country + " " +
// LocationUtils.getCountryName(countryCode));
// Resolve the dns
String dns = null;
try {
dns = DnsLookup.reverseDns(ip);
} catch (Exception e) {
}
System.out.println(ip);
System.out.println(dns);
// Save it in our server
SolrInputDocument doc1 = new SolrInputDocument();
doc1.addField("ip", ip);
doc1.addField("type", dso.getType());
doc1.addField("id", dso.getID());
doc1.addField("time", DateFormatUtils.format(time,
SolrLogger.DATE_FORMAT_8601));
doc1.addField("continent", continent);
// doc1.addField("country", country);
doc1.addField("countryCode", countryCode);
doc1.addField("city", city);
doc1.addField("latitude", latitude);
doc1.addField("longitude", longitude);
if (epersonId > 0)
{
doc1.addField("epersonid", epersonId);
}
if (dns != null)
{
doc1.addField("dns", dns.toLowerCase());
}
if (dso instanceof Item) {
Item item = (Item) dso;
// Store the metadata
for (Map.Entry<String, String> entry : metadataStorageInfo.entrySet())
{
String dcField = entry.getValue();
DCValue[] vals = item.getMetadata(dcField.split("\\.")[0],
dcField.split("\\.")[1], dcField.split("\\.")[2],
Item.ANY);
for (DCValue val1 : vals) {
String val = val1.value;
doc1.addField(entry.getKey(), val);
doc1.addField(entry.getKey() + "_search", val.toLowerCase());
}
}
}
SolrLogger.storeParents(doc1, dso);
solr.add(doc1);
// Make sure we have a previous ip
prevIp = ip;
}
System.out.println("Nr of countryErrors: " + countryErrors);
// Commit at the end cause it takes a while
solr.commit();
}
/**
* Method returns a random integer between the given int
*
* @param min
* the random number must be greater or equal to this
* @param max
* the random number must be smaller or equal to this
* @return a random in
*/
private static long getRandomNumberInRange(long min, long max) {
return min + (long) (Math.random() * ((max - min) + 1));
}
/**
* Method to get the miliseconds from a datestring
*
* @param dateString
* the string containing our date in a string
* @return the nr of miliseconds in the given datestring
* @throws java.text.ParseException
* should not happen
*/
private static long getDateInMiliseconds(String dateString)
throws java.text.ParseException {
SimpleDateFormat formatter = new SimpleDateFormat("dd/MM/yyyy");
return formatter.parse(dateString).getTime();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.apache.commons.cli.*;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.eperson.EPerson;
import org.dspace.statistics.SolrLogger;
import java.sql.SQLException;
import java.text.*;
import java.io.*;
import java.util.*;
import com.maxmind.geoip.LookupService;
import com.maxmind.geoip.Location;
/**
* Class to load intermediate statistics files into solr
*
* @author Stuart Lewis
*/
public class StatisticsImporter
{
private static final Logger log = Logger.getLogger(StatisticsImporter.class);
/** Date format (for solr) */
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
/** Solr server connection */
private static CommonsHttpSolrServer solr;
/** GEOIP lookup service */
private static LookupService geoipLookup;
/** Metadata storage information */
private static Map<String, String> metadataStorageInfo;
/** Whether to skip the DNS reverse lookup or not */
private static boolean skipReverseDNS = false;
/** Local items */
private List<Integer> localItems;
/** Local collections */
private List<Integer> localCollections;
/** Local communities */
private List<Integer> localCommunities;
/** Local bitstreams */
private List<Integer> localBitstreams;
/** Whether or not to replace item IDs with local values (for testing) */
private boolean useLocal;
/**
* Constructor. Optionally loads local data to replace foreign data
* if using someone else's log files
*
* @param local Whether to use local data
*/
public StatisticsImporter(boolean local)
{
// Setup the lists of communities, collections, items & bitstreams if required
useLocal = local;
if (local)
{
try
{
System.out.print("Loading local communities... ");
Context c = new Context();
Community[] communities = Community.findAll(c);
localCommunities = new ArrayList<Integer>();
for (Community community : communities)
{
localCommunities.add(community.getID());
}
System.out.println("Found " + localCommunities.size());
System.out.print("Loading local collections... ");
Collection[] collections = Collection.findAll(c);
localCollections = new ArrayList<Integer>();
for (Collection collection : collections)
{
localCollections.add(collection.getID());
}
System.out.println("Found " + localCollections.size());
System.out.print("Loading local items... ");
ItemIterator items = Item.findAll(c);
localItems = new ArrayList<Integer>();
Item i;
while (items.hasNext())
{
i = items.next();
localItems.add(i.getID());
}
System.out.println("Found " + localItems.size());
System.out.print("Loading local bitstreams... ");
Bitstream[] bitstreams = Bitstream.findAll(c);
localBitstreams = new ArrayList<Integer>();
for (Bitstream bitstream : bitstreams)
{
if (bitstream.getName() != null)
{
localBitstreams.add(bitstream.getID());
}
}
System.out.println("Found " + localBitstreams.size());
} catch (Exception e)
{
System.err.println("Error retrieving items from DSpace database:");
e.printStackTrace();
System.exit(1);
}
}
}
/**
* Method to load the lines from the statics file and load them into solr
*
* @param filename The filename of the file to load
* @param context The DSpace Context
* @param verbose Whether to display verbose output
*/
private void load(String filename, Context context, boolean verbose)
{
// Print out the filename for confirmation
System.out.println("Processing file: " + filename);
// Item counter
int counter = 0;
int errors = 0;
int searchengines = 0;
try
{
BufferedReader input = new BufferedReader(new FileReader(new File(filename)));
String line;
// String uuid;
String action;
String id;
Date date;
String user;
String ip;
String continent = "";
String country = "";
String countryCode = "";
float longitude = 0f;
float latitude = 0f;
String city = "";
String dns;
DNSCache dnsCache = new DNSCache(2500, 0.75f, 2500);
Object fromCache;
Random rand = new Random();
while ((line = input.readLine()) != null)
{
// Tokenise the line
String data = "";
counter++;
errors++;
if (verbose)
{
System.out.println("Line:" + line);
}
String[] parts = line.split(",");
// uuid = parts[0];
action = parts[1];
id = parts[2];
date = dateFormat.parse(parts[3]);
user = parts[4];
ip = parts[5];
// Resolve the dns (if applicable) to get rid of search engine bots early on in the processing chain
dns = "";
if (!skipReverseDNS)
{
// Is the IP address in the cache?
fromCache = dnsCache.get(ip);
if (fromCache != null)
{
dns = (String)fromCache;
}
else
{
try
{
dns = DnsLookup.reverseDns(ip);
dnsCache.put(ip, dns);
} catch (Exception e)
{
dns = "";
}
}
}
data += ("ip addr = " + ip);
data += (", dns name = " + dns);
if ((dns.endsWith(".googlebot.com.")) ||
(dns.endsWith(".crawl.yahoo.net.")) ||
(dns.endsWith(".search.msn.com.")))
{
if (verbose)
{
System.out.println(data + ", IGNORE (search engine)");
}
errors--;
searchengines++;
continue;
}
// Get the geo information for the user
Location location;
try {
location = geoipLookup.getLocation(ip);
city = location.city;
country = location.countryName;
countryCode = location.countryCode;
longitude = location.longitude;
latitude = location.latitude;
if(verbose) {
data += (", country = " + country);
data += (", city = " + city);
System.out.println(data);
}
try {
continent = LocationUtils.getContinentCode(countryCode);
} catch (Exception e) {
if (verbose)
{
System.out.println("Unknown country code: " + countryCode);
}
continue;
}
} catch (Exception e) {
// No problem - just can't look them up
}
// Now find our dso
int type = 0;
if ("view_bitstream".equals(action))
{
type = Constants.BITSTREAM;
if (useLocal)
{
id = "" + localBitstreams.get(rand.nextInt(localBitstreams.size()));
}
}
else if ("view_item".equals(action))
{
type = Constants.ITEM;
if (useLocal)
{
id = "" + localItems.get(rand.nextInt(localItems.size()));
}
}
else if ("view_collection".equals(action))
{
type = Constants.COLLECTION;
if (useLocal)
{
id = "" + localCollections.get(rand.nextInt(localCollections.size()));
}
}
else if ("view_community".equals(action))
{
type = Constants.COMMUNITY;
if (useLocal)
{
id = "" + localCommunities.get(rand.nextInt(localCommunities.size()));
}
}
DSpaceObject dso = DSpaceObject.find(context, type, Integer.parseInt(id));
if (dso == null)
{
if (verbose)
{
System.err.println(" - DSO with ID '" + id + "' is no longer in the system");
}
continue;
}
// Get the eperson details
EPerson eperson = EPerson.findByEmail(context, user);
int epersonId = 0;
if (eperson != null)
{
eperson.getID();
}
// Save it in our server
SolrInputDocument sid = new SolrInputDocument();
sid.addField("ip", ip);
sid.addField("type", dso.getType());
sid.addField("id", dso.getID());
sid.addField("time", DateFormatUtils.format(date, SolrLogger.DATE_FORMAT_8601));
sid.addField("continent", continent);
sid.addField("country", country);
sid.addField("countryCode", countryCode);
sid.addField("city", city);
sid.addField("latitude", latitude);
sid.addField("longitude", longitude);
if (epersonId > 0)
{
sid.addField("epersonid", epersonId);
}
if (dns != null)
{
sid.addField("dns", dns.toLowerCase());
}
if (dso instanceof Item) {
Item item = (Item) dso;
// Store the metadata
for (String storedField : metadataStorageInfo.keySet()) {
String dcField = metadataStorageInfo.get(storedField);
DCValue[] vals = item.getMetadata(dcField.split("\\.")[0],
dcField.split("\\.")[1], dcField.split("\\.")[2],
Item.ANY);
for (DCValue val1 : vals) {
String val = val1.value;
sid.addField(String.valueOf(storedField), val);
sid.addField(String.valueOf(storedField + "_search"),
val.toLowerCase());
}
}
}
SolrLogger.storeParents(sid, dso);
solr.add(sid);
errors--;
}
}
catch (RuntimeException re)
{
throw re;
}
catch (Exception e)
{
System.err.println(e.getMessage());
log.error(e.getMessage(), e);
}
DecimalFormat percentage = new DecimalFormat("##.###");
int committed = counter - errors - searchengines;
System.out.println("Processed " + counter + " log lines");
if (counter > 0)
{
Double committedpercentage = 100d * committed / counter;
System.out.println(" - " + committed + " entries added to solr: " + percentage.format(committedpercentage) + "%");
Double errorpercentage = 100d * errors / counter;
System.out.println(" - " + errors + " errors: " + percentage.format(errorpercentage) + "%");
Double sepercentage = 100d * searchengines / counter;
System.out.println(" - " + searchengines + " search engine activity skipped: " + percentage.format(sepercentage) + "%");
System.out.print("About to commit data to solr...");
// Commit at the end because it takes a while
try
{
solr.commit();
}
catch (SolrServerException sse)
{
System.err.println("Error committing statistics to solr server!");
sse.printStackTrace();
System.exit(1);
}
catch (IOException ioe)
{
System.err.println("Error writing to solr server!");
ioe.printStackTrace();
System.exit(1);
}
}
System.out.println(" done!");
}
/**
* Print the help message
*
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("StatisticsImporter\n", options);
System.exit(exitCode);
}
/**
* Main method to run the statistics importer.
*
* @param args The command line arguments
* @throws Exception If something goes wrong
*/
public static void main(String[] args) throws Exception
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("i", "in", true, "the inpout file");
options.addOption("l", "local", false, "developers tool - map external log file to local handles");
options.addOption("m", "multiple", false, "treat the input file as having a wildcard ending");
options.addOption("s", "skipdns", false, "skip performing reverse DNS lookups on IP addresses");
options.addOption("v", "verbose", false, "display verbose output (useful for debugging)");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
// Did the user ask to see the help?
if (line.hasOption('h'))
{
printHelp(options, 0);
}
if (!line.hasOption('i'))
{
System.err.println("You must specify an input file using the -i flag");
printHelp(options, 1);
}
if (line.hasOption('s'))
{
skipReverseDNS = true;
}
// Whether or not to convert handles to handles used in a local system
// (useful if using someone else's log file for testing)
boolean local = line.hasOption('l');
// We got all our parameters now get the rest
Context context = new Context();
// Verbose option
boolean verbose = line.hasOption('v');
// Find our solrserver
String sserver = ConfigurationManager.getProperty("solr.log.server");
if (verbose)
{
System.out.println("Writing to solr server at: " + sserver);
}
solr = new CommonsHttpSolrServer(sserver);
metadataStorageInfo = SolrLogger.getMetadataStorageInfo();
String dbfile = ConfigurationManager.getProperty("solr.dbfile");
geoipLookup = new LookupService(dbfile, LookupService.GEOIP_STANDARD);
StatisticsImporter si = new StatisticsImporter(local);
if (line.hasOption('m'))
{
// Convert all the files
final File sample = new File(line.getOptionValue('i'));
File dir = sample.getParentFile();
FilenameFilter filter = new FilenameFilter()
{
public boolean accept(File dir, String name)
{
return name.startsWith(sample.getName());
}
};
String[] children = dir.list(filter);
for (String in : children)
{
System.out.println(in);
si.load(dir.getAbsolutePath() + System.getProperty("file.separator") + in, context, verbose);
}
}
else
{
// Just convert the one file
si.load(line.getOptionValue('i'), context, verbose);
}
}
/**
* Inner class to hold a cache of reverse lookups of IP addresses
* @param <K>
* @param <V>
*/
static class DNSCache<K,V> extends LinkedHashMap<K,V>
{
private int maxCapacity;
public DNSCache(int initialCapacity, float loadFactor, int maxCapacity)
{
super(initialCapacity, loadFactor, true);
this.maxCapacity = maxCapacity;
}
@Override
protected boolean removeEldestEntry(java.util.Map.Entry<K,V> eldest)
{
return size() >= this.maxCapacity;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.util;
import org.dspace.core.ConfigurationManager;
import org.xbill.DNS.*;
import java.io.IOException;
/**
* XBill DNS resolver to retrieve hostnames for client IP addresses.
*
* @author kevinvandevelde at atmire.com
* @author ben at atmire.com
*/
public class DnsLookup {
public static String reverseDns(String hostIp) throws IOException {
Resolver res = new ExtendedResolver();
// set the timeout, defaults to 200 milliseconds
int timeout = ConfigurationManager.getIntProperty("solr.resolver.timeout", 200);
res.setTimeout(0, timeout);
Name name = ReverseMap.fromAddress(hostIp);
int type = Type.PTR;
int dclass = DClass.IN;
Record rec = Record.newRecord(name, type, dclass);
Message query = Message.newQuery(rec);
Message response = res.send(query);
Record[] answers = response.getSectionArray(Section.ANSWER);
if (answers.length == 0)
{
return hostIp;
}
else
{
return answers[0].rdataToString();
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.util.Properties;
import java.sql.SQLException;
import org.dspace.app.util.Util;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.search.HarvestedItemInfo;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* An OAICat Crosswalk implementation that extracts
* DSpace items into typed RDF format.
*
* @author Richard Rodgers
* @version $Revision: 5845 $
*/
public class RDFCrosswalk extends Crosswalk
{
// base URL for thumbnails
private String baseUrl = null;
// hostname for rdf URI
private String hostName = null;
public RDFCrosswalk(Properties properties)
{
super(
"http://www.openarchives.org/OAI/2.0/rdf/ http://www.openarchives.org/OAI/2.0/rdf.xsd");
baseUrl = ConfigurationManager.getProperty("dspace.url");
hostName = ConfigurationManager.getProperty("dspace.hostname");
}
public boolean isAvailableFor(Object nativeItem)
{
// Only implemented for items so far
return (nativeItem instanceof HarvestedItemInfo);
}
public String createMetadata(Object nativeItem)
throws CannotDisseminateFormatException
{
HarvestedItemInfo itemInfo = (HarvestedItemInfo)nativeItem;
Item item = itemInfo.item;
// Get all the DC
DCValue[] allDC = item.getDC(Item.ANY, Item.ANY, Item.ANY);
StringBuffer metadata = new StringBuffer();
/*
metadata
.append(
"<oai_dc:dc xmlns:oai_dc=\"http://www.openarchives.org/OAI/2.0/oai_dc/\" ")
.append("xmlns:dc=\"http://purl.org/dc/elements/1.1/\" ")
.append(
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" ")
.append(
"xsi:schemaLocation=\"http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd\">");
*/
metadata
.append(
"<rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" " )
.append("xmlns:ow=\"http://www.ontoweb.org/ontology/1#\" " )
.append("xmlns:dc=\"http://purl.org/dc/elements/1.1/\" " )
.append("xmlns:ds=\"http://dspace.org/ds/elements/1.1/\" " )
.append(
"xsi:schemaLocation=\"http://www.w3.org/1999/02/22-rdf-syntax-ns# http://www.openarchives.org/OAI/2.0/rdf.xsd\">");
// construct URI for item
metadata.append("<ow:Publication rdf:about=\"oai:")
.append(hostName)
.append(":")
.append(item.getHandle())
.append("\">");
for (int i = 0; i < allDC.length; i++)
{
if (screened(allDC[i]))
{
String element = allDC[i].element;
// contributor.author exposed as 'creator'
if (allDC[i].element.equals("contributor")
&& (allDC[i].qualifier != null)
&& allDC[i].qualifier.equals("author"))
{
element = "creator";
}
// Escape XML chars <, > and &
String value = allDC[i].value;
// Check for null values
if (value == null)
{
value = "";
}
// First do &'s - need to be careful not to replace the
// & in "&" again!
int c = -1;
while ((c = value.indexOf("&", c + 1)) > -1)
{
value = value.substring(0, c) + "&"
+ value.substring(c + 1);
}
while ((c = value.indexOf("<")) > -1)
{
value = value.substring(0, c) + "<"
+ value.substring(c + 1);
}
while ((c = value.indexOf(">")) > -1)
{
value = value.substring(0, c) + ">"
+ value.substring(c + 1);
}
metadata.append("<dc:").append(element).append(">")
.append(value)
.append("</dc:").append(element).append(">");
}
}
// add extended info - collection, communities, and thumbnail URLs
Collection[] colls = null;
Community[] comms = null;
Bundle[] origBundles = null;
Bundle[] thumbBundles = null;
try
{
colls = item.getCollections();
comms = item.getCommunities();
origBundles = item.getBundles("ORIGINAL");
thumbBundles = item.getBundles("THUMBNAIL");
}
catch(SQLException sqlE)
{
}
// all parent communities map to DC source
for (int i = 0; i < comms.length; i++)
{
metadata.append("<dc:source>")
.append(comms[i].getMetadata("name"))
.append("</dc:source>");
}
// as do collections
for (int j = 0; j < colls.length; j++)
{
metadata.append("<dc:source>")
.append(colls[j].getMetadata("name"))
.append("</dc:source>");
}
if (origBundles.length > 0)
{
Bitstream[] bitstreams = origBundles[0].getBitstreams();
// add a URL for each original that has a thumbnail
for (int j = 0; j < bitstreams.length; j++)
{
String tName = bitstreams[j].getName() + ".jpg";
Bitstream tb = null;
if (thumbBundles.length > 0)
{
tb = thumbBundles[0].getBitstreamByName(tName);
}
if (tb != null)
{
String thumbUrl = null;
try
{
thumbUrl = baseUrl + "/retrieve/" + tb.getID() + "/" +
Util.encodeBitstreamName(tb.getName(),
Constants.DEFAULT_ENCODING);
}
catch(Exception e)
{
}
metadata.append("<dc:coverage>")
.append(thumbUrl)
.append("</dc:coverage>");
}
}
}
//metadata.append("</oai_ds:ds>");
metadata.append("</ow:Publication>");
metadata.append("</rdf:RDF>");
return metadata.toString();
}
/*
* Exclude Item DC elements unsuitable for harvest
*/
private boolean screened(DCValue dcValue)
{
// description.providence
if (isQualified(dcValue, "description", "provenance"))
{
return false;
}
// format.extent
if (isQualified(dcValue, "format", "extent"))
{
return false;
}
// date.available is algorithmically identical to date.accessioned
// suppress one
if (isQualified(dcValue, "date", "accessioned"))
{
return false;
}
return true;
}
private boolean isQualified(DCValue dcValue, String elName, String qualName)
{
return (dcValue.element.equals(elName) &&
dcValue.qualifier != null &&
dcValue.qualifier.equals(qualName));
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Properties;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.app.didl.UUIDFactory;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.search.HarvestedItemInfo;
import org.dspace.storage.bitstore.BitstreamStorageManager;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
import ORG.oclc.oai.server.verb.ServerVerb;
/**
* DSpace Item DIDL crosswalk.
*
* Development of this code was part of the aDORe repository project
* by the Research Library of the Los Alamos National Laboratory.
*
* @author Henry Jerez
* @author Los Alamos National Laboratory
*/
public class DIDLCrosswalk extends Crosswalk
{
private static final Logger log = Logger.getLogger(DIDLCrosswalk.class);
/** default value if no oai.didl.maxresponse property is defined */
public static final int MAXRESPONSE_INLINE_BITSTREAM = 0;
/** another crosswalk that will be used to generate the metadata section */
private Crosswalk metadataCrosswalk;
public DIDLCrosswalk(Properties properties)
{
super("urn:mpeg:mpeg21:2002:02-DIDL-NS http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-21_schema_files/did/didl.xsd ");
// FIXME this should be injected from the configuration...
// but it is better than duplicate the OAIDCCrosswalk code!
metadataCrosswalk = new OAIDCCrosswalk(properties);
}
public boolean isAvailableFor(Object nativeItem)
{
// We have DC for everything
return true;
}
public String createMetadata(Object nativeItem)
throws CannotDisseminateFormatException
{
Item item = ((HarvestedItemInfo) nativeItem).item;
StringBuffer metadata = new StringBuffer();
String itemhandle=item.getHandle();
String strMaxSize = ConfigurationManager.getProperty("oai.didl.maxresponse");
int maxsize = MAXRESPONSE_INLINE_BITSTREAM;
if (strMaxSize != null)
{
maxsize = Integer.parseInt(strMaxSize);
}
String currdate=ServerVerb.createResponseDate(new Date());
metadata.append("<didl:DIDL ")
.append(" xmlns:didl=\"urn:mpeg:mpeg21:2002:02-DIDL-NS\" ")
.append(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" ")
.append("xsi:schemaLocation=\"urn:mpeg:mpeg21:2002:02-DIDL-NS http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-21_schema_files/did/didl.xsd \">")
.append ("<didl:DIDLInfo>")
.append ("<dcterms:created xmlns:dcterms=\"http://purl.org/dc/terms/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://purl.org/dc/terms/ \">")
.append (currdate)
.append ("</dcterms:created> </didl:DIDLInfo>" )
.append("<didl:Item id=\"")
.append("uuid-" + UUIDFactory.generateUUID().toString()+"\">");
metadata.append("<didl:Descriptor>")
.append("<didl:Statement mimeType=\"application/xml; charset=utf-8\">")
.append("<dii:Identifier xmlns:dii=\"urn:mpeg:mpeg21:2002:01-DII-NS\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"urn:mpeg:mpeg21:2002:01-DII-NS http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-21_schema_files/dii/dii.xsd\">").append("urn:hdl:").append(itemhandle)
.append("</dii:Identifier>")
.append("</didl:Statement>")
.append("</didl:Descriptor>");
metadata.append("<didl:Descriptor>")
.append("<didl:Statement mimeType=\"application/xml; charset=utf-8\">");
// delegate the metadata section to another crosswalk
metadata.append(metadataCrosswalk.createMetadata(nativeItem));
metadata
.append("</didl:Statement>")
.append("</didl:Descriptor>");
/**putfirst item here**/
//**CYCLE HERE!!!!**//
try
{
Bundle[] bundles= item.getBundles("ORIGINAL");
if (bundles.length != 0)
{
/**cycle bundles**/
for (int i = 0; i < bundles.length; i++)
{
int flag=0;
Bitstream[] bitstreams = bundles[i].getBitstreams();
/**cycle bitstreams**/
for (int k = 0; k < bitstreams.length ; k++)
{
// Skip internal types
if (!bitstreams[k].getFormat().isInternal())
{
if (flag==0)
{
flag=1;
}
metadata.append("<didl:Component id=" + "\"uuid-"+ UUIDFactory.generateUUID().toString() + "\">");
if (bitstreams[k].getSize()> maxsize)
{
metadata.append("<didl:Resource ref=\""+ConfigurationManager.getProperty("dspace.url")+"/bitstream/"+itemhandle+"/"+bitstreams[k].getSequenceID()+"/"+bitstreams[k].getName() );
metadata.append("\" mimeType=\"");
metadata.append(bitstreams[k].getFormat().getMIMEType());
metadata.append("\">");
metadata.append("</didl:Resource>");
}
else
{
try
{
metadata.append("<didl:Resource mimeType=\"");
metadata.append(bitstreams[k].getFormat().getMIMEType());
metadata.append("\" encoding=\"base64\">");
/*
* Assume that size of in-line bitstreams will always be
* smaller than MAXINT bytes
*/
int intSize = (int) bitstreams[k].getSize();
byte[] buffer = new byte[intSize];
Context contextl= new Context();
InputStream is = BitstreamStorageManager.retrieve(contextl,bitstreams[k].getID());
BufferedInputStream bis = new BufferedInputStream(is);
try
{
bis.read(buffer);
}
finally
{
if (bis != null)
{
try
{
bis.close();
}
catch (IOException ioe)
{
}
}
if (is != null)
{
try
{
is.close();
}
catch (IOException ioe)
{
}
}
}
contextl.complete();
String encoding = new String(Base64.encodeBase64(buffer), "ASCII");
metadata.append(encoding);
}
catch (Exception ex)
{
log.error("Error creating resource didl", ex);
metadata.append("<didl:Resource ref=\"")
.append(ConfigurationManager.getProperty("dspace.url"))
.append("/bitstream/")
.append(itemhandle).append("/").append(bitstreams[k].getSequenceID())
.append("/").append(bitstreams[k].getName());
metadata.append("\" mimeType=\"");
metadata.append(bitstreams[k].getFormat().getMIMEType());
metadata.append("\">");
}
metadata.append("</didl:Resource>");
}
metadata.append("</didl:Component>");
}
/*end bitstream cycle*/
}
/*end bundle cycle*/
}
}
}
catch (SQLException sqle)
{
System.err.println("Caught exception:"+sqle.getCause());
log.error("Database error", sqle);
}
//**END CYCLE HERE **//
metadata.append("</didl:Item>")
.append("</didl:DIDL>");
return metadata.toString();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.dspace.content.DCDate;
import org.dspace.search.HarvestedItemInfo;
import ORG.oclc.oai.server.catalog.RecordFactory;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* Implementation of the OAICat RecordFactory base class for DSpace items.
*
* @author Robert Tansley
* @version $Revision: 5845 $
*/
public class DSpaceRecordFactory extends RecordFactory
{
public DSpaceRecordFactory(Properties properties)
{
// We don't use the OAICat properties; pass on up
super(properties);
}
public String fromOAIIdentifier(String identifier)
{
// Our local identifier is actually the same as the OAI one (the Handle)
return identifier;
}
public String quickCreate(Object nativeItem, String schemaURL,
String metadataPrefix) throws IllegalArgumentException,
CannotDisseminateFormatException
{
// Not supported
return null;
}
public String getOAIIdentifier(Object nativeItem)
{
String h = DSpaceOAICatalog.OAI_ID_PREFIX
+ ((HarvestedItemInfo) nativeItem).handle;
return h;
}
public String getDatestamp(Object nativeItem)
{
Date d = ((HarvestedItemInfo) nativeItem).datestamp;
// Return as ISO8601
return new DCDate(d).toString();
}
public Iterator getSetSpecs(Object nativeItem)
{
HarvestedItemInfo hii = (HarvestedItemInfo) nativeItem;
Iterator<String> i = hii.collectionHandles.iterator();
List<String> setSpecs = new LinkedList<String>();
// Convert the DB Handle string 123.456/789 to the OAI-friendly
// hdl_123.456/789
while (i.hasNext())
{
String handle = "hdl_" + i.next();
setSpecs.add(handle.replace('/', '_'));
}
return setSpecs.iterator();
}
public boolean isDeleted(Object nativeItem)
{
HarvestedItemInfo hii = (HarvestedItemInfo) nativeItem;
return hii.withdrawn;
}
public Iterator getAbouts(Object nativeItem)
{
// Nothing in the about section for now
return new LinkedList().iterator();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.app.util.MetadataExposure;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.IConverter;
import org.dspace.search.HarvestedItemInfo;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.PluginManager;
import org.dspace.core.LogManager;
import org.apache.log4j.Logger;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* OAI_DC Crosswalk implementation based on oaidc.properties file. All metadata
* included in the oaidc.properties file will be mapped on a valid oai_dc
* element, invalid oai_dc element will be not used. It is possible specify for
* any metadata a converter {@link org.dspace.content.crosswalk.IConverter}
* to manipulate the metadata value before that it will be dissemite in OAI_DC.
*
* @author Robert Tansley
* @author Andrea Bollini
* @version $Revision: 5845 $
*/
public class OAIDCCrosswalk extends Crosswalk
{
// Pattern containing all the characters we want to filter out / replace
// converting a String to xml
private static final Pattern invalidXmlPattern = Pattern
.compile("([^\\t\\n\\r\\u0020-\\ud7ff\\ue000-\\ufffd\\u10000-\\u10ffff]+|[&<>])");
// Patter to extract the converter name if any
private static final Pattern converterPattern = Pattern.compile(".*\\((.*)\\)");
private static final String[] oaidcElement = new String[] { "title",
"creator", "subject", "description", "publisher", "contributor",
"date", "type", "format", "identifier", "source", "language",
"relation", "coverage", "rights" };
/** Location of config file */
private static final String configFilePath = ConfigurationManager
.getProperty("dspace.dir")
+ File.separator
+ "config"
+ File.separator
+ "crosswalks"
+ File.separator + "oaidc.properties";
/** log4j logger */
private static Logger log = Logger.getLogger(OAIDCCrosswalk.class);
private static final Map<String, Set<String>> config = new HashMap<String, Set<String>>();
static
{
// Read in configuration
Properties crosswalkProps = new Properties();
FileInputStream fis = null;
try
{
fis = new FileInputStream(configFilePath);
crosswalkProps.load(fis);
}
catch (IOException e)
{
throw new IllegalArgumentException(
"Wrong configuration for OAI_DC", e);
}
finally
{
if (fis != null)
{
try
{
fis.close();
}
catch (IOException ioe)
{
log.error(ioe);
}
}
}
Set<Object> keySet = crosswalkProps.keySet();
if (keySet != null)
{
for (Object key : keySet)
{
String oaielement = crosswalkProps.getProperty((String) key);
if (oaielement != null && !oaielement.trim().equals(""))
{
Set<String> tmp = config.get(oaielement);
if (tmp == null)
{
tmp = new HashSet<String>();
}
tmp.add((String) key);
config.put(oaielement, tmp);
}
}
}
else
{
throw new IllegalArgumentException(
"Configurazione errata per l'uscita OAI_DC");
}
}
public OAIDCCrosswalk(Properties properties)
{
super("http://www.openarchives.org/OAI/2.0/oai_dc/ "
+ "http://www.openarchives.org/OAI/2.0/oai_dc.xsd");
}
public boolean isAvailableFor(Object nativeItem)
{
// We have DC for everything
return true;
}
public String createMetadata(Object nativeItem)
throws CannotDisseminateFormatException
{
Item item = ((HarvestedItemInfo) nativeItem).item;
StringBuffer metadata = new StringBuffer();
metadata
.append(
"<oai_dc:dc xmlns:oai_dc=\"http://www.openarchives.org/OAI/2.0/oai_dc/\" ")
.append("xmlns:dc=\"http://purl.org/dc/elements/1.1/\" ")
.append(
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" ")
.append(
"xsi:schemaLocation=\"http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd\">");
for (String element : oaidcElement)
{
Set<String> itemMetadata = config.get(element);
if (itemMetadata != null && itemMetadata.size() > 0)
{
for (String mdString : itemMetadata)
{
String converterName = null;
IConverter converter = null;
Matcher converterMatcher = converterPattern.matcher(mdString);
if (converterMatcher.matches())
{
converterName = converterMatcher.group(1);
converter = (IConverter) PluginManager.getNamedPlugin(
IConverter.class, converterName);
if (converter == null)
{
log.warn(LogManager.getHeader(null,
"createMetadata",
"no converter plugin found with name "
+ converterName + " for metadata "
+ mdString));
}
}
DCValue[] dcValues;
if (converterName != null)
{
dcValues = item.getMetadata(mdString.replaceAll("\\("
+ converterName + "\\)", ""));
}
else
{
dcValues = item.getMetadata(mdString);
}
try
{
for (DCValue dcValue : dcValues)
{
if (!MetadataExposure.isHidden(((HarvestedItemInfo) nativeItem).context,
dcValue.schema, dcValue.element, dcValue.qualifier))
{
String value;
if (converter != null)
{
value = converter.makeConversion(dcValue.value);
}
else
{
value = dcValue.value;
}
// Also replace all invalid characters with ' '
if (value != null)
{
StringBuffer valueBuf = new StringBuffer(value
.length());
Matcher xmlMatcher = invalidXmlPattern
.matcher(value.trim());
while (xmlMatcher.find())
{
String group = xmlMatcher.group();
// group will either contain a character that we
// need to encode for xml
// (ie. <, > or &), or it will be an invalid
// character
// test the contents and replace appropriately
if (group.equals("&"))
{
xmlMatcher.appendReplacement(valueBuf,
"&");
}
else if (group.equals("<"))
{
xmlMatcher.appendReplacement(valueBuf,
"<");
}
else if (group.equals(">"))
{
xmlMatcher.appendReplacement(valueBuf,
">");
}
else
{
xmlMatcher.appendReplacement(valueBuf, " ");
}
}
// add bit of the string after the final match
xmlMatcher.appendTail(valueBuf);
metadata.append("<dc:").append(element).append(">")
.append(valueBuf.toString())
.append("</dc:").append(element)
.append(">");
}
}
}
}
catch (SQLException e)
{
// Stack loss as exception does not support cause
throw new CannotDisseminateFormatException(e.toString());
}
}
}
}
metadata.append("</oai_dc:dc>");
return metadata.toString();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import javax.servlet.http.HttpServlet;
import org.dspace.core.ConfigurationManager;
/**
* Simple servlet to load in DSpace and log4j configurations. Should always be
* started up before other servlets (use <loadOnStartup>)
*
* This class holds code to be removed in the next version of the DSpace XMLUI,
* it is now managed by a Shared Context Listener inthe dspace-api project.
*
* It is deprecated, rather than removed to maintain backward compatibility for
* local DSpace 1.5.x customized overlays.
*
* TODO: Remove in trunk
*
* @deprecated Use Servlet Context Listener provided in dspace-api (remove in >
* 1.5.x)
*
* @author Robert Tansley
* @version $Revision: 5845 $
*/
public class LoadDSpaceOAIConfig extends HttpServlet
{
public void init()
{
if(!ConfigurationManager.isConfigured())
{
// Get config parameter
String config = getServletContext().getInitParameter("dspace-config");
// Load in DSpace config
ConfigurationManager.loadConfig(config);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.sql.SQLException;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Utils;
import org.dspace.search.HarvestedItemInfo;
import org.dspace.content.*;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* A Crosswalk implementation that extracts qualified Dublin Core from
* DSpace items into the uketd_dc format.
*
* It supports the writing of UKETD_DC metadata
* in a METS document and to make the schema URIs available for
* inclusion in such a METS document. For this reason, the writing
* of the metadata itself has been separated from the writing
* of the schemas.
* This version places the writing of the header and metadata
* in its own method called by createMetadata so the headers are
* included in the UKETD_METS that also uses those methods.
* This allows the writeMetadata method to remain unchanged,
* with no header information included. It is therefore consistent with
* other DSpace crosswalks.
*
* @author Paul Needham (Cranfield University)
* @author Jon Bell & Stuart Lewis (Aberystwyth University)
*/
public class UKETDDCCrosswalk extends Crosswalk
{
// Pattern containing all the characters we want to filter out / replace
// converting a String to xml
private static final Pattern invalidXmlPattern =
Pattern.compile("([^\\t\\n\\r\\u0020-\\ud7ff\\ue000-\\ufffd\\u10000-\\u10ffff]+|[&<>])");
// String constants for metadata schemas...
/** Used to open the metadata in a OAI-PMH record. */
private String uketdIn = "<uketd_dc:uketddc";
/** The identifier for the uketd namespace. */
private String uketdNs = "uketd_dc";
/** The URI of the uketd namespace. */
private String uketdUri = "http://naca.central.cranfield.ac.uk/ethos-oai/2.0/";
/** The identifier for the namespace of the DC used in the UKETD_DC metadata set. */
private String dcNs = "dc";
/** The URI of the DC namespace. */
private String dcUri = "http://purl.org/dc/elements/1.1/";
/** The identifier for the namespace of the qualified DC terms used in UKETD_DC. */
private String dcTermsNs = "dcterms";
/** The URI of the DC terms namespace. */
private String dcTermsUri = "http://purl.org/dc/terms/";
/** Identifier of the UKETD terms namespace.*/
private String uketdTermsNs = "uketdterms";
/** The URI of the uketd terms namespace. */
private String uketdTermsUri = "http://naca.central.cranfield.ac.uk/ethos-oai/terms/";
/** The xsi string (identifier and URI) used for UKETD records.*/
private String xsi = "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"";
/** The xsi schema location tag, used in UKETD records. */
private String schemaLoc = "xsi:schemaLocation";
/** The URI of the uketd location namespace. */
private static String uketdSchemaLocNs = "http://naca.central.cranfield.ac.uk/ethos-oai/2.0/";
/** The URI of the uketd location. */
private static String uketdSchemaLocUri = "http://naca.central.cranfield.ac.uk/ethos-oai/2.0/uketd_dc.xsd";
/**
* UKETDDCCrosswalk contructor.
*
* @param properties Not used
* */
public UKETDDCCrosswalk(Properties properties)
{
super(uketdSchemaLocNs + " " + uketdSchemaLocUri);
}
/**
* Returns the identifier for the UKETD namespace.
*
* @return uketdNs
*/
public String getUketdNs ()
{
return uketdNs;
}
/**
* Returns the URI of the UKETD namespace.
*
* @return uketdUri
*/
public String getUketdUri ()
{
return uketdUri;
}
/**
* Returns the identifier for the Dublin Core namespace.
*
* @return dcNs
*/
public String getDcNs ()
{
return dcNs;
}
/**
* Returns the URI of the Dublin Core namespace.
*
* @return dcUri
*/
public String getDcUri ()
{
return dcUri;
}
/**
* Returns the identifier for the DC terms (qualifiers) namespace.
*
* @return cdTermsNs
*/
public String getDcTermsNs ()
{
return dcTermsNs;
}
/**
* Returns the URI of the DC terms namespace.
*
* @return dcTermsUri
*/
public String getDcTermsUri ()
{
return dcTermsUri;
}
/**
* Returns the identifier for the UKETD terms namespace.
*
* @return uketdTermsNs
*/
public String getUketdTermsNs ()
{
return uketdTermsNs;
}
/**
* Returns the URI of the UKETD terms namespace.
*
* @return uketdTermsUri
*/
public String getUketdTermsUri ()
{
return uketdTermsUri;
}
/**
* Returns the identifier for the UKETD schema location.
*
* @return uketdSchemaLocNs
*/
public String getUketdSchemaLocNs ()
{
return uketdSchemaLocNs;
}
/**
* Returns the URI of the UKETD schema location.
*
* @return uketdSchemaLocUri
*/
public String getUketdSchemaLocUri ()
{
return uketdSchemaLocUri;
}
/**
* Shows what items UKETD_DC OAI-PMH is available for.
* This is every item in the repository.
*
* @return a boolean (true)
*/
public boolean isAvailableFor(Object nativeItem)
{
// We have DC for everything
return true;
}
/**
* Creates the metadata necessary for UKTEDDC crosswalk.
* Adds the name space details and schemas to the metadata itself.
* It therefore creates a complete OAI-PMH record that matches
* the UKETD_DC metadata prefix.
*
* @return The OAI-PMH xml
*/
public String createMetadata (Object nativeItem)
throws CannotDisseminateFormatException
{
// Get the Item
Item item = ((HarvestedItemInfo) nativeItem).item;
// Write the record out
return writeMetadataWithSchema(item);
}
/**
* Write the item's metadata, headed by the schema namespace
* details. Separated from createMetadata, so UKETD_METS can
* use the method with an Item , not an Object (nativeItem).
*
* @param item The org.dspace.content.Item
* @return a String, the item's metadata in UKETD_DC format.
* @throws SQLException
*/
public String writeMetadataWithSchema (Item item)
{
StringBuffer metadata = new StringBuffer ();
metadata.append(uketdIn).append(" ");
metadata.append ("xmlns:" + uketdNs + "=\"" + uketdUri + "\" ");
metadata.append ("xmlns:" + dcNs + "=\"" + dcUri + "\" ");
metadata.append ("xmlns:" + dcTermsNs + "=\"" + dcTermsUri + "\" ");
metadata.append ("xmlns:" + uketdTermsNs + "=\"" + uketdTermsUri + "\" ");
metadata.append(xsi).append(" ");
metadata.append (schemaLoc + "=\"" + uketdSchemaLocNs + " ");
metadata.append(uketdSchemaLocUri).append("\">\n");
metadata.append (writeMetadata (item));
metadata.append ("</uketd_dc:uketddc>\n");
return metadata.toString ( );
}
/**
* Writes the UKETD_DC metadata for the specified item.
* It simply gets hold of the Dublin Core for an Item
* and converts it to UKEDT_DC, including the splitting
* of the Dublin Core publisher and type fields.
* The metadata is identical to that returned by
* the original version's create metadata method,
* without the schema information.
* This method does no checking of the correctness of the
* metadata format, nor does it throw any exception.
*
* @param item a org.dspace.content.Item
* @return a String, the item's metadata in UKETD_DC xml.
*/
public String writeMetadata(Item item)
{
// The string we are constructing
StringBuffer metadata = new StringBuffer();
// Get all the DC
DCValue[] allDC = item.getMetadata(MetadataSchema.DC_SCHEMA, Item.ANY, Item.ANY, Item.ANY);
// Get the handle of the item
String itemhandle = item.getHandle();
for (int i = 0; i < allDC.length; i++)
{
// Get the element, qualifier and value
String element = allDC[i].element;
String qualifier = allDC[i].qualifier;
String value = Utils.addEntities(allDC[i].value);
// title
if (allDC[i].element.equals("title"))
{
if (allDC[i].qualifier != null) {
if (allDC[i].qualifier.equals("alternative"))
{
// title.alternative exposed as 'dcterms:alternative'
this.makeDCTermsElement(qualifier, null, value, metadata);
}
} else
{
this.makeDCElement(element, null, value, metadata);
}
}
// contributor
if (allDC[i].element.equals("contributor"))
{
if (allDC[i].qualifier != null) {
if (allDC[i].qualifier.equals("author"))
{
this.makeDCElement("creator", null, value, metadata);
} else if ((allDC[i].qualifier.equals("advisor")) ||
(allDC[i].qualifier.equals("sponsor")))
{
// contributor.qualifier exposed as 'uketdterms:qualifier'
this.makeUKDCTermsElement(qualifier, null, value, metadata);
} else if (allDC[i].qualifier.equals("funder"))
{
// contributor.qualifier exposed as 'uketdterms:qualifier'
this.makeUKDCTermsElement("sponsor", null, value, metadata);
} else
{
// contributor.qualifier exposed as 'dcterms:qualifier'
this.makeDCTermsElement(qualifier, null, value, metadata);
}
} else {
this.makeDCElement(element, null, value, metadata);
}
}
// subject
if (allDC[i].element.equals("subject"))
{
if (allDC[i].qualifier != null) {
boolean ddc = allDC[i].qualifier.equals("ddc");
boolean lcc = allDC[i].qualifier.equals("lcc");
boolean lcsh = allDC[i].qualifier.equals("lcsh");
boolean mesh = allDC[i].qualifier.equals("mesh");
boolean udc = allDC[i].qualifier.equals("udc");
if (ddc || lcc || lcsh || mesh || udc)
{
// subject.qualifier exposed as 'dc:element xsi:type="dcterms:qualifier"'
qualifier = qualifier.toUpperCase();
this.makeDCElement(element, qualifier, value, metadata);
} else
{
this.makeDCElement(element, null, value, metadata);
}
} else
{
this.makeDCElement(element, null, value, metadata);
}
}
// description
if (allDC[i].element.equals("description"))
{
if (allDC[i].qualifier != null)
{
if (allDC[i].qualifier.equals("abstract"))
{
// e.g. description.abstract exposed as 'dcterms:abstract'
this.makeDCTermsElement(qualifier, null, value, metadata);
} else if (allDC[i].qualifier.equals("sponsorship"))
{
// description.sponsorship exposed as 'uketdterms:sponsor"'
this.makeUKDCTermsElement("sponsor", null, value, metadata);
}
} else {
this.makeDCElement(element, null, value, metadata);
}
}
// publisher
if (allDC[i].element.equals("publisher"))
{
if (allDC[i].qualifier != null) {
if ((allDC[i].qualifier.equals("department")) ||
(allDC[i].qualifier.equals("commercial")))
{
this.makeUKDCTermsElement(qualifier, null, value, metadata);
}
} else {
String[] pubParts = value.split("(?<!(&[0-9a-zA-Z#]{2,4}));");
this.makeUKDCTermsElement("institution", null,
pubParts[0], metadata);
StringBuffer dept = new StringBuffer();
if ((pubParts.length > 1) && (pubParts[1] != null)) {
dept.append(pubParts[1] + ";");
}
if ((pubParts.length > 2) && (pubParts[2] != null)) {
dept.append(" " + pubParts[2]);
}
if (dept.length() > 0) {
this.makeUKDCTermsElement("department", null,
dept.toString(), metadata);
}
}
}
// date
if (allDC[i].element.equals("date"))
{
if (allDC[i].qualifier != null)
{
if (allDC[i].qualifier.equals("issued"))
{
this.makeDCTermsElement(qualifier, null, value, metadata);
} else
{
this.makeDCElement(element, null, value, metadata);
}
} else
{
this.makeDCElement(element, null, value, metadata);
}
}
// type
if (allDC[i].element.equals("type"))
{
if (allDC[i].qualifier != null)
{
if ((allDC[i].qualifier.equals("qualificationlevel")) ||
(allDC[i].qualifier.equals("qualificationname")))
{
this.makeUKDCTermsElement(qualifier, null, value, metadata);
}
} else {
String[] Typepart = value.split("[;]");
this.makeDCElement(element, null, Typepart[0], metadata);
if ((Typepart.length > 1) && (Typepart[1] != null))
{
this.makeUKDCTermsElement("qualificationlevel", null,
Typepart[1], metadata);
} if ((Typepart.length > 2) && (Typepart[2] != null))
{
this.makeUKDCTermsElement("qualificationname", null,
Typepart[2], metadata);
}
}
}
// language
if (allDC[i].element.equals("language"))
{
if (allDC[i].qualifier != null) {
if (allDC[i].qualifier.equals("iso"))
{
// language.iso exposed as 'dc:element xsi:type="dcterms:qualifier"'
this.makeDCElement(element, "ISO639-2", value, metadata);
} else
{
this.makeDCElement(element, null, value, metadata);
}
} else
{
this.makeDCElement(element, null, value, metadata);
}
}
// relation
if (allDC[i].element.equals("relation"))
{
if (allDC[i].qualifier != null) {
if (allDC[i].qualifier.equals("hasversion"))
{
// relation.hasversion exposed as 'dcterms:qualifier'
this.makeDCElement("hasVersion", null, value, metadata);
} else if ((allDC[i].qualifier.equals("references")) ||
(allDC[i].qualifier.equals("requires")))
{
// relation.references exposed as 'dcterms:qualifier'
this.makeDCTermsElement(qualifier, null, value, metadata);
} else
{
this.makeDCElement(element, null, value, metadata);
}
} else
{
this.makeDCElement(element, null, value, metadata);
}
}
// format
if (allDC[i].element.equals("format"))
{
if (allDC[i].qualifier != null)
{
if (allDC[i].qualifier.equals("extent"))
{
// format exposed as 'dcterms:qualifier'
this.makeDCTermsElement(qualifier, null, value, metadata);
} else if (allDC[i].qualifier.equals("mimetype"))
{
this.makeDCElement(element, "IMT", value, metadata);
}
} else
{
// format exposed as 'dc:element'
this.makeDCElement(element, null, value, metadata);
}
}
// identifier
if (allDC[i].element.equals("identifier"))
{
if (allDC[i].qualifier != null)
{
if (allDC[i].qualifier.equals("uri"))
{
this.makeDCTermsElement("isReferencedBy", "URI", value, metadata);
} else if (allDC[i].qualifier.equals("citation"))
{
this.makeDCTermsElement("hasVersion", null, value, metadata);
} else if (allDC[i].qualifier.equals("grantnumber"))
{
this.makeUKDCTermsElement(qualifier, null, value, metadata);
}
} else
{
// identifier exposed as 'dc:element'
this.makeDCElement(element, null, value, metadata);
}
}
// rights
if (allDC[i].element.equals("rights"))
{
if (allDC[i].qualifier != null)
{
if ((allDC[i].qualifier.equals("embargodate")) ||
(allDC[i].qualifier.equals("embargoreason")))
{
this.makeUKDCTermsElement(qualifier, null, value, metadata);
} else
{
// rights exposed as 'dc:element'
this.makeDCElement(element, null, value, metadata);
}
} else
{
// rights exposed as 'dc:element'
this.makeDCElement(element, null, value, metadata);
}
}
}
// Generate bitstream URIs
Bundle[] bundles = {};
try
{
bundles = item.getBundles("ORIGINAL");
String url;
if (bundles.length > 0)
{
// Itterate through each bundle
for (int i = 0; i < bundles.length; i++)
{
// Itterate through each bitstream
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int k = 0; k < bitstreams.length ; k++)
{
// Skip internal types
if (!bitstreams[k].getFormat().isInternal())
{
url = ConfigurationManager.getProperty("dspace.url") +
"/bitstream/" + itemhandle + "/" +
bitstreams[k].getSequenceID() + "/" +
bitstreams[k].getName();
this.makeDCElement("identifier", "URI", url, metadata);
this.makeUKDCTermsElement("checksum",
bitstreams[k].getChecksumAlgorithm(),
bitstreams[k].getChecksum(), metadata);
}
}
}
}
} catch (SQLException sqle)
{
// Nothing we can do
}
// Return the metadata - all done!
return metadata.toString();
}
/**
* Private wrapper method to create a DC term element.
*
* @param element The element name
* @param qualifier The qualifier name (or null)
* @param value The value of the element
* @param buffer The buffer to add the element to
* @return The buffer with the new element appended to
*/
private StringBuffer makeDCElement(String element, String qualifier,
String value, StringBuffer buffer)
{
return this.makeTermsElement(element, qualifier, value,
buffer, "dc", "dcterms");
}
/**
* Private wrapper method to create a DCterms term element.
*
* @param element The element name
* @param qualifier The qualifier name (or null)
* @param value The value of the element
* @param buffer The buffer to add the element to
* @return The buffer with the new element appended to
*/
private StringBuffer makeDCTermsElement(String element, String qualifier,
String value, StringBuffer buffer)
{
return this.makeTermsElement(element, qualifier, value,
buffer, "dcterms", "dcterms");
}
/**
* Private wrapper method to create a UKETD DC term element.
*
* @param element The element name
* @param qualifier The qualifier name (or null)
* @param value The value of the element
* @param buffer The buffer to add the element to
* @return The buffer with the new element appended to
*/
private StringBuffer makeUKDCTermsElement(String element, String qualifier,
String value, StringBuffer buffer)
{
return this.makeTermsElement(element, qualifier, value,
buffer, "uketdterms", "uketdterms");
}
/**
* Private wrapper method to create an element.
*
* @param element The element name
* @param qualifier The qualifier name (or null)
* @param value The value of the element
* @param buffer The buffer to add the element to
* @param terms The namespace of the term
* @return The buffer with the new element appended to
*/
private StringBuffer makeTermsElement(String element, String qualifier,
String value, StringBuffer buffer,
String namespace, String terms)
{
// Escape XML chars <, > and &
// Also replace all invalid characters with ' '
if (value != null)
{
StringBuffer valueBuf = new StringBuffer(value.length());
Matcher xmlMatcher = invalidXmlPattern.matcher(value.trim());
while (xmlMatcher.find())
{
String group = xmlMatcher.group();
// group will either contain a character that we need to encode for xml
// (ie. <, > or &), or it will be an invalid character
// test the contents and replace appropriately
if ("&".equals(group))
{
xmlMatcher.appendReplacement(valueBuf, "&");
}
else if ("<".equals(group))
{
xmlMatcher.appendReplacement(valueBuf, "<");
}
else if (">".equals(group))
{
xmlMatcher.appendReplacement(valueBuf, ">");
}
else
{
xmlMatcher.appendReplacement(valueBuf, " ");
}
}
// add bit of the string after the final match
xmlMatcher.appendTail(valueBuf);
if (qualifier == null)
{
buffer.append("<").append(namespace).append(":").append(element).append(">").append(valueBuf.toString()).append("</").append(namespace).append(":").append(element).append(">\n");
} else
{
buffer.append("<").append(namespace).append(":").append(element).append(" xsi:type=\"").append(terms).append(":").append(qualifier).append("\">").append(valueBuf.toString()).append("</").append(namespace).append(":").append(element).append(">\n");
}
}
else
{
buffer.append("<").append(namespace).append(":").append(element).append(" />\n");
}
// Return the updated buffer
return buffer;
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.DisseminationCrosswalk;
import org.dspace.core.PluginManager;
import org.dspace.search.HarvestedItemInfo;
import org.jdom.output.XMLOutputter;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* An OAICat Crosswalk implementation that calls, in turn, on
* <code>DisseminationCrosswalk</code> plugins.
* It is configured so its "OAI schema label" matches the name of a
* <code>DisseminationCrosswalk</code> plugin. This class
* will then recognize its name and invoke the correct crosswalk
* to produce the results it sends out.
* <p>
* <b>Configuration:</b>
* In the OAICat configuration file (e.g. <code>oaicat.properties</code>
* add a line like this for each plugin crosswalk you wish to provide:
* <pre>
* Crosswalks.<cite>Plugin-Name</cite>=org.dspace.app.oai.PluginCrosswalk
*
* e.g.
*
* Crosswalks.DC=org.dspace.app.oai.PluginCrosswalk
* </pre>
* This creates an OAI metadata prefix "DC" which is implemented
* by the dissemination crosswalk plugin that answers to the name "DC".
* It, in turn, could be found in the DSpace configuration in a line like:
* <pre>
* plugin.named.org.dspace.content.crosswalk.DisseminationCrosswalk = \
* org.dspace.content.crosswalk.SimpleDCDisseminationCrosswalk = DC
* </pre>
*
* <p>
* Note that all OAI crosswalks are instances of this same class, since
* the instance gets bound to a specific <code>DisseminationCrosswalk</code>
* when it is created.
* <p>
* WARNING: This requires at the OAICAT java library version 1.5.38.
* It does NOT work with some older versions.
*
* @author Larry Stone
* @version $Revision: 5845 $
*/
public class PluginCrosswalk extends Crosswalk
{
/** log4j category */
private static Logger log = Logger.getLogger(PluginCrosswalk.class);
private DisseminationCrosswalk xwalk = null;
// preserve the label from config property for diagnostics.
private String schemaLabel = null;
private static XMLOutputter outputUgly = new XMLOutputter();
/**
* Prepare a "schema location" string for the oaicat Crosswalk
* class's initialization. This is a string consisting of the
* namespace URI, a space, and the schema URL, for the XML
* element to be included in the OAI report. This is not documented
* in oaicat's manuals so we mention it here.
*
* Since this gets called by the constructor, we can't initialize the
* xwalk field so the plugin gets thrown away.
*/
private static String makeSchemaLocation(String schemaLabel)
{
DisseminationCrosswalk xwalk = (DisseminationCrosswalk)
PluginManager.getNamedPlugin(DisseminationCrosswalk.class,
schemaLabel);
if (xwalk != null)
{
String schemaLoc = xwalk.getSchemaLocation();
// initialize the oaicat Crosswalk with a "schemalocation" string,
// which is "{namespace-URI} {schema-URL}" (space separated)
if (schemaLoc != null)
{
log.debug("Initialized schemaLabel="+schemaLabel+" with schemaLocation = \""+schemaLoc+"\"");
return schemaLoc;
}
log.error("makeSchemaLocation: crosswalk cannot provide schemaLocation, label="+schemaLabel);
return "Error No-schemaLocation-for-"+schemaLabel;
}
log.error("No crosswalk found, makeSchemaLocation giving up, label="+schemaLabel);
return "Error No-crosswalk-for-"+schemaLabel;
}
/**
* Constructor; called by
* ORG.oclc.oai.server.crosswalk.Crosswalks, which tries first with
* args (String schemaLabel, Properties properties). This is
* convenient since it lets us use that label to initialize this
* instance of the plugin with the DisseminationCrosswalk crosswalk
* corresponding to that schemaLabel, instead of creating a subclass
* for each one.
* <p>
* WARNING: This requires at the OAICAT java library version 1.5.37.
* It does NOT work with some older versions.
*/
public PluginCrosswalk(String schemaLabel, Properties properties)
{
super(makeSchemaLocation(schemaLabel));
xwalk = (DisseminationCrosswalk)PluginManager.getNamedPlugin(
DisseminationCrosswalk.class, schemaLabel);
this.schemaLabel = schemaLabel;
}
/**
* @return true if this dissemination is available for the item.
*/
@Override
public boolean isAvailableFor(Object nativeItem)
{
Item item = ((HarvestedItemInfo) nativeItem).item;
return xwalk.canDisseminate(item);
}
/**
* Do the crosswalk. Returns serialized XML in a string.
*/
@Override
public String createMetadata(Object nativeItem)
throws CannotDisseminateFormatException
{
Item item = ((HarvestedItemInfo) nativeItem).item;
try
{
log.debug("OAI plugin, schema="+schemaLabel+", preferList="+String.valueOf(xwalk.preferList()));
if (xwalk.preferList())
{
return outputUgly.outputString(xwalk.disseminateList(item));
}
else
{
return outputUgly.outputString(xwalk.disseminateElement(item));
}
}
catch (Exception e)
{
log.error(this.getClass().getName()+
": hiding exception in CannotDisseminateFormatException:"+
e.toString());
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
log.error("*** Stack trace follows:");
log.error(sw.toString());
// Stack loss as exception does not support cause
throw new CannotDisseminateFormatException(schemaLabel);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.io.ByteArrayOutputStream;
import java.util.Properties;
import org.dspace.app.mets.METSExport;
import org.dspace.search.HarvestedItemInfo;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
/**
* OAICat crosswalk to allow METS to be harvested.
*
* No security or privacy measures in place.
*
* @author Li XiaoYu (Rita)
* @author Robert Tansley
*/
public class METSCrosswalk extends Crosswalk
{
public METSCrosswalk(Properties properties)
{
super(
"http://www.loc.gov/METS/ http://www.loc.gov/standards/mets/mets.xsd");
}
public boolean isAvailableFor(Object nativeItem)
{
// We have METS for everything
return true;
}
public String createMetadata(Object nativeItem)
throws CannotDisseminateFormatException
{
HarvestedItemInfo hii = (HarvestedItemInfo) nativeItem;
try
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
METSExport.writeMETS(hii.context, hii.item, baos, true);
// FIXME: Nasty hack to remove <?xml...?> header that METS toolkit
// puts there. Hopefully the METS toolkit itself can be updated
// to fix this
String fullXML = baos.toString("UTF-8");
String head = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\n";
int pos = fullXML.indexOf(head);
if (pos != -1)
{
fullXML = fullXML.substring(pos + head.length());
}
return fullXML;
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.oai;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.handle.HandleManager;
import org.dspace.search.Harvest;
import org.dspace.search.HarvestedItemInfo;
import org.dspace.eperson.Group;
import ORG.oclc.oai.server.catalog.AbstractCatalog;
import ORG.oclc.oai.server.verb.BadArgumentException;
import ORG.oclc.oai.server.verb.BadResumptionTokenException;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
import ORG.oclc.oai.server.verb.IdDoesNotExistException;
import ORG.oclc.oai.server.verb.NoItemsMatchException;
import ORG.oclc.oai.server.verb.NoMetadataFormatsException;
import ORG.oclc.oai.server.verb.NoSetHierarchyException;
import ORG.oclc.oai.server.verb.OAIInternalServerError;
/**
* This is class extends OAICat's AbstractCatalog base class to allow metadata
* harvesting of the metadata in DSpace via OAI-PMH 2.0.
*
* FIXME: Some CNRI Handle-specific stuff in here. Anyone wanting to use
* something else will need to update this code too. Sorry about that.
*
* @author Robert Tansley
* @version $Revision: 5845 $
*/
public class DSpaceOAICatalog extends AbstractCatalog
{
/** log4j logger */
private static Logger log = Logger.getLogger(DSpaceOAICatalog.class);
/** Prefix that all our OAI identifiers have */
public static final String OAI_ID_PREFIX = "oai:" + ConfigurationManager.getProperty("dspace.hostname") + ":";
/** Maximum number of records returned by one request */
private final int MAX_RECORDS = ConfigurationManager.getIntProperty("oai.response.max-records", 100);
public DSpaceOAICatalog(Properties properties)
{
// Don't need to do anything
}
/**
* Retrieve a list of schemaLocation values associated with the specified
* identifier.
*
* @param identifier
* the OAI identifier
* @return a Vector containing schemaLocation Strings
* @exception OAIInternalServerError
* signals an http status code 500 problem
* @exception IdDoesNotExistException
* the specified identifier can't be found
* @exception NoMetadataFormatsException
* the specified identifier was found but the item is flagged
* as deleted and thus no schemaLocations (i.e.
* metadataFormats) can be produced.
*/
public Vector getSchemaLocations(String identifier)
throws OAIInternalServerError, IdDoesNotExistException,
NoMetadataFormatsException
{
log.info(LogManager.getHeader(null, "oai_request",
"verb=getSchemaLocations,identifier="
+ ((identifier == null) ? "null" : identifier)));
HarvestedItemInfo itemInfo = null;
Context context = null;
// Get the item from the DB
try
{
context = new Context();
// Valid identifiers all have prefix "oai:hostname:"
if (identifier != null && identifier.startsWith(OAI_ID_PREFIX))
{
itemInfo = Harvest.getSingle(context, identifier
.substring(OAI_ID_PREFIX.length()), // Strip prefix to
// get raw handle
false);
}
}
catch (SQLException se)
{
// Log the error
log.warn(LogManager.getHeader(context, "database_error", ""), se);
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(se.toString());
}
finally
{
if (context != null)
{
context.abort();
}
}
if (itemInfo == null)
{
throw new IdDoesNotExistException(identifier);
}
else
{
if (itemInfo.withdrawn)
{
throw new NoMetadataFormatsException();
}
else
{
return getRecordFactory().getSchemaLocations(itemInfo);
}
}
}
/**
* Retrieve a list of identifiers that satisfy the specified criteria
*
* @param from
* beginning date using the proper granularity
* @param until
* ending date using the proper granularity
* @param set
* the set name or null if no such limit is requested
* @param metadataPrefix
* the OAI metadataPrefix or null if no such limit is requested
* @return a Map object containing entries for "headers" and "identifiers"
* Iterators (both containing Strings) as well as an optional
* "resumptionMap" Map. It may seem strange for the map to include
* both "headers" and "identifiers" since the identifiers can be
* obtained from the headers. This may be true, but
* AbstractCatalog.listRecords() can operate quicker if it doesn't
* need to parse identifiers from the XML headers itself. Better
* still, do like I do below and override
* AbstractCatalog.listRecords(). AbstractCatalog.listRecords() is
* relatively inefficient because given the list of identifiers, it
* must call getRecord() individually for each as it constructs its
* response. It's much more efficient to construct the entire
* response in one fell swoop by overriding listRecords() as I've
* done here.
* @exception OAIInternalServerError
* signals an http status code 500 problem
* @exception NoSetHierarchyException
* the repository doesn't support sets.
* @exception CannotDisseminateFormatException
* the metadata format specified is not supported by your
* repository.
*/
public Map listIdentifiers(String from, String until, String set,
String metadataPrefix) throws OAIInternalServerError,
NoSetHierarchyException, NoItemsMatchException,
CannotDisseminateFormatException, BadArgumentException
{
log
.info(LogManager.getHeader(null, "oai_request",
"verb=listIdentifiers,from="
+ ((from == null) ? "null" : from)
+ ",until="
+ ((until == null) ? "null" : until)
+ ",set="
+ ((set == null) ? "null" : set)
+ ",metadataPrefix="
+ ((metadataPrefix == null) ? "null"
: metadataPrefix)));
// We can produce oai_dc and simple DC for all items, so just return IDs
Context context = null;
// Lists to put results in
List<String> headers = new LinkedList<String>();
List<String> identifiers = new LinkedList<String>();
try
{
context = new Context();
// Get the relevant OAIItemInfo objects to make headers
DSpaceObject scope = resolveSet(context, set);
boolean includeAll = ConfigurationManager.getBooleanProperty("harvest.includerestricted.oai", true);
// Warning: In large repositories, setting harvest.includerestricted.oai to false may cause
// performance problems as all items will need to have their authorization permissions checked,
// but because we haven't implemented resumption tokens in ListIdentifiers, ALL items will
// need checking whenever a ListIdentifiers request is made.
List<HarvestedItemInfo> itemInfos = Harvest.harvest(context, scope, from, until, 0, 0, // Everything
// for
// now
!includeAll, true, true, includeAll);
// No Item objects, but we need to know collections they're in and
// withdrawn items
if (itemInfos.size() == 0)
{
log.info(LogManager.getHeader(null, "oai_error",
"no_items_match"));
throw new NoItemsMatchException();
}
// Build up lists of headers and identifiers
Iterator<HarvestedItemInfo> i = itemInfos.iterator();
while (i.hasNext())
{
HarvestedItemInfo itemInfo = i.next();
String[] header = getRecordFactory().createHeader(itemInfo);
headers.add(header[0]);
identifiers.add(header[1]);
}
}
catch (SQLException se)
{
// Log the error
log.warn(LogManager.getHeader(context, "database_error", ""), se);
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(se.toString());
}
catch (ParseException pe)
{
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(pe.toString());
}
finally
{
if (context != null)
{
context.abort();
}
}
// Put results in form needed to return
Map<String, Iterator<String>> results = new HashMap<String, Iterator<String>>();
results.put("headers", headers.iterator());
results.put("identifiers", identifiers.iterator());
return results;
}
/**
* Retrieve the next set of identifiers associated with the resumptionToken
*
* @param resumptionToken
* implementation-dependent format taken from the previous
* listIdentifiers() Map result.
* @return a Map object containing entries for "headers" and "identifiers"
* Iterators (both containing Strings) as well as an optional
* "resumptionMap" Map.
* @exception BadResumptionTokenException
* the value of the resumptionToken is invalid or expired.
* @exception OAIInternalServerError
* signals an http status code 500 problem
*/
public Map listIdentifiers(String resumptionToken)
throws BadResumptionTokenException, OAIInternalServerError
{
// Resumption tokens not yet supported
throw new BadResumptionTokenException();
}
/**
* Retrieve the specified metadata for the specified identifier
*
* @param identifier
* the OAI identifier
* @param metadataPrefix
* the OAI metadataPrefix
* @return the <record/>portion of the XML response.
* @exception OAIInternalServerError
* signals an http status code 500 problem
* @exception CannotDisseminateFormatException
* the metadataPrefix is not supported by the item.
* @exception IdDoesNotExistException
* the identifier wasn't found
*/
public String getRecord(String identifier, String metadataPrefix)
throws OAIInternalServerError, CannotDisseminateFormatException,
IdDoesNotExistException
{
log
.info(LogManager.getHeader(null, "oai_request",
"verb=getRecord,identifier="
+ ((identifier == null) ? "null" : identifier)
+ ",metadataPrefix="
+ ((metadataPrefix == null) ? "null"
: metadataPrefix)));
Context context = null;
String record = null;
HarvestedItemInfo itemInfo = null;
// First get the item from the DB
try
{
// Valid IDs start with oai:hostname:
if (identifier != null && identifier.startsWith(OAI_ID_PREFIX))
{
context = new Context();
/*
* Try and get the item. the .substring() is to strip the
* oai:(hostname): prefix to get the raw handle
*/
itemInfo = Harvest.getSingle(context, identifier
.substring(OAI_ID_PREFIX.length()), true);
}
if (itemInfo == null)
{
log.info(LogManager.getHeader(null, "oai_error",
"id_does_not_exist"));
throw new IdDoesNotExistException(identifier);
}
boolean includeAll = ConfigurationManager.getBooleanProperty("harvest.includerestricted.oai", true);
if (!includeAll)
{
Group[] authorizedGroups = AuthorizeManager.getAuthorizedGroups(context, itemInfo.item, Constants.READ);
boolean authorized = false;
for (int i = 0; i < authorizedGroups.length; i++)
{
if ((authorizedGroups[i].getID() == 0) && (!authorized))
{
authorized = true;
}
}
if (!authorized)
{
log.info(LogManager.getHeader(null, "oai_error",
"id_not_accessible"));
throw new IdDoesNotExistException(identifier);
}
}
String schemaURL = getCrosswalks().getSchemaURL(metadataPrefix);
if (schemaURL == null)
{
log.info(LogManager.getHeader(null, "oai_error",
"cannot_disseminate_format"));
throw new CannotDisseminateFormatException(metadataPrefix);
}
record = getRecordFactory().create(itemInfo, schemaURL,
metadataPrefix);
}
catch (SQLException se)
{
// Log the error
log.warn(LogManager.getHeader(context, "database_error", ""), se);
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(se.toString());
}
finally
{
if (context != null)
{
context.abort();
}
}
return record;
}
/**
* Retrieve a list of records that satisfy the specified criteria. Note,
* though, that unlike the other OAI verb type methods implemented here,
* both of the listRecords methods are already implemented in
* AbstractCatalog rather than abstracted. This is because it is possible to
* implement ListRecords as a combination of ListIdentifiers and GetRecord
* combinations. Nevertheless, I suggest that you override both the
* AbstractCatalog.listRecords methods here since it will probably improve
* the performance if you create the response in one fell swoop rather than
* construct it one GetRecord at a time.
*
* @param from
* beginning date using the proper granularity
* @param until
* ending date using the proper granularity
* @param set
* the set name or null if no such limit is requested
* @param metadataPrefix
* the OAI metadataPrefix or null if no such limit is requested
* @return a Map object containing entries for a "records" Iterator object
* (containing XML <record/>Strings) and an optional
* "resumptionMap" Map.
* @exception OAIInternalServerError
* signals an http status code 500 problem
* @exception NoSetHierarchyException
* The repository doesn't support sets.
* @exception CannotDisseminateFormatException
* the metadataPrefix isn't supported by the item.
*/
public Map listRecords(String from, String until, String set,
String metadataPrefix) throws OAIInternalServerError,
NoSetHierarchyException, CannotDisseminateFormatException,
NoItemsMatchException, BadArgumentException
{
log
.info(LogManager.getHeader(null, "oai_request",
"verb=listRecords,from="
+ ((from == null) ? "null" : from)
+ ",until="
+ ((until == null) ? "null" : until)
+ ",set="
+ ((set == null) ? "null" : set)
+ ",metadataPrefix="
+ ((metadataPrefix == null) ? "null"
: metadataPrefix)));
Map m = doRecordHarvest(from, until, set, metadataPrefix, 0);
// Null means bad metadata prefix was bad
if (m == null)
{
log.info(LogManager.getHeader(null, "oai_error",
"cannot_disseminate_format"));
throw new CannotDisseminateFormatException(metadataPrefix);
}
// If there were zero results, return the appropriate error
Iterator i = (Iterator) m.get("records");
if ((i == null) || !i.hasNext())
{
log.info(LogManager.getHeader(null, "oai_error", "no_items_match"));
throw new NoItemsMatchException();
}
return m;
}
/**
* Retrieve the next set of records associated with the resumptionToken
*
* @param resumptionToken
* implementation-dependent format taken from the previous
* listRecords() Map result.
* @return a Map object containing entries for "headers" and "identifiers"
* Iterators (both containing Strings) as well as an optional
* "resumptionMap" Map.
* @exception OAIInternalServerError
* signals an http status code 500 problem
* @exception BadResumptionTokenException
* the value of the resumptionToken argument is invalid or
* expired.
*/
public Map listRecords(String resumptionToken)
throws BadResumptionTokenException, OAIInternalServerError
{
log.info(LogManager.getHeader(null, "oai_request",
"verb=listRecords,resumptionToken=" + resumptionToken));
/*
* FIXME: This may return zero records if the previous harvest returned
* a number of records that's an exact multiple of MAX_RECORDS. I hope
* that's OK.
*/
Object[] params = decodeResumptionToken(resumptionToken);
Integer offset = (Integer) params[4];
Map m = null;
/*
* We catch BadArgumentExceptions here, because doRecordHarvest() throws
* BadArgumentExcpetions when the set spec is bad. set spec bad == bad
* resumption token.
*/
try
{
m = doRecordHarvest((String) params[0], (String) params[1],
(String) params[2], (String) params[3], offset.intValue());
}
catch (BadArgumentException bae)
{
m = null;
}
// null result means a problem -> bad resumption token
if (m == null)
{
log.info(LogManager.getHeader(null, "oai_error",
"bad_resumption_token"));
throw new BadResumptionTokenException();
}
return m;
}
/**
* Method to do the actual harvest of records
*
* @param from
* OAI 'from' parameter
* @param until
* OAI 'until' parameter
* @param set
* OAI 'set' parameter
* @param metadataPrefix
* OAI 'metadataPrefix' parameter
* @param offset
* where to start this harvest
*
* @return the Map for listRecords to return, or null if the metadataPrefix
* is invalid
*/
private Map doRecordHarvest(String from, String until, String set,
String metadataPrefix, int offset) throws OAIInternalServerError,
BadArgumentException
{
Context context = null;
String schemaURL = getCrosswalks().getSchemaURL(metadataPrefix);
Map results = new HashMap();
if (schemaURL == null)
{
return null;
}
// List to put results in
List<String> records = new LinkedList<String>();
try
{
context = new Context();
// Get the relevant HarvestedItemInfo objects to make headers
DSpaceObject scope = resolveSet(context, set);
boolean includeAll = ConfigurationManager.getBooleanProperty("harvest.includerestricted.oai", true);
List<HarvestedItemInfo> itemInfos = Harvest.harvest(context, scope, from, until,
offset, MAX_RECORDS, // Limit amount returned from one
// request
true, true, true, includeAll); // Need items, containers + withdrawals
// Build list of XML records from item info objects
for (HarvestedItemInfo itemInfo : itemInfos)
{
try
{
String recordXML = getRecordFactory().create(itemInfo, schemaURL, metadataPrefix);
records.add(recordXML);
}
catch (CannotDisseminateFormatException cdfe)
{
/*
* FIXME: I've a feeling a
* "CannotDisseminateFormatException" should be discarded
* here - it's OK if some records in the requested date
* range don't have the requested metadata format available.
* I'll just log it for now.
*/
if (log.isDebugEnabled())
{
log.debug(LogManager.getHeader(context, "oai_warning",
"Couldn't disseminate " + metadataPrefix
+ " for " + itemInfo.handle));
}
}
}
// Put results in form needed to return
results.put("records", records.iterator());
log.info(LogManager.getHeader(context, "oai_harvest", "results=" + records.size()));
// If we have MAX_RECORDS records, we need to provide a resumption
// token
if (records.size() >= MAX_RECORDS)
{
String resumptionToken = makeResumptionToken(from, until, set,
metadataPrefix, offset + MAX_RECORDS);
if (log.isDebugEnabled())
{
log.debug(LogManager
.getHeader(context, "made_resumption_token",
"token=" + resumptionToken));
}
results.put("resumptionMap", getResumptionMap(resumptionToken));
//results.put("resumptionToken", resumptionToken);
}
}
catch (SQLException se)
{
// Log the error
log.warn(LogManager.getHeader(context, "database_error", ""), se);
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(se.toString());
}
catch (ParseException pe)
{
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(pe.toString());
}
finally
{
if (context != null)
{
context.abort();
}
}
return results;
}
/**
* Retrieve a list of sets that satisfy the specified criteria
*
* @return a Map object containing "sets" Iterator object (contains
* <setSpec/>XML Strings) as well as an optional resumptionMap Map.
* @exception NoSetHierarchyException
* signals an http status code 400 problem
* @exception OAIInternalServerError
* signals an http status code 500 problem
*/
public Map listSets() throws NoSetHierarchyException,
OAIInternalServerError
{
log.info(LogManager.getHeader(null, "oai_request", "verb=listSets"));
Context context = null;
// List to put results in
List<String> sets = new LinkedList<String>();
try
{
context = new Context();
Collection[] allCols = Collection.findAll(context);
StringBuffer spec = null;
for (int i = 0; i < allCols.length; i++)
{
spec = new StringBuffer("<set><setSpec>hdl_");
spec.append(allCols[i].getHandle().replace('/', '_'));
spec.append("</setSpec>");
String collName = allCols[i].getMetadata("name");
if(collName != null)
{
spec.append("<setName>");
spec.append(Utils.addEntities(collName));
spec.append("</setName>");
}
else
{
spec.append("<setName />");
// Warn that there is an error of a null set name
log.info(LogManager.getHeader(null, "oai_error",
"null_set_name_for_set_id_" + allCols[i].getHandle()));
}
spec.append("</set>");
sets.add(spec.toString());
}
Community[] allComs = Community.findAll(context);
for (int i = 0; i < allComs.length; i++)
{
spec = new StringBuffer("<set><setSpec>hdl_");
spec.append(allComs[i].getHandle().replace('/', '_'));
spec.append("</setSpec>");
String commName = allComs[i].getMetadata("name");
if(commName != null)
{
spec.append("<setName>");
spec.append(Utils.addEntities(commName));
spec.append("</setName>");
}
else
{
spec.append("<setName />");
// Warn that there is an error of a null set name
log.info(LogManager.getHeader(null, "oai_error",
"null_set_name_for_set_id_" + allComs[i].getHandle()));
}
spec.append("</set>");
sets.add(spec.toString());
}
}
catch (SQLException se)
{
// Log the error
log.warn(LogManager.getHeader(context, "database_error", ""), se);
// Stack trace loss as OAI Exception does not support cause
throw new OAIInternalServerError(se.toString());
}
finally
{
if (context != null)
{
context.abort();
}
}
// Put results in form needed to return
Map<String, Iterator<String>> results = new HashMap<String, Iterator<String>>();
results.put("sets", sets.iterator());
return results;
}
/**
* Retrieve the next set of sets associated with the resumptionToken
*
* @param resumptionToken
* implementation-dependent format taken from the previous
* listSets() Map result.
* @return a Map object containing "sets" Iterator object (contains
* <setSpec/>XML Strings) as well as an optional resumptionMap Map.
* @exception BadResumptionTokenException
* the value of the resumptionToken is invalid or expired.
* @exception OAIInternalServerError
* signals an http status code 500 problem
*/
public Map listSets(String resumptionToken)
throws BadResumptionTokenException, OAIInternalServerError
{
// Resumption tokens not yet supported
throw new BadResumptionTokenException();
}
/**
* close the repository
*/
public void close()
{
}
// ******************************************
// Internal DSpace utility methods below here
// ******************************************
/**
* Get the community or collection signified by a set spec
*
* @param context
* DSpace context object
* @param set
* OAI set spec
* @return the corresponding community or collection, or null if no set
* provided
*/
private DSpaceObject resolveSet(Context context, String set)
throws SQLException, BadArgumentException
{
if (set == null)
{
return null;
}
DSpaceObject o = null;
/*
* set specs are in form hdl_123.456_789 corresponding to
* hdl:123.456/789
*/
if (set.startsWith("hdl_"))
{
// Looks OK so far... turn second _ into /
String handle = set.substring(4).replace('_', '/');
o = HandleManager.resolveToObject(context, handle);
}
// If it corresponds to a collection or a community, that's the set we
// want
if ((o != null) &&
((o instanceof Collection) || (o instanceof Community)))
{
return o;
}
// Handle is either non-existent, or corresponds to a non-collection
// Either way, a bad set spec, ergo a bad argument
throw new BadArgumentException();
}
/**
* Create a resumption token. The relevant parameters for the harvest are
* put in a
*
* @param from
* OAI 'from' parameter
* @param until
* OAI 'until' parameter
* @param set
* OAI 'set' parameter
* @param prefix
* OAI 'metadataPrefix' parameter
* @param offset
* where to start the next harvest
*
* @return the appropriate resumption token
*/
private String makeResumptionToken(String from, String until, String set,
String prefix, int offset)
{
StringBuffer token = new StringBuffer();
if (from != null)
{
token.append(from);
}
token.append("/");
if (until != null)
{
token.append(until);
}
token.append("/");
if (set != null)
{
token.append(set);
}
token.append("/");
if (prefix != null)
{
token.append(prefix);
}
token.append("/");
token.append(String.valueOf(offset));
return (token.toString());
}
/**
* Get the information out of a resumption token
*
* @param token
* the resumption token
* @return a 5-long array of Objects; 4 Strings (from, until, set, prefix)
* and an Integer (the offset)
*/
private Object[] decodeResumptionToken(String token)
throws BadResumptionTokenException
{
Object[] obj = new Object[5];
StringTokenizer st = new StringTokenizer(token, "/", true);
try
{
// Extract from, until, set, prefix
for (int i = 0; i < 4; i++)
{
if (!st.hasMoreTokens())
{
throw new BadResumptionTokenException();
}
String s = st.nextToken();
// If this value is a delimiter /, we have no value for this
// part
// of the resumption token.
if (s.equals("/"))
{
obj[i] = null;
}
else
{
obj[i] = s;
// Skip the delimiter
st.nextToken();
}
log.debug("is: " + (String) obj[i]);
}
if (!st.hasMoreTokens())
{
throw new BadResumptionTokenException();
}
obj[4] = Integer.valueOf(st.nextToken());
}
catch (NumberFormatException nfe)
{
// Stack trace loss as OAI Exception does not support cause
throw new BadResumptionTokenException();
}
catch (NoSuchElementException nsee)
{
// Stack trace loss as OAI Exception does not support cause
throw new BadResumptionTokenException();
}
return obj;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.didl;
import java.security.SecureRandom;
import java.util.Random;
/**
* Factory class for generating UUID version 4. All what this class does is
* creating UUID version 4 objects using crypto-quality random numbers.
*
* Development of this code was part of the aDORe repository project by the
* Research Library of the Los Alamos National Laboratory.
*
* This code is based on the implementation of UUID version 4 (the one that
* uses random/pseudo-random numbers by Ashraf Amrou of the Old Dominion University
* (Aug 14, 2003)
*
**/
public final class UUIDFactory
{
/** Random number generator */
private Random rand = null;
/** an instance */
private static UUIDFactory generator = new UUIDFactory();
/** private constructor (Singleton class) */
private UUIDFactory()
{
// crypto-quality random number generator
rand = new SecureRandom();
}
/**
*
* Customers of this class call this method to generete new UUID objects
*
* @return a new UUID object
*
**/
public static synchronized UUID generateUUID()
{
return new UUID(generator.rand.nextLong(),generator.rand.nextLong());
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.didl;
import java.io.Serializable;
/**
* This class implements UUID version 4. The values for the various fields are
* crypto random values set by the factory class UUIDFactory
*
* Development of this code was part of the aDORe repository project by the
* Research Library of the Los Alamos National Laboratory.
*
* This code is based on the implementation of UUID version 4 (the one that
* uses random/pseudo-random numbers by Ashraf Amrou of the Old Dominion University
* (Aug 14, 2003)
**/
public final class UUID implements Serializable
{
private long hi;
private long lo;
/**
* Construct a Version 4 UUID object from another UUID object
*
* @param uuid
* the UUID to use as a base for the new UUID
**/
public UUID(UUID uuid)
{
this.hi = uuid.hi;
this.lo = uuid.lo;
}
/**
* Construct a Version 4 UUID object form the two given long values.
* These values are (pseudo)random numbers (best if crypto quality)
*
* @param _hi
* first long value
*
* @param _lo
* second long value
*
**/
public UUID(long _hi, long _lo)
{
this.hi = _hi;
this.lo = _lo;
// IETF variant (10)b
lo &= 0x3FFFFFFFFFFFFFFFL; lo |= 0x8000000000000000L;
// set multicast bit (so that it there is no chance it will clash
// with other UUIDs generated based on real IEEE 802 addresses)
lo |= 0x0000800000000000L;
// version 4 (100)b: the one based on random/pseudo-random numbers
hi &= 0xFFFFFFFFFFFF0FFFL; hi |= 0x0000000000004000L;
}
/**
* Compare UUID objects
*
* @param obj
* the object to compare this UUID against
*
* @return true or false
**/
public boolean equals(Object obj)
{
if(this == obj) // comparing to myself
{
return true;
}
if(obj instanceof UUID)
{
UUID uuid = (UUID)obj;
return (hi == uuid.hi && lo == uuid.lo);
}
return false;
}
/**
* Generate a hash for the UUID
*
* @return hash code for the UUID
*
**/
public int hashCode()
{
return Long.valueOf(hi ^ lo).hashCode();
}
/**
* Obtain a string representation of the UUID object
*
* @return the string representation of this UUID
*
**/
public String toString()
{
return (/**"urn:uuid:" + **/
hexDigits(hi >> 32, 4) // time_low: 4 hexOctet (8 hex digits)
+ "-" +
hexDigits(hi >> 16, 2) // time_mid: 2 hexOctet (4 hex digits)
+ "-" +
hexDigits(hi, 2) // time_high_and_version: 2 hexOctet (4 hex digits)
+ "-" +
hexDigits(lo >> 48, 2) // clock_seq_and_reserved: 1 hexOctet (2 hex digits) & clock_seq_low: 1 hexOctet (2 hex digits)
+ "-" +
hexDigits(lo, 6)); // node: 6 hexOctet (12 hex digits)
}
/**
* Obtain the Hex value of a given number of least significant octets
* from a long value as a String
*
* @param lVal
* the long value to retrieve octets from
*
* @param nHexOctets
* number of hex octets to return
*
* @return hex value of least significant octets as a string
*
**/
private static String hexDigits(long lVal, int nHexOctets) {
long tmp = 1L << (nHexOctets * 2 * 4); // e.g., if nHexOctets is 2, tmp = (1 0000 0000 0000 0000)b & tmp - 1 = (1111 1111 1111 1111)b
long result = lVal & (tmp - 1); // get ride of the uneeded most significant bits
result = tmp | result; // make sure the digit at position (nDigits + 1) equals 1 (to preserve leading zeroes)
return Long.toHexString(result).substring(1); // getride ot the digit at position nDigits + 1
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageParameters;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.core.Utils;
import org.dspace.eperson.EPerson;
import org.dspace.license.CreativeCommons;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* This defines the behavior of DSpace "resources" in the WebDAV interface; it
* maps DAV operations onto DSpace object.s
*/
class DAVItem extends DAVDSpaceObject
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVItem.class);
/** The item. */
private Item item = null;
/** The Constant submitterProperty. */
private static final Element submitterProperty = new Element("submitter",
DAV.NS_DSPACE);
/** The Constant getlastmodifiedProperty. */
private static final Element getlastmodifiedProperty = new Element(
"getlastmodified", DAV.NS_DAV);
/** The Constant licenseProperty. */
private static final Element licenseProperty = new Element("license",
DAV.NS_DSPACE);
/** The Constant cc_license_textProperty. */
private static final Element cc_license_textProperty = new Element(
"cc_license_text", DAV.NS_DSPACE);
/** The Constant cc_license_rdfProperty. */
private static final Element cc_license_rdfProperty = new Element(
"cc_license_rdf", DAV.NS_DSPACE);
/** The Constant cc_license_urlProperty. */
private static final Element cc_license_urlProperty = new Element(
"cc_license_url", DAV.NS_DSPACE);
/** The Constant owning_collectionProperty. */
private static final Element owning_collectionProperty = new Element(
"owning_collection", DAV.NS_DSPACE);
/** The Constant withdrawnProperty. */
private static final Element withdrawnProperty = new Element("withdrawn",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
static
{
allProps.add(submitterProperty);
allProps.add(getlastmodifiedProperty);
allProps.add(licenseProperty);
allProps.add(cc_license_textProperty);
allProps.add(cc_license_rdfProperty);
allProps.add(cc_license_urlProperty);
allProps.add(owning_collectionProperty);
allProps.add(handleProperty);
allProps.add(withdrawnProperty);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/**
* Instantiates a new DAV item.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param item the item
*/
protected DAVItem(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], Item item)
{
super(context, request, response, pathElt, item);
this.type = TYPE_ITEM;
this.item = item;
}
/**
* Match Item URIs that identify the item by a database ID. Handle URIs are
* matched by DAVDSpaceObject.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
int id = -1;
String bsElt = null;
try
{
// The "/item_db_" element in last or next-to-last element
if (pathElt[pathElt.length - 1].startsWith("item_db_"))
{
id = Integer.parseInt(pathElt[pathElt.length - 1].substring(8));
}
else if (pathElt[pathElt.length - 1].startsWith("bitstream_")
&& pathElt.length > 1
&& pathElt[pathElt.length - 2].startsWith("item_db_"))
{
id = Integer.parseInt(pathElt[pathElt.length - 2].substring(8));
bsElt = pathElt[pathElt.length - 1];
}
if (id >= 0)
{
Item item = Item.find(context, id);
if (item == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND, "Item with ID="
+ String.valueOf(id) + " not found.");
}
if (bsElt != null)
{
Bitstream bs = DAVBitstream.findBitstream(context, item,
bsElt);
if (bs == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Bitstream not found.");
}
return new DAVBitstream(context, request, response,
pathElt, item, bs);
}
else
{
return new DAVItem(context, request, response, pathElt,
item);
}
}
return null;
}
catch (NumberFormatException ne)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Error parsing number in request URI.", ne);
}
}
/**
* Return pathname element in db-id format.
*
* @param dbid the dbid
*
* @return the path elt
*/
protected static String getPathElt(int dbid)
{
return "item_db_" + String.valueOf(dbid);
}
/**
* Return pathname element to this Item. Use db-id format if no handle,
* otherwise the DSpace object format.
*
* @param item the item
*
* @return the path elt
*/
protected static String getPathElt(Item item)
{
String handle = item.getHandle();
if (handle == null)
{
return getPathElt(item.getID());
}
else
{
return DAVDSpaceObject.getPathElt(item);
}
}
/**
* Return this resource's children. Item's children are its bitstreams.
*
* @return the DAV resource[]
*
* @throws SQLException the SQL exception
*/
@Override
protected DAVResource[] children() throws SQLException
{
// Check for overall read permission on Item
if (!AuthorizeManager.authorizeActionBoolean(this.context, this.item,
Constants.READ))
{
return new DAVResource[0];
}
Vector result = new Vector();
Bundle[] bundles = this.item.getBundles();
for (Bundle element : bundles)
{
// check read permission on this Bundle
if (!AuthorizeManager.authorizeActionBoolean(this.context, element,
Constants.READ))
{
continue;
}
Bitstream[] bitstreams = element.getBitstreams();
for (Bitstream element0 : bitstreams)
{
String ext[] = element0.getFormat().getExtensions();
result.add(new DAVBitstream(this.context, this.request, this.response,
makeChildPath(DAVBitstream.getPathElt(element0
.getSequenceID(), ext.length < 1 ? null
: ext[0])), this.item, element0));
}
}
return (DAVResource[]) result.toArray(new DAVResource[result.size()]);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVDSpaceObject#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
/*
* FIXME: This implements permission check that really belongs in
* business logic. Although communities and collections don't check for
* read auth, Item may contain sensitive data and should always check
* for READ permission. Exception: allow "withdrawn" property to be
* checked regardless of authorization since all permissions are removed
* when item is withdrawn.
*/
if (!elementsEqualIsh(property, withdrawnProperty))
{
AuthorizeManager.authorizeAction(this.context, this.item, Constants.READ);
}
if (elementsEqualIsh(property, withdrawnProperty))
{
value = String.valueOf(this.item.isWithdrawn());
}
else if (elementsEqualIsh(property, displaynameProperty))
{
// displayname - title or handle.
DCValue titleDc[] = this.item.getDC("title", Item.ANY, Item.ANY);
value = titleDc.length > 0 ? titleDc[0].value : this.item.getHandle();
}
else if (elementsEqualIsh(property, handleProperty))
{
value = canonicalizeHandle(this.item.getHandle());
}
else if (elementsEqualIsh(property, submitterProperty))
{
EPerson ep = this.item.getSubmitter();
if (ep != null)
{
value = hrefToEPerson(ep);
}
}
else if (elementsEqualIsh(property, owning_collectionProperty))
{
Collection owner = this.item.getOwningCollection();
if (owner != null)
{
value = canonicalizeHandle(owner.getHandle());
}
}
else if (elementsEqualIsh(property, getlastmodifiedProperty))
{
value = DAV.applyHttpDateFormat(this.item.getLastModified());
}
else if (elementsEqualIsh(property, licenseProperty))
{
value = getLicenseAsString();
}
else if (elementsEqualIsh(property, cc_license_textProperty))
{
value = CreativeCommons.getLicenseText(this.item);
}
else if (elementsEqualIsh(property, cc_license_rdfProperty))
{
value = CreativeCommons.getLicenseRDF(this.item);
}
else if (elementsEqualIsh(property, cc_license_urlProperty))
{
value = CreativeCommons.getLicenseURL(this.item);
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/**
* Get the license for this Item as String. License is the first bitstream
* named "license.txt" in a LICENSE bundle, apparently?
* <p>
* FIXME: is this correct? there's no counterexample..
*
* @return license string, or null if none found.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private String getLicenseAsString() throws SQLException,
AuthorizeException, IOException
{
Bundle lb[] = this.item.getBundles(Constants.LICENSE_BUNDLE_NAME);
for (Bundle element : lb)
{
Bitstream lbs = element.getBitstreamByName("license.txt");
if (lbs != null)
{
ByteArrayOutputStream baos = new ByteArrayOutputStream(
(int) lbs.getSize());
Utils.copy(lbs.retrieve(), baos);
return baos.toString();
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
// Don't need any authorization checks since the Item layer
// checks authorization for write and delete.
Namespace ns = prop.getNamespace();
String propName = prop.getName();
// "submitter" is the only Item-specific mutable property, rest are
// live and unchangeable.
if (ns != null && ns.equals(DAV.NS_DSPACE)
&& propName.equals("submitter"))
{
if (action == DAV.PROPPATCH_REMOVE)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"The submitter property cannot be removed.");
}
String newName = prop.getText();
EPerson ep = EPerson.findByEmail(this.context, newName);
if (ep == null)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"Cannot set submitter, no EPerson found for email address: "
+ newName);
}
this.item.setSubmitter(ep);
this.item.update();
return HttpServletResponse.SC_OK;
}
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
/**
* GET implementation returns the contents of the Item as a package. The
* query arg "package" must be specified.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
// Check for overall read permission on Item, because nothing else will
AuthorizeManager.authorizeAction(this.context, this.item, Constants.READ);
String packageType = this.request.getParameter("package");
if (packageType == null)
{
packageType = "default";
}
PackageDisseminator dip = (PackageDisseminator) PluginManager
.getNamedPlugin(PackageDisseminator.class, packageType);
if (dip == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Cannot find a disseminate plugin for package="
+ packageType);
}
else
{
try
{
// Create a temporary file to disseminate into
String tempDirectory = ConfigurationManager.getProperty("upload.temp.dir");
File tempFile = File.createTempFile("DAVItemGet" + this.item.hashCode(), null, new File(tempDirectory));
tempFile.deleteOnExit();
// Disseminate item to temporary file
PackageParameters pparams = PackageParameters.create(this.request);
this.response.setContentType(dip.getMIMEType(pparams));
dip.disseminate(this.context, this.item, pparams, tempFile);
// Copy temporary file contents to response stream
FileInputStream fileIn = null;
try
{
fileIn = new FileInputStream(tempFile);
Utils.copy(fileIn, this.response.getOutputStream());
}
finally
{
if (fileIn != null)
{
fileIn.close();
}
}
}
catch (CrosswalkException pe)
{
throw new DAVStatusException(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Failed in crosswalk of metadata: " + pe.toString(), pe);
}
catch (PackageException pe)
{
pe.log(log);
throw new DAVStatusException(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR, pe
.toString(), pe);
}
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT is not implemented for Item.");
}
/**
* COPY is implemented by an "add", which is the closest we can get in
* DSpace semantics.
*
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return the int
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
return addItemToCollection(this.context, this.item, destination, overwrite);
}
/**
* Do the work of "copy" method; this code is shared with e.g.
* DAVInProgressSubmission.
*
* @param context the context
* @param item the item
* @param destination the destination
* @param overwrite the overwrite
*
* @return HTTP status code.
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
protected static int addItemToCollection(Context context, Item item,
DAVResource destination, boolean overwrite)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
// sanity checks
if (!(destination instanceof DAVCollection))
{
throw new DAVStatusException(
HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"COPY of Item is only allowed when destination is a DSpace Collection.");
}
// access check
AuthorizeManager.authorizeAction(context, item, Constants.READ);
// make sure item doesn't belong to this collection
Collection destColl = ((DAVCollection) destination).getCollection();
log.debug("COPY from=" + item.toString() + " (" + item.getHandle()
+ "), to=" + destColl.toString() + " (" + destColl.getHandle()
+ ")");
// check if it's already a member
Collection refs[] = item.getCollections();
for (Collection element : refs)
{
if (destColl.equals(element))
{
log.debug("COPY - item @ " + item.getHandle()
+ " is already a member of collection @ "
+ destColl.getHandle());
if (overwrite)
{
return DAV.SC_NO_CONTENT;
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"This Item is already a member of collection handle="
+ destColl.getHandle());
}
}
}
destColl.addItem(item);
return DAV.SC_NO_CONTENT;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
this.item.withdraw();
return HttpServletResponse.SC_OK; // HTTP OK
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Item.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.InProgressSubmission;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager;
import org.jdom.Element;
/**
* Reflect the contents of a WorkflowItem object, which is mainly an in-progress
* submission wrapper around an Item. Allow the client to read and set the slots
* in the workflow item and explore its child Item.
* <p>
* WorkflowItem resources are reached through the Workflow resource, typically
* with a path of /workflow_pool/wfi_db_{id}
* <p>
*
* @author Larry Stone
* @see DAVInProgressSubmission
* @see DAVWorkflow
*/
class DAVWorkflowItem extends DAVInProgressSubmission
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVWorkflowItem.class);
/** The Constant ownerProperty. */
private static final Element ownerProperty = new Element("owner",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(inProgressProps);
static
{
allProps.add(ownerProperty);
allProps.add(stateProperty);
}
/**
* Instantiates a new DAV workflow item.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param wi the wi
*/
protected DAVWorkflowItem(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[],
InProgressSubmission wi)
{
super(context, request, response, pathElt, wi);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("workflow-item", DAV.NS_DSPACE);
}
// format the final path element for one of these
/**
* Gets the path elt.
*
* @param dbid the dbid
*
* @return the path elt
*/
protected static String getPathElt(int dbid)
{
return "wfi_db_" + String.valueOf(dbid);
}
/**
* Gets the path elt.
*
* @param wfi the wfi
*
* @return the path elt
*/
protected static String getPathElt(WorkflowItem wfi)
{
return getPathElt(wfi.getID());
}
/**
* Match resource URI.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
try
{
// Match "/workflow/wfi_db_<id>" URI
if (pathElt.length >= 2 && pathElt[0].startsWith("workflow_")
&& pathElt[1].startsWith("wfi_db_"))
{
// match /workflow/wfi_db_<id>/item_db_<id> ...
// should be an Item (or Bitstream) URI, child of this
// WorkflowItem.
if (pathElt.length >= 3)
{
DAVResource result = DAVItem.matchResourceURI(context,
request, response, pathElt);
if (result == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Invalid resource path.");
}
else
{
return result;
}
}
// get this WFI
int id = Integer.parseInt(pathElt[1].substring(7));
InProgressSubmission ips = WorkflowItem.find(context, id);
if (ips == null)
{
log.warn("invalid WorkflowItem DB ID in DAV URI, " + "id="
+ pathElt[1]);
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND, "Not found: "
+ pathElt[1] + " does not exist.");
}
else
{
return new DAVWorkflowItem(context, request, response,
pathElt, ips);
}
}
return null;
}
catch (NumberFormatException ne)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Error parsing number in request URI.", ne);
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVInProgressSubmission#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
Element result = super.propfindInternal(property);
if (result != null)
{
return result;
}
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = getPathElt(this.inProgressItem.getID());
}
else if (elementsEqualIsh(property, ownerProperty))
{
EPerson ep = ((WorkflowItem) this.inProgressItem).getOwner();
if (ep != null)
{
value = hrefToEPerson(ep);
}
}
else if (elementsEqualIsh(property, stateProperty))
{
value = WorkflowManager.getWorkflowText(((WorkflowItem) this.inProgressItem).getState());
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVInProgressSubmission#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
if (super.proppatchInternal(action, prop) == HttpServletResponse.SC_OK)
{
return HttpServletResponse.SC_OK;
}
else if (elementsEqualIsh(prop, stateProperty))
{
if (action == DAV.PROPPATCH_REMOVE)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"The state property cannot be removed.");
}
String key = prop.getTextTrim();
int newState;
if (key.equalsIgnoreCase("abort"))
{
WorkflowManager.abort(this.context, (WorkflowItem) this.inProgressItem,
this.inProgressItem.getSubmitter());
}
else if (key.equalsIgnoreCase("reject"))
{
EPerson cu = this.context.getCurrentUser();
String who = cu == null ? "nobody" : cu.getFullName();
WorkflowManager.reject(this.context, (WorkflowItem) this.inProgressItem,
this.inProgressItem.getSubmitter(), "Rejected by " + who
+ ", via WebDAV Network Interface");
}
else if (key.equalsIgnoreCase("advance"))
{
WorkflowManager.advance(this.context, (WorkflowItem) this.inProgressItem,
this.context.getCurrentUser());
}
else if (key.equalsIgnoreCase("claim"))
{
WorkflowManager.claim(this.context, (WorkflowItem) this.inProgressItem,
this.context.getCurrentUser());
}
else if (key.equalsIgnoreCase("unclaim"))
{
WorkflowManager.unclaim(this.context, (WorkflowItem) this.inProgressItem,
this.context.getCurrentUser());
}
else
{
newState = WorkflowManager.getWorkflowID(key);
if (newState >= 0)
{
((WorkflowItem) this.inProgressItem).setState(newState);
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"Unrecognized verb or state-name in value for state property.");
}
}
this.inProgressItem.update();
return HttpServletResponse.SC_OK;
}
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Element;
/**
* The "eperson" resource is a collection (in the DAV sense) of all E-people in
* DSpace.
* <p>
* <p>
* Its children are all the relevant <code>EPerson</code> objects. These
* resources cannot be altered.
* <p>
*
* @author Larry Stone
* @see DAVEPersonEPerson
*/
class DAVEPerson extends DAVResource
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVEPerson.class);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("eperson-collection", DAV.NS_DSPACE);
}
/**
* Instantiates a new DAVE person.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVEPerson(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
super(context, request, response, pathElt);
this.type = TYPE_OTHER;
}
/**
* Gets the path.
*
* @param ep the ep
*
* @return the path
*/
protected static String getPath(EPerson ep)
{
return "eperson/" + DAVEPersonEPerson.getPathElt(ep);
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException,
AuthorizeException
{
// The "/eperson" URI:
if (pathElt.length > 0 && pathElt[0].equals("eperson"))
{
if (pathElt.length > 1)
{
return DAVEPersonEPerson.matchResourceURI(context, request,
response, pathElt);
}
else
{
return new DAVEPerson(context, request, response, pathElt);
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/**
* If authenticated user is an Administrator, show all epersons, otherwise
* just show current user (if there is one).
*
* @return array of children, all the EPerson records on site.
*
* @throws SQLException the SQL exception
*/
@Override
protected DAVResource[] children() throws SQLException
{
if (AuthorizeManager.isAdmin(this.context))
{
EPerson all[] = EPerson.findAll(this.context, EPerson.ID);
DAVResource result[] = new DAVResource[all.length];
log.debug("EPerson children(), got " + String.valueOf(all.length)
+ " e-people.");
for (int i = 0; i < all.length; ++i)
{
result[i] = new DAVEPersonEPerson(this.context, this.request, this.response,
makeChildPath(DAVEPersonEPerson.getPathElt(all[i]
.getID())), all[i]);
}
return result;
}
EPerson self = this.context.getCurrentUser();
if (self == null)
{
return new DAVResource[0];
}
else
{
DAVResource result[] = new DAVResource[1];
result[0] = new DAVEPersonEPerson(this.context, this.request, this.response,
makeChildPath(DAVEPersonEPerson.getPathElt(self.getID())),
self);
return result;
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = this.pathElt[0];
}
else
{
return commonPropfindInternal(property, true);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for eperson.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.jdom.Namespace;
/**
* Container for common constants and other shared static bits. It is only
* abstract to signify that it never gets instantiated.
*/
abstract class DAV
{
/** Internal value for depth of infinity in PROPFIND, etc. */
protected static final int DAV_INFINITY = -1;
/** WebDAV extension HTTP status codes not found in HttpServletResponse. */
protected static final int SC_MULTISTATUS = 207;
/** The Constant SC_NO_CONTENT. */
protected static final int SC_NO_CONTENT = 204;
/** The Constant SC_CONFLICT. */
protected static final int SC_CONFLICT = 409;
/** The Constant SC_LOCKED. */
protected static final int SC_LOCKED = 423;
/** The Constant SC_FAILED_DEPENDENCY. */
protected static final int SC_FAILED_DEPENDENCY = 424;
/** The Constant SC_INSUFFICIENT_STORAGE. */
protected static final int SC_INSUFFICIENT_STORAGE = 507;
/** The Constant SC_UNPROCESSABLE_ENTITY. */
protected static final int SC_UNPROCESSABLE_ENTITY = 422;
/** Namespaces of interest. DAV: is WebDAV's namespace URI. */
protected static final Namespace NS_DAV = Namespace.getNamespace("DAV:");
/** DSpace XML namespace (for everything at the moment). */
protected static final Namespace NS_DSPACE = Namespace.getNamespace(
"dspace", "http://www.dspace.org/xmlns/dspace");
/** PROPFIND operation types - our internal convention. */
protected static final int PROPFIND_PROP = 1;
/** The Constant PROPFIND_PROPNAME. */
protected static final int PROPFIND_PROPNAME = 2;
/** The Constant PROPFIND_ALLPROP. */
protected static final int PROPFIND_ALLPROP = 3;
/** PROPPATCH operation types - internal convention dictated by XML names:. */
protected static final int PROPPATCH_SET = 1;
/** The Constant PROPPATCH_REMOVE. */
protected static final int PROPPATCH_REMOVE = 2;
protected static String applyHttpDateFormat(Date thisDate)
{
return new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'").format(thisDate);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Element;
/**
* Give read-only access to the contents of an EPerson object, through PROPFIND.
* This resource is provided mainly so other resources (e.g. Item, WorkflowItem)
* can refer to e-people as resources.
* <p>
*
* @author Larry Stone
* @see DAVEPerson
*/
class DAVEPersonEPerson extends DAVResource
{
/** The eperson. */
private EPerson eperson = null;
/**
* Instantiates a new DAVE person E person.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param ep the ep
*/
protected DAVEPersonEPerson(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], EPerson ep)
{
super(context, request, response, pathElt);
this.type = TYPE_OTHER;
this.eperson = ep;
}
/** The Constant emailProperty. */
private static final Element emailProperty = new Element("email",
DAV.NS_DSPACE);
/** The Constant first_nameProperty. */
private static final Element first_nameProperty = new Element("first_name",
DAV.NS_DSPACE);
/** The Constant last_nameProperty. */
private static final Element last_nameProperty = new Element("last_name",
DAV.NS_DSPACE);
/** The Constant handleProperty. */
private static final Element handleProperty = new Element("handle",
DAV.NS_DSPACE);
/** The Constant require_certificateProperty. */
private static final Element require_certificateProperty = new Element(
"require_certificate", DAV.NS_DSPACE);
/** The Constant self_registeredProperty. */
private static final Element self_registeredProperty = new Element(
"self_registered", DAV.NS_DSPACE);
/** The Constant can_loginProperty. */
private static final Element can_loginProperty = new Element("can_login",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>();
static
{
allProps.add(displaynameProperty);
allProps.add(typeProperty);
allProps.add(resourcetypeProperty);
allProps.add(current_user_privilege_setProperty);
allProps.add(emailProperty);
allProps.add(first_nameProperty);
allProps.add(last_nameProperty);
allProps.add(handleProperty);
allProps.add(require_certificateProperty);
allProps.add(self_registeredProperty);
allProps.add(can_loginProperty);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("eperson", DAV.NS_DSPACE);
}
// format the final path element for one of these
/**
* Gets the path elt.
*
* @param dbid the dbid
*
* @return the path elt
*/
protected static String getPathElt(int dbid)
{
return "ep_db_" + String.valueOf(dbid);
}
// format the final path element for one of these
/**
* Gets the path elt.
*
* @param ep the ep
*
* @return the path elt
*/
protected static String getPathElt(EPerson ep)
{
return getPathElt(ep.getID());
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
return new DAVResource[0];
}
/**
* Match resource URI.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException,
AuthorizeException
{
try
{
// Match "/eperson/ep_db_<id>" URI
// -or- "/eperson/<email-addr>"
// -or- "/eperson/current" - magic path to _current_ user (if any)
if (pathElt[0].equals("eperson") && pathElt.length > 1)
{
EPerson ep = null;
if (pathElt.length > 2)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Invalid eperson resource path.");
}
if (pathElt[1].startsWith("ep_db_"))
{
int id = Integer.parseInt(pathElt[1].substring(6));
ep = EPerson.find(context, id);
}
else if (pathElt[1].equalsIgnoreCase("current"))
{
ep = context.getCurrentUser();
}
else
{
ep = EPerson.findByEmail(context, pathElt[1]);
}
if (ep == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"EPerson not found: " + pathElt[1]);
}
return new DAVEPersonEPerson(context, request, response,
pathElt, ep);
}
return null;
}
catch (NumberFormatException ne)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Error parsing number in request URI.", ne);
}
}
// Authorization test - must be either admin or same as this user
/**
* Admin or self.
*
* @param context the context
*
* @return true, if successful
*
* @throws SQLException the SQL exception
*/
private boolean adminOrSelf(Context context) throws SQLException
{
if (AuthorizeManager.isAdmin(context))
{
return true;
}
EPerson self = context.getCurrentUser();
return self != null && this.eperson != null
&& self.getID() == this.eperson.getID();
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = this.eperson.getFullName();
}
else if (elementsEqualIsh(property, emailProperty))
{
value = this.eperson.getEmail();
}
else if (elementsEqualIsh(property, first_nameProperty))
{
value = this.eperson.getFirstName();
}
else if (elementsEqualIsh(property, last_nameProperty))
{
value = this.eperson.getLastName();
}
else if (elementsEqualIsh(property, handleProperty))
{
value = canonicalizeHandle(this.eperson.getHandle());
if (!adminOrSelf(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to read this property.");
}
}
else if (elementsEqualIsh(property, require_certificateProperty))
{
value = String.valueOf(this.eperson.getRequireCertificate());
if (!adminOrSelf(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to read this property.");
}
}
else if (elementsEqualIsh(property, self_registeredProperty))
{
value = String.valueOf(this.eperson.getSelfRegistered());
if (!adminOrSelf(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to read this property.");
}
}
else if (elementsEqualIsh(property, can_loginProperty))
{
value = String.valueOf(this.eperson.canLogIn());
if (!adminOrSelf(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to read this property.");
}
}
else
{
return commonPropfindInternal(property, false);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
if (!adminOrSelf(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"No authorization to read this EPerson.");
}
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for eperson.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for eperson.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* This defines the behavior of DSpace "resources" in the WebDAV interface; it
* maps DAV operations onto DSpace object.
*/
class DAVBitstream extends DAVDSpaceObject
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVBitstream.class);
/** The item. */
private Item item = null;
/** The bitstream. */
private Bitstream bitstream = null;
/** The Constant BITSTREAM_INLINE_THRESHOLD.
* The longest bitstream that should be rendered "inline" (base64)
* see makeXmlBitstream
*/
private static final int BITSTREAM_INLINE_THRESHOLD = 2000;
/** The Constant getcontentlengthProperty. */
private static final Element getcontentlengthProperty = new Element(
"getcontentlength", DAV.NS_DAV);
/** The Constant getcontenttypeProperty. */
private static final Element getcontenttypeProperty = new Element(
"getcontenttype", DAV.NS_DAV);
/** The Constant sourceProperty. */
private static final Element sourceProperty = new Element("source",
DAV.NS_DSPACE);
/** The Constant descriptionProperty. */
private static final Element descriptionProperty = new Element(
"description", DAV.NS_DSPACE);
/** The Constant formatProperty. */
private static final Element formatProperty = new Element("format",
DAV.NS_DSPACE);
/** The Constant format_descriptionProperty. */
private static final Element format_descriptionProperty = new Element(
"format_description", DAV.NS_DSPACE);
/** The Constant checksumProperty. */
private static final Element checksumProperty = new Element("checksum",
DAV.NS_DSPACE);
/** The Constant checksum_algorithmProperty. */
private static final Element checksum_algorithmProperty = new Element(
"checksum_algorithm", DAV.NS_DSPACE);
/** The Constant sequence_idProperty. */
private static final Element sequence_idProperty = new Element(
"sequence_id", DAV.NS_DSPACE);
/** The Constant bundleProperty. */
private static final Element bundleProperty = new Element("bundle",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
static
{
allProps.add(getcontentlengthProperty);
allProps.add(getcontenttypeProperty);
allProps.add(sourceProperty);
allProps.add(descriptionProperty);
allProps.add(formatProperty);
allProps.add(format_descriptionProperty);
allProps.add(checksumProperty);
allProps.add(checksum_algorithmProperty);
allProps.add(sequence_idProperty);
allProps.add(bundleProperty);
allProps.add(handleProperty);
}
/**
* Instantiates a new DAV bitstream.
* This gets called by matchResourceURI, for /retrieve_<dbid> format
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param bitstream the bitstream
*/
protected DAVBitstream(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], Bitstream bitstream)
{
super(context, request, response, pathElt, bitstream);
this.bitstream = bitstream;
this.type = TYPE_BITSTREAM;
}
/**
* Instantiates a new DAV bitstream.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param item the item
* @param bitstream the bitstream
*/
protected DAVBitstream(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], Item item,
Bitstream bitstream)
{
super(context, request, response, pathElt, bitstream);
this.bitstream = bitstream;
this.type = TYPE_BITSTREAM;
this.item = item;
}
/**
* Make bitstream path element with filename extension, if given.
*
* @param sid the sid
* @param ext the ext
*
* @return bitstream path element
*/
protected static String getPathElt(int sid, String ext)
{
return "bitstream_" + String.valueOf(sid)
+ (ext == null ? "" : "." + ext);
}
/**
* Attempt to locate Bitstream object from URI. pathElt is
* "bitstream_{sid}.ext" or "retrieve_{db-id}.ext"
*
* @param context the context
* @param item the item
* @param pathElt the path elt
*
* @return the bitstream found (any errors throw an exception)
*
* @throws SQLException the SQL exception
* @throws DAVStatusException the DAV status exception
*/
protected static Bitstream findBitstream(Context context, Item item,
String pathElt) throws SQLException, DAVStatusException
{
try
{
// get rid of extension, if any, e.g. ".pdf"
int dot = pathElt.indexOf('.');
String strId = (dot >= 0) ? pathElt.substring(0, dot) : pathElt;
Bitstream result = null;
if (strId.startsWith("bitstream_"))
{
strId = strId.substring(10);
result = getBitstreamBySequenceID(item, Integer.parseInt(strId));
}
else if (strId.startsWith("retrieve_"))
{
strId = strId.substring(9);
result = Bitstream.find(context, Integer.parseInt(strId));
}
else
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Unrecognized bitstream URI format.");
}
if (result == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"No bitstream at this sequence ID: " + pathElt);
}
return result;
}
catch (NumberFormatException nfe)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Invalid Bitstream Sequence ID in URI: " + pathElt, nfe);
}
}
/**
* Find bitstream with matching sequence id.
*
* @param item the item
* @param sid the sid
*
* @return bitstream, or null if none found.
*
* @throws SQLException the SQL exception
*/
protected static Bitstream getBitstreamBySequenceID(Item item, int sid)
throws SQLException
{
Bundle[] bundles = item.getBundles();
for (Bundle element : bundles)
{
Bitstream[] bitstreams = element.getBitstreams();
for (Bitstream element0 : bitstreams)
{
if (sid == element0.getSequenceID())
{
return element0;
}
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
return new DAVResource[0];
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVDSpaceObject#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
/*
* FIXME: This implements permission check that really belongs in
* business logic. Although communities and collections don't check for
* read auth, Bitstream may contain sensitive data and should always
* check for READ permission.
*/
AuthorizeManager.authorizeAction(this.context, this.bitstream, Constants.READ);
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = this.bitstream.getName();
if (value == null)
{
value = makeDisplayname();
}
}
else if (elementsEqualIsh(property, getcontentlengthProperty))
{
value = String.valueOf(this.bitstream.getSize());
}
else if (elementsEqualIsh(property, getcontenttypeProperty))
{
value = this.bitstream.getFormat().getMIMEType();
}
else if (elementsEqualIsh(property, sourceProperty))
{
value = this.bitstream.getSource();
}
else if (elementsEqualIsh(property, descriptionProperty))
{
value = this.bitstream.getDescription();
}
else if (elementsEqualIsh(property, formatProperty))
{
BitstreamFormat bsf = this.bitstream.getFormat();
value = bsf == null ? null : bsf.getShortDescription();
}
else if (elementsEqualIsh(property, format_descriptionProperty))
{
value = this.bitstream.getFormatDescription();
}
else if (elementsEqualIsh(property, checksumProperty))
{
value = this.bitstream.getChecksum();
}
else if (elementsEqualIsh(property, checksum_algorithmProperty))
{
value = this.bitstream.getChecksumAlgorithm();
}
else if (elementsEqualIsh(property, sequence_idProperty))
{
int sid = this.bitstream.getSequenceID();
if (sid >= 0)
{
value = String.valueOf(sid);
}
}
else if (elementsEqualIsh(property, bundleProperty))
{
Bundle bn[] = this.bitstream.getBundles();
if (bn != null && bn.length > 0)
{
value = bn[0].getName();
}
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
Namespace ns = prop.getNamespace();
String propName = prop.getName();
boolean nsDspace = ns != null && ns.equals(DAV.NS_DSPACE);
String newValue = (action == DAV.PROPPATCH_REMOVE) ? null : prop
.getText();
// displayname - arbitrary string
if (elementsEqualIsh(prop, displaynameProperty))
{
this.bitstream.setName(newValue);
}
else if (nsDspace && propName.equals("description"))
{
this.bitstream.setDescription(newValue);
}
else if (nsDspace && propName.equals("source"))
{
this.bitstream.setSource(newValue);
}
else if (nsDspace && propName.equals("format_description"))
{
this.bitstream.setUserFormatDescription(newValue);
}
else if (nsDspace && propName.equals("format"))
{
if (action == DAV.PROPPATCH_REMOVE)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"The format property cannot be removed.");
}
BitstreamFormat bsf = BitstreamFormat.findByShortDescription(
this.context, newValue);
if (bsf == null)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"Cannot set format, no such Bitstream Format: "
+ newValue);
}
this.bitstream.setFormat(bsf);
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
// this assumes we got through an IF clause and changed something:
this.bitstream.update();
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
if (this.bitstream == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Bitstream not found, URI=\"" + hrefURL() + "\"");
}
else
{
if (this.item != null)
{
log.info(LogManager.getHeader(this.context, "DAV GET Bitstream",
"item handle=" + this.item.getHandle() + ", bitstream_id="
+ this.bitstream.getID()));
}
// Set the response MIME type
this.response.setContentType(this.bitstream.getFormat().getMIMEType());
// Response length
this.response.setHeader("Content-Length", String.valueOf(this.bitstream
.getSize()));
// Pipe the bits
InputStream is = this.bitstream.retrieve();
Utils.bufferedCopy(is, this.response.getOutputStream());
is.close();
this.response.getOutputStream().flush();
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT is not implemented for Bitstream (yet?).");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented.");
}
/**
* Make a default name to go in properties' displayname Should be last path
* element of canonical resource URI.
*
* @return name string
*/
private String makeDisplayname()
{
String ext[] = this.bitstream.getFormat().getExtensions();
String prefix = (this.item == null) ? "retrieve_"
+ String.valueOf(this.bitstream.getID()) : "bitstream_"
+ String.valueOf(this.bitstream.getSequenceID());
return prefix + (ext.length > 0 ? ext[0] : "");
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
/**
* Match URI /retrieve_<DbId> NOTE: This is an evil kludge to get raw
* bitstreams by DB ID, required to implement link form of
* <dspace:bitstream> element in properties. The "logo" of Community or
* Collection is a loose bitstream not connected to any Item, so it can
* only be identified by a direct database-ID reference. Ugh.
*/
if (pathElt[0].startsWith("retrieve_"))
{
Bitstream bs = findBitstream(context, null, pathElt[0]);
return new DAVBitstream(context, request, response, pathElt, bs);
}
return null;
}
/**
* Returns an XML representation of a bitstream -- either inline content or
* a link reference. The XML looks like:
*
* <pre>
* <dspace:bitstream>
* <dspace:link href="url-to-bitstream">
* </dspace:bitstream>
* ...or...
* <dspace:bitstream>
* <dspace:content contenttype="image/gif" contentlength="299" contentencoding="base64">
* ...text of base64..
* </dspace:content>
* </dspace:bitstream>
* NOTE: contentlength is the DECODED length of the content.
* </pre>
*
* Used by the "logo" property on collections and communities.
*
* @param bitstream the bitstream
* @param resource the resource
*
* @return the element
*
* @throws AuthorizeException the authorize exception
* @throws SQLException the SQL exception
* @throws IOException Signals that an I/O exception has occurred.
*/
protected static Element makeXmlBitstream(Bitstream bitstream,
DAVResource resource) throws AuthorizeException, SQLException,
IOException
{
Element b = new Element("bitstream", DAV.NS_DSPACE);
long length = bitstream.getSize();
BitstreamFormat bf = bitstream.getFormat();
if (length > BITSTREAM_INLINE_THRESHOLD)
{
Element e = new Element("link", DAV.NS_DSPACE);
e.setAttribute("href", resource.hrefPrefix() + "retrieve_"
+ String.valueOf(bitstream.getID()));
b.addContent(e);
}
else
{
Element e = new Element("content", DAV.NS_DSPACE);
if (bf != null)
{
e.setAttribute("contenttype", bf.getMIMEType());
}
e.setAttribute("contentlength", String.valueOf(length));
e.setAttribute("contentencoding", "base64");
b.addContent(e);
// write encoding of bitstream contents
ByteArrayOutputStream baos = new ByteArrayOutputStream((int) length);
Utils.copy(bitstream.retrieve(), baos);
e.setText(new String(Base64.encodeBase64(baos.toByteArray())));
}
return b;
}
/**
* Extract bitstream from the XML representation, i.e.
*
* <pre>
* <dspace:bitstream>
* <dspace:content contenttype="image/gif"
* contentlength="299"
* contentencoding="base64">
* ...text of base64..
* </dspace:content>
* </dspace:bitstream>
* </pre>
*
* In the above format, contenttype and contentencoding attributes of
* content are REQUIRED.
*
* @param context the context
* @param xb the xb
*
* @return inputstream of the contents of the data, or null on error.
*/
protected static InputStream getXmlBitstreamContent(Context context,
Element xb)
{
Element c = xb.getChild("content", DAV.NS_DSPACE);
if (c != null)
{
String enc = c.getAttributeValue("contentencoding");
if (enc != null && enc.equals("base64"))
{
byte value[] = Base64.decodeBase64(c.getText().getBytes());
return new ByteArrayInputStream(value);
}
}
return null;
}
/**
* Get the content-type from an XML-encoded bitstream.
*
* @param context required for reading the DB.
* @param xb XML bitstream representation in JDOM.
*
* @return First BitstreamFormat matching content-type string, or null if
* none.
*
* @throws SQLException the SQL exception
*/
protected static BitstreamFormat getXmlBitstreamFormat(Context context,
Element xb) throws SQLException
{
Element c = xb.getChild("content", DAV.NS_DSPACE);
if (c != null)
{
String ctype = c.getAttributeValue("contenttype");
if (ctype != null)
{
BitstreamFormat af[] = BitstreamFormat.findAll(context);
for (BitstreamFormat element : af)
{
if (ctype.equals(element.getMIMEType()))
{
return element;
}
}
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for BitStream.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for BitStream.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager;
import org.jdom.Element;
/**
* The "workflow_*" resources are collections (in the DAV sense) of all the
* current user's <code>WorkflowItem</code>s. Each one is nothing more than a
* read-only collection to list the items.
* <p>
* The <code>workflow_own</code> resource lists items <em>owned</em> by the
* current user, while the <code>workflow_pool</code> lists items in the
* workflow pool which may be acquired by the current user.
* <p>
* Its children are all the relevant <code>WorkflowItem</code>s. These
* resources cannot be altered.
* <p>
*
* @author Larry Stone
* @see DAVWorkflowItem
*/
class DAVWorkflow extends DAVResource
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVWorkflow.class);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("workflow", DAV.NS_DSPACE);
}
/**
* Instantiates a new DAV workflow.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVWorkflow(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
super(context, request, response, pathElt);
this.type = TYPE_OTHER;
}
/**
* Gets the path.
*
* @param wfi the wfi
*
* @return URI path to this object.
*/
protected static String getPath(WorkflowItem wfi)
{
return "workflow_pool/" + DAVWorkflowItem.getPathElt(wfi);
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
// The "/workflow" URI:
if (pathElt.length > 0 && pathElt[0].startsWith("workflow_"))
{
if (pathElt.length > 1)
{
return DAVWorkflowItem.matchResourceURI(context, request,
response, pathElt);
}
else if (pathElt[0].equals("workflow_own")
|| pathElt[0].equals("workflow_pool"))
{
return new DAVWorkflow(context, request, response, pathElt);
}
else
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Unrecognized URI path element: " + pathElt[0]);
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
EPerson ep = this.context.getCurrentUser();
if (ep != null)
{
List<WorkflowItem> wi = null;
if (this.pathElt[0].equals("workflow_own"))
{
wi = WorkflowManager.getOwnedTasks(this.context, ep);
}
else if (this.pathElt[0].equals("workflow_pool"))
{
wi = WorkflowManager.getPooledTasks(this.context, ep);
}
if (wi != null)
{
log.debug("children(): Got " + String.valueOf(wi.size())
+ " Workflow Items.");
DAVResource result[] = new DAVResource[wi.size()];
ListIterator wii = wi.listIterator();
int i = 0;
while (wii.hasNext())
{
WorkflowItem wfi = (WorkflowItem) wii.next();
result[i++] = new DAVWorkflowItem(this.context, this.request,
this.response, makeChildPath(DAVWorkflowItem
.getPathElt(wfi.getID())), wfi);
}
return result;
}
}
return new DAVResource[0];
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = this.pathElt[0];
}
else
{
return commonPropfindInternal(property, true);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for workflow.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for workflow.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for workflow.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for Workflow.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Workflow.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
/**
* Encapsulate the components of an HTTP status response, namely an integer code
* and a text message. This lets a method pass a complete HTTP status up its
* chain of callers via the exception mechanism.
*
* @author Larry Stone
*/
public class DAVStatusException extends Exception
{
/** The status. */
private int status;
/** The message. */
private String message;
/**
* Instantiates a new DAV status exception.
*
* @param status the status
* @param msg the msg
*/
protected DAVStatusException(int status, String msg)
{
super(String.valueOf(status) + " - " + msg);
this.status = status;
this.message = msg;
}
/**
* Instantiates a new DAV status exception.
*
* @param status the status
* @param msg the msg
* @param cause the cause
*/
protected DAVStatusException(int status, String msg, Throwable cause)
{
super(String.valueOf(status) + " - " + msg, cause);
this.status = status;
this.message = msg;
}
/**
* Returns an HTTP-format status line.
*
* @return string representing HTTP status line (w/o trailing newline)
*/
protected String getStatusLine()
{
return "HTTP/1.1 " + String.valueOf(this.status) + " " + this.message;
}
/**
* Return the status code.
*
* @return status code set in this exception.
*/
protected int getStatus()
{
return this.status;
}
/**
* Return the status message.
*
* @return status message set in this exception.
*/
@Override
public String getMessage()
{
return this.message;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Element;
/**
* Superclass of the two kinds of resources that implement DSpace
* InProgressSubmission objects, namely WorkspaceItem and WorkflowItem. It
* coalesces their common code.
* <p>
*
* @author Larry Stone
* @see DAVWorkspaceItem
* @see DAVWorkflowItem
*/
abstract class DAVInProgressSubmission extends DAVResource
{
/** DSpace object which this resource represents. */
protected InProgressSubmission inProgressItem = null;
/** The Constant collectionProperty. */
protected static final Element collectionProperty = new Element(
"collection", DAV.NS_DSPACE);
/** The Constant submitterProperty. */
protected static final Element submitterProperty = new Element("submitter",
DAV.NS_DSPACE);
/** The Constant has_multiple_filesProperty. */
protected static final Element has_multiple_filesProperty = new Element(
"has_multiple_files", DAV.NS_DSPACE);
/** The Constant has_multiple_titlesProperty. */
protected static final Element has_multiple_titlesProperty = new Element(
"has_multiple_titles", DAV.NS_DSPACE);
/** The Constant is_published_beforeProperty. */
protected static final Element is_published_beforeProperty = new Element(
"is_published_before", DAV.NS_DSPACE);
/** State of in-progress item. Defined here so workflow items and workspace items can both get at it for PROPPATCH; but don't allow it in the list of common properties. since it's write-only for DAVWorkspaceItem. */
protected static final Element stateProperty = new Element("state",
DAV.NS_DSPACE);
/** Commonly visible properties. */
protected static List<Element> inProgressProps = new ArrayList<Element>(9);
static
{
inProgressProps.add(current_user_privilege_setProperty);
inProgressProps.add(displaynameProperty);
inProgressProps.add(resourcetypeProperty);
inProgressProps.add(typeProperty);
inProgressProps.add(collectionProperty);
inProgressProps.add(submitterProperty);
inProgressProps.add(has_multiple_filesProperty);
inProgressProps.add(has_multiple_titlesProperty);
inProgressProps.add(is_published_beforeProperty);
}
/**
* Instantiates a new DAV in progress submission.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param wi the wi
*/
protected DAVInProgressSubmission(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[], InProgressSubmission wi)
{
super(context, request, response, pathElt);
this.inProgressItem = wi;
this.type = TYPE_OTHER;
}
/**
* The only child is the Item this wraps:.
*
* @return the DAV resource[]
*
* @throws SQLException the SQL exception
*/
@Override
protected DAVResource[] children() throws SQLException
{
DAVResource result[] = new DAVResource[1];
Item item = this.inProgressItem.getItem();
result[0] = new DAVItem(this.context, this.request, this.response,
makeChildPath(DAVItem.getPathElt(item.getID())), item);
return result;
}
/**
* Typically overridden by subclass which then calls back via "super";.
*
* @param property the property
*
* @return null if nothing matched so subclass has a chance.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
if (elementsEqualIsh(property, collectionProperty))
{
value = canonicalizeHandle(this.inProgressItem.getCollection()
.getHandle());
}
else if (elementsEqualIsh(property, submitterProperty))
{
EPerson ep = this.inProgressItem.getSubmitter();
if (ep != null)
{
value = hrefToEPerson(ep);
}
}
else if (elementsEqualIsh(property, has_multiple_filesProperty))
{
value = String.valueOf(this.inProgressItem.hasMultipleFiles());
}
else if (elementsEqualIsh(property, has_multiple_titlesProperty))
{
value = String.valueOf(this.inProgressItem.hasMultipleTitles());
}
else if (elementsEqualIsh(property, is_published_beforeProperty))
{
value = String.valueOf(this.inProgressItem.isPublishedBefore());
}
else if (elementsEqualIsh(property, current_user_privilege_setProperty))
{
// if we see a WFI/WSI, we are the owner and have all privs:
Element c = (Element) current_user_privilege_setProperty.clone();
addPrivilege(c, new Element("all", DAV.NS_DAV));
return c;
}
else
{
return commonPropfindInternal(property, true);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/**
* Parses the boolean.
*
* @param in the in
*
* @return true, if successful
*
* @throws DAVStatusException the DAV status exception
*/
static boolean parseBoolean(String in) throws DAVStatusException
{
in = in.trim();
if (in.equalsIgnoreCase("true"))
{
return true;
}
else if (in.equalsIgnoreCase("false"))
{
return false;
}
throw new DAVStatusException(DAV.SC_CONFLICT,
"Unacceptable value for boolean: " + in);
}
/**
* Since this is in a superclass, subclass must call it first and return if
* it answers SC_OK. Otherwise, subclass gets a chance to set a property.
*
* @param action the action
* @param prop the prop
*
* @return HTTP status - SC_OK means it set something, SC_NOT_FOUND if no
* property was matched.
*
* @throws DAVStatusException when property cannot be changed.
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
if (elementsEqualIsh(prop, displaynameProperty)
|| elementsEqualIsh(prop, resourcetypeProperty)
|| elementsEqualIsh(prop, typeProperty)
|| elementsEqualIsh(prop, collectionProperty)
|| elementsEqualIsh(prop, submitterProperty))
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
String newValue = (action == DAV.PROPPATCH_REMOVE) ? null : prop
.getText();
if (elementsEqualIsh(prop, has_multiple_filesProperty))
{
this.inProgressItem.setMultipleFiles(parseBoolean(newValue));
}
else if (elementsEqualIsh(prop, has_multiple_titlesProperty))
{
this.inProgressItem.setMultipleTitles(parseBoolean(newValue));
}
else if (elementsEqualIsh(prop, is_published_beforeProperty))
{
this.inProgressItem.setPublishedBefore(parseBoolean(newValue));
}
else
{
return HttpServletResponse.SC_NOT_FOUND;
}
this.inProgressItem.update();
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for workspace.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for workspace.");
}
/**
* Implement a "copy" into a collection by adding the child Item to the
* Collection -- this isn't strictly what they asked for, but it *is* what
* they want, so DWIM here.
*
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return the int
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
return DAVItem.addItemToCollection(this.context, this.inProgressItem.getItem(),
destination, overwrite);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for InProgressSubmission.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for InProgressSubmission.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.Context;
import org.jdom.Element;
/**
* This defines the behavior of DSpace "resources" in the WebDAV interface; it
* maps DAV operations onto DSpace object.s
*/
class DAVCommunity extends DAVDSpaceObject
{
/** The community. */
private Community community = null;
/** The Constant short_descriptionProperty. */
private static final Element short_descriptionProperty = new Element(
"short_description", DAV.NS_DSPACE);
/** The Constant introductory_textProperty. */
private static final Element introductory_textProperty = new Element(
"introductory_text", DAV.NS_DSPACE);
/** The Constant side_bar_textProperty. */
private static final Element side_bar_textProperty = new Element(
"side_bar_text", DAV.NS_DSPACE);
/** The Constant copyright_textProperty. */
private static final Element copyright_textProperty = new Element(
"copyright_text", DAV.NS_DSPACE);
/** The Constant logoProperty. */
private static final Element logoProperty = new Element("logo",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
static
{
allProps.add(logoProperty);
allProps.add(short_descriptionProperty);
allProps.add(introductory_textProperty);
allProps.add(side_bar_textProperty);
allProps.add(copyright_textProperty);
allProps.add(handleProperty);
}
/**
* Instantiates a new DAV community.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param community the community
*/
protected DAVCommunity(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], Community community)
{
super(context, request, response, pathElt, community);
this.community = community;
this.type = TYPE_COMMUNITY;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
Vector result = new Vector();
Community subs[] = this.community.getSubcommunities();
for (Community element : subs)
{
result.add(new DAVCommunity(this.context, this.request, this.response,
makeChildPath(element), element));
}
Collection colls[] = this.community.getCollections();
for (Collection element : colls)
{
result.add(new DAVCollection(this.context, this.request, this.response,
makeChildPath(element), element));
}
return (DAVResource[]) result.toArray(new DAVResource[result.size()]);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVDSpaceObject#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = getObjectMetadata("name");
if (value == null)
{
value = this.community.getHandle();
}
}
// special case, value is XML, not string:
else if (elementsEqualIsh(property, logoProperty))
{
Bitstream lbs = this.community.getLogo();
if (lbs != null)
{
Element le = DAVBitstream.makeXmlBitstream(lbs, this);
if (le != null)
{
Element p = new Element("logo", DAV.NS_DSPACE);
p.addContent(le);
return p;
}
}
}
else if (elementsEqualIsh(property, handleProperty))
{
value = canonicalizeHandle(this.community.getHandle());
}
else if (elementsEqualIsh(property, short_descriptionProperty))
{
value = getObjectMetadata("short_description");
}
else if (elementsEqualIsh(property, introductory_textProperty))
{
value = getObjectMetadata("introductory_text");
}
else if (elementsEqualIsh(property, side_bar_textProperty))
{
value = getObjectMetadata("side_bar_text");
}
else if (elementsEqualIsh(property, copyright_textProperty))
{
value = getObjectMetadata("copyright_text");
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
// syntactic sugar around getting community metadata values:
/**
* Gets the object metadata.
*
* @param mdname the mdname
*
* @return the object metadata
*/
private String getObjectMetadata(String mdname)
{
try
{
return this.community.getMetadata(mdname);
}
catch (IllegalArgumentException e)
{
return null;
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
String newValue = (action == DAV.PROPPATCH_REMOVE) ? null : prop
.getText();
// these are "metadata" values..
if (elementsEqualIsh(prop, short_descriptionProperty)
|| elementsEqualIsh(prop, introductory_textProperty)
|| elementsEqualIsh(prop, side_bar_textProperty)
|| elementsEqualIsh(prop, copyright_textProperty))
{
this.community.setMetadata(prop.getName(), newValue);
}
else if (elementsEqualIsh(prop, displaynameProperty))
{
this.community.setMetadata("name", newValue);
}
else if (elementsEqualIsh(prop, logoProperty))
{
if (action == DAV.PROPPATCH_REMOVE)
{
this.community.setLogo(null);
}
else
{
Element bs = prop.getChild("bitstream", DAV.NS_DSPACE);
if (bs != null)
{
InputStream bis = DAVBitstream.getXmlBitstreamContent(
this.context, bs);
BitstreamFormat bsf = DAVBitstream.getXmlBitstreamFormat(
this.context, bs);
if (bis == null || bsf == null)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"Unacceptable value for logo property.");
}
Bitstream nbs = this.community.setLogo(bis);
nbs.setFormat(bsf);
nbs.update();
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"No <bitstream> element value found for logo property.");
}
}
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
this.community.update();
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for Community.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for Community.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for Community.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
Community c = this.community.getParentCommunity();
if (c != null)
{
c.removeSubcommunity(this.community);
}
this.community.delete();
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String name) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
Collection newColl = this.community.createCollection();
newColl.setMetadata("name", name);
newColl.update();
return HttpServletResponse.SC_OK;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.core.Utils;
import org.dspace.handle.HandleManager;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager;
import org.jdom.Element;
/**
* This defines the behavior of DSpace "resources" in the WebDAV interface; it
* maps DAV operations onto DSpace object.s
*/
class DAVCollection extends DAVDSpaceObject
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVCollection.class);
/** The collection. */
private Collection collection = null;
/** The temporary upload directory. */
private static String tempDirectory = ConfigurationManager
.getProperty("upload.temp.dir");
/** The Constant short_descriptionProperty. */
private static final Element short_descriptionProperty = new Element(
"short_description", DAV.NS_DSPACE);
/** The Constant introductory_textProperty. */
private static final Element introductory_textProperty = new Element(
"introductory_text", DAV.NS_DSPACE);
/** The Constant side_bar_textProperty. */
private static final Element side_bar_textProperty = new Element(
"side_bar_text", DAV.NS_DSPACE);
/** The Constant copyright_textProperty. */
private static final Element copyright_textProperty = new Element(
"copyright_text", DAV.NS_DSPACE);
/** The Constant provenance_descriptionProperty. */
private static final Element provenance_descriptionProperty = new Element(
"provenance_description", DAV.NS_DSPACE);
/** The Constant default_licenseProperty. */
private static final Element default_licenseProperty = new Element(
"default_license", DAV.NS_DSPACE);
/** The Constant logoProperty. */
private static final Element logoProperty = new Element("logo",
DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
static
{
allProps.add(logoProperty);
allProps.add(short_descriptionProperty);
allProps.add(introductory_textProperty);
allProps.add(side_bar_textProperty);
allProps.add(copyright_textProperty);
allProps.add(default_licenseProperty);
allProps.add(provenance_descriptionProperty);
allProps.add(handleProperty);
}
/**
* Instantiates a new DAV collection.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param collection the collection
*/
protected DAVCollection(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[],
Collection collection)
{
super(context, request, response, pathElt, collection);
this.collection = collection;
this.type = TYPE_COLLECTION;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
Vector result = new Vector();
ItemIterator ii = this.collection.getItems();
try
{
while (ii.hasNext())
{
Item item = ii.next();
result.add(new DAVItem(this.context, this.request, this.response,
makeChildPath(item), item));
}
}
finally
{
if (ii != null)
{
ii.close();
}
}
return (DAVResource[]) result.toArray(new DAVResource[result.size()]);
}
/**
* Gets the collection.
*
* @return the DSpace Collection object represented by this resource.
*/
protected Collection getCollection()
{
return this.collection;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVDSpaceObject#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = getObjectMetadata("name");
if (value == null)
{
value = this.collection.getHandle();
}
}
else if (elementsEqualIsh(property, handleProperty))
{
value = canonicalizeHandle(this.collection.getHandle());
}
else if (elementsEqualIsh(property, logoProperty))
{
Bitstream lbs = this.collection.getLogo();
if (lbs != null)
{
Element le = DAVBitstream.makeXmlBitstream(lbs, this);
if (le != null)
{
Element p = new Element("logo", DAV.NS_DSPACE);
p.addContent(le);
return p;
}
}
}
else if (elementsEqualIsh(property, short_descriptionProperty))
{
value = getObjectMetadata("short_description");
}
else if (elementsEqualIsh(property, introductory_textProperty))
{
value = getObjectMetadata("introductory_text");
}
else if (elementsEqualIsh(property, side_bar_textProperty))
{
value = getObjectMetadata("side_bar_text");
}
else if (elementsEqualIsh(property, copyright_textProperty))
{
value = getObjectMetadata("copyright_text");
}
else if (elementsEqualIsh(property, default_licenseProperty))
{
value = this.collection.hasCustomLicense() ? this.collection.getLicense()
: null;
}
else if (elementsEqualIsh(property, provenance_descriptionProperty))
{
value = getObjectMetadata("provenance_description");
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
// syntactic sugar around getting collection metadata values:
/**
* Gets the object metadata.
*
* @param mdname the mdname
*
* @return the object metadata
*/
private String getObjectMetadata(String mdname)
{
try
{
return this.collection.getMetadata(mdname);
}
catch (IllegalArgumentException e)
{
return null;
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
String newValue = (action == DAV.PROPPATCH_REMOVE) ? null : prop
.getText();
// these are "metadata" values..
if (elementsEqualIsh(prop, short_descriptionProperty)
|| elementsEqualIsh(prop, introductory_textProperty)
|| elementsEqualIsh(prop, side_bar_textProperty)
|| elementsEqualIsh(prop, copyright_textProperty)
|| elementsEqualIsh(prop, provenance_descriptionProperty))
{
this.collection.setMetadata(prop.getName(), newValue);
}
else if (elementsEqualIsh(prop, displaynameProperty))
{
this.collection.setMetadata("name", newValue);
}
else if (elementsEqualIsh(prop, default_licenseProperty))
{
this.collection.setLicense(newValue);
}
else if (elementsEqualIsh(prop, logoProperty))
{
if (action == DAV.PROPPATCH_REMOVE)
{
this.collection.setLogo(null);
}
else
{
Element bs = prop.getChild("bitstream", DAV.NS_DSPACE);
if (bs != null)
{
InputStream bis = DAVBitstream.getXmlBitstreamContent(
this.context, bs);
BitstreamFormat bsf = DAVBitstream.getXmlBitstreamFormat(
this.context, bs);
if (bis == null || bsf == null)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"Unacceptable value for logo property.");
}
Bitstream nbs = this.collection.setLogo(bis);
nbs.setFormat(bsf);
nbs.update();
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"No <bitstream> element value found for logo property.");
}
}
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
this.collection.update();
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET not implemented for Collection.");
}
/**
* Wrapped input stream that hits end-of-file after reading a counted number
* of bytes, even if its client stream appears to have more data. This fixes
* a problem in the Servlet container's input stream which will try to read
* past the end of the request body.
*/
private static class CountedInputStream extends FilterInputStream
{
/** The count. */
private long count = 0;
/** The length. */
private long length = -1;
/**
* Instantiates a new counted input stream.
*
* @param is the is
* @param length the length
*/
protected CountedInputStream(InputStream is, long length)
{
super(is);
this.length = length;
}
/* (non-Javadoc)
* @see java.io.FilterInputStream#read()
*/
@Override
public int read() throws IOException
{
if (++this.count > this.length)
{
return -1;
}
return super.read();
}
/* (non-Javadoc)
* @see java.io.FilterInputStream#read(byte[])
*/
@Override
public int read(byte[] b) throws IOException
{
if (this.count >= this.length)
{
return -1;
}
int result = super.read(b);
if (this.count > 0)
{
this.count += result;
}
return result;
}
/* (non-Javadoc)
* @see java.io.FilterInputStream#read(byte[], int, int)
*/
@Override
public int read(byte[] b, int off, int len) throws IOException
{
if (this.count >= this.length)
{
return -1;
}
int result = super.read(b, off, len);
if (this.count > 0)
{
this.count += result;
}
return result;
}
/* (non-Javadoc)
* @see java.io.FilterInputStream#skip(long)
*/
@Override
public long skip(long n) throws IOException
{
long result = super.skip(n);
this.count += result;
return result;
}
}
/**
* PUT ingests a package as a new Item. Package type (must match pluggable
* packager name) is in either (a) "package" query arg in URI (b)
* content-type request header
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
try
{
String packageType = this.request.getParameter("package");
if (packageType == null)
{
packageType = this.request.getContentType();
}
if (packageType == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Cannot determine package type, need content-type header or package param");
}
PackageIngester sip = (PackageIngester) PluginManager
.getNamedPlugin(PackageIngester.class, packageType);
if (sip == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Cannot find importer for package type: " + packageType);
}
/*
* Ugh. Servlet container doesn't get end-of-file right on input
* stream so we have to count it, when possible.
*/
int contentLength = this.request.getIntHeader("Content-Length");
InputStream pis = this.request.getInputStream();
if (contentLength >= 0)
{
pis = new CountedInputStream(pis, contentLength);
log.debug("put: Using CountedInputStream, length="
+ String.valueOf(contentLength));
}
// Write to temporary file (so that SIP ingester can process it)
File tempDir = new File(tempDirectory);
File tempFile = File.createTempFile("davUpload" + pis.hashCode(), null, tempDir);
log.debug("Storing temporary file at " + tempFile.getCanonicalPath());
FileOutputStream fos = new FileOutputStream(tempFile);
Utils.copy(pis, fos);
fos.close();
pis.close();
// Initialize parameters to packager
PackageParameters params = PackageParameters.create(this.request);
// Force package ingester to respect Collection workflows (i.e. start workflow automatically as needed)
params.setWorkflowEnabled(true);
//ingest from the temp file to create the new DSpaceObject
DSpaceObject ingestedDso = sip.ingest(this.context, this.collection, tempFile,
params, null);
Item item = (Item) ingestedDso;
//schedule temp file for deletion
tempFile.deleteOnExit();
//get the new workflowitem (if it exists)
WorkflowItem wfi = WorkflowItem.findByItem(context, item);
//Get status of item
// if we found a WorkflowItem, then it is still in-process
// if we didn't find one, item is already in archive
int state;
if(wfi!=null)
{
state = wfi.getState();
}
else
{
state = WorkflowManager.WFSTATE_ARCHIVE;
}
// get new item's location: if workflow completed, then look
// for handle (but be ready for disappointment); otherwise,
// return the workflow item's resource.
String location = null;
if (state == WorkflowManager.WFSTATE_ARCHIVE)
{
// Item is already in the archive
String handle = HandleManager.findHandle(this.context, item);
String end = (handle != null) ? DAVDSpaceObject
.getPathElt(handle) : DAVItem.getPathElt(item);
DAVItem newItem = new DAVItem(this.context, this.request, this.response,
makeChildPath(end), item);
location = newItem.hrefURL();
}
else if (state == WorkflowManager.WFSTATE_SUBMIT
|| state == WorkflowManager.WFSTATE_STEP1POOL)
{
// Item is still in-process in the workflow
location = hrefPrefix() + DAVWorkflow.getPath(wfi);
}
else
{
throw new DAVStatusException(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Workflow object in unexpected state, state="
+ String.valueOf(state) + ", aborting PUT.");
}
this.context.commit();
log.info("Created new Item, location=" + location);
this.response.setHeader("Location", location);
this.response.setStatus(HttpServletResponse.SC_CREATED);
}
catch (PackageException pe)
{
pe.log(log);
throw new DAVStatusException(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR, pe.toString(), pe);
}
catch (CrosswalkException ie)
{
String reason = "";
if (ie.getCause() != null)
{
reason = ", Reason: " + ie.getCause().toString();
}
log.error(ie.toString() + reason);
throw new DAVStatusException(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ie.toString()
+ reason, ie);
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for Collection.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
Community[] ca = this.collection.getCommunities();
if (ca != null)
{
for (Community element : ca)
{
element.removeCollection(this.collection);
}
}
// collection.delete();
return HttpServletResponse.SC_OK; // HTTP OK
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Collection.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.axis.MessageContext;
import org.apache.axis.transport.http.AxisServlet;
import org.apache.log4j.Logger;
import org.dspace.authenticate.AuthenticationManager;
import org.dspace.authenticate.AuthenticationMethod;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Document;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
/**
* Servlet implementing SOAP services of DSpace Lightweight Network Interface
* <P>
* This is implemented as a subclass of the AxisServlet that processes SOAP
* requests, so it can pick out the requests it handles and pass on the rest to
* the Axis Engine.
* <p>
* Note that it also handles WebDAV GET and PUT requests, so the SOAP client can
* use the same URL as a SOAP endpoint and WebDAV resource root.
*
* @author Larry Stone
* @version $Revision: 5845 $
*/
public class LNISoapServlet extends AxisServlet
{
/** log4j category. */
private static Logger log = Logger.getLogger(LNISoapServlet.class);
/** The output pretty. */
private static XMLOutputter outputPretty = new XMLOutputter(Format
.getPrettyFormat());
// state of this transaction
/** The request. */
private HttpServletRequest request = null;
/** The response. */
private HttpServletResponse response = null;
/**
* Pass a GET request directly to the WebDAV implementation. It handles
* authentication.
*
* @param request the request
* @param response the response
*
* @throws ServletException the servlet exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
DAVServlet.serviceInternal("GET", request, response);
}
/**
* Pass a PUT request directly to the WebDAV implementation. It handles
* authentication.
*
* @param request the request
* @param response the response
*
* @throws ServletException the servlet exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
public void doPut(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
DAVServlet.serviceInternal("PUT", request, response);
}
/**
* Authenticate and return the filled-in DSpace context This is the prologue
* to all calls.
*
* @return the context
*
* @throws SQLException the SQL exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private Context prologue() throws SQLException, IOException
{
MessageContext mc = MessageContext.getCurrentContext();
String username = null, password = null;
if (mc.getUsername() != null)
{
username = DAVServlet.decodeFromURL(mc.getUsername());
}
if (mc.getPassword() != null)
{
password = DAVServlet.decodeFromURL(mc.getPassword());
}
/***********************************************************************
* ** XXX TEMPORARY *** Instrumentation to explore the guts of Axis at
* runtime, *** leave this commented-out. java.util.Iterator pi =
* mc.getPropertyNames(); while (pi.hasNext()) log.debug("SOAP: request
* has property named \""+((String)pi.next())+"\""); log.debug("SOAP:
* getSOAPActionURI = \""+mc.getSOAPActionURI()+"\""); log.debug("SOAP:
* property(servletEndpointContext) = =
* \""+mc.getProperty("servletEndpointContext").toString()+"\"");
* log.debug("SOAP: property(realpath) = =
* \""+mc.getProperty("realpath").toString()+"\""); log.debug("SOAP:
* property(transport.http.servletLocation) = =
* \""+mc.getProperty("transport.http.servletLocation").toString()+"\"");
* *** end TEMPORARY INSTRUMENTATION
**********************************************************************/
this.request = (HttpServletRequest) mc
.getProperty("transport.http.servletRequest");
this.response = (HttpServletResponse) mc
.getProperty("transport.http.servletResponse");
Context context = new Context();
// try cookie shortcut
if (DAVServlet.getAuthFromCookie(context, this.request))
{
DAVServlet.putAuthCookie(context, this.request, this.response, false);
log.debug("SOAP service " + this.getClass().getName()
+ " authenticated with cookie.");
return context;
}
int status = AuthenticationManager.authenticate(context, username,
password, null, this.request);
if (status == AuthenticationMethod.SUCCESS)
{
EPerson cu = context.getCurrentUser();
log.debug("SOAP service " + this.getClass().getName()
+ " authenticated as " + cu.getEmail() + " ("
+ cu.getFirstName() + " " + cu.getLastName() + ")");
DAVServlet.putAuthCookie(context, this.request, this.response, true);
return context;
}
else if (status == AuthenticationMethod.BAD_CREDENTIALS)
{
context.abort();
throw new LNIRemoteException(
"Authentication failed: Bad Credentials.");
}
else if (status == AuthenticationMethod.CERT_REQUIRED)
{
context.abort();
throw new LNIRemoteException(
"Authentication failed: This user may only login with X.509 certificate.");
}
else if (status == AuthenticationMethod.NO_SUCH_USER)
{
context.abort();
throw new LNIRemoteException("Authentication failed: No such user.");
}
else
{
context.abort();
/** AuthenticationMethod.BAD_ARGS and etc * */
throw new LNIRemoteException(
"Authentication failed: Cannot authenticate.");
}
}
/**
* Propfind.
*
* @param uri the uri
* @param doc the doc
* @param depth the depth
* @param types the types
*
* @return the string
*
* @throws LNIRemoteException the LNI remote exception
*/
public String propfind(String uri, String doc, int depth, String types)
throws LNIRemoteException
{
// break up path into elements.
if (uri.startsWith("/"))
{
uri = uri.substring(1);
}
String pathElt[] = uri.split("/");
Context context = null;
try
{
context = prologue();
// return properties only for resources of these types, comma-sep
// list
String aTypes[] = (types == null) ? null : types.split(",");
int typeMask = DAVResource.typesToMask(aTypes);
DAVResource resource = DAVResource.findResource(context, null,
null, pathElt);
if (resource == null)
{
throw new LNIRemoteException("Resource not found.");
}
else
{
Document outdoc = resource.propfindDriver(depth,
new ByteArrayInputStream(doc.getBytes()), typeMask);
if (outdoc == null)
{
// this should never happen, it should throw an error
// before returning null
throw new LNIRemoteException(
"propfind failed, no document returned.");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
outputPretty.output(outdoc, baos);
context.complete();
return baos.toString();
}
}
catch (IOException ie)
{
throw new LNIRemoteException("Exception executing PROPFIND", ie);
}
catch (SQLException e)
{
throw new LNIRemoteException("Failure accessing database", e);
}
catch (DAVStatusException e)
{
throw new LNIRemoteException("PROPFIND request failed: "
+ e.getStatusLine(), e);
}
catch (AuthorizeException e)
{
throw new LNIRemoteException(
"You are not authorized for the requested operation.", e);
}
finally
{
if (context != null && context.isValid())
{
context.abort();
}
}
}
/**
* Proppatch.
*
* @param uri the uri
* @param doc the doc
*
* @return the string
*
* @throws LNIRemoteException the LNI remote exception
*/
public String proppatch(String uri, String doc) throws LNIRemoteException
{
// break up path into elements.
if (uri.startsWith("/"))
{
uri = uri.substring(1);
}
String pathElt[] = uri.split("/");
Context context = null;
try
{
context = prologue();
DAVResource resource = DAVResource.findResource(context, null,
null, pathElt);
if (resource == null)
{
throw new LNIRemoteException("Resource not found.");
}
else
{
Document outdoc = resource
.proppatchDriver(new ByteArrayInputStream(doc
.getBytes()));
if (outdoc == null)
{
// this should never happen, it should throw an error
// before returning null
throw new LNIRemoteException(
"proppatch failed, no document returned.");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
outputPretty.output(outdoc, baos);
context.complete();
return baos.toString();
}
}
catch (IOException ie)
{
throw new LNIRemoteException("Exception executing PROPPATCH", ie);
}
catch (SQLException e)
{
throw new LNIRemoteException("Failure accessing database", e);
}
catch (DAVStatusException e)
{
throw new LNIRemoteException("PROPPATCH request failed: "
+ e.getStatusLine(), e);
}
catch (AuthorizeException e)
{
throw new LNIRemoteException(
"You are not authorized for the requested operation.", e);
}
finally
{
if (context != null && context.isValid())
{
context.abort();
}
}
}
/** The lookup path elt. */
private static String lookupPathElt[] = { "lookup", "handle" };
/**
* Return "absolute" DAV URI for the given handle (and optional bitstream
* persistent identifier). Always returns a valid URI; if resource is not
* found it throws an exception.
*
* @param handle the handle
* @param bitstreamPid the bitstream pid
*
* @return the string
*
* @throws LNIRemoteException the LNI remote exception
*/
public String lookup(String handle, String bitstreamPid)
throws LNIRemoteException
{
Context context = null;
try
{
context = prologue();
// trim leading scheme if any:
if (handle.startsWith("hdl:"))
{
handle = handle.substring(4);
}
DAVLookup resource = new DAVLookup(context, this.request, this.response,
lookupPathElt);
String result = resource.makeURI(handle, bitstreamPid);
if (result == null)
{
throw new LNIRemoteException("Resource not found.");
}
context.complete();
return result;
}
catch (IOException ie)
{
throw new LNIRemoteException("Exception executing LOOKUP", ie);
}
catch (SQLException e)
{
throw new LNIRemoteException("Failure accessing database", e);
}
finally
{
if (context != null && context.isValid())
{
context.abort();
}
}
}
/**
* Copy.
*
* @param source the source
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return the int
*
* @throws LNIRemoteException the LNI remote exception
*/
public int copy(String source, String destination, int depth,
boolean overwrite, boolean keepProperties)
throws LNIRemoteException
{
// break up path into elements.
if (source.startsWith("/"))
{
source = source.substring(1);
}
String pathElt[] = source.split("/");
Context context = null;
try
{
context = prologue();
DAVResource resource = DAVResource.findResource(context, null,
null, pathElt);
if (resource == null)
{
throw new LNIRemoteException("Resource not found.");
}
int status = resource.copyDriver(destination, depth, overwrite,
keepProperties);
context.complete();
return status;
}
catch (IOException ie)
{
throw new LNIRemoteException("IOException while executing COPY", ie);
}
catch (SQLException e)
{
throw new LNIRemoteException("Failure accessing database", e);
}
catch (DAVStatusException e)
{
throw new LNIRemoteException("COPY request failed: "
+ e.getStatusLine(), e);
}
catch (AuthorizeException e)
{
throw new LNIRemoteException(
"You are not authorized for the requested operation.", e);
}
finally
{
if (context != null && context.isValid())
{
context.abort();
}
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.WorkspaceItem;
import org.dspace.core.Context;
import org.dspace.workflow.WorkflowManager;
import org.jdom.Element;
/**
* Reflect the contents of a WorkspaceItem object, which is mainly an
* in-progress submission wrapper around an Item. Allow the client to read and
* set the slots in the workspace item and explore its child Item.
* <p>
* WorkspaceItem resources are reached through the Workspace resource, typically
* with a path of /workspace/wsi_db_{id}
* <p>
*
* @author Larry Stone
* @see DAVInProgressSubmission
* @see DAVWorkspace
*/
class DAVWorkspaceItem extends DAVInProgressSubmission
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVWorkspaceItem.class);
/** The Constant stage_reachedProperty. */
private static final Element stage_reachedProperty = new Element(
"stage_reached", DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(inProgressProps);
static
{
allProps.add(stage_reachedProperty);
}
/**
* Instantiates a new DAV workspace item.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param wi the wi
*/
protected DAVWorkspaceItem(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[],
InProgressSubmission wi)
{
super(context, request, response, pathElt, wi);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("workspace-item", DAV.NS_DSPACE);
}
// format the final path element for one of these
/**
* Gets the path elt.
*
* @param dbid the dbid
*
* @return the path elt
*/
protected static String getPathElt(int dbid)
{
return "wsi_db_" + String.valueOf(dbid);
}
/**
* Match resource URI.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
try
{
// Match "/workspace/wsi_db_<id>" URI
if (pathElt.length >= 2 && pathElt[0].equals("workspace")
&& pathElt[1].startsWith("wsi_db_"))
{
// match /workspace/wsi_db_<id>/item_db_<id> ...
// should be an Item (or Bitstream) URI, child of this
// WorkspaceItem.
if (pathElt.length >= 3)
{
DAVResource result = DAVItem.matchResourceURI(context,
request, response, pathElt);
if (result == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Invalid resource path.");
}
else
{
return result;
}
}
// get this WSI
int id = Integer.parseInt(pathElt[1].substring(7));
InProgressSubmission ips = WorkspaceItem.find(context, id);
if (ips == null)
{
log.warn("invalid WorkspaceItem DB ID in DAV URI, " + "id="
+ pathElt[1]);
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND, "Not found: "
+ pathElt[1] + " does not exist.");
}
else
{
return new DAVWorkspaceItem(context, request, response,
pathElt, ips);
}
}
return null;
}
catch (NumberFormatException ne)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Error parsing number in request URI.", ne);
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVInProgressSubmission#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = getPathElt(this.inProgressItem.getID());
}
else if (elementsEqualIsh(property, stage_reachedProperty))
{
value = String.valueOf(((WorkspaceItem) this.inProgressItem)
.getStageReached());
}
else
{
return super.propfindInternal(property);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVInProgressSubmission#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
if (super.proppatchInternal(action, prop) == HttpServletResponse.SC_OK)
{
return HttpServletResponse.SC_OK;
}
else if (elementsEqualIsh(prop, stateProperty))
{
if (action == DAV.PROPPATCH_REMOVE)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"The state property cannot be removed.");
}
String key = prop.getTextTrim();
if (key.equalsIgnoreCase("start"))
{
WorkflowManager.start(this.context, (WorkspaceItem) this.inProgressItem);
}
else if (key.equalsIgnoreCase("start_without_notify"))
{
WorkflowManager.startWithoutNotify(this.context,
(WorkspaceItem) this.inProgressItem);
}
}
// setStageReached - arbitrary number.
else if (elementsEqualIsh(prop, stage_reachedProperty))
{
if (action == DAV.PROPPATCH_REMOVE)
{
throw new DAVStatusException(DAV.SC_CONFLICT,
"The stage property cannot be removed.");
}
try
{
((WorkspaceItem) this.inProgressItem).setStageReached(Integer
.parseInt(prop.getTextTrim()));
}
catch (NumberFormatException ne)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Error parsing number in property value.", ne);
}
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
this.inProgressItem.update();
return HttpServletResponse.SC_OK;
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.net.URLDecoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.StringTokenizer;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.authenticate.AuthenticationManager;
import org.dspace.authenticate.AuthenticationMethod;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.eperson.EPerson;
/**
* Servlet implementing WebDAV server for DSpace.
* <P>
*
* @author Larry Stone
* @version $Revision: 5845 $
*/
public class DAVServlet extends HttpServlet
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVServlet.class);
/** Names of DAV HTTP extension methods. */
private static final String METHOD_PROPFIND = "PROPFIND";
/** The Constant METHOD_PROPPATCH. */
private static final String METHOD_PROPPATCH = "PROPPATCH";
/** The Constant METHOD_MKCOL. */
private static final String METHOD_MKCOL = "MKCOL";
/** The Constant METHOD_COPY. */
private static final String METHOD_COPY = "COPY";
/** The Constant METHOD_MOVE. */
private static final String METHOD_MOVE = "MOVE";
/** The Constant METHOD_DELETE. */
private static final String METHOD_DELETE = "DELETE";
/** Method names of standard HTTP methods; we have to override HttpServlet fields, because they are private (ugh). */
private static final String METHOD_GET = "GET";
/** The Constant METHOD_PUT. */
private static final String METHOD_PUT = "PUT";
/** The Constant METHOD_OPTIONS. */
private static final String METHOD_OPTIONS = "OPTIONS";
/** Switch to allow anonymous (unauthenticated) access to DAV resources. If true, client doesn't have to authenticate, false they do. */
private static boolean allowAnonymousAccess = ConfigurationManager
.getBooleanProperty("dav.access.anonymous");
/** Guess at longest status text the servlet container will tolerate; Tomcat 5.0 handles this, but dies on longer messages. */
private static final int HTTP_STATUS_MESSAGE_MAX = 1000;
/** A random secret to embed in cookies, generated fresh at every startup:. */
private static final String cookieSecret = Utils.generateHexKey();
// name of our HTTP cookie.
/** The Constant COOKIE_NAME. */
private static final String COOKIE_NAME = "DSpaceDavAuth";
// sell-by time (shelf life) for cookies, in milliseconds: 1/2 hour
/** The Constant COOKIE_SELL_BY. */
private static final long COOKIE_SELL_BY = 30 * 60 * 1000;
// 'C' is for cookie..
/**
* Gimme cookie.
*
* @param request the request
*
* @return the cookie
*/
private static Cookie gimmeCookie(HttpServletRequest request)
{
Cookie cookies[] = request.getCookies();
if (cookies != null)
{
for (Cookie element : cookies)
{
if (element.getName().equals(COOKIE_NAME))
{
return element;
}
}
}
return null;
}
/**
* Get Session Cookie.
* <p>
* DAVServlet rolls its own session cookie because the Servlet container's
* session <em>cannot</em> be constrained to use ONLY cookies and NOT
* URL-rewriting, and the latter would break the DAV protocol so we cannot
* use it. Since we really only need to cache the authenticated EPerson (an
* integer ID) anyway, it's easy enough so simply stuff that into a cookie.
* <p>
* Cookie format is: <br>
* {timestamp}!{epersonID}!{client-IP}!{MAC} <br>
* where timestamp and eperson are integers; client IP is dotted IP
* notation, and MAC is the hex MD5 of the preceding fields plus the
* "cookieSecret" string. The MAC ensures that the cookie was issued by this
* servlet.
* <p>
* Look for authentication cookie and try to get a previously-authenticated
* EPerson from it if found. Also check the timestamp to be sure the cookie
* isn't "stale".
* <p>
* NOTE This is also used by the SOAP servlet.
* <p>
*
* @param context -
* set user in this context
* @param request -
* HTTP request.
*
* @return true when a fresh cookie yields a valid eperson.
*
* @throws SQLException the SQL exception
*/
protected static boolean getAuthFromCookie(Context context,
HttpServletRequest request) throws SQLException
{
Cookie cookie = gimmeCookie(request);
if (cookie == null)
{
return false;
}
String crumb[] = cookie.getValue().split("\\!");
if (crumb.length != 4)
{
log
.warn("Got invalid cookie value = \"" + cookie.getValue()
+ "\"");
return false;
}
long timestamp = 0;
int epersonID = 0;
try
{
timestamp = Long.parseLong(crumb[0]);
epersonID = Integer.parseInt(crumb[1]);
}
catch (NumberFormatException e)
{
log.warn("Error groveling cookie, " + e.toString());
return false;
}
// check freshness
long now = new Date().getTime();
if (timestamp > now || (now - timestamp) > COOKIE_SELL_BY)
{
log.warn("Cookie is stale or has weird time, value = \""
+ cookie.getValue() + "\"");
return false;
}
// check IP address
if (!crumb[2].equals(request.getRemoteAddr()))
{
log.warn("Cookie fails IP Addr test, value = \""
+ cookie.getValue() + "\"");
return false;
}
// check MAC
String mac = Utils.getMD5(crumb[0] + "!" + crumb[1] + "!" + crumb[2]
+ "!" + cookieSecret);
if (!mac.equals(crumb[3]))
{
log.warn("Cookie fails MAC test, value = \"" + cookie.getValue()
+ "\"");
return false;
}
// looks like the browser reguritated a good one:
EPerson cuser = EPerson.find(context, epersonID);
if (cuser != null)
{
context.setCurrentUser(cuser);
log.debug("Got authenticated user from cookie, id=" + crumb[1]);
return true;
}
return false;
}
/**
* Set a new cookie -- only bother if there is no existing cookie or it's at
* least halfway stale, so you're not churning it.. When force is true,
* always set a fresh cookie. (e.g. after mac failure upon server restart,
* etc)
* <p>
*
* @param context -
* get user from context
* @param request the request
* @param response the response
* @param force the force
*/
protected static void putAuthCookie(Context context,
HttpServletRequest request, HttpServletResponse response,
boolean force)
{
Cookie cookie = gimmeCookie(request);
long now = new Date().getTime();
if (!force && cookie != null)
{
String crumb[] = cookie.getValue().split("\\!");
if (crumb.length == 4)
{
long timestamp = -1;
try
{
timestamp = Long.parseLong(crumb[0]);
}
catch (NumberFormatException e)
{
}
// check freshness - skip setting cookie if old one isn't stale
if (timestamp > 0 && (now - timestamp) < (COOKIE_SELL_BY / 2))
{
return;
}
}
}
EPerson user = context.getCurrentUser();
if (user == null)
{
return;
}
String value = String.valueOf(now) + "!" + String.valueOf(user.getID())
+ "!" + request.getRemoteAddr() + "!";
String mac = Utils.getMD5(value + cookieSecret);
cookie = new Cookie(COOKIE_NAME, value + mac);
cookie.setPath(request.getContextPath());
response.addCookie(cookie);
log.debug("Setting new cookie, value = \"" + value + mac + "\"");
}
/**
* Get authenticated user for this service. Returns null upon failure, with
* the implication that an error repsonse has already been "sent", so caller
* should not set anything else in servlet response.
*
* @param request the request
* @param response the response
* @param username the username
* @param password the password
*
* @return the context
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
*/
private static Context authenticate(HttpServletRequest request,
HttpServletResponse response, String username, String password)
throws IOException, SQLException
{
Context context = new Context();
if (getAuthFromCookie(context, request))
{
putAuthCookie(context, request, response, false);
return context;
}
// get username/password from Basic auth header if avail:
String cred = request.getHeader("Authorization");
if (cred != null && username == null && password == null)
{
log.info(LogManager.getHeader(context, "got creds", "Authorize: "
+ cred));
StringTokenizer ct = new StringTokenizer(cred);
// format: Basic {username:password in base64}
if (ct.nextToken().equalsIgnoreCase("Basic"))
{
String crud = ct.nextToken();
String dcrud = new String(Base64.decodeBase64(crud.getBytes()));
int colon = dcrud.indexOf(':');
if (colon > 0)
{
username = decodeFromURL(dcrud.substring(0, colon));
password = decodeFromURL(dcrud.substring(colon + 1));
log
.info(LogManager.getHeader(context, "auth",
"Got username=\"" + username
+ "\" out of \"" + crud + "\"."));
}
}
}
if (AuthenticationManager.authenticate(context, username, password,
null, request) == AuthenticationMethod.SUCCESS)
{
log.info(LogManager.getHeader(context, "auth",
"Authentication returned SUCCESS, eperson="
+ context.getCurrentUser().getEmail()));
}
else
{
if (username == null)
{
log.info(LogManager.getHeader(context, "auth",
"No credentials, so sending WWW-Authenticate header."));
}
else
{
log.warn(LogManager.getHeader(context, "auth",
"Authentication FAILED, cred=" + cred));
}
// ...EXCEPT if dav.access.anonymous is true in config:
if (!allowAnonymousAccess)
{
if (response != null)
{
response.setHeader("WWW-Authenticate",
"Basic realm=\"dspace\"");
response.sendError(HttpServletResponse.SC_UNAUTHORIZED);
}
return null;
}
}
// Set any special groups - invoke the authentication mgr.
int[] groupIDs = AuthenticationManager.getSpecialGroups(context,
request);
for (int element : groupIDs)
{
context.setSpecialGroup(element);
log.debug("Adding Special Group id=" + String.valueOf(element));
}
putAuthCookie(context, request, response, true);
return context;
}
/**
* Return portion of URI path relevant to the DAV resource. We go through
* the extra pain of chopping up getRequestURI() because it is NOT
* URL-decoded by the Servlet container, while unfortunately getPathInfo()
* IS pre-decoded, leaving a redudndant "/" (and who knows what else) in the
* handle. Since the "handle" may not even be a CNRI Handle, we don't want
* to assume it even has a "/" (escaped or not).
* <p>
* Finally, search for doubled-up '/' separators and coalesce them.
*
* @param request the request
*
* @return String of undecoded path NOT starting with '/'.
*/
private static String getDavResourcePath(HttpServletRequest request)
{
String path = request.getRequestURI();
String ppath = path.substring(request.getContextPath().length());
String scriptName = request.getServletPath();
if (ppath.startsWith(scriptName))
{
ppath = ppath.substring(scriptName.length());
}
// log.debug("Got DAV URI: BEFORE // FIXUP: PATH_INFO=\"" + ppath+"\"");
// turn all double '/' ("//") in URI into single '/'
StringBuffer sb = new StringBuffer(ppath);
int i = ppath.length() - 2;
if (i > 0)
{
while ((i = ppath.lastIndexOf("//", i)) > -1)
{
sb.deleteCharAt(i + 1);
--i;
}
}
// remove leading '/'
if (sb.length() > 0 && sb.charAt(0) == '/')
{
sb.deleteCharAt(0);
}
ppath = sb.toString();
log.debug("Got DAV URI: PATH_INFO=\"" + ppath + "\"");
return ppath;
}
/**
* override service() to add DAV methods.
*
* @param request the request
* @param response the response
*
* @throws ServletException the servlet exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected void service(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException
{
String method = request.getMethod();
// no authentication needed for OPTIONS
if (method.equals(METHOD_OPTIONS))
{
doOptions(request, response);
}
else if (!serviceInternal(method, request, response))
{
super.service(request, response);
}
}
/**
* truncate string to max length for HTTP status message.
*
* @param msg the msg
*
* @return the string
*/
private static String truncateForStatus(String msg)
{
return (msg.length() > HTTP_STATUS_MESSAGE_MAX) ? msg.substring(0,
HTTP_STATUS_MESSAGE_MAX)
+ "... [Message truncated, see logs for details.]" : msg;
}
/**
* Pass this request along to the appropriate resource and method. Includes
* authentication, where needed. Return true if we handle this request,
* false otherwise. True means response has been "sent", false not.
*
* @param method the method
* @param request the request
* @param response the response
*
* @return true, if service internal
* @throws IOException Signals that an I/O exception has occurred.
*/
protected static boolean serviceInternal(String method,
HttpServletRequest request, HttpServletResponse response)
throws IOException
{
// Fake new DAV methods not understood by the Apache Servlet base class
// (returns HTTP/500 when it sees unrecognised method)
// The way it is faked is by submitting "delete=true" in the PUT URL's
// query parameters (for a delete)
// The way it is faked is by submitting "mkcol=true" in the PUT URL's
// query parameters (for a mk-collection)
if (method.equals(METHOD_PUT)
&& request.getQueryString().indexOf("delete=true") >= 0)
{
method = METHOD_DELETE;
}
if (method.equals(METHOD_PUT)
&& request.getQueryString().indexOf("mkcol=true") >= 0)
{
method = METHOD_MKCOL;
}
// if not a DAV method (i.e. POST), defer to superclass.
if (!(method.equals(METHOD_PROPFIND) || method.equals(METHOD_PROPPATCH)
|| method.equals(METHOD_MKCOL) || method.equals(METHOD_COPY)
|| method.equals(METHOD_MOVE) || method.equals(METHOD_DELETE)
|| method.equals(METHOD_GET) || method.equals(METHOD_PUT)))
{
return false;
}
// set all incoming encoding to UTF-8
request.setCharacterEncoding("UTF-8");
String pathElt[] = getDavResourcePath(request).split("/");
Context context = null;
try
{
// this sends a response on failure, unless it throws.
context = authenticate(request, response, null, null);
if (context == null)
{
return true;
}
// Note: findResource sends error response if it fails.
DAVResource resource = DAVResource.findResource(context, request,
response, pathElt);
if (resource != null)
{
if (method.equals(METHOD_PROPFIND))
{
resource.propfind();
}
else if (method.equals(METHOD_PROPPATCH))
{
resource.proppatch();
}
else if (method.equals(METHOD_COPY))
{
resource.copy();
}
else if (method.equals(METHOD_DELETE))
{
resource.delete();
}
else if (method.equals(METHOD_MKCOL))
{
resource.mkcol();
}
else if (method.equals(METHOD_GET))
{
resource.get();
}
else if (method.equals(METHOD_PUT))
{
resource.put();
}
else
{
response.sendError(HttpServletResponse.SC_NOT_IMPLEMENTED);
}
context.complete();
context = null;
}
}
catch (SQLException e)
{
log.error(e.toString(),e);
response
.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
truncateForStatus("Database access error: "
+ e.toString()));
}
catch (AuthorizeException e)
{
if(log.isDebugEnabled())
{
log.debug(e.toString(),e);
}
else
{
log.info(e.toString());
}
response.sendError(HttpServletResponse.SC_FORBIDDEN,
truncateForStatus("Access denied: " + e.toString()));
}
catch (DAVStatusException e)
{
log.error(e.toString(),e);
response.sendError(e.getStatus(), truncateForStatus(e
.getMessage()));
}
catch (IOException e)
{
log.error(e.toString(),e);
response
.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
truncateForStatus("IO Error: "
+ e.toString()));
}
catch (Exception e)
{
log.error(e.toString(),e);
response
.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
truncateForStatus("IO Error: "
+ e.toString()));
}
finally
{
// Abort the context if it's still valid
if (context != null && context.isValid())
{
context.abort();
}
}
return true;
}
/**
* Handler for HTTP OPTIONS method. Same for all resources under the WeDAV
* root. Add DAV methods so client knows we handle DAV.
*
* @param request the request
* @param response the response
*
* @throws ServletException the servlet exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected void doOptions(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException
{
// we only support minimal DAV
response.addHeader("DAV", "1");
response.addHeader("Allow",
"GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, "
+ "PROPFIND, PROPPATCH, MKCOL, COPY, MOVE");
}
/**
* Sugar-coating for URLDecoder.decode, used all over.
*
* @param in the in
*
* @return the string
*/
protected static String decodeFromURL(String in)
{
try
{
return URLDecoder.decode(in, "UTF-8");
}
catch (java.io.UnsupportedEncodingException e)
{
return "";
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Community;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.jdom.Element;
/**
* Model the DSpace Site as a resource. The URI is the top level "collection".
* Only an Administrator can modify properties.
*/
class DAVSite extends DAVResource
{
/** The Constant news_topProperty. */
private static final Element news_topProperty = new Element("news_top",
DAV.NS_DSPACE);
/** The Constant news_sideProperty. */
private static final Element news_sideProperty = new Element("news_side",
DAV.NS_DSPACE);
/** The Constant default_licenseProperty. */
private static final Element default_licenseProperty = new Element(
"default_license", DAV.NS_DSPACE);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
static
{
allProps.add(news_topProperty);
allProps.add(news_sideProperty);
allProps.add(default_licenseProperty);
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("site", DAV.NS_DSPACE);
}
/**
* Instantiates a new DAV site.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVSite(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
super(context, request, response, pathElt);
this.type = TYPE_SITE;
}
/**
* Match resource URI.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
if (pathElt.length == 0 || pathElt[0].length() == 0)
{
return new DAVSite(context, request, response, new String[0]);
}
else
{
return null;
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
Community top[] = Community.findAllTop(this.context);
DAVResource result[] = new DAVResource[top.length];
for (int i = 0; i < top.length; ++i)
{
result[i] = new DAVCommunity(this.context, this.request, this.response,
makeChildPath(top[i]), top[i]);
}
return result;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = ConfigurationManager.getProperty("dspace.name");
}
else if (elementsEqualIsh(property, news_topProperty))
{
value = ConfigurationManager.readNewsFile("news-top.html");
}
else if (elementsEqualIsh(property, news_sideProperty))
{
value = ConfigurationManager.readNewsFile("news-side.html");
}
else if (elementsEqualIsh(property, default_licenseProperty))
{
value = ConfigurationManager.getDefaultSubmissionLicense();
}
else
{
return commonPropfindInternal(property, true);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
String newValue = (action == DAV.PROPPATCH_REMOVE) ? null : prop
.getText();
if (elementsEqualIsh(prop, news_topProperty))
{
if (!AuthorizeManager.isAdmin(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to modify this property.");
}
ConfigurationManager.writeNewsFile("news-top.html", newValue);
}
else if (elementsEqualIsh(prop, news_sideProperty))
{
if (!AuthorizeManager.isAdmin(this.context))
{
throw new DAVStatusException(HttpServletResponse.SC_FORBIDDEN,
"Not authorized to modify this property.");
}
ConfigurationManager.writeNewsFile("news-side.html", newValue);
}
else if (elementsEqualIsh(prop, displaynameProperty))
{
throw new DAVStatusException(
DAV.SC_CONFLICT,
"The site name can only be changed through the DSpace Configuration, \"dspace.name\" property.");
}
else
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
return HttpServletResponse.SC_OK;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET is not implemented for Site.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT is not implemented for Site.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for Site.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Site.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.jdom.Element;
/**
* The Lookup resource translates a DSpace persistent object identifier (i.e. an
* Item or Bitstream Handle) into a DAV resource URI for the LNI. It accepts two
* simple, flexible formats: one for Item handles and one for a bitstream within
* an Item.
* <p>
* Any GET, PUT, PROPFIND, etc response gets a "Temporarily Moved" status and
* the DAV URL in the "Location:" header of the response.
* <p>
* The "lookup" URI format:
*
* <pre>
* {prefix}/lookup/handle/{hdl-prefix}/{hdl-suffix} ... item Handle
* e.g.
* {prefix}/lookup/handle/1234.56/99 ... item Handle
* {prefix}/lookup/handle/1234.56%2f99 ... item Handle
* {prefix}/lookup/bitstream-handle/{seq-id}/{hdl-prefix}/{hdl-suffix}
* e.g.
* {prefix}/lookup/bitstream-handle/13/1234.56/99 ... bitstream Handle
* {prefix}/lookup/bitstream-handle/13/1234.56%2f99 ... bitstream Handle
* </pre>
*/
class DAVLookup extends DAVResource
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVLookup.class);
/**
* Instantiates a new DAV lookup.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVLookup(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
super(context, request, response, pathElt);
}
// empty property list, this class doesn't implement propfind.
/** The Constant allProps. */
private static final List<Element> allProps = new ArrayList<Element>();
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return a DAVLookup resource if we can parse this URI, or null.
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
// The "/lookup" request:
if (pathElt[0].equals("lookup"))
{
return new DAVLookup(context, request, response, pathElt);
}
return null;
}
/**
* Send a redirect (302) response to client with DAV URL of the resource for
* this handle and/or bitstream. Puts URL in the <code>Location:</code>
* header.
*
* @return URL in string form for desired DAV resource.
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
* @throws DAVStatusException the DAV status exception
*
* @throw IOException
* @throw SQLException
*/
private void doRedirect() throws IOException, SQLException,
DAVStatusException
{
DSpaceObject dso = null;
String bsPid = null;
/*
* FIXME: (maybe?) NOTE: This is currently hard-wired to accomodate the
* syntax of Handles, with "prefix/suffix" separated by the slash --
* that means the Handle probably takes up multiple path elements,
* unless the client escaped the '/'. This code *might* need adjusting
* if we allow other kinds of persistent identifiers for DSpace objects.
*/
int hdlStart = -1;
if (this.pathElt.length > 2 && this.pathElt[1].equals("handle"))
{
hdlStart = 2;
}
else if (this.pathElt.length > 3 && this.pathElt[1].equals("bitstream-handle"))
{
bsPid = this.pathElt[2];
hdlStart = 3;
}
else
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Unrecognized 'lookup' request format.");
}
String prefix = decodeHandle(this.pathElt[hdlStart]);
String handle = null;
// if "prefix" contains a slash, then it's the whole handle:
if (prefix.indexOf('/') >= 0)
{
handle = prefix;
log.debug("Lookup: resolving escaped handle \"" + handle + "\"");
}
else if (this.pathElt.length >= hdlStart + 2)
{
StringBuffer hdl = new StringBuffer(prefix);
for (int i = hdlStart + 1; i < this.pathElt.length; ++i)
{
hdl.append("/");
hdl.append(this.pathElt[i]);
}
handle = hdl.toString();
log.debug("Lookup: resolving multielement handle \"" + handle
+ "\"");
}
else
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Incomplete handle in lookup request.");
}
// did handle lookup fail?
dso = HandleManager.resolveToObject(this.context, handle);
if (dso == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Cannot resolve handle \"" + handle + "\"");
}
// bitstream must exist too
String location = makeLocation(dso, bsPid);
if (location == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Bitstream \"" + bsPid + "\" does not exist in \"" + handle
+ "\"");
}
// add query string -- unnecessary, but it helps naive clients that
// use GET with "package" query arg to download an Item.
String qs = this.request.getQueryString();
if (qs != null)
{
location += "?" + qs;
}
log.debug("Lookup returning redirect to: " + location);
this.response.setHeader("Location", location);
this.response.sendError(HttpServletResponse.SC_MOVED_TEMPORARILY,
"These are not the droids you are looking for.");
}
/**
* Create URI as string for a given handle and optional bitstream. URI is
* relative to top of DAV hierarchy, but starts with '/'.
*
* @param handle handle of a DSpace object (Item, Collection, etc)
* @param bsPid bitstream persistent identifier.
*
* @return "absolute" URI from top of DAV hierarchy
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
*/
protected String makeURI(String handle, String bsPid) throws IOException,
SQLException
{
DSpaceObject dso = HandleManager.resolveToObject(this.context, handle);
if (dso == null)
{
return null;
}
return makeURI(dso, bsPid);
}
/**
* Create URI as string for a given handle and optional bitstream. URI is
* relative to top of DAV hierarchy, but starts with '/'.
*
* @param dso a DSpace object (Item, Collection, etc)
* @param bsPid bitstream persistent identifier.
*
* @return "absolute" URI from top of DAV hierarchy
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
*/
private String makeURI(DSpaceObject dso, String bsPid) throws IOException,
SQLException
{
// make sure that bitstream actually exists:
if (bsPid != null)
{
if (dso.getType() != Constants.ITEM)
{
log.warn("Non-Item with Bitstream Sequence ID in DAV Lookup.");
return null;
}
try
{
int pid = Integer.parseInt(bsPid);
if (DAVBitstream.getBitstreamBySequenceID((Item) dso, pid) == null)
{
log
.warn("Bitstream Sequence ID Not Found in DAV Lookup: \""
+ bsPid + "\"");
return null;
}
}
catch (NumberFormatException nfe)
{
log.warn("Invalid Bitstream Sequence ID in DAV Lookup: \""
+ bsPid + "\"");
return null;
}
}
String base = "/" + DAVDSpaceObject.getPathElt(dso);
if (bsPid != null)
{
return base + "/bitstream_" + bsPid;
}
else
{
return base;
}
}
// returns fully-qualified URL or null upon error.
/**
* Make location.
*
* @param dso the dso
* @param bsPid the bs pid
*
* @return the string
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
*/
private String makeLocation(DSpaceObject dso, String bsPid)
throws IOException, SQLException
{
String prefix = hrefPrefix();
String rest = makeURI(dso, bsPid);
if (rest == null)
{
return null;
}
// delete leading '/' from URI since prefix has trailing one
return prefix + rest.substring(1);
}
/**
* placeholder that does nothing since propfind() is overridden.
*
* @param property the property
*
* @return the element
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
return null;
}
/**
* Override propfind() to make sure it always returns a redirect.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected void propfind() throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
this.doRedirect();
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int mode, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
return HttpServletResponse.SC_METHOD_NOT_ALLOWED;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
this.doRedirect();
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
this.doRedirect();
}
/**
* Reject copy. Client should get resource URL first.
*
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return the int
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"COPY method not allowed on lookup resource.");
}
/**
* This should never get called.
*
* @return the DAV resource[]
*
* @throws SQLException the SQL exception
*/
@Override
protected DAVResource[] children() throws SQLException
{
return new DAVResource[0];
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for Lookup.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Lookup.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.jdom.Element;
/**
* Superclass for resources representing DSpace Objects like Item, Collection
* etc. Defines the behavior of DSpace "resources" in the WebDAV interface; maps
* DAV operations onto DSpace object.
*/
abstract class DAVDSpaceObject extends DAVResource
{
/** Object of this resource, set by subclass' initializer. */
protected DSpaceObject dso = null;
/** Prototype of DAV Property "handle". */
protected static final Element handleProperty = new Element("handle",
DAV.NS_DSPACE);
/** Special character used to separate handle prefix from suffix in DAV resource URIs - substitute this for the '/' normally used in Handle syntax since the '/' causes all sorts of problems for broken DAV clients. */
private static final char handleSeparator = '$';
/**
* Instantiates a new DAVD space object.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
* @param dso the dso
*/
protected DAVDSpaceObject(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[], DSpaceObject dso)
{
super(context, request, response, pathElt);
this.dso = dso;
}
/**
* Make URI path element for a DSpaceObject.
*
* @param dso the DSpaceObject, which needs to have a valid Handle.
*
* @return path element string or null if no handle.
*/
protected static String getPathElt(DSpaceObject dso)
{
String handle = dso.getHandle();
if (handle == null)
{
return null;
}
return getPathElt(handle);
}
/**
* Make URI path element for a DSpaceObject.
*
* @param handle handle of a DSpaceObject.
*
* @return path element string or null if no handle.
*/
protected static String getPathElt(String handle)
{
int hs;
if (handleSeparator != '/')
{
handle = handle.replaceFirst("/", String.valueOf(handleSeparator));
}
return "dso_" + encodeHandle(handle);
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource. Since the "dso_" format can lead to several different resource
* types, handle it here.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException,
AuthorizeException
{
// Match /dso_<handle>{...} .. look for last "dso_" element
if (pathElt[0].startsWith("dso_"))
{
int i = 1;
for (; i < pathElt.length && pathElt[i].startsWith("dso_"); ++i)
{
// empty
}
--i;
String handle = decodeHandle(pathElt[i].substring(4));
// Replace substituted handle separator char with '/' to
// get back a normal handle: (inverse of getPathElt() above)
int sepIndex = handle.indexOf(handleSeparator);
if (sepIndex >= 0)
{
char hc[] = handle.toCharArray();
hc[sepIndex] = '/';
handle = String.copyValueOf(hc);
}
DSpaceObject dso = HandleManager.resolveToObject(context, handle);
if (dso == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Cannot resolve handle \"" + handle + "\"");
}
else if (dso.getType() == Constants.ITEM)
{
if (i + 1 < pathElt.length)
{
if (pathElt[i + 1].startsWith("bitstream_"))
{
Bitstream bs = DAVBitstream.findBitstream(context,
(Item) dso, pathElt[i + 1]);
if (bs == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Bitstream \"" + pathElt[i + 1]
+ "\" not found in item: "
+ pathElt[i]);
}
return new DAVBitstream(context, request, response,
pathElt, (Item) dso, bs);
}
else
{
throw new DAVStatusException(
HttpServletResponse.SC_NOT_FOUND,
"Illegal resource path, \""
+ pathElt[i + 1]
+ "\" is not a Bitstream identifier for item: "
+ pathElt[i]);
}
}
else
{
return new DAVItem(context, request, response, pathElt,
(Item) dso);
}
}
else if (dso.getType() == Constants.COLLECTION)
{
return new DAVCollection(context, request, response, pathElt,
(Collection) dso);
}
else if (dso.getType() == Constants.COMMUNITY)
{
return new DAVCommunity(context, request, response, pathElt,
(Community) dso);
}
else
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Unrecognized DSpace object type for handle=" + handle);
}
}
return null;
}
/**
* Interposed between subclass and common props, take care of shared props
* like privileges, handle, dspace:type.
*
* @param property the property
*
* @return the element
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
if (elementsEqualIsh(property, handleProperty))
{
value = canonicalizeHandle(this.dso.getHandle());
}
else if (elementsEqualIsh(property, current_user_privilege_setProperty))
{
Element c = (Element) current_user_privilege_setProperty.clone();
// if we're an admin we have all privs everywhere.
if (AuthorizeManager.isAdmin(this.context))
{
addPrivilege(c, new Element("all", DAV.NS_DAV));
}
else
{
for (int i = 0; i < Constants.actionText.length; ++i)
{
if (AuthorizeManager
.authorizeActionBoolean(this.context, this.dso, i))
{
Element priv = actionToPrivilege(i);
if (priv != null)
{
addPrivilege(c, priv);
}
}
}
}
return c;
}
else
{
return commonPropfindInternal(property,
this.dso.getType() != Constants.BITSTREAM);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/**
* Return value of DSpace type property. It is conveniently the same as our
* internal type name.
*
* @return the element
*/
@Override
protected Element typeValue()
{
return new Element(Constants.typeText[this.dso.getType()].toLowerCase(),
DAV.NS_DSPACE);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.WorkspaceItem;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.jdom.Element;
/**
* The "workspace" resource is a collection (in the DAV sense) of all the
* current user's <code>WorkspaceItem</code>s. It is nothing more than a
* read-only collection to list these.
* <p>
* Its children are all the relevant <code>WorkspaceItem</code>s. It cannot
* be altered.
* <p>
*
* @author Larry Stone
* @see DAVWorkspaceItem
*/
class DAVWorkspace extends DAVResource
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVWorkspace.class);
/** The all props. */
private static List<Element> allProps = new ArrayList<Element>(commonProps);
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#typeValue()
*/
@Override
protected Element typeValue()
{
return new Element("workspace", DAV.NS_DSPACE);
}
/**
* Instantiates a new DAV workspace.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVWorkspace(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
super(context, request, response, pathElt);
this.type = TYPE_OTHER;
}
/**
* Match the URIs this subclass understands and return the corresponding
* resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return the DAV resource
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected static DAVResource matchResourceURI(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws DAVStatusException, SQLException
{
// The "/workspace" URI:
if (pathElt.length > 0 && pathElt[0].equals("workspace"))
{
if (pathElt.length > 1)
{
return DAVWorkspaceItem.matchResourceURI(context, request,
response, pathElt);
}
else
{
return new DAVWorkspace(context, request, response, pathElt);
}
}
return null;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#getAllProperties()
*/
@Override
protected List<Element> getAllProperties()
{
return allProps;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#children()
*/
@Override
protected DAVResource[] children() throws SQLException
{
EPerson ep = this.context.getCurrentUser();
if (ep != null)
{
WorkspaceItem wi[] = WorkspaceItem.findByEPerson(this.context, ep);
log.debug("children(): Got " + String.valueOf(wi.length)
+ " Workspace Items.");
DAVResource result[] = new DAVResource[wi.length];
for (int i = 0; i < wi.length; ++i)
{
result[i] = new DAVWorkspaceItem(this.context, this.request, this.response,
makeChildPath(DAVWorkspaceItem
.getPathElt(wi[i].getID())), wi[i]);
}
return result;
}
else
{
return new DAVResource[0];
}
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#propfindInternal(org.jdom.Element)
*/
@Override
protected Element propfindInternal(Element property) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
String value = null;
// displayname - title or handle.
if (elementsEqualIsh(property, displaynameProperty))
{
value = "workspace";
}
else
{
return commonPropfindInternal(property, true);
}
// value was set up by "if" clause:
if (value == null)
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_FOUND,
"Not found.");
}
Element p = new Element(property.getName(), property.getNamespace());
p.setText(filterForXML(value));
return p;
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#proppatchInternal(int, org.jdom.Element)
*/
@Override
protected int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The " + prop.getName()
+ " property cannot be changed.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#get()
*/
@Override
protected void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"GET method not implemented for workspace.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#put()
*/
@Override
protected void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"PUT method not implemented for workspace.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#copyInternal(org.dspace.app.dav.DAVResource, int, boolean, boolean)
*/
@Override
protected int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"COPY method not implemented for workspace.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#deleteInternal()
*/
@Override
protected int deleteInternal() throws DAVStatusException, SQLException,
AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_NOT_IMPLEMENTED,
"DELETE method not implemented for Workspace.");
}
/* (non-Javadoc)
* @see org.dspace.app.dav.DAVResource#mkcolInternal(java.lang.String)
*/
@Override
protected int mkcolInternal(String waste) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
throw new DAVStatusException(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
"MKCOL method not allowed for Workspace.");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.DSpaceObject;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.JDOMParseException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
/**
* Superclass for all DSpace "resources" in the WebDAV interface. Maps DAV
* operations onto DSpace objects. An instance is created for one HTTP request
* and discarded thereafter.
* <p>
* This class has the high-level driver for each DAV (or SOAP) method. It calls
* on subclasses for implementation details, e.g. how to get properties in and
* out of each type of resource, PUT, COPY, etc.
* <p>
* All of the interpretation (and generation) of DAV Resource URIs should be in
* this class, in methods such as <code>findResource()</code>,
* <code>hrefURL()</code> etc, calling on the <code>matchResource()</code>
* methods in some subclasses for help. NOTE: <code>matchResource</code>
* should be an abstract method but it cannot be since it is necessarily static.
* <p>
* DAVResource instances are generally only created by the
* <code>findResource()</code> method and the <code>children()</code>
* methods in resource subclasses.
*/
abstract class DAVResource
{
/** log4j category. */
private static Logger log = Logger.getLogger(DAVResource.class);
/** The path elt. */
protected String pathElt[] = null;
/** The request. */
protected HttpServletRequest request = null;
/** The response. */
protected HttpServletResponse response = null;
/** The context. */
protected Context context = null;
/** Optional limit of resources traversed in a PROPFIND - initialized from PROPFIND_LIMIT_CONFIG in config properties. Names maximum number of HREF's in PROPFIND result or 0 for unlimited. */
private static final String PROPFIND_LIMIT_CONFIG = "dav.propfind.limit";
/** The propfind resource limit. */
private static int propfindResourceLimit = ConfigurationManager
.getIntProperty(PROPFIND_LIMIT_CONFIG);
/** Resource type (i.e. DSpace object type) masks to implement type filter in PROPFIND. */
protected static final int TYPE_OTHER = 0x20; // 100000
/** The Constant TYPE_SITE. */
protected static final int TYPE_SITE = 0x10; // 10000
/** The Constant TYPE_COMMUNITY. */
protected static final int TYPE_COMMUNITY = 0x08; // 01000
/** The Constant TYPE_COLLECTION. */
protected static final int TYPE_COLLECTION = 0x04; // 00100
/** The Constant TYPE_ITEM. */
protected static final int TYPE_ITEM = 0x02; // 00010
/** The Constant TYPE_BITSTREAM. */
protected static final int TYPE_BITSTREAM = 0x01; // 00001
/** The Constant TYPE_ALL. */
protected static final int TYPE_ALL = 0x1F; // 11111
/** Type code, set by constructor in each subclass. */
protected int type = 0;
/** The output raw. */
private static XMLOutputter outputRaw = new XMLOutputter();
/** The output pretty. */
private static XMLOutputter outputPretty = new XMLOutputter(Format
.getPrettyFormat());
// enable dumping XML for last PROPFIND or PROPPATCH transaction.
/** The Constant debugXML. */
private static final boolean debugXML = ConfigurationManager
.getBooleanProperty("dav.debug.xml", false);
/** DAV Properties common to all resources:. */
protected static final Element displaynameProperty = new Element(
"displayname", DAV.NS_DAV);
/** The Constant resourcetypeProperty. */
protected static final Element resourcetypeProperty = new Element(
"resourcetype", DAV.NS_DAV);
/** The Constant typeProperty. */
protected static final Element typeProperty = new Element("type",
DAV.NS_DSPACE);
/** The Constant current_user_privilege_setProperty. */
protected static final Element current_user_privilege_setProperty = new Element(
"current-user-privilege-set", DAV.NS_DAV);
/** The common props. */
protected static List<Element> commonProps = new ArrayList<Element>();
static
{
commonProps.add(displaynameProperty);
commonProps.add(resourcetypeProperty);
commonProps.add(typeProperty);
commonProps.add(current_user_privilege_setProperty);
}
/**
* Instantiates a new DAV resource.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*/
protected DAVResource(Context context, HttpServletRequest request,
HttpServletResponse response, String pathElt[])
{
this.pathElt = (String[]) ArrayUtils.clone(pathElt);
this.request = request;
this.response = response;
this.context = context;
}
/*----------------- Abstracts -----------------------*/
/**
* Returns an array of <code>DAVResource</code>'s considered the direct
* children of this resource. Array can be empty but MUST NOT be null.
*
* @return array of immediate children, or empty array if none.
*
* @throws SQLException the SQL exception
*/
protected abstract DAVResource[] children() throws SQLException;
/**
* Execute a PROPFIND method request on this Resource, and insert the
* results into the <code>multistatus</code> XML element. This may be one
* of many <code>propfind()</code> calls that build up the result of a
* PROPFIND on a whole hierarchy. The depth option of the PROPFIND method is
* managed by the driver that calls this, so it is only responsible for its
* own resource.
*
* @param property the property
*
* @return the element
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract Element propfindInternal(Element property)
throws SQLException, AuthorizeException, IOException,
DAVStatusException;
/**
* Gets the all properties.
*
* @return list of all properties that resource wants known to a "propname"
* request.
*/
protected abstract List<Element> getAllProperties();
/**
* Execute a PROPPATCH method request on this Resource, and insert the
* results into the <code>multistatus</code> XML element.
*
* @param action either SET or REMOVE, taken from PROPERTYUPDATE element.
* @param prop the PROP element from the request structure.
*
* @return HTTP extended status code, e.g. 200 for success.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract int proppatchInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException;
/**
* Output the GET method results for the resource to the response, should
* include content-type and length headers.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract void get() throws SQLException, AuthorizeException,
IOException, DAVStatusException;
/**
* Create a new resource out of the contents of this request. For example,
* add a new Item under an existing Collection. Might also replace a
* Bitstream (or install a new version).
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract void put() throws SQLException, AuthorizeException,
IOException, DAVStatusException;
/**
* Create a copy of a DAV resource under a different DAV "collection"
* resource, within the limits of DSpace semantics. Use this to install an
* Item in an additional Collection, for example. NOTE: copyInternal SHOULD
* only return success status codes; throw a DAVStatusException to indicate
* an error so the descriptive text can also be included.
*
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return HTTP status code (201 or 204 for success)
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract int copyInternal(DAVResource destination, int depth,
boolean overwrite, boolean keepProperties)
throws DAVStatusException, SQLException, AuthorizeException,
IOException;
/**
* Return value of dspace:type property, e.g. an empty element with the tag
* "dspace:item".
*
* @return JDOM Element to put in PROPFIND result as value of "dspace:type"
*/
protected abstract Element typeValue();
/**
* Execute a DELETE method request on this Resource. Inserts nothing into
* the <code>multistatus</code> XML element.
*
* @return HTTP extended status code, e.g. 200 for success.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract int deleteInternal() throws SQLException,
AuthorizeException, IOException, DAVStatusException;
/**
* Execute a MKCOL method request on this Resource. Inserts nothing into the
* <code>multistatus</code> XML element. Only makes sense to do a MKCOL
* operation on Communities and Collections.
*
* @param name the name
*
* @return HTTP extended status code, e.g. 200 for success.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected abstract int mkcolInternal(String name) throws SQLException,
AuthorizeException, IOException, DAVStatusException;
/*----------------- Interpreting Resource URIs -----------------------*/
/**
* Get resource named by a DAV URI. The URI can be relative (from the Site)
* or absolute if it starts with the WebDAV server's prefix.
*
* @param uri the uri
*
* @return resource object or null upon error.
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
* @throws DAVStatusException the DAV status exception
* @throws AuthorizeException the authorize exception
*/
protected DAVResource uriToResource(String uri) throws IOException,
SQLException, DAVStatusException, AuthorizeException
{
String dest = uri;
String prefix = hrefPrefix();
if (dest.startsWith(prefix))
{
dest = dest.substring(prefix.length());
}
if (dest.startsWith("/"))
{
dest = dest.substring(1);
}
return findResource(this.context, this.request, this.response, dest.split("/"));
}
/**
* Creates a resource object corresponding to the path.
*
* @param context the context
* @param request the request
* @param response the response
* @param pathElt the path elt
*
* @return resource, or null if path cannot be interpreted. Sends HTTP
* status in the event of failure.
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
* @throws DAVStatusException the DAV status exception
* @throws AuthorizeException the authorize exception
*/
protected static DAVResource findResource(Context context,
HttpServletRequest request, HttpServletResponse response,
String pathElt[]) throws IOException, SQLException,
DAVStatusException, AuthorizeException
{
DAVResource result = DAVSite.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVLookup.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVWorkspace.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVWorkflow.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVEPerson.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVItem.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVBitstream.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
result = DAVDSpaceObject.matchResourceURI(context, request, response, pathElt);
if (result != null) {
return result;
}
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST, "Unrecognized DSpace resource URI");
}
/*----------------- Generating Resource URIs -----------------------*/
/**
* Construct URL prefix up to the resource path, for making up "href" values
* in e.g. PROPFIND responses.
*
* @return start of resource URI ending with a '/'.
*/
protected String hrefPrefix()
{
// Note: contextPath and servletPath include leading slashes but no
// trailing..
if (this.request == null)
{
return "/";
}
else
{
return this.request.getScheme() + "://" + this.request.getServerName() + ":"
+ String.valueOf(this.request.getServerPort())
+ this.request.getContextPath() + this.request.getServletPath() + "/";
}
}
/**
* Construct the whole absolute URL to resource - prefix plus path elements.
*
* @return the string
*/
protected String hrefURL()
{
StringBuffer result = new StringBuffer(hrefPrefix());
for (int i = 0; i < this.pathElt.length; ++i)
{
if (i + 1 < this.pathElt.length)
{
result.append(this.pathElt[i]).append("/");
}
else
{
result.append(this.pathElt[i]);
}
}
return result.toString();
}
/**
* Href to E person.
*
* @param ep the ep
*
* @return fully-qualfied URL to resource of given EPerson.
*/
protected String hrefToEPerson(EPerson ep)
{
return hrefPrefix() + DAVEPerson.getPath(ep);
}
/**
* Construct path to child DSpaceObject.
*
* @param child the child
*
* @return the string[]
*/
protected String[] makeChildPath(DSpaceObject child)
{
String bpath[] = makeChildPathInternal();
bpath[this.pathElt.length] = DAVDSpaceObject.getPathElt(child);
return bpath;
}
/**
* Construct path to child with last element predetermined.
*
* @param lastElt the last elt
*
* @return the string[]
*/
protected String[] makeChildPath(String lastElt)
{
String bpath[] = makeChildPathInternal();
bpath[this.pathElt.length] = lastElt;
return bpath;
}
// actaul work of building a child path.
/**
* Make child path internal.
*
* @return the string[]
*/
private String[] makeChildPathInternal()
{
String bpath[] = new String[this.pathElt.length + 1];
for (int k = 0; k < this.pathElt.length; ++k)
{
bpath[k] = this.pathElt[k];
}
return bpath;
}
/*----------------- Handling DAV Requests ------------------*/
/**
* PROPFIND method service: Collect parameters and launch the recursive
* generic propfind driver which in turn calls resource methods.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected void propfind() throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
// set all incoming encoding to UTF-8
this.request.setCharacterEncoding("UTF-8");
/*
* FIXME:(?) this is technically wrong, wrt. WebDAV protocol, but it's
* more efficient; default depth should be DAV.DAV_INFINITY; we cheat
* and make it 0.
*/
int depth = 0;
String sdepth = this.request.getHeader("Depth");
if (sdepth != null)
{
sdepth = sdepth.trim();
try
{
if (sdepth.equalsIgnoreCase("infinity"))
{
depth = DAV.DAV_INFINITY;
}
else
{
depth = Integer.parseInt(sdepth);
}
}
catch (NumberFormatException nfe)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Bad Depth header: " + sdepth, nfe);
}
}
// get object-type mask from request query args, e.g.
// type=ITEM,type=BITSTREAM ...
String types[] = this.request.getParameterValues("type");
int typeMask = typesToMask(types);
Document outdoc = propfindDriver(depth, this.request.getInputStream(),
typeMask);
if (outdoc != null)
{
this.response.setStatus(DAV.SC_MULTISTATUS);
this.response.setContentType("text/xml");
outputRaw.output(outdoc, this.response.getOutputStream());
if (debugXML)
{
log.debug("PROPFIND response = "
+ outputPretty.outputString(outdoc));
}
}
}
/**
* Inner logic for propfind. Shared with SOAP servlet
*
* @param depth the depth
* @param pfDoc the pf doc
* @param typeMask the type mask
*
* @return the document
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected Document propfindDriver(int depth, InputStream pfDoc, int typeMask)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
// When there is no document, type defaults to <allprop>.
int pfType = DAV.PROPFIND_ALLPROP;
List<Element> pfProps = null;
try
{
SAXBuilder builder = new SAXBuilder();
Document reqdoc = builder.build(pfDoc);
Element propfind = reqdoc.getRootElement();
if (!propfind.getName().equals("propfind")
|| !propfind.getNamespace().equals(DAV.NS_DAV))
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Bad Root Element, must be propfind in DAV:");
}
// Propfind child is be ONE of: allprop | propname | prop
// if "prop", also get list of type names.
List<Element> pfChild = propfind.getChildren();
if (pfChild.size() > 0)
{
Element child0 = pfChild.get(0);
String rawType = child0.getName();
if (rawType.equalsIgnoreCase("prop"))
{
pfType = DAV.PROPFIND_PROP;
pfProps = child0.getChildren();
}
else if (rawType.equalsIgnoreCase("allprop"))
{
pfType = DAV.PROPFIND_ALLPROP;
}
else if (rawType.equalsIgnoreCase("propname"))
{
pfType = DAV.PROPFIND_PROPNAME;
}
else
{
log.warn(LogManager.getHeader(this.context, "propfind",
"Unknown TYPE child of <propfind>, named: \""
+ rawType + "\""));
}
}
if (debugXML)
{
log.debug("PROPFIND request = "
+ outputPretty.outputString(reqdoc));
}
}
catch (JDOMParseException je)
{
// if there is no document we get error at line -1, so let it pass.
if (je.getLineNumber() >= 0)
{
log.error(LogManager.getHeader(this.context, "propfind", je
.toString()));
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Could not parse request document: " + je.toString(), je);
}
}
catch (JDOMException je)
{
log.error(LogManager.getHeader(this.context, "propfind", je.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Could not parse request document: " + je.toString(), je);
}
// At this point, pfProps, pfType and URI define the whole request.
// Construct response XML
Element multistatus = new Element("multistatus", DAV.NS_DAV);
Document outdoc = new Document(multistatus);
propfindCrawler(multistatus, pfType, pfProps, depth, typeMask, 0);
return outdoc;
}
// Recursive propfind driver: each call to
// resource.propfindInternal accumulates <response>s in multistatus.
/**
* Propfind crawler.
*
* @param multistatus the multistatus
* @param pfType the pf type
* @param pfProps the pf props
* @param depth the depth
* @param typeMask the type mask
* @param count the count
*
* @return the int
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private int propfindCrawler(Element multistatus, int pfType, List<Element> pfProps,
int depth, int typeMask, int count) throws DAVStatusException,
SQLException, AuthorizeException, IOException
{
if ((this.type & typeMask) != 0 || this.type == TYPE_OTHER)
{
// check the count of resources visited
++count;
if (propfindResourceLimit > 0 && count > propfindResourceLimit)
{
throw new DAVStatusException(HttpServletResponse.SC_CONFLICT,
"PROPFIND request exceeded server's limit on number of resources to query.");
}
String uri = hrefURL();
log.debug("PROPFIND returning (count=" + String.valueOf(count)
+ ") href=" + uri);
List<Element> notFound = new ArrayList<Element>();
List<Element> forbidden = new ArrayList<Element>();
Element responseElt = new Element("response", DAV.NS_DAV);
multistatus.addContent(responseElt);
Element href = new Element("href", DAV.NS_DAV);
href.setText(uri);
responseElt.addContent(href);
// just get the names
if (pfType == DAV.PROPFIND_PROPNAME)
{
responseElt.addContent(makePropstat(
copyElementList(getAllProperties()),
HttpServletResponse.SC_OK, "OK"));
}
else
{
List<Element> success = new LinkedList<Element>();
List<Element> props = (pfType == DAV.PROPFIND_ALLPROP) ? getAllProperties()
: pfProps;
ListIterator pi = props.listIterator();
while (pi.hasNext())
{
Element property = (Element) pi.next();
try
{
Element value = propfindInternal(property);
if (value != null)
{
success.add(value);
}
else
{
notFound.add((Element) property.clone());
}
}
catch (DAVStatusException se)
{
if (se.getStatus() == HttpServletResponse.SC_NOT_FOUND)
{
notFound.add((Element) property.clone());
}
else
{
responseElt.addContent(makePropstat(
(Element) property.clone(), se.getStatus(),
se.getMessage()));
}
}
catch (AuthorizeException ae)
{
forbidden.add((Element) property.clone());
}
}
if (success.size() > 0)
{
responseElt.addContent(makePropstat(success,
HttpServletResponse.SC_OK, "OK"));
}
if (notFound.size() > 0)
{
responseElt.addContent(makePropstat(notFound,
HttpServletResponse.SC_NOT_FOUND, "Not found"));
}
if (forbidden.size() > 0)
{
responseElt
.addContent(makePropstat(forbidden,
HttpServletResponse.SC_FORBIDDEN,
"Not authorized."));
}
}
}
// recurse on children; filter on types first.
// child types are all lower bits than this type, so (type - 1) is
// bit-mask of all the possible child types. Skip recursion if
// all child types would be masked out anyway.
if (depth != 0 && ((this.type - 1) & typeMask) != 0)
{
DAVResource[] kids = children();
for (DAVResource element : kids)
{
count = element.propfindCrawler(multistatus, pfType, pfProps,
depth == DAV.DAV_INFINITY ? depth : depth - 1,
typeMask, count);
}
}
return count;
}
/**
* Service routine for PROPPATCH method on a resource: Take apart the
* PROPERTYUPDATE request document, call resource's proppatchInternal() for
* each property, and accumulate the response document.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected void proppatch() throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
// set all incoming encoding to UTF-8
this.request.setCharacterEncoding("UTF-8");
Document outdoc = proppatchDriver(this.request.getInputStream());
this.response.setStatus(DAV.SC_MULTISTATUS);
this.response.setContentType("text/xml");
outputRaw.output(outdoc, this.response.getOutputStream());
if (debugXML)
{
log.debug("PROPPATCH response = "
+ outputPretty.outputString(outdoc));
}
}
/**
* Inner logic for proppatch. Shared with SOAP servlet
*
* @param docStream the doc stream
*
* @return the document
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected Document proppatchDriver(InputStream docStream)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
Document reqdoc = null;
try
{
SAXBuilder builder = new SAXBuilder();
reqdoc = builder.build(docStream);
}
catch (JDOMParseException je)
{
// if there is no document we get error at line -1, so let it pass.
if (je.getLineNumber() >= 0)
{
log.error(LogManager.getHeader(this.context, "proppatch", je
.toString()));
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Could not parse PROPERTYUPDATE request document: "
+ je.toString(), je);
}
}
catch (JDOMException je)
{
log
.error(LogManager.getHeader(this.context, "proppatch", je
.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Could not parse PROPERTYUPDATE request document: "
+ je.toString(), je);
}
if (reqdoc == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Failed to parse any valid PROPERTYUPDATE document in request.");
}
Element pupdate = reqdoc.getRootElement();
if (!pupdate.getName().equals("propertyupdate")
|| !pupdate.getNamespace().equals(DAV.NS_DAV))
{
log.warn(LogManager.getHeader(this.context, "proppatch",
"Got bad root element, XML=" + pupdate.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Bad Root Element, must be propertyupdate in DAV:");
}
Element multistatus = new Element("multistatus", DAV.NS_DAV);
Element msResponse = new Element("response", DAV.NS_DAV);
multistatus.addContent(msResponse);
Element href = new Element("href", DAV.NS_DAV);
msResponse.addContent(href);
href.addContent(hrefURL());
// result status and accumulation:
boolean failing = false;
List<Element> failedDep = new LinkedList<Element>();
List<Element> success = new LinkedList<Element>();
// process the SET and REMOVE elements under PROPERTYUPDATE:
ListIterator ci = pupdate.getChildren().listIterator();
while (ci.hasNext())
{
int action = -1;
Element e = (Element) ci.next();
if (e.getName().equals("set")
&& e.getNamespace().equals(DAV.NS_DAV))
{
action = DAV.PROPPATCH_SET;
}
else if (e.getName().equals("remove")
&& e.getNamespace().equals(DAV.NS_DAV))
{
action = DAV.PROPPATCH_REMOVE;
}
else
{
log.warn(LogManager.getHeader(this.context, "proppatch",
"Got unrecognized PROPERTYUPDATE element:"
+ e.toString()));
continue;
}
// PROP elements under SET or REMOVE
ListIterator pi = e.getChildren().listIterator();
while (pi.hasNext())
{
Element p = (Element) pi.next();
if (p.getName().equals("prop")
&& p.getNamespace().equals(DAV.NS_DAV))
{
ListIterator propi = p.getChildren().listIterator();
while (propi.hasNext())
{
Element thisprop = (Element) propi.next();
// if this PROPPATCH request is failing, just
// accumulate properties for Failed Dependency status
if (failing)
{
failedDep.add(new Element(thisprop.getName(),
thisprop.getNamespace()));
}
else
{
int status = 0;
String statusMsg = null;
try
{
status = proppatchCommonInternal(action,
thisprop);
}
catch (DAVStatusException se)
{
status = se.getStatus();
statusMsg = se.getMessage();
}
catch (AuthorizeException ae)
{
status = HttpServletResponse.SC_FORBIDDEN;
statusMsg = "Permission denied.";
}
if (status == HttpServletResponse.SC_OK)
{
success.add(new Element(thisprop.getName(),
thisprop.getNamespace()));
log.debug("proppatch SET/REMOVE OK, action="
+ String.valueOf(action) + ", prop="
+ thisprop.toString());
}
else
{
failing = true;
msResponse.addContent(makePropstat(thisprop,
status, statusMsg));
log
.debug("proppatch SET/REMOVE FAILED with status="
+ String.valueOf(status)
+ ", on prop="
+ thisprop.toString());
}
}
}
}
else
{
log.warn(LogManager.getHeader(this.context, "proppatch",
"No PROP element where expected, found:"
+ p.toString()));
}
}
}
// add success and failure propstat elements to response
if (failing)
{
failedDep.addAll(success);
if (failedDep.size() > 0)
{
msResponse.addContent(makePropstat(failedDep,
DAV.SC_FAILED_DEPENDENCY,
"Failed because another property failed."));
}
this.context.abort();
}
else
{
if (success.size() > 0)
{
msResponse.addContent(makePropstat(success,
HttpServletResponse.SC_OK, "OK"));
}
this.context.complete();
}
return new Document(multistatus);
}
// call proppatchInternal after taking care of "common" properties
/**
* Proppatch common internal.
*
* @param action the action
* @param prop the prop
*
* @return the int
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
private int proppatchCommonInternal(int action, Element prop)
throws SQLException, AuthorizeException, IOException,
DAVStatusException
{
if (elementsEqualIsh(prop, resourcetypeProperty)
|| elementsEqualIsh(prop, typeProperty)
|| elementsEqualIsh(prop, current_user_privilege_setProperty))
{
throw new DAVStatusException(DAV.SC_CONFLICT, "The "
+ prop.getName() + " property cannot be changed.");
}
else
{
return proppatchInternal(action, prop);
}
}
/**
* Returns value of one of the common properties. Returns null when property
* isn't one of the common ones. Throws exception if property isn't
* available (i.e. 404). <br>
* NOTE: This MUST be called from the subclass's propfindInternal() method
* so it be passed the content object as a DSpaceObject.
* <p>
* Although the displayname is common to all, it is computed differently by
* each subclass so it's implemented there.
*
* @param property the property
* @param isCollection the is collection
*
* @return the element
*
* @throws DAVStatusException the DAV status exception
* @throws SQLException the SQL exception
*/
protected Element commonPropfindInternal(Element property,
boolean isCollection) throws DAVStatusException, SQLException
{
// DSpace object type -- also a special case, see typeValue method.
if (elementsEqualIsh(property, typeProperty))
{
Element p = (Element) typeProperty.clone();
p.addContent(typeValue());
return p;
}
// resourcetype -- special case, sub-element: collection or nothing
else if (elementsEqualIsh(property, resourcetypeProperty))
{
Element p = (Element) resourcetypeProperty.clone();
if (isCollection)
{
p.addContent(new Element("collection", DAV.NS_DAV));
}
return p;
}
// value is dspace:action element with allowable actions.
else if (elementsEqualIsh(property, current_user_privilege_setProperty))
{
Element c = (Element) current_user_privilege_setProperty.clone();
// if we're an admin we have all privs everywhere.
if (AuthorizeManager.isAdmin(this.context))
{
addPrivilege(c, new Element("all", DAV.NS_DAV));
}
else
{
addPrivilege(c, new Element("read", DAV.NS_DAV));
}
return c;
}
else
{
return null;
}
}
/**
* Service routine for COPY HTTP request.
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws DAVStatusException the DAV status exception
*/
protected void copy() throws IOException, SQLException, AuthorizeException,
DAVStatusException
{
// Destination arg from header
String destination = this.request.getHeader("Destination");
if (destination == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Missing the required request header \"Destination\"");
}
// Fix a common misfeature in clients: they will append
// the final pathname element of the "source" URI to the
// "destination" URI, which is misleading in our URI scheme so
// we have to strip it off:
try
{
String srcPath = (new URI(this.request.getRequestURI())).getPath();
String destPath = (new URI(destination)).getPath();
int slash = srcPath.lastIndexOf('/');
if (slash > -1)
{
String lastElt = srcPath.substring(slash);
if (destPath.endsWith(lastElt))
{
destination = destination.substring(0, destination.length()
- lastElt.length() + 1);
}
}
log.debug("Copy dest. URI repair: srcPath=" + srcPath
+ ", destPath=" + destPath + ", final dest=" + destination);
}
catch (URISyntaxException e)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Illegal URI syntax in value of \"Destination\" header: "
+ destination, e);
}
// Depth arg from header
int depth = DAV.DAV_INFINITY;
String sdepth = this.request.getHeader("Depth");
if (sdepth != null)
{
sdepth = sdepth.trim();
try
{
if (sdepth.equalsIgnoreCase("infinity"))
{
depth = DAV.DAV_INFINITY;
}
else
{
depth = Integer.parseInt(sdepth);
}
}
catch (NumberFormatException nfe)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Illegal value in Depth request header: " + sdepth, nfe);
}
}
// overwrite header logic
boolean overwrite = false;
String soverwrite = this.request.getHeader("Overwrite");
if (soverwrite != null && soverwrite.trim().equalsIgnoreCase("T"))
{
overwrite = true;
}
// keepProperties - extract from XML doc in request, if any..
boolean keepProperties = false;
Document reqdoc = null;
try
{
SAXBuilder builder = new SAXBuilder();
reqdoc = builder.build(this.request.getInputStream());
}
catch (JDOMParseException je)
{
// if there is no document we get error at line -1, so let it pass.
if (je.getLineNumber() >= 0)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Error parsing XML document in COPY request.", je);
}
}
catch (JDOMException je)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Error parsing XML document in COPY request: "
+ je.toString(), je);
}
if (reqdoc != null)
{
Element propertybehavior = reqdoc.getRootElement();
Namespace ns = propertybehavior.getNamespace();
if (!(ns != null && ns.equals(DAV.NS_DAV) && propertybehavior
.getName().equals("propertybehavior")))
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Illegal XML document in COPY request, root= "
+ propertybehavior.toString());
}
// FIXME: (?) Punt on parsing exact list of properties to
// "keepalive" since we don't implement it anyway.
if (propertybehavior.getChild("keepalive", DAV.NS_DAV) != null)
{
keepProperties = true;
}
else if (propertybehavior.getChild("omit", DAV.NS_DAV) == null)
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Illegal propertybehavior document in COPY request, no omit or keepalive child.");
}
}
int result = copyDriver(destination, depth, overwrite, keepProperties);
if (result >= 200 && result < 300)
{
this.response.setStatus(result);
}
else
{
throw new DAVStatusException(result, "COPY Failed.");
}
}
/**
* "copy" driver gets parameters from request and calls resource's method.
* This is shared with SOAP servelet.
*
* @param destination the destination
* @param depth the depth
* @param overwrite the overwrite
* @param keepProperties the keep properties
*
* @return HTTP success status code (201 or 204 for success), Does not
* return errors, but throws exception.
*
* @throws IOException Signals that an I/O exception has occurred.
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws DAVStatusException the DAV status exception
*/
protected int copyDriver(String destination, int depth, boolean overwrite,
boolean keepProperties) throws IOException, SQLException,
AuthorizeException, DAVStatusException
{
DAVResource destResource = uriToResource(destination);
if (destResource == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Destination is not a legal DAV resource: " + destination);
}
log.debug("Executing COPY method, depth=" + String.valueOf(depth)
+ ", overwrite=" + String.valueOf(overwrite)
+ ", keepProperties=" + String.valueOf(keepProperties)
+ ", destination=\"" + destination + "\"");
int result = copyInternal(destResource, depth, overwrite,
keepProperties);
this.context.commit();
return result;
}
/**
* Make bitmask out resource-type keywords. Used by DAV and SOAP interfaces.
*
* @param types the types
*
* @return binary mask of types to allow
*
* @throws DAVStatusException if an unrecognized type keyword is given.
*/
protected static int typesToMask(String types[]) throws DAVStatusException
{
int typeMask = 0;
if (types == null || types.length == 0)
{
typeMask = TYPE_ALL;
}
else
{
for (String element : types)
{
String key = element.trim();
if (key.equalsIgnoreCase("SITE"))
{
typeMask |= TYPE_SITE;
}
else if (key.equalsIgnoreCase("COMMUNITY"))
{
typeMask |= TYPE_COMMUNITY;
}
else if (key.equalsIgnoreCase("COLLECTION"))
{
typeMask |= TYPE_COLLECTION;
}
else if (key.equalsIgnoreCase("ITEM"))
{
typeMask |= TYPE_ITEM;
}
else if (key.equalsIgnoreCase("BITSTREAM"))
{
typeMask |= TYPE_BITSTREAM;
}
else
{
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Unrecognized type keyword: " + key);
}
}
}
return typeMask;
}
/**
* Predicate, test if name and namespace of Elements match.
*
* @param a the a
* @param b the b
*
* @return true if elements have same name and namespace.
*/
protected boolean elementsEqualIsh(Element a, Element b)
{
Namespace nsa = a.getNamespace();
Namespace nsb = b.getNamespace();
return a.getName().equals(b.getName())
&& (nsa == nsb || (nsa != null && nsb != null && nsa
.equals(nsb)));
}
// assemble a PROPSTAT element for response document.
/**
* Make propstat.
*
* @param property the property
* @param status the status
* @param message the message
*
* @return the element
*/
private Element makePropstat(Element property, int status, String message)
{
Element ps = makePropstatInternal(status, message);
Element p = new Element("prop", DAV.NS_DAV);
Element pp = new Element(property.getName(), property.getNamespace());
p.addContent(pp);
ps.addContent(0, p);
return ps;
}
/**
* Make propstat.
*
* @param properties the properties
* @param status the status
* @param message the message
*
* @return the element
*/
private static Element makePropstat(List<Element> properties, int status,
String message)
{
Element ps = makePropstatInternal(status, message);
Element p = new Element("prop", DAV.NS_DAV);
p.addContent(properties);
ps.addContent(0, p);
return ps;
}
/**
* Make propstat internal.
*
* @param status the status
* @param message the message
*
* @return the element
*/
private static Element makePropstatInternal(int status, String message)
{
Element ps = new Element("propstat", DAV.NS_DAV);
Element s = new Element("status", DAV.NS_DAV);
s.addContent("HTTP/1.1 " + String.valueOf(status) + " " + message);
ps.addContent(s);
return ps;
}
/*----------------- Utility Functions -----------------------*/
/**
* Translate DSpace authorization "action" code into a WebDAV ACL privilege
* element; make the most obvious mappings for ones that can be represented
* accurately and allocate DSpace-namespace elements for the rest.
*
* @param action the action
*
* @return DAV privilege element, or null when there is no mapping.
*/
protected static Element actionToPrivilege(int action)
{
if (action == Constants.ADD)
{
return new Element("bind", DAV.NS_DAV);
}
else if (action == Constants.COLLECTION_ADMIN)
{
return new Element("collection_admin", DAV.NS_DSPACE);
}
else if (action == Constants.DEFAULT_BITSTREAM_READ)
{
return new Element("default_bitstream_read", DAV.NS_DSPACE);
}
else if (action == Constants.DEFAULT_ITEM_READ)
{
return new Element("default_item_read", DAV.NS_DSPACE);
}
else if (action == Constants.DELETE || action == Constants.REMOVE)
{
return new Element("unbind", DAV.NS_DAV);
}
else if (action == Constants.READ)
{
return new Element("read", DAV.NS_DAV);
}
else if (action == Constants.WORKFLOW_ABORT)
{
return new Element("workflow_abort", DAV.NS_DSPACE);
}
else if (action == Constants.WORKFLOW_STEP_1)
{
return new Element("workflow_step_1", DAV.NS_DSPACE);
}
else if (action == Constants.WORKFLOW_STEP_2)
{
return new Element("workflow_step_2", DAV.NS_DSPACE);
}
else if (action == Constants.WORKFLOW_STEP_3)
{
return new Element("workflow_step_3", DAV.NS_DSPACE);
}
else if (action == Constants.WRITE)
{
return new Element("write", DAV.NS_DAV);
}
else
{
return null;
}
}
/**
* Add a privilege element to property like current-user-privilege-set,
* wrapped in <privilege> first.
*
* @param prop the prop
* @param thePriv the the priv
*/
protected static void addPrivilege(Element prop, Element thePriv)
{
Element priv = new Element("privilege", DAV.NS_DAV);
priv.addContent(thePriv);
prop.addContent(priv);
}
// make a "deep" copy of list of empty elements so we can
// add "all props" list to a propstat with impunity..
/**
* Copy element list.
*
* @param el the el
*
* @return the list
*/
private static List<Element> copyElementList(List<Element> el)
{
List<Element> result = new ArrayList<Element>(el.size());
for (Element e : el)
{
result.add((Element)e.clone());
}
return result;
}
/**
* Get canonical form of persistent identifier (Handle), but allow it to be
* null. Canonical form is an URN or perhaps URL. For CNRI Handle System
* handles, it is "hdl:" followed by handle.
*
* @param handle object handle in bare form, e.g. "12345/xyz".
*
* @return canonical form of handle, or null if arg was null.
*/
protected static String canonicalizeHandle(String handle)
{
if (handle != null)
{
if (handle.startsWith("hdl:"))
{
return handle;
}
else
{
return "hdl:" + handle;
}
}
else
{
return null;
}
}
/**
* Utility to filter out characters illegal XML characters when putting
* something of random provenance into TEXT element.
* <p>
* See <a href="http://www.w3.org/TR/2004/REC-xml-20040204/#charsets">
* http://www.w3.org/TR/2004/REC-xml-20040204/#charsets</a> for rules,
* essentially, anything above 0x20 and 0x09 (\t, HT), 0x0a (\n, NL), 0x0d
* (\r, CR).
* <p>
* FIXME: for now, just replace all control chars with '?' Maybe someday
* attempt to do something more meaningful, once it's clear what that would
* be.
*
* @param in the in
*
* @return the string
*/
protected static String filterForXML(String in)
{
final Pattern illegals = Pattern
.compile("[\\x00-\\x08\\x0b\\x0c\\x0e-\\x1f]");
Matcher m = illegals.matcher(in);
if (m.find())
{
return m.replaceAll("?");
}
else
{
return in;
}
}
/**
* Mostly sugar around catching the UnsupportedEncodingException.
*
* @param pathFrag the path frag
*
* @return URL-decoded version of an encoded handle
*/
protected static String decodeHandle(String pathFrag)
{
try
{
String handle = URLDecoder.decode(pathFrag, "UTF-8");
// XXX KLUDGE: WebDAV client cadaver double-encodes the %2f,
// into "%252f" so look for a leftover %25 (== '%').
if (pathFrag.indexOf("%25") >= 0)
{
handle = URLDecoder.decode(handle, "UTF-8");
}
return handle;
}
catch (java.io.UnsupportedEncodingException e)
{
return "";
}
}
/**
* Mostly sugar around catching the UnsupportedEncodingException.
*
* @param handle the handle
*
* @return URL-encoded version of handle
*/
protected static String encodeHandle(String handle)
{
try
{
return java.net.URLEncoder.encode(handle, "UTF-8");
}
catch (java.io.UnsupportedEncodingException ue)
{
// highly unlikely.
}
return "";
}
/**
* Service routine for DELETE method on a resource:.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected void delete() throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
// set all incoming encoding to UTF-8
this.request.setCharacterEncoding("UTF-8");
Document outdoc = deleteDriver(this.request.getInputStream());
this.response.setStatus(DAV.SC_MULTISTATUS);
this.response.setContentType("text/xml");
outputRaw.output(outdoc, this.response.getOutputStream());
if (debugXML)
{
log.debug("DELETE response = " + outputPretty.outputString(outdoc));
}
}
/**
* Inner logic for delete. Shared with SOAP servlet(??)
*
* @param docStream the doc stream
*
* @return the document
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected Document deleteDriver(InputStream docStream) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
Document reqdoc = null;
try
{
SAXBuilder builder = new SAXBuilder();
reqdoc = builder.build(docStream);
}
catch (JDOMParseException je)
{
// if there is no document we get error at line -1, so let it pass.
if (je.getLineNumber() >= 0)
{
log.error(LogManager
.getHeader(this.context, "delete", je.toString()));
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Could not parse DELETE request document: "
+ je.toString(), je);
}
}
catch (JDOMException je)
{
log.error(LogManager.getHeader(this.context, "delete", je.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Could not parse DELETE request document: " + je.toString(), je);
}
if (reqdoc == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Failed to parse any valid DELETE document in request.");
}
Element pupdate = reqdoc.getRootElement();
if (!pupdate.getName().equals("delete"))
{
log.warn(LogManager.getHeader(this.context, "delete",
"Got bad root element, XML=" + pupdate.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Bad Root Element, must be delete");
}
deleteInternal();
Element multistatus = new Element("multistatus", DAV.NS_DAV);
Element msResponse = new Element("response", DAV.NS_DAV);
multistatus.addContent(msResponse);
Element href = new Element("href", DAV.NS_DAV);
msResponse.addContent(href);
href.addContent(hrefURL());
return new Document(multistatus);
}
/**
* Service routine for MKCOL method on a resource:.
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected void mkcol() throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
// set all incoming encoding to UTF-8
this.request.setCharacterEncoding("UTF-8");
Document outdoc = mkcolDriver(this.request.getInputStream());
this.response.setStatus(DAV.SC_MULTISTATUS);
this.response.setContentType("text/xml");
outputRaw.output(outdoc, this.response.getOutputStream());
if (debugXML)
{
log.debug("MKCOL response = " + outputPretty.outputString(outdoc));
}
}
/**
* Inner logic for mkcol. Shared with SOAP servlet(??)
*
* @param docStream the doc stream
*
* @return the document
*
* @throws SQLException the SQL exception
* @throws AuthorizeException the authorize exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws DAVStatusException the DAV status exception
*/
protected Document mkcolDriver(InputStream docStream) throws SQLException,
AuthorizeException, IOException, DAVStatusException
{
Document reqdoc = null;
try
{
SAXBuilder builder = new SAXBuilder();
reqdoc = builder.build(docStream);
}
catch (JDOMParseException je)
{
// if there is no document we get error at line -1, so let it pass.
if (je.getLineNumber() >= 0)
{
log
.error(LogManager.getHeader(this.context, "mkcol", je
.toString()));
throw new DAVStatusException(
HttpServletResponse.SC_BAD_REQUEST,
"Could not parse MKCOL request document: "
+ je.toString(), je);
}
}
catch (JDOMException je)
{
log.error(LogManager.getHeader(this.context, "mkcol", je.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Could not parse MKCOL request document: " + je.toString(), je);
}
if (reqdoc == null)
{
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Failed to parse any valid MKCOL document in request.");
}
Element pupdate = reqdoc.getRootElement();
String newNodeName = pupdate.getValue();
if (!"mkcol".equals(pupdate.getName()) || newNodeName == null)
{
log.warn(LogManager.getHeader(this.context, "mkcol",
"Got bad root element, XML=" + pupdate.toString()));
throw new DAVStatusException(HttpServletResponse.SC_BAD_REQUEST,
"Bad Root Element, must be mkcol");
}
mkcolInternal(newNodeName);
Element multistatus = new Element("multistatus", DAV.NS_DAV);
Element msResponse = new Element("response", DAV.NS_DAV);
multistatus.addContent(msResponse);
Element href = new Element("href", DAV.NS_DAV);
msResponse.addContent(href);
href.addContent(hrefURL());
return new Document(multistatus);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
import javax.servlet.http.HttpServlet;
import org.dspace.core.ConfigurationManager;
/**
* Simple servlet to load in DSpace and log4j configurations. Should always be
* started up before other servlets (use <loadOnStartup>)
*
* This class holds code to be removed in the next version of the DSpace XMLUI,
* it is now managed by a Shared Context Listener inthe dspace-api project.
*
* It is deprecated, rather than removed to maintain backward compatibility for
* local DSpace 1.5.x customized overlays.
*
* TODO: Remove in trunk
*
* @deprecated Use Servlet Context Listener provided in dspace-api (remove in >
* 1.5.x)
* @author Robert Tansley
* @version $Revision: 5845 $
*/
public class LoadDSpaceLNIConfig extends HttpServlet
{
public void init()
{
if(!ConfigurationManager.isConfigured())
{
// Get config parameter
String config = getServletContext().getInitParameter("dspace-config");
// Load in DSpace config
ConfigurationManager.loadConfig(config);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav;
/**
* Container for all exceptions thrown by DSpace LNI SOAP methods.
*/
public class LNIRemoteException extends java.rmi.RemoteException
{
/**
* Instantiates a new LNI remote exception.
*/
protected LNIRemoteException()
{
super();
}
/**
* Instantiates a new LNI remote exception.
*
* @param message the message
*/
protected LNIRemoteException(String message)
{
super(message);
}
/**
* Instantiates a new LNI remote exception.
*
* @param message the message
* @param thrown the thrown
*/
protected LNIRemoteException(String message, Throwable thrown)
{
super(message, thrown);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.dav.client;
import java.net.MalformedURLException;
import java.net.URL;
/**
* Convenience and Utility functions for DSpace Lightweight Network Interface
* clients. This class should be included in the JAR prepared for Java-based
* clients of the LNI, along with the generated SOAP code.
*
* @author Larry Stone
* @version $Revision: 5845 $
*/
public class LNIClientUtils
{
/** Depth of infinity in SOAP propfind(). */
public static final int INFINITY = -1;
/**
* Make a URL to access LNI WebDAV resource, given a LNI SOAP "endpoint" URL
* and a relative WebDAV URI. Since the LNI SOAP interface does not include
* any means to submit or disseminate DSpace Items, it relies on the WebDAV
* LNI's GET and PUT methods. To make a WebDAV request, the client must
* first construct a WebDAV URL; this function makes that step much more
* convenient.
* <p>
* This is the inverse of makeLNIURI.
* <p>
* Since the actual Web servlet supporting the LNI SOAP interface can also
* respond to WebDAV GET and PUT methods, there is a straightforward way to
* construct the URL:
* <p>
* 1. Remove the last pathname element from the SOAP "endpoint" URL,
* including the '/' (slash) separator character.
* <p>
* 2. Append the "relative URI" returned e.g. by the SOAP
* <code>lookup()</code> function to this URL.
* <p>
* 3. Add the packager specification and other query arguments, e.g. by
* appending "?packager=METS"
*
* @param endpoint full URL of LNI Soap endpoint, as used with SOAP.
* @param davURI relative URI of DAV resource, as returned by lookup(). assumed
* to start with "/". May be null.
* @param packager name of packager to use to retrieve/submit Item, or null.
*
* @return new URL.
*
* @throws MalformedURLException if endpoint is unacceptable URL, or the resulting construct
* is not an acceptable URL.
*/
public static URL makeDAVURL(String endpoint, String davURI, String packager)
throws MalformedURLException
{
/* chop off last path element */
int s = endpoint.lastIndexOf('/');
if (s < 0)
{
throw new MalformedURLException(
"Illegal LNI SOAP endpoint, no path separators (/) found: "
+ endpoint);
}
/* paste up URL.. */
String result = endpoint.substring(0, s);
if (davURI != null)
{
result += davURI;
if (packager != null)
{
result += "?package=" + packager;
}
}
return new URL(result);
}
/**
* Make a URL to access LNI WebDAV resource, given a LNI SOAP "endpoint" URL
* and a relative WebDAV URI. This version takes only two arguments, leaving
* out the packager option.
* <p>
* This is the inverse of makeLNIURI.
*
* @param endpoint full URL of LNI Soap endpoint, as used with SOAP.
* @param davURI relative URI of DAV resource, as returned by lookup(). assumed
* to start with "/". May be null.
*
* @return new URL
*
* @throws MalformedURLException if endpoint is unacceptable URL, or the resulting construct
* is not an acceptable URL.
*/
public static URL makeDAVURL(String endpoint, String davURI)
throws MalformedURLException
{
return makeDAVURL(endpoint, davURI, null);
}
/**
* Translates a WebDAV URL, such as would be returned by the PUT method,
* into a resource URI relative to the DAV root which can be passed to the
* SOAP methods.
* <p>
* This is the inverse of makeDAVURL.
*
* @param endpoint full URL of LNI Soap endpoint, as used with SOAP.
* @param davurl the davurl
*
* @return the string
*
* @throws MalformedURLException the malformed URL exception
*/
public static String makeLNIURI(String endpoint, String davurl)
throws MalformedURLException
{
URL emptyUrl = makeDAVURL(endpoint, null, null);
URL url = new URL(davurl);
return url.getPath().substring(emptyUrl.getPath().length());
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.ProtocolException;
import java.net.URL;
import java.util.List;
import java.util.ListIterator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.app.dav.client.LNIClientUtils;
import org.dspace.app.dav.client.LNISoapServlet;
import org.dspace.app.dav.client.LNISoapServletServiceLocator;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.JDOMParseException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
/**
* Very simple test program for DSpace Lightweight Network Interface (LNI).
* <p>
* This will test and demonstrate the LNI's SOAP API and some WebDAV operations.
* It was written as a simple "smoke" test for the LNI to exercise basic
* features in a simple way, and to serve as a coding example.
*
* Example: (SOAP endpoint is http://mydspace.edu/dspace-lni/DSpaceLNI )
*
* /dspace/bin/dsrun LNISmokeTest \ -e
* http://user:passwd@mydsapce.edu/dspace-lni/DSpaceLNI \ -f 123.45/67
*
* @author Larry Stone
* @version $Revision: 5845 $
*/
public class LNISmokeTest
{
private static final Logger log = Logger.getLogger(LNISmokeTest.class);
/**
* The Constant NS_DAV.
* namespace for DAV XML objects
*/
private static final Namespace NS_DAV = Namespace.getNamespace("DAV:");
/**
* The Constant NS_DSPACE.
* DSpace's XML namespace
*/
/** The output pretty. */
private static XMLOutputter outputPretty = new XMLOutputter(Format
.getPrettyFormat());
// XML expressions for propfind calls (below)
/** The Constant allProp. */
private static final String allProp = "<propfind xmlns=\"DAV:\"><allprop /></propfind>";
/** The Constant nameProp. */
private static final String nameProp = "<propfind xmlns=\"DAV:\"><propname /></propfind>";
/** The Constant someProp. */
private static final String someProp = "<propfind xmlns=\"DAV:\"><prop>"
+ "<displayname/>"
+ "<dspace:type xmlns:dspace=\"http://www.dspace.org/xmlns/dspace\" />"
+ "</prop></propfind>";
/** The Constant specificPropPrefix. */
private static final String specificPropPrefix = "<propfind xmlns=\"DAV:\""
+ " xmlns:dspace=\"http://www.dspace.org/xmlns/dspace\">"
+ " <prop><";
/** The Constant specificPropSuffix. */
private static final String specificPropSuffix = "/></prop></propfind>";
/**
* usage. prints usage info to System.out & dies.
*
* @param options the options
* @param status the status
* @param msg the msg
*/
private static void usage(Options options, int status, String msg)
{
HelpFormatter hf = new HelpFormatter();
if (msg != null)
{
System.out.println(msg + "\n");
}
hf.printHelp("LNISmokeTest\n" + " -e SOAP-endpoint-URL\n"
+ " [ -s collection-handle -P package -i path ] |\n"
+ " [ -d item-handle -P package -o path ] |\n"
+ " [ -f handle [ -N propertyName ] ] |\n"
+ " [ -r handle [ -N propertyName ] ] |\n"
+ " [ -n handle ] |\n"
+ " [ -p handle -N propertyName -V newvalue ] |\n"
+ " [ -d item-handle -C collection-handle ]\n", options,
false);
System.exit(status);
}
/**
* Execute command line. See Usage string for options and arguments.
*
* @param argv the argv
*
* @throws Exception the exception
*/
public static void main(String[] argv) throws Exception
{
Options options = new Options();
OptionGroup func = new OptionGroup();
func.addOption(new Option("c", "copy", true,
"copy <Item> to -C <Collection>"));
func.addOption(new Option("s", "submit", true,
"submit <collection> -P <packager> -i <file>"));
func.addOption(new Option("d", "disseminate", true,
"disseminate <item> -P <packager> -o <file>"));
func.addOption(new Option("f", "propfind", true,
"propfind of all properties or -N <propname>"));
func.addOption(new Option("r", "rpropfind", true,
"recursive propfind, only collections"));
func.addOption(new Option("n", "names", true,
"list all property names on resource"));
func.addOption(new Option("p", "proppatch", true,
"set property: <handle> -N <property> -V <newvalue>"));
func.setRequired(true);
options.addOptionGroup(func);
options.addOption("h", "help", false, "show help message");
options
.addOption("e", "endpoint", true,
"SOAP endpoint URL (REQUIRED)");
options.addOption("P", "packager", true,
"Packager to use to import/export a package.");
options.addOption("C", "collection", true,
"Target collection of -c copy");
options
.addOption("o", "output", true,
"file to create for new package");
options.addOption("i", "input", true,
"file containing package to submit");
options.addOption("N", "name", true, "name of property to query/set");
options.addOption("V", "value", true,
"new value for property being set");
try
{
CommandLine line = (new PosixParser()).parse(options, argv);
if (line.hasOption("h"))
{
usage(options, 0, null);
}
// get SOAP client connection, using the endpoint URL
String endpoint = line.getOptionValue("e");
if (endpoint == null)
{
usage(options, 2, "Missing the required -e endpoint argument");
}
LNISoapServletServiceLocator loc = new LNISoapServletServiceLocator();
LNISoapServlet lni = loc.getDSpaceLNI(new URL(endpoint));
// propfind - with optional single-property Name
if (line.hasOption("f"))
{
String pfXml = (line.hasOption("N")) ? specificPropPrefix
+ line.getOptionValue("N") + specificPropSuffix
: allProp;
doPropfind(lni, line.getOptionValue("f"), pfXml, 0, null);
}
// recursive propfind limited to collection, community objects
else if (line.hasOption("r"))
{
doPropfind(lni, line.getOptionValue("r"), someProp, -1,
"collection,community");
}
else if (line.hasOption("n"))
{
doPropfind(lni, line.getOptionValue("n"), nameProp, 0, null);
}
else if (line.hasOption("p"))
{
if (line.hasOption("N") && line.hasOption("V"))
{
doProppatch(lni, line.getOptionValue("p"), line
.getOptionValue("N"), line.getOptionValue("V"));
}
else
{
usage(options, 13,
"Missing required args: -N <name> -V <value>n");
}
}
// submit a package
else if (line.hasOption("s"))
{
if (line.hasOption("P") && line.hasOption("i"))
{
doPut(lni, line.getOptionValue("s"), line
.getOptionValue("P"), line.getOptionValue("i"),
endpoint);
}
else
{
usage(options, 13,
"Missing required args after -s: -P <packager> -i <file>");
}
}
// Disseminate (GET) item as package
else if (line.hasOption("d"))
{
if (line.hasOption("P") && line.hasOption("o"))
{
doGet(lni, line.getOptionValue("d"), line
.getOptionValue("P"), line.getOptionValue("o"),
endpoint);
}
else
{
usage(options, 13,
"Missing required args after -d: -P <packager> -o <file>");
}
}
// copy from src to dst
else if (line.hasOption("c"))
{
if (line.hasOption("C"))
{
doCopy(lni, line.getOptionValue("c"), line
.getOptionValue("C"));
}
else
{
usage(options, 13,
"Missing required args after -c: -C <collection>\n");
}
}
else
{
usage(options, 14, "Missing command option.\n");
}
}
catch (ParseException pe)
{
usage(options, 1, "Error in arguments: " + pe.toString());
}
catch (java.rmi.RemoteException de)
{
System.out.println("ERROR, got RemoteException, message="
+ de.getMessage());
de.printStackTrace();
die(1, " Exception class=" + de.getClass().getName());
}
}
/**
* Die.
*
* @param exit the exit
* @param msg the msg
*/
private static void die(int exit, String msg)
{
System.err.println(msg);
System.exit(exit);
}
// Like LNI lookup(), but dies on error so we don't have to check result.
// Also interprets the special format "handle bitstream" (with space
// separating the words) as the ``handle'' of a bitstream -- a combination
// of Item handle and bitstream sequence-ID.
// On success it returns a DAV resource URI, relative to the
// root of the DAV resource hierarchy, e.g. "/dso_12345.678$123"
/**
* Do lookup.
*
* @param lni the lni
* @param handle the handle
* @param bitstream the bitstream
*
* @return the string
*
* @throws RemoteException the remote exception
*/
private static String doLookup(LNISoapServlet lni, String handle,
String bitstream) throws java.rmi.RemoteException
{
// hack: if "handle" starts with '/' and there is no bitstream
// assume it is URI passed in and just return that.
if (handle.startsWith("/") && bitstream == null)
{
return handle;
}
// hack: parse "handle bitstream" syntax of handle.
if (handle.indexOf(' ') >= 0 && bitstream == null)
{
String h[] = handle.split("\\s+");
handle = h[0];
bitstream = h[1];
}
String uri = lni.lookup(handle, bitstream);
if (uri == null)
{
die(2, "ERROR, got null from lookup(\"" + handle + "\")");
}
System.out.println("DEBUG: lookup returns: \"" + uri + "\"");
return uri;
}
/**
* Do propfind.
*
* @param lni the lni
* @param handle the handle
* @param pf the pf
* @param depth the depth
* @param types the types
*
* @throws RemoteException the remote exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private static void doPropfind(LNISoapServlet lni, String handle,
String pf, int depth, String types)
throws java.rmi.RemoteException, java.io.IOException
{
String uri = doLookup(lni, handle, null);
String result = lni.propfind(uri, pf, depth, types);
try
{
SAXBuilder builder = new SAXBuilder();
Document msDoc = builder.build(new java.io.StringReader(result));
Element ms = msDoc.getRootElement();
ListIterator ri = ms.getChildren("response", NS_DAV).listIterator();
while (ri.hasNext())
{
Element resp = (Element) ri.next();
String href = resp.getChildText("href", NS_DAV);
System.out.println("Resource = " + href);
ListIterator pi = resp.getChildren("propstat", NS_DAV)
.listIterator();
while (pi.hasNext())
{
Element ps = (Element) pi.next();
String status = ps.getChildText("status", NS_DAV);
if (status.indexOf("200") >= 0)
{
System.out
.println(" === PROPERTIES Successfully returned:");
}
else
{
System.out.println(" === PROPERTIES with Status="
+ status);
}
// print properties and values
Element prop = ps.getChild("prop", NS_DAV);
ListIterator ppi = prop.getChildren().listIterator();
while (ppi.hasNext())
{
Element e = (Element) ppi.next();
String value = e.getTextTrim();
if (value.equals(""))
{
List kids = e.getChildren();
if (kids.size() > 0)
{
value = outputPretty.outputString(kids);
}
if (value.indexOf('\n') >= 0)
{
value = "\n" + value;
}
}
else
{
value = "\"" + value + "\"";
}
String equals = value.equals("") ? "" : " = ";
System.out.println(" " + e.getQualifiedName()
+ equals + value);
}
}
}
}
catch (JDOMParseException je)
{
je.printStackTrace();
die(3, "ERROR: " + je.toString());
}
catch (JDOMException je)
{
je.printStackTrace();
die(4, "ERROR: " + je.toString());
}
}
/**
* Do proppatch.
*
* @param lni the lni
* @param handle the handle
* @param prop the prop
* @param val the val
*
* @throws RemoteException the remote exception
*/
private static void doProppatch(LNISoapServlet lni, String handle,
String prop, String val) throws java.rmi.RemoteException
{
String uri = doLookup(lni, handle, null);
String action = (val.length() > 0) ? "set" : "remove";
String pupdate = "<propertyupdate xmlns=\"DAV:\" xmlns:dspace=\"http://www.dspace.org/xmlns/dspace\">"
+ "<"
+ action
+ "><prop><"
+ prop
+ ">"
+ val
+ "</"
+ prop
+ "></prop></" + action + "></propertyupdate>";
System.err.println("DEBUG: sending: " + pupdate);
String result = lni.proppatch(uri, pupdate);
System.err.println("RESULT: " + result);
}
// "copy" src item into dst collection, both are handles.
/**
* Do copy.
*
* @param lni the lni
* @param src the src
* @param dst the dst
*
* @throws RemoteException the remote exception
*/
private static void doCopy(LNISoapServlet lni, String src, String dst)
throws java.rmi.RemoteException
{
String srcUri = doLookup(lni, src, null);
String dstUri = doLookup(lni, dst, null);
int status = lni.copy(srcUri, dstUri, -1, false, true);
System.err.println("Copy status = " + String.valueOf(status));
}
/**
* Implement WebDAV PUT http request.
*
* This might be simpler with a real HTTP client library, but
* java.net.HttpURLConnection is part of the standard SDK and it
* demonstrates the concepts.
*
* @param lni the lni
* @param collHandle the coll handle
* @param packager the packager
* @param source the source
* @param endpoint the endpoint
*
* @throws RemoteException the remote exception
* @throws ProtocolException the protocol exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws FileNotFoundException the file not found exception
*/
private static void doPut(LNISoapServlet lni, String collHandle,
String packager, String source, String endpoint)
throws java.rmi.RemoteException, ProtocolException, IOException,
FileNotFoundException
{
// assemble URL from chopped endpoint-URL and relative URI
String collURI = doLookup(lni, collHandle, null);
URL url = LNIClientUtils.makeDAVURL(endpoint, collURI, packager);
System.err.println("DEBUG: PUT file=" + source + " to URL="
+ url.toString());
// connect with PUT method, then copy file over.
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
conn.setDoOutput(true);
fixBasicAuth(url, conn);
conn.connect();
InputStream in = null;
OutputStream out = null;
try {
in = new FileInputStream(source);
out = conn.getOutputStream();
copyStream(in, out);
} finally {
if (in != null)
{
try
{
in.close();
}
catch (IOException e)
{
log.error("Unable to close input stream", e);
}
}
if (out!= null)
{
try
{
out.close();
}
catch (IOException e)
{
log.error("Unable to close output stream", e);
}
}
}
int status = conn.getResponseCode();
if (status < 200 || status >= 300)
{
die(status, "HTTP error, status=" + String.valueOf(status)
+ ", message=" + conn.getResponseMessage());
}
// diagnostics, and get resulting new item's location if avail.
System.err.println("DEBUG: sent " + source);
System.err.println("RESULT: Status="
+ String.valueOf(conn.getResponseCode()) + " "
+ conn.getResponseMessage());
String loc = conn.getHeaderField("Location");
System.err.println("RESULT: Location="
+ ((loc == null) ? "NULL!" : loc));
}
/**
* Get an item with WebDAV GET http request.
*
* @param lni the lni
* @param itemHandle the item handle
* @param packager the packager
* @param output the output
* @param endpoint the endpoint
*
* @throws RemoteException the remote exception
* @throws ProtocolException the protocol exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws FileNotFoundException the file not found exception
*/
private static void doGet(LNISoapServlet lni, String itemHandle,
String packager, String output, String endpoint)
throws java.rmi.RemoteException, ProtocolException, IOException,
FileNotFoundException
{
// assemble URL from chopped endpoint-URL and relative URI
String itemURI = doLookup(lni, itemHandle, null);
URL url = LNIClientUtils.makeDAVURL(endpoint, itemURI, packager);
System.err.println("DEBUG: GET from URL: " + url.toString());
// connect with GET method, then copy file over.
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.setDoInput(true);
fixBasicAuth(url, conn);
conn.connect();
int status = conn.getResponseCode();
if (status < 200 || status >= 300)
{
die(status, "HTTP error, status=" + String.valueOf(status)
+ ", message=" + conn.getResponseMessage());
}
InputStream in = conn.getInputStream();
OutputStream out = new FileOutputStream(output);
copyStream(in, out);
in.close();
out.close();
System.err.println("DEBUG: Created local file " + output);
System.err.println("RESULT: Status="
+ String.valueOf(conn.getResponseCode()) + " "
+ conn.getResponseMessage());
}
//
/**
* Fix basic auth.
*
* Set up HTTP basic authentication based on user/password in URL.
* The HttpURLConnection class should do this itself!
*
* @param url the url
* @param conn the conn
*/
private static void fixBasicAuth(URL url, HttpURLConnection conn)
{
String userinfo = url.getUserInfo();
if (userinfo != null)
{
String cui = new String(Base64.encodeBase64(userinfo.getBytes()));
conn.addRequestProperty("Authorization", "Basic " + cui);
System.err.println("DEBUG: Sending Basic auth=" + cui);
}
}
/**
* Copy stream. copy from one stream to another
*
* @param input the input
* @param output the output
*
* @throws IOException Signals that an I/O exception has occurred.
*/
private static void copyStream(final InputStream input,
final OutputStream output) throws IOException
{
final int BUFFER_SIZE = 1024 * 4;
final byte[] buffer = new byte[BUFFER_SIZE];
while (true)
{
final int count = input.read(buffer, 0, BUFFER_SIZE);
if (-1 == count)
{
break;
}
output.write(buffer, 0, count);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import java.util.List;
/**
* Search interface that discovery to search in solr
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public interface SearchService {
QueryResponse search(SolrQuery query) throws SearchServiceException;
List<DSpaceObject> search(Context context, String query, int offset, int max, String... filterquery);
List<DSpaceObject> search(Context context, String query, String orderfield, boolean ascending, int offset, int max, String... filterquery);
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.log4j.Logger;
import org.apache.commons.cli.*;
import org.dspace.core.Context;
import org.dspace.utils.DSpace;
import java.io.IOException;
import java.sql.SQLException;
/**
* Class used to reindex dspace communities/collections/items into discovery
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class IndexClient {
private static final Logger log = Logger.getLogger(IndexClient.class);
/**
* When invoked as a command-line tool, creates, updates, removes content
* from the whole index
*
* @param args the command-line arguments, none used
* @throws java.io.IOException
* @throws java.sql.SQLException
* @throws org.apache.solr.client.solrj.SolrServerException
*
*/
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
Context context = new Context();
context.setIgnoreAuthorization(true);
String usage = "org.dspace.discovery.IndexClient [-cbhf[r <item handle>]] or nothing to update/clean an existing index.";
Options options = new Options();
HelpFormatter formatter = new HelpFormatter();
CommandLine line = null;
options
.addOption(OptionBuilder
.withArgName("item handle")
.hasArg(true)
.withDescription(
"remove an Item, Collection or Community from index based on its handle")
.create("r"));
options
.addOption(OptionBuilder
.isRequired(false)
.withDescription(
"clean existing index removing any documents that no longer exist in the db")
.create("c"));
options.addOption(OptionBuilder.isRequired(false).withDescription(
"(re)build index, wiping out current one if it exists").create(
"b"));
options
.addOption(OptionBuilder
.isRequired(false)
.withDescription(
"if updating existing index, force each handle to be reindexed even if uptodate")
.create("f"));
options.addOption(OptionBuilder.isRequired(false).withDescription(
"print this help message").create("h"));
options.addOption(OptionBuilder.isRequired(false).withDescription(
"optimize search solr core").create("o"));
try {
line = new PosixParser().parse(options, args);
} catch (Exception e) {
// automatically generate the help statement
formatter.printHelp(usage, e.getMessage(), options, "");
System.exit(1);
}
if (line.hasOption("h")) {
// automatically generate the help statement
formatter.printHelp(usage, options);
System.exit(1);
}
/** Acquire from dspace-services in future */
/**
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
*/
DSpace dspace = new DSpace();
IndexingService indexer = dspace.getServiceManager().getServiceByName(IndexingService.class.getName(),IndexingService.class);
if (line.hasOption("r")) {
log.info("Removing " + line.getOptionValue("r") + " from Index");
indexer.unIndexContent(context, line.getOptionValue("r"));
} else if (line.hasOption("c")) {
log.info("Cleaning Index");
indexer.cleanIndex(line.hasOption("f"));
} else if (line.hasOption("b")) {
log.info("(Re)building index from scratch.");
indexer.createIndex(context);
} else if (line.hasOption("o")) {
log.info("Optimizing search core.");
indexer.optimize();
} else {
log.info("Updating and Cleaning Index");
indexer.cleanIndex(line.hasOption("f"));
indexer.updateIndex(context, line.hasOption("f"));
}
log.info("Done with indexing");
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.commons.collections.ExtendedProperties;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.dspace.content.DSpaceObject;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.*;
/**
* Util methods used by discovery
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class SearchUtils {
private static final Logger log = Logger.getLogger(SearchUtils.class);
private static ExtendedProperties props = null;
private static Map<String, SolrFacetConfig[]> solrFacets = new HashMap<String, SolrFacetConfig[]>();
private static List<String> allFacets = new ArrayList<String>();
private static List<String> searchFilters = new ArrayList<String>();
private static List<String> sortFields = new ArrayList<String>();
private static List<String> dateIndexableFields = new ArrayList<String>();
public static final String FILTER_SEPARATOR = "|||";
static {
log.debug("loading configuration");
//Method that will retrieve all the possible configs we have
props = ExtendedProperties
.convertProperties(ConfigurationManager.getProperties());
try {
File config = new File(props.getProperty("dspace.dir") + "/config/dspace-solr-search.cfg");
if (config.exists()) {
props.combine(new ExtendedProperties(config.getAbsolutePath()));
} else {
InputStream is = null;
try {
is = SolrServiceImpl.class.getResourceAsStream("dspace-solr-search.cfg");
ExtendedProperties defaults = new ExtendedProperties();
defaults.load(is);
props.combine(defaults);
} finally {
if (is != null) {
is.close();
}
}
}
log.debug("combined configuration");
} catch (IOException e) {
log.error(e.getMessage(), e);
}
try {
Iterator allPropsIt = props.getKeys();
while (allPropsIt.hasNext()) {
String propName = String.valueOf(allPropsIt.next());
if (propName.startsWith("solr.facets.")) {
String[] propVals = props.getStringArray(propName);
log.info("loading scope, " + propName);
allFacets.addAll(Arrays.asList(propVals));
List<SolrFacetConfig> facets = new ArrayList<SolrFacetConfig>();
for (String propVal : propVals) {
if (propVal.endsWith("_dt") || propVal.endsWith(".year")) {
facets.add(new SolrFacetConfig(propVal.replace("_dt", ".year"), true));
log.info("value, " + propVal);
} else {
facets.add(new SolrFacetConfig(propVal + "_filter", false));
log.info("value, " + propVal);
}
}
//All the values are split into date & facetfields, so now store em
solrFacets.put(propName.replace("solr.facets.", ""), facets.toArray(new SolrFacetConfig[facets.size()]));
log.info("solrFacets size: " + solrFacets.size());
}
}
String[] filterFieldsProps = SearchUtils.getConfig().getStringArray("solr.search.filters");
if (filterFieldsProps != null) {
searchFilters.addAll(Arrays.asList(filterFieldsProps));
}
String[] sortFieldProps = SearchUtils.getConfig().getStringArray("solr.search.sort");
if (sortFieldProps != null) {
sortFields.addAll(Arrays.asList(sortFieldProps));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new IllegalStateException(e.getMessage(), e);
}
}
public static ExtendedProperties getConfig() {
return props;
}
public static SolrFacetConfig[] getFacetsForType(String type) {
return solrFacets.get(type);
}
public static List<String> getAllFacets() {
return allFacets;
}
public static List<String> getSearchFilters() {
return searchFilters;
}
public static List<String> getSortFields() {
return sortFields;
}
public static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
Integer id = (Integer) doc.getFirstValue("search.resourceid");
String handle = (String) doc.getFirstValue("handle");
if (type != null && id != null) {
return DSpaceObject.find(context, type, id);
} else if (handle != null) {
return HandleManager.resolveToObject(context, handle);
}
return null;
}
public static String[] getDefaultFilters(String scope) {
List<String> result = new ArrayList<String>();
// Check (and add) any default filters which may be configured
String defaultFilters = getConfig().getString("solr.default.filterQuery");
if (defaultFilters != null)
{
result.addAll(Arrays.asList(defaultFilters.split(",")));
}
if (scope != null) {
String scopeDefaultFilters = SearchUtils.getConfig().getString("solr." + scope + ".default.filterQuery");
if (scopeDefaultFilters != null)
{
result.addAll(Arrays.asList(scopeDefaultFilters.split(",")));
}
}
return result.toArray(new String[result.size()]);
}
public static List<String> getDateIndexableFields() {
String[] dateFieldsProps = SearchUtils.getConfig().getStringArray("solr.index.type.date");
if (dateFieldsProps != null) {
for (String dateField : dateFieldsProps) {
dateIndexableFields.add(dateField.trim());
}
}
return dateIndexableFields;
}
public static String getFilterQueryDisplay(String filterQuery){
String separator = SearchUtils.getConfig().getString("solr.facets.split.char", SearchUtils.FILTER_SEPARATOR);
//Escape any regex chars
separator = java.util.regex.Pattern.quote(separator);
String[] fqParts = filterQuery.split(separator);
String result = "";
int start = fqParts.length / 2;
for(int i = start; i < fqParts.length; i++){
result += fqParts[i];
}
return result;
}
public static class SolrFacetConfig {
private String facetField;
private boolean isDate;
public SolrFacetConfig(String facetField, boolean date) {
this.facetField = facetField;
isDate = date;
}
public String getFacetField() {
return facetField;
}
public boolean isDate() {
return isDate;
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.log4j.Logger;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
import org.dspace.utils.DSpace;
import java.util.HashSet;
import java.util.Set;
/**
* Class for updating search indices in discovery from content events.
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class IndexEventConsumer implements Consumer {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(IndexEventConsumer.class);
// collect Items, Collections, Communities that need indexing
private Set<DSpaceObject> objectsToUpdate = null;
// handles to delete since IDs are not useful by now.
private Set<String> handlesToDelete = null;
DSpace dspace = new DSpace();
IndexingService indexer = dspace.getServiceManager().getServiceByName(IndexingService.class.getName(),IndexingService.class);
public void initialize() throws Exception {
}
/**
* Consume a content event -- just build the sets of objects to add (new) to
* the index, update, and delete.
*
* @param ctx DSpace context
* @param event Content event
*/
public void consume(Context ctx, Event event) throws Exception {
if (objectsToUpdate == null) {
objectsToUpdate = new HashSet<DSpaceObject>();
handlesToDelete = new HashSet<String>();
}
int st = event.getSubjectType();
if (!(st == Constants.ITEM || st == Constants.BUNDLE
|| st == Constants.COLLECTION || st == Constants.COMMUNITY)) {
log
.warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: "
+ event.toString());
return;
}
DSpaceObject subject = event.getSubject(ctx);
DSpaceObject object = event.getObject(ctx);
// If event subject is a Bundle and event was Add or Remove,
// transform the event to be a Modify on the owning Item.
// It could be a new bitstream in the TEXT bundle which
// would change the index.
int et = event.getEventType();
if (st == Constants.BUNDLE) {
if ((et == Event.ADD || et == Event.REMOVE) && subject != null
&& ((Bundle) subject).getName().equals("TEXT")) {
st = Constants.ITEM;
et = Event.MODIFY;
subject = ((Bundle) subject).getItems()[0];
if (log.isDebugEnabled())
{
log.debug("Transforming Bundle event into MODIFY of Item "
+ subject.getHandle());
}
} else
{
return;
}
}
switch (et) {
case Event.CREATE:
case Event.MODIFY:
case Event.MODIFY_METADATA:
if (subject == null)
{
log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getSubjectTypeAsString() + " id="
+ String.valueOf(event.getSubjectID())
+ ", perhaps it has been deleted.");
}
else {
log.debug("consume() adding event to update queue: " + event.toString());
objectsToUpdate.add(subject);
}
break;
case Event.REMOVE:
case Event.ADD:
if (object == null)
{
log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getObjectTypeAsString() + " id="
+ String.valueOf(event.getObjectID())
+ ", perhaps it has been deleted.");
}
else {
log.debug("consume() adding event to update queue: " + event.toString());
objectsToUpdate.add(object);
}
break;
case Event.DELETE:
String detail = event.getDetail();
if (detail == null)
{
log.warn("got null detail on DELETE event, skipping it.");
}
else {
log.debug("consume() adding event to delete queue: " + event.toString());
handlesToDelete.add(detail);
}
break;
default:
log
.warn("IndexConsumer should not have been given a event of type="
+ event.getEventTypeAsString()
+ " on subject="
+ event.getSubjectTypeAsString());
break;
}
}
/**
* Process sets of objects to add, update, and delete in index. Correct for
* interactions between the sets -- e.g. objects which were deleted do not
* need to be added or updated, new objects don't also need an update, etc.
*/
public void end(Context ctx) throws Exception {
if (objectsToUpdate != null && handlesToDelete != null) {
// update the changed Items not deleted because they were on create list
for (DSpaceObject iu : objectsToUpdate) {
/* we let all types through here and
* allow the search DSIndexer to make
* decisions on indexing and/or removal
*/
String hdl = iu.getHandle();
if (hdl != null && !handlesToDelete.contains(hdl)) {
try {
indexer.indexContent(ctx, iu, true);
log.debug("Indexed "
+ Constants.typeText[iu.getType()]
+ ", id=" + String.valueOf(iu.getID())
+ ", handle=" + hdl);
}
catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
}
}
for (String hdl : handlesToDelete) {
try {
indexer.unIndexContent(ctx, hdl, true);
if (log.isDebugEnabled())
{
log.debug("UN-Indexed Item, handle=" + hdl);
}
}
catch (Exception e) {
log.error("Failed while UN-indexing object: " + hdl, e);
}
}
}
// "free" the resources
objectsToUpdate = null;
handlesToDelete = null;
}
public void finish(Context ctx) throws Exception {
// No-op
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.commons.collections.ExtendedProperties;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.handle.HandleManager;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.stereotype.Service;
import java.io.*;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* SolrIndexer contains the methods that index Items and their metadata,
* collections, communities, etc. It is meant to either be invoked from the
* command line (see dspace/bin/index-all) or via the indexContent() methods
* within DSpace.
* <p/>
* The Administrator can choose to run SolrIndexer in a cron that repeats
* regularly, a failed attempt to index from the UI will be "caught" up on in
* that cron.
*
* The SolrServiceImple is registered as a Service in the ServiceManager via
* A spring configuration file located under
* classpath://spring/spring-dspace-applicationContext.xml
*
* Its configuration is Autowired by the ApplicationContext
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
@Service
public class SolrServiceImpl implements SearchService, IndexingService {
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
private static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
/**
* Non-Static CommonsHttpSolrServer for processing indexing events.
*/
private CommonsHttpSolrServer solr = null;
/**
* Non-Static Singelton instance of Configuration Service
*/
private ConfigurationService configurationService;
@Autowired
@Required
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
protected CommonsHttpSolrServer getSolr() throws java.net.MalformedURLException, org.apache.solr.client.solrj.SolrServerException
{
if ( solr == null)
{
String solrService = configurationService.getProperty("solr.search.server") ;
/*
* @deprecated need to remove this in favor of looking up above.
*/
if(solrService == null)
{
solrService = SearchUtils.getConfig().getString("solr.search.server","http://localhost:8080/solr/search");
}
log.debug("Solr URL: " + solrService);
solr = new CommonsHttpSolrServer(solrService);
solr.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery()
.setQuery("search.resourcetype:2 AND search.resourceid:1");
solr.query(solrQuery);
}
return solr;
}
/**
* If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index
* then it is updated, otherwise a new document is added.
*
* @param context Users Context
* @param dso DSpace Object (Item, Collection or Community
* @throws SQLException
* @throws IOException
*/
public void indexContent(Context context, DSpaceObject dso)
throws SQLException {
indexContent(context, dso, false);
}
/**
* If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index
* then it is updated, otherwise a new document is added.
*
* @param context Users Context
* @param dso DSpace Object (Item, Collection or Community
* @param force Force update even if not stale.
* @throws SQLException
* @throws IOException
*/
public void indexContent(Context context, DSpaceObject dso,
boolean force) throws SQLException {
String handle = dso.getHandle();
if (handle == null) {
handle = HandleManager.findHandle(context, dso);
}
try {
switch (dso.getType()) {
case Constants.ITEM:
Item item = (Item) dso;
if (item.isArchived() && !item.isWithdrawn()) {
/**
* If the item is in the repository now, add it to the index
*/
if (requiresIndexing(handle, ((Item) dso).getLastModified())
|| force) {
unIndexContent(context, handle);
buildDocument(context, (Item) dso);
}
} else {
/**
* Make sure the item is not in the index if it is not in
* archive. TODO: Someday DSIndexer should block withdrawn
* content on search/retrieval and allow admins the ability
* to still search for withdrawn Items.
*/
unIndexContent(context, handle);
log.info("Removed Item: " + handle + " from Index");
}
break;
case Constants.COLLECTION:
buildDocument(context, (Collection) dso);
log.info("Wrote Collection: " + handle + " to Index");
break;
case Constants.COMMUNITY:
buildDocument(context, (Community) dso);
log.info("Wrote Community: " + handle + " to Index");
break;
default:
log
.error("Only Items, Collections and Communities can be Indexed");
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
/**
* unIndex removes an Item, Collection, or Community only works if the
* DSpaceObject has a handle (uses the handle for its unique ID)
*
* @param context
* @param dso DSpace Object, can be Community, Item, or Collection
* @throws SQLException
* @throws IOException
*/
public void unIndexContent(Context context, DSpaceObject dso)
throws SQLException, IOException {
try {
unIndexContent(context, dso.getHandle());
} catch (Exception exception) {
log.error(exception.getMessage(), exception);
emailException(exception);
}
}
/**
* Unindex a Document in the Lucene index.
* @param context the dspace context
* @param handle the handle of the object to be deleted
* @throws IOException
* @throws SQLException
*/
public void unIndexContent(Context context, String handle) throws IOException, SQLException {
unIndexContent(context, handle, false);
}
/**
* Unindex a Document in the Lucene Index.
* @param context the dspace context
* @param handle the handle of the object to be deleted
* @throws SQLException
* @throws IOException
*/
public void unIndexContent(Context context, String handle, boolean commit)
throws SQLException, IOException {
try {
getSolr().deleteById(handle);
if(commit)
{
getSolr().commit();
}
} catch (SolrServerException e) {
log.error(e.getMessage(), e);
}
}
/**
* reIndexContent removes something from the index, then re-indexes it
*
* @param context context object
* @param dso object to re-index
*/
public void reIndexContent(Context context, DSpaceObject dso)
throws SQLException, IOException {
try {
indexContent(context, dso);
} catch (Exception exception) {
log.error(exception.getMessage(), exception);
emailException(exception);
}
}
/**
* create full index - wiping old index
*
* @param c context to use
*/
public void createIndex(Context c) throws SQLException, IOException {
/* Reindex all content preemptively. */
updateIndex(c, true);
}
/**
* Iterates over all Items, Collections and Communities. And updates them in
* the index. Uses decaching to control memory footprint. Uses indexContent
* and isStale to check state of item in index.
*
* @param context
*/
public void updateIndex(Context context) {
updateIndex(context, false);
}
/**
* Iterates over all Items, Collections and Communities. And updates them in
* the index. Uses decaching to control memory footprint. Uses indexContent
* and isStale to check state of item in index.
* <p/>
* At first it may appear counterintuitive to have an IndexWriter/Reader
* opened and closed on each DSO. But this allows the UI processes to step
* in and attain a lock and write to the index even if other processes/jvms
* are running a reindex.
*
* @param context
* @param force
*/
public void updateIndex(Context context, boolean force) {
try {
ItemIterator items = null;
try {
for (items = Item.findAll(context); items.hasNext();) {
Item item = (Item) items.next();
indexContent(context, item, force);
item.decache();
}
} finally {
if (items != null)
{
items.close();
}
}
Collection[] collections = Collection.findAll(context);
for (int i = 0; i < collections.length; i++) {
indexContent(context, collections[i], force);
context.removeCached(collections[i], collections[i].getID());
}
Community[] communities = Community.findAll(context);
for (int i = 0; i < communities.length; i++) {
indexContent(context, communities[i], force);
context.removeCached(communities[i], communities[i].getID());
}
getSolr().commit();
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
/**
* Iterates over all documents in the Lucene index and verifies they are in
* database, if not, they are removed.
*
* @param force
* @throws IOException
* @throws SQLException
* @throws SolrServerException
*/
public void cleanIndex(boolean force) throws IOException,
SQLException, SearchServiceException {
Context context = new Context();
context.turnOffAuthorisationSystem();
try
{
if (force) {
getSolr().deleteByQuery("*:*");
} else {
SolrQuery query = new SolrQuery();
query.setQuery("*:*");
QueryResponse rsp = getSolr().query(query);
SolrDocumentList docs = rsp.getResults();
Iterator iter = docs.iterator();
while (iter.hasNext()) {
SolrDocument doc = (SolrDocument) iter.next();
String handle = (String) doc.getFieldValue("handle");
DSpaceObject o = HandleManager.resolveToObject(context, handle);
if (o == null) {
log.info("Deleting: " + handle);
/*
* Use IndexWriter to delete, its easier to manage
* write.lock
*/
unIndexContent(context, handle);
} else {
context.removeCached(o, o.getID());
log.debug("Keeping: " + handle);
}
}
}
} catch(Exception e){
throw new SearchServiceException(e.getMessage(), e);
} finally
{
context.abort();
}
}
/**
* Maintenance to keep a SOLR index efficient.
* Note: This might take a long time.
*/
public void optimize() {
try {
long start = System.currentTimeMillis();
System.out.println("SOLR Search Optimize -- Process Started:"+start);
getSolr().optimize();
long finish = System.currentTimeMillis();
System.out.println("SOLR Search Optimize -- Process Finished:"+finish);
System.out.println("SOLR Search Optimize -- Total time taken:"+(finish-start) + " (ms).");
} catch (SolrServerException sse) {
System.err.println(sse.getMessage());
} catch (IOException ioe) {
System.err.println(ioe.getMessage());
}
}
// //////////////////////////////////
// Private
// //////////////////////////////////
private void emailException(Exception exception) {
// Also email an alert, system admin may need to check for stale lock
try {
String recipient = ConfigurationManager
.getProperty("alert.recipient");
if (recipient != null) {
Email email = ConfigurationManager
.getEmail(I18nUtil.getEmailFilename(
Locale.getDefault(), "internal_error"));
email.addRecipient(recipient);
email.addArgument(ConfigurationManager
.getProperty("dspace.url"));
email.addArgument(new Date());
String stackTrace;
if (exception != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exception.printStackTrace(pw);
pw.flush();
stackTrace = sw.toString();
} else {
stackTrace = "No exception";
}
email.addArgument(stackTrace);
email.send();
}
} catch (Exception e) {
// Not much we can do here!
log.warn("Unable to send email alert", e);
}
}
/**
* Is stale checks the lastModified time stamp in the database and the index
* to determine if the index is stale.
*
* @param handle
* @param lastModified
* @return
* @throws SQLException
* @throws IOException
* @throws SolrServerException
*/
private boolean requiresIndexing(String handle, Date lastModified)
throws SQLException, IOException, SearchServiceException {
boolean reindexItem = false;
boolean inIndex = false;
SolrQuery query = new SolrQuery();
query.setQuery("handle:" + handle);
QueryResponse rsp = null;
try {
rsp = getSolr().query(query);
} catch (SolrServerException e) {
throw new SearchServiceException(e.getMessage(),e);
}
for (SolrDocument doc : rsp.getResults()) {
inIndex = true;
Object value = doc.getFieldValue(LAST_INDEXED_FIELD);
if(value instanceof Date)
{
Date lastIndexed = (Date) value;
if (lastIndexed == null
|| lastIndexed.before(lastModified)) {
reindexItem = true;
}
}
}
return reindexItem || !inIndex;
}
/**
* @param c
* @param myitem
* @return
* @throws SQLException
*/
private List<String> getItemLocations(Context c, Item myitem)
throws SQLException {
List<String> locations = new Vector<String>();
// build list of community ids
Community[] communities = myitem.getCommunities();
// build list of collection ids
Collection[] collections = myitem.getCollections();
// now put those into strings
int i = 0;
for (i = 0; i < communities.length; i++)
{
locations.add("m" + communities[i].getID());
}
for (i = 0; i < collections.length; i++)
{
locations.add("l" + collections[i].getID());
}
return locations;
}
private List<String> getCollectionLocations(Context c,
Collection target) throws SQLException {
List<String> locations = new Vector<String>();
// build list of community ids
Community[] communities = target.getCommunities();
// now put those into strings
int i = 0;
for (i = 0; i < communities.length; i++)
{
locations.add("m" + communities[i].getID());
}
return locations;
}
/**
* Write the document to the index under the appropriate handle.
* @param doc
* @throws IOException
*/
private void writeDocument(SolrInputDocument doc) throws IOException {
try {
getSolr().add(doc);
} catch (SolrServerException e) {
log.error(e.getMessage(), e);
}
}
/**
* Build a Lucene document for a DSpace Community.
*
* @param context Users Context
* @param community Community to be indexed
* @throws SQLException
* @throws IOException
*/
private void buildDocument(Context context, Community community)
throws SQLException, IOException {
// Create Document
SolrInputDocument doc = buildDocument(Constants.COMMUNITY, community.getID(),
community.getHandle(), null);
// and populate it
String name = community.getMetadata("name");
if (name != null) {
doc.addField("name", name);
}
writeDocument(doc);
}
/**
* Build a Lucene document for a DSpace Collection.
*
* @param context Users Context
* @param collection Collection to be indexed
* @throws SQLException
* @throws IOException
*/
private void buildDocument(Context context, Collection collection)
throws SQLException, IOException {
List<String> locations = getCollectionLocations(context,
collection);
// Create Lucene Document
SolrInputDocument doc = buildDocument(Constants.COLLECTION, collection.getID(),
collection.getHandle(), locations);
// and populate it
String name = collection.getMetadata("name");
if (name != null) {
doc.addField("name", name);
}
writeDocument(doc);
}
/**
* Build a Lucene document for a DSpace Item and write the index
*
* @param context Users Context
* @param item The DSpace Item to be indexed
* @throws SQLException
* @throws IOException
*/
private void buildDocument(Context context, Item item)
throws SQLException, IOException {
String handle = item.getHandle();
if (handle == null) {
handle = HandleManager.findHandle(context, item);
}
// get the location string (for searching by collection & community)
List<String> locations = getItemLocations(context, item);
SolrInputDocument doc = buildDocument(Constants.ITEM, item.getID(), handle,
locations);
log.debug("Building Item: " + handle);
//Keep a list of our sort values which we added, sort values can only be added once
List<String> sortFieldsAdded = new ArrayList<String>();
try {
DCValue[] mydc = item.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (int i = 0; i < mydc.length; i++) {
DCValue meta = mydc[i];
String field = meta.schema + "." + meta.element;
String unqualifiedField = field;
String value = meta.value;
if(value == null)
{
continue;
}
if(meta.qualifier != null && !meta.qualifier.trim().equals("")) {
field += "." + meta.qualifier;
}
//We are not indexing provenance, this is useless
if(field.equals("dc.description.provenance"))
{
continue;
}
//Add the field to all for autocomplete so our autocomplete works for all fields
doc.addField("all_ac", value);
List<String> dateIndexableFields = SearchUtils.getDateIndexableFields();
if (dateIndexableFields.contains(field) || dateIndexableFields.contains(unqualifiedField + "." + Item.ANY))
{
try{
Date date = toDate(value);
//Check if we have a date, invalid dates can not be added
if(date != null){
value = DateFormatUtils.formatUTC(date, "yyyy-MM-dd'T'HH:mm:ss'Z'");
doc.addField(field + ".year", DateFormatUtils.formatUTC(date, "yyyy"));
doc.addField(field + "_dt", value);
if(SearchUtils.getSortFields().contains(field + "_dt") && !sortFieldsAdded.contains(field)){
//Also add a sort field
doc.addField(field + "_dt_sort", value);
sortFieldsAdded.add(field);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
continue;
}
if(SearchUtils.getSearchFilters().contains(field) || SearchUtils.getSearchFilters().contains(unqualifiedField + "." + Item.ANY)){
//Add a dynamic fields for autocomplete in search
doc.addField(field + "_ac", value);
}
if(SearchUtils.getAllFacets().contains(field) || SearchUtils.getAllFacets().contains(unqualifiedField + "." + Item.ANY)){
//Add a special filter
//We use a separator to split up the lowercase and regular case, this is needed to get our filters in regular case
//Solr has issues with facet prefix and cases
String separator = SearchUtils.getConfig().getString("solr.facets.split.char", SearchUtils.FILTER_SEPARATOR);
doc.addField(field + "_filter", value.toLowerCase() + separator + value);
}
if(SearchUtils.getSortFields().contains(field) && !sortFieldsAdded.contains(field)){
//Only add sort value once
doc.addField(field + "_sort", value);
sortFieldsAdded.add(field);
}
doc.addField(field, value.toLowerCase());
if(meta.language != null && !meta.language.trim().equals("")) {
String langField = field + "." + meta.language;
doc.addField(langField, value);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
log.debug(" Added Metadata");
try {
DCValue[] values = item.getMetadata("dc.relation.ispartof");
if(values != null && values.length > 0 && values[0] != null && values[0].value != null)
{
// group on parent
String handlePrefix = ConfigurationManager.getProperty("handle.canonical.prefix");
if (handlePrefix == null || handlePrefix.length() == 0)
{
handlePrefix = "http://hdl.handle.net/";
}
doc.addField("publication_grp",values[0].value.replaceFirst(handlePrefix,"") );
}
else
{
// group on self
doc.addField("publication_grp", item.getHandle());
}
} catch (Exception e){
log.error(e.getMessage(),e);
}
log.debug(" Added Grouping");
Vector<InputStreamReader> readers = new Vector<InputStreamReader>();
try {
// now get full text of any bitstreams in the TEXT bundle
// trundle through the bundles
Bundle[] myBundles = item.getBundles();
for (int i = 0; i < myBundles.length; i++) {
if ((myBundles[i].getName() != null)
&& myBundles[i].getName().equals("TEXT")) {
// a-ha! grab the text out of the bitstreams
Bitstream[] myBitstreams = myBundles[i].getBitstreams();
for (int j = 0; j < myBitstreams.length; j++) {
try {
InputStreamReader is = new InputStreamReader(
myBitstreams[j].retrieve()); // get input
readers.add(is);
// Add each InputStream to the Indexed Document
doc.addField("fulltext", IOUtils.toString(is));
log.debug(" Added BitStream: "
+ myBitstreams[j].getStoreNumber() + " "
+ myBitstreams[j].getSequenceID() + " "
+ myBitstreams[j].getName());
} catch (Exception e) {
// this will never happen, but compiler is now
// happy.
log.trace(e.getMessage(), e);
}
}
}
}
} catch (RuntimeException e) {
log.error(e.getMessage(), e);
}
// write the index and close the inputstreamreaders
try {
writeDocument(doc);
log.info("Wrote Item: " + handle + " to Index");
} catch (RuntimeException e) {
log.error("Error while writing item to discovery index: " + handle + " message:"+ e.getMessage(), e);
} finally {
Iterator<InputStreamReader> itr = readers.iterator();
while (itr.hasNext()) {
InputStreamReader reader = itr.next();
if (reader != null) {
reader.close();
}
}
log.debug("closed " + readers.size() + " readers");
}
}
/**
* Create Lucene document with all the shared fields initialized.
*
* @param type Type of DSpace Object
* @param id
* @param handle
* @param locations @return
*/
private SolrInputDocument buildDocument(int type, int id, String handle,
List<String> locations) {
SolrInputDocument doc = new SolrInputDocument();
// want to be able to check when last updated
// (not tokenized, but it is indexed)
doc.addField(LAST_INDEXED_FIELD, new Date());
// New fields to weaken the dependence on handles, and allow for faster
// list display
doc.addField("search.resourcetype", Integer.toString(type));
doc.addField("search.resourceid", Integer.toString(id));
// want to be able to search for handle, so use keyword
// (not tokenized, but it is indexed)
if (handle != null) {
// want to be able to search for handle, so use keyword
// (not tokenized, but it is indexed)
doc.addField("handle", handle);
}
if (locations != null) {
for (String location : locations) {
doc.addField("location", location);
if (location.startsWith("m"))
{
doc.addField("location.comm", location.substring(1));
}
else
{
doc.addField("location.coll", location.substring(1));
}
}
}
return doc;
}
/**
* Helper function to retrieve a date using a best guess of the potential
* date encodings on a field
*
* @param t
* @return
*/
public static Date toDate(String t) {
SimpleDateFormat[] dfArr;
// Choose the likely date formats based on string length
switch (t.length()) {
case 4:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyy")};
break;
case 6:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyyMM")};
break;
case 7:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyy-MM")};
break;
case 8:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyyMMdd"),
new SimpleDateFormat("yyyy MMM")};
break;
case 10:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyy-MM-dd")};
break;
case 11:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat("yyyy MMM dd")};
break;
case 20:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'")};
break;
default:
dfArr = new SimpleDateFormat[]{new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")};
break;
}
for (SimpleDateFormat df : dfArr) {
try {
// Parse the date
df.setCalendar(Calendar
.getInstance(TimeZone.getTimeZone("UTC")));
df.setLenient(false);
return df.parse(t);
} catch (ParseException pe) {
log.error("Unable to parse date format", pe);
}
}
return null;
}
public static String locationToName(Context context, String field, String value) throws SQLException {
if("location.comm".equals(field) || "location.coll".equals(field)){
int type = field.equals("location.comm") ? Constants.COMMUNITY : Constants.COLLECTION;
DSpaceObject commColl = DSpaceObject.find(context, type, Integer.parseInt(value));
if(commColl != null)
{
return commColl.getName();
}
}
return value;
}
//******** SearchService implementation
public QueryResponse search(SolrQuery query) throws SearchServiceException {
try {
return getSolr().query(query);
} catch (Exception e) {
throw new org.dspace.discovery.SearchServiceException(e.getMessage(),e);
}
}
/** Simple means to return the search result as an InputStream */
public java.io.InputStream searchAsInputStream(SolrQuery query) throws SearchServiceException, java.io.IOException {
try {
org.apache.commons.httpclient.methods.GetMethod method =
new org.apache.commons.httpclient.methods.GetMethod(getSolr().getHttpClient().getHostConfiguration().getHostURL() + "");
method.setQueryString(query.toString());
getSolr().getHttpClient().executeMethod(method);
return method.getResponseBodyAsStream();
} catch (org.apache.solr.client.solrj.SolrServerException e) {
throw new SearchServiceException(e.getMessage(), e);
}
}
public List<DSpaceObject> search(Context context, String query, int offset, int max, String... filterquery) {
return search(context, query, null, true, offset, max, filterquery);
}
public List<DSpaceObject> search(Context context, String query, String orderfield, boolean ascending, int offset, int max, String... filterquery) {
try {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(query);
solrQuery.setFields("search.resourceid", "search.resourcetype");
solrQuery.setStart(offset);
solrQuery.setRows(max);
if (orderfield != null) {
solrQuery.setSortField(orderfield, ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc);
}
if (filterquery != null) {
solrQuery.addFilterQuery(filterquery);
}
QueryResponse rsp = getSolr().query(solrQuery);
SolrDocumentList docs = rsp.getResults();
Iterator iter = docs.iterator();
List<DSpaceObject> result = new ArrayList<DSpaceObject>();
while (iter.hasNext()) {
SolrDocument doc = (SolrDocument) iter.next();
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
if (o != null) {
result.add(o);
}
}
return result;
} catch (Exception e) {
// Any acception that we get ignore it.
// We do NOT want any crashed to shown by the user
e.printStackTrace();
return new ArrayList<DSpaceObject>(0);
}
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import java.io.IOException;
import java.sql.SQLException;
/**
* Interface used for indexing dspaceobject into discovery
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public interface IndexingService {
void indexContent(Context context, DSpaceObject dso)
throws SQLException;
void indexContent(Context context, DSpaceObject dso,
boolean force) throws SQLException;
void unIndexContent(Context context, DSpaceObject dso)
throws SQLException, IOException;
void unIndexContent(Context context, String handle)
throws SQLException, IOException;
void unIndexContent(Context context, String handle, boolean commit)
throws SQLException, IOException;
void reIndexContent(Context context, DSpaceObject dso)
throws SQLException, IOException;
void createIndex(Context context) throws SQLException, IOException;
void updateIndex(Context context);
void updateIndex(Context context, boolean force);
void cleanIndex(boolean force) throws IOException,
SQLException, SearchServiceException;
void optimize();
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
/**
* Exception used by discovery when solr exceptions occur
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class SearchServiceException extends Exception {
public SearchServiceException() {
}
public SearchServiceException(String s) {
super(s);
}
public SearchServiceException(String s, Throwable throwable) {
super(s, throwable);
}
public SearchServiceException(Throwable throwable) {
super(throwable);
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.discovery;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.common.SolrDocument;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.Body;
import org.dspace.app.xmlui.wing.element.Division;
import org.dspace.app.xmlui.wing.element.ReferenceSet;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.sql.SQLException;
/**
* Transformer that displays the recently submitted items on the dspace home page
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class SiteRecentSubmissions extends AbstractFiltersTransformer {
private static final Logger log = Logger.getLogger(SiteRecentSubmissions.class);
private static final Message T_head_recent_submissions =
message("xmlui.ArtifactBrowser.SiteViewer.head_recent_submissions");
/**
* Display a single community (and refrence any sub communites or
* collections)
*/
public void addBody(Body body) throws SAXException, WingException,
UIException, SQLException, IOException, AuthorizeException {
try {
performSearch(null);
} catch (SearchServiceException e) {
log.error(e.getMessage(), e);
}
Division home = body.addDivision("site-home", "primary repository");
Division lastSubmittedDiv = home
.addDivision("site-recent-submission", "secondary recent-submission");
lastSubmittedDiv.setHead(T_head_recent_submissions);
ReferenceSet lastSubmitted = lastSubmittedDiv.addReferenceSet(
"site-last-submitted", ReferenceSet.TYPE_SUMMARY_LIST,
null, "recent-submissions");
if (queryResults != null) {
for (SolrDocument doc : queryResults.getResults()) {
DSpaceObject obj = SearchUtils.findDSpaceObject(context, doc);
if(obj != null)
{
lastSubmitted.addReference(obj);
}
}
}
}
public String getView()
{
return "site";
}
/**
* facet.limit=11&wt=javabin&rows=5&sort=dateaccessioned+asc&facet=true&facet.mincount=1&q=search.resourcetype:2&version=1
*
* @param object
*/
@Override
public void performSearch(DSpaceObject object) throws SearchServiceException, UIException {
if(queryResults != null)
{
return; // queryResults;
}
queryArgs = prepareDefaultFilters(getView());
queryArgs.setQuery("search.resourcetype:" + Constants.ITEM);
queryArgs.setRows(SearchUtils.getConfig().getInt("solr.recent-submissions.size", 5));
String sortField = SearchUtils.getConfig().getString("recent.submissions.sort-option");
if(sortField != null){
queryArgs.setSortField(
sortField,
SolrQuery.ORDER.desc
);
}
SearchService service = getSearchService();
queryResults = service.search(queryArgs);
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.discovery;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrDocument;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.LogManager;
import org.dspace.discovery.SearchUtils;
import org.xml.sax.SAXException;
import org.dspace.discovery.SearchServiceException;
/**
* Displays related items to the currently viewable item
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class RelatedItems extends AbstractFiltersTransformer
{
private static final Logger log = Logger.getLogger(RelatedItems.class);
/**
* Display a single item
*/
public void addBody(Body body) throws SAXException, WingException,
UIException, SQLException, IOException, AuthorizeException
{
DSpaceObject dspaceObject = HandleUtil.obtainHandle(objectModel);
if (!(dspaceObject instanceof Item))
{
return;
}
Item item = (Item) dspaceObject;
try {
performSearch(item);
} catch (SearchServiceException e) {
log.error(e.getMessage(),e);
}
// Build the collection viewer division.
if (this.queryResults != null) {
NamedList nList = this.queryResults.getResponse();
SimpleOrderedMap<SolrDocumentList> mlt = (SimpleOrderedMap<SolrDocumentList>)nList.get("moreLikeThis");
//home.addPara(nList.toString());
if(mlt != null && 0 < mlt.size())
{
//TODO: also make sure if an item is unresolved we do not end up with an empty referenceset !
List<DSpaceObject> dsos = new ArrayList<DSpaceObject>();
for(Map.Entry<String,SolrDocumentList> entry : mlt)
{
//org.dspace.app.xmlui.wing.element.List mltList = mltDiv.addList(key);
//mltList.setHead(key);
for(SolrDocument doc : entry.getValue())
{
try{
dsos.add(SearchUtils.findDSpaceObject(context, doc));
}catch(Exception e){
log.error(LogManager.getHeader(context, "Error while resolving related item doc to dso", "Main item: " + item.getID()));
}
//mltList.addItem().addContent(doc.toString());
}
}
if(0 < dsos.size()){
Division home = body.addDivision("test", "secondary related");
String name = "Related Items";
//if (name == null || name.length() == 0)
// home.setHead(T_untitled);
//else
home.setHead(name);
Division mltDiv = home.addDivision("item-related", "secondary related");
mltDiv.setHead("Items By Author:");
ReferenceSet set = mltDiv.addReferenceSet(
"item-related-items", ReferenceSet.TYPE_SUMMARY_LIST,
null, "related-items");
for (DSpaceObject dso : dsos) {
set.addReference(dso);
}
}
}
}
}
@Override
public void performSearch(DSpaceObject dso) throws SearchServiceException {
if(queryResults != null)
{
return;
}
this.queryArgs = prepareDefaultFilters(getView());
this.queryArgs.setRows(1);
this.queryArgs.add("fl","dc.contributor,dc.contributor.author,handle");
this.queryArgs.add("mlt","true");
this.queryArgs.add("mlt.fl","dc.contributor,dc.contributor.author,handle");
this.queryArgs.add("mlt.mindf","1");
this.queryArgs.add("mlt.mintf","1");
this.queryArgs.setQuery("handle:" + dso.getHandle());
this.queryArgs.setRows(1);
queryResults = getSearchService().search(queryArgs);
}
public String getView()
{
return "item";
}
/**
* Recycle
*/
public void recycle() {
this.queryArgs = null;
this.queryResults = null;
super.recycle();
}
}
| Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.discovery;
import java.io.IOException;
import java.io.Serializable;
import java.sql.SQLException;
import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.util.HashUtil;
import org.apache.excalibur.source.SourceValidity;
import org.apache.log4j.Logger;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.utils.DSpaceValidity;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.Body;
import org.dspace.app.xmlui.wing.element.Division;
import org.dspace.app.xmlui.wing.element.PageMeta;
import org.dspace.app.xmlui.wing.element.Para;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.core.LogManager;
import org.xml.sax.SAXException;
/**
* Renders the search box for a community
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class CommunitySearch extends AbstractDSpaceTransformer implements CacheableProcessingComponent
{
private static final Logger log = Logger.getLogger(CommunitySearch.class);
/** Language Strings */
private static final Message T_full_text_search =
message("xmlui.ArtifactBrowser.CommunityViewer.full_text_search");
private static final Message T_go =
message("xmlui.general.go");
public static final Message T_untitled =
message("xmlui.general.untitled");
private static final Message T_head_sub_collections =
message("xmlui.ArtifactBrowser.CommunityViewer.head_sub_collections");
/** Cached validity object */
private SourceValidity validity;
/**
* Generate the unique caching key.
* This key must be unique inside the space of this component.
*/
public Serializable getKey() {
try {
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if (dso == null)
{
return "0"; // no item, something is wrong
}
return HashUtil.hash(dso.getHandle());
}
catch (SQLException sqle)
{
// Ignore all errors and just return that the component is not cachable.
return "0";
}
}
/**
* Generate the cache validity object.
*
* This validity object includes the community being viewed, all
* sub-communites (one level deep), all sub-collections, and
* recently submitted items.
*/
public SourceValidity getValidity()
{
if (this.validity == null)
{
Community community = null;
try {
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if (dso == null)
{
return null;
}
if (!(dso instanceof Community))
{
return null;
}
community = (Community) dso;
DSpaceValidity validity = new DSpaceValidity();
validity.add(community);
Community[] subCommunities = community.getSubcommunities();
Collection[] collections = community.getCollections();
// Sub communities
for (Community subCommunity : subCommunities)
{
validity.add(subCommunity);
}
// Sub collections
for (Collection collection : collections)
{
validity.add(collection);
}
this.validity = validity.complete();
}
catch (Exception e)
{
// Ignore all errors and invalidate the cache.
}
log.info(LogManager.getHeader(context, "view_community", "community_id=" + (community == null ? "" : community.getID())));
}
return this.validity;
}
/**
* Add the community's title and trail links to the page's metadata
*/
public void addPageMeta(PageMeta pageMeta) throws SAXException,
WingException, UIException, SQLException, IOException,
AuthorizeException
{
}
/**
* Display a single community (and refrence any sub communites or
* collections)
*/
public void addBody(Body body) throws SAXException, WingException,
UIException, SQLException, IOException, AuthorizeException
{
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if (!(dso instanceof Community))
{
return;
}
// Set up the major variables
Community community = (Community) dso;
// Build the community viewer division.
Division home = body.addDivision("community-home", "primary repository community");
String name = community.getMetadata("name");
if (name == null || name.length() == 0)
{
home.setHead(T_untitled);
}
else
{
home.setHead(name);
}
// The search / browse box.
{
Division search = home.addDivision("community-search-browse",
"secondary search-browse");
// Search query
Division query = search.addInteractiveDivision("community-search",
contextPath + "/handle/" + community.getHandle() + "/discover",
Division.METHOD_POST, "secondary search");
Para para = query.addPara("search-query", null);
para.addContent(T_full_text_search);
para.addContent(" ");
para.addText("query");
para.addContent(" ");
para.addButton("submit").setValue(T_go);
//query.addPara().addXref(contextPath + "/handle/" + community.getHandle() + "/advanced-search", T_advanced_search_link);
// Browse by list
// Division browseDiv = search.addDivision("community-browse","secondary browse");
// List browse = browseDiv.addList("community-browse", List.TYPE_SIMPLE,
// "community-browse");
// browse.setHead(T_head_browse);
// String url = contextPath + "/handle/" + community.getHandle();
// browse.addItemXref(url + "/browse?type=title",T_browse_titles);
// browse.addItemXref(url + "/browse?type=author",T_browse_authors);
// browse.addItemXref(url + "/browse?type=dateissued",T_browse_dates);
}
}
/**
* Recycle
*/
public void recycle()
{
// Clear out our item's cache.
this.validity = null;
super.recycle();
}
} | Java |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.discovery;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.DSpaceValidity;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.utils.RequestUtils;
import org.dspace.app.xmlui.wing.element.*;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Community;
import org.dspace.content.Collection;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.util.HashUtil;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.excalibur.source.SourceValidity;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.FacetParams;
import org.apache.log4j.Logger;
import org.dspace.discovery.*;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
import org.xml.sax.SAXException;
import java.io.Serializable;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import java.text.SimpleDateFormat;
import java.text.DateFormat;
import java.net.URLEncoder;
import java.util.List;
/**
* Dynamic browse by page (not used @ the moment)
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class BrowseFacet extends AbstractDSpaceTransformer implements CacheableProcessingComponent {
private static final Logger log = Logger.getLogger(BrowseFacet.class);
private static final Message T_dspace_home = message("xmlui.general.dspace_home");
/**
* The cache of recently submitted items
*/
protected QueryResponse queryResults;
/**
* Cached validity object
*/
protected SourceValidity validity;
/**
* Cached query arguments
*/
protected SolrQuery queryArgs;
private int DEFAULT_PAGE_SIZE = 10;
public static final String OFFSET = "offset";
public static final String FACET_FIELD = "field";
private ConfigurationService config = null;
private SearchService searchService = null;
public BrowseFacet() {
DSpace dspace = new DSpace();
config = dspace.getConfigurationService();
searchService = dspace.getServiceManager().getServiceByName(SearchService.class.getName(),SearchService.class);
}
/**
* Generate the unique caching key.
* This key must be unique inside the space of this component.
*/
public Serializable getKey() {
try {
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if (dso == null)
{
return "0";
}
return HashUtil.hash(dso.getHandle());
}
catch (SQLException sqle) {
// Ignore all errors and just return that the component is not
// cachable.
return "0";
}
}
/**
* Generate the cache validity object.
* <p/>
* The validity object will include the collection being viewed and
* all recently submitted items. This does not include the community / collection
* hierarch, when this changes they will not be reflected in the cache.
*/
public SourceValidity getValidity() {
if (this.validity == null) {
try {
DSpaceValidity validity = new DSpaceValidity();
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if (dso != null) {
// Add the actual collection;
validity.add(dso);
}
// add reciently submitted items, serialize solr query contents.
QueryResponse response = getQueryResponse(dso);
validity.add("numFound:" + response.getResults().getNumFound());
for (SolrDocument doc : response.getResults()) {
validity.add(doc.toString());
}
for (SolrDocument doc : response.getResults()) {
validity.add(doc.toString());
}
for (FacetField field : response.getFacetFields()) {
validity.add(field.getName());
for (FacetField.Count count : field.getValues()) {
validity.add(count.getName() + count.getCount());
}
}
this.validity = validity.complete();
}
catch (Exception e) {
// Just ignore all errors and return an invalid cache.
}
//TODO: dependent on tags as well :)
}
return this.validity;
}
/**
* Get the recently submitted items for the given community or collection.
*
* @param scope The collection.
*/
protected QueryResponse getQueryResponse(DSpaceObject scope) {
Request request = ObjectModelHelper.getRequest(objectModel);
if (queryResults != null)
{
return queryResults;
}
queryArgs = new SolrQuery();
//Make sure we add our default filters
queryArgs.addFilterQuery(SearchUtils.getDefaultFilters("browse"));
queryArgs.setQuery("search.resourcetype: " + Constants.ITEM + ((request.getParameter("query") != null && !"".equals(request.getParameter("query"))) ? " AND (" + request.getParameter("query") + ")" : ""));
// queryArgs.setQuery("search.resourcetype:" + Constants.ITEM);
queryArgs.setRows(0);
// TODO: change this !
queryArgs.setSortField(
ConfigurationManager.getProperty("recent.submissions.sort-option"),
SolrQuery.ORDER.asc
);
queryArgs.addFilterQuery(getParameterFacetQueries());
//Set the default limit to 11
//query.setFacetLimit(11);
queryArgs.setFacetMinCount(1);
//sort
//TODO: why this kind of sorting ? Should the sort not be on how many times the value appears like we do in the filter by sidebar ?
queryArgs.setFacetSort(config.getPropertyAsType("solr.browse.sort","lex"));
queryArgs.setFacet(true);
int offset = RequestUtils.getIntParameter(request, OFFSET);
if (offset == -1)
{
offset = 0;
}
queryArgs.setParam(FacetParams.FACET_OFFSET, String.valueOf(offset));
//We add +1 so we can use the extra one to make sure that we need to show the next page
queryArgs.setParam(FacetParams.FACET_LIMIT, String.valueOf(DEFAULT_PAGE_SIZE + 1));
if (scope != null) /* top level search / community */ {
if (scope instanceof Community) {
queryArgs.setFilterQueries("location:m" + scope.getID());
} else if (scope instanceof Collection) {
queryArgs.setFilterQueries("location:l" + scope.getID());
}
}
String facetField = request.getParameter(FACET_FIELD);
boolean isDate = false;
if(facetField.endsWith("_dt")){
facetField = facetField.split("_")[0];
isDate = true;
}
if (isDate) {
queryArgs.setParam(FacetParams.FACET_DATE,new String[]{facetField});
queryArgs.setParam(FacetParams.FACET_DATE_GAP,"+1YEAR");
Date lowestDate = getLowestDateValue(queryArgs.getQuery(), facetField, queryArgs.getFilterQueries());
int thisYear = Calendar.getInstance().get(Calendar.YEAR);
DateFormat formatter = new SimpleDateFormat("yyyy");
int maxEndYear = Integer.parseInt(formatter.format(lowestDate));
//Since we have a date, we need to find the last year
String startDate = "NOW/YEAR-" + SearchUtils.getConfig().getString("solr.date.gap", "10") + "YEARS";
String endDate = "NOW";
int startYear = thisYear - (offset + DEFAULT_PAGE_SIZE);
// We shouldn't go lower then our max bottom year
// Make sure to substract one so the bottom year is also counted !
if(startYear < maxEndYear)
{
startYear = maxEndYear - 1;
}
if(0 < offset){
//Say that we have an offset of 10 years
//we need to go back 10 years (2010 - (2010 - 10))
//(add one to compensate for the NOW in the start)
int endYear = thisYear - offset + 1;
endDate = "NOW/YEAR-" + (thisYear - endYear) + "YEARS";
//Add one to the startyear to get one more result
//When we select NOW, the current year is also used (so auto+1)
}
startDate = "NOW/YEAR-" + (thisYear - startYear) + "YEARS";
queryArgs.setParam(FacetParams.FACET_DATE_START, startDate);
queryArgs.setParam(FacetParams.FACET_DATE_END, endDate);
System.out.println(startDate);
System.out.println(endDate);
} else {
queryArgs.addFacetField(new String[]{facetField});
}
try {
queryResults = searchService.search(queryArgs);
} catch (SearchServiceException e) {
log.error(e.getMessage(), e);
}
return queryResults;
}
/**
* Retrieves the lowest date value in the given field
* @param query a solr query
* @param dateField the field for which we want to retrieve our date
* @param filterquery the filterqueries
* @return the lowest date found, in a date object
*/
private Date getLowestDateValue(String query, String dateField, String... filterquery){
try {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(query);
solrQuery.setFields(dateField);
solrQuery.setRows(1);
solrQuery.setSortField(dateField, SolrQuery.ORDER.asc);
solrQuery.setFilterQueries(filterquery);
QueryResponse rsp = searchService.search(solrQuery);
if(0 < rsp.getResults().getNumFound()){
return (Date) rsp.getResults().get(0).getFieldValue(dateField);
}
}catch (Exception e){
log.error("Unable to get lowest date", e);
}
return null;
}
/**
* Add a page title and trail links.
*/
public void addPageMeta(PageMeta pageMeta) throws SAXException, WingException, SQLException, IOException, AuthorizeException {
Request request = ObjectModelHelper.getRequest(objectModel);
String facetField = request.getParameter(FACET_FIELD);
pageMeta.addMetadata("title").addContent(message("xmlui.ArtifactBrowser.AbstractSearch.type_" + facetField + "_browse"));
pageMeta.addTrailLink(contextPath + "/", T_dspace_home);
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
if ((dso instanceof Collection) || (dso instanceof Community)) {
HandleUtil.buildHandleTrail(dso, pageMeta, contextPath);
}
pageMeta.addTrail().addContent(message("xmlui.ArtifactBrowser.AbstractSearch.type_" + facetField + "_browse"));
}
@Override
public void addBody(Body body) throws SAXException, WingException, UIException, SQLException, IOException, AuthorizeException {
Request request = ObjectModelHelper.getRequest(objectModel);
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
// Set up the major variables
//Collection collection = (Collection) dso;
// Build the collection viewer division.
//Make sure we get our results
queryResults = getQueryResponse(dso);
if (this.queryResults != null) {
java.util.List<FacetField> facetFields = this.queryResults.getFacetFields();
if (facetFields == null)
{
facetFields = new ArrayList<FacetField>();
}
facetFields.addAll(this.queryResults.getFacetDates());
if (facetFields.size() > 0) {
FacetField field = facetFields.get(0);
java.util.List<FacetField.Count> values = field.getValues();
if(field.getGap() != null){
//We are dealing with dates so flip em, top date comes first
Collections.reverse(values);
}
if (values != null && 0 < values.size()) {
Division results = body.addDivision("browse-by-" + field + "-results", "primary");
results.setHead(message("xmlui.ArtifactBrowser.AbstractSearch.type_" + request.getParameter(FACET_FIELD) + "_browse"));
// Find our faceting offset
int offSet = 0;
try {
offSet = Integer.parseInt(queryArgs.get(FacetParams.FACET_OFFSET));
} catch (NumberFormatException e) {
//Ignore
}
//Only show the nextpageurl if we have at least one result following our current results
String nextPageUrl = null;
if (values.size() == (DEFAULT_PAGE_SIZE + 1))
{
nextPageUrl = getNextPageURL(request);
}
results.setSimplePagination((int) queryResults.getResults().getNumFound(), offSet + 1,
(offSet + (values.size() - 1)), getPreviousPageURL(request), nextPageUrl);
Table singleTable = results.addTable("browse-by-" + field + "-results", (int) (queryResults.getResults().getNumFound() + 1), 1);
List<String> filterQueries = new ArrayList<String>();
if(request.getParameterValues("fq") != null)
{
filterQueries = Arrays.asList(request.getParameterValues("fq"));
}
for (int i = 0; i < values.size(); i++) {
FacetField.Count value = values.get(i);
String displayedValue = value.getName();
String filterQuery = value.getAsFilterQuery();
if (field.getName().equals("location.comm") || field.getName().equals("location.coll")) {
//We have a community/collection, resolve it to a dspaceObject
displayedValue = SolrServiceImpl.locationToName(context, field.getName(), displayedValue);
}
if(field.getGap() != null){
//We have a date get the year so we can display it
DateFormat simpleDateformat = new SimpleDateFormat("yyyy");
displayedValue = simpleDateformat.format(SolrServiceImpl.toDate(displayedValue));
filterQuery = ClientUtils.escapeQueryChars(value.getFacetField().getName()) + ":" + displayedValue + "*";
}
Cell cell = singleTable.addRow().addCell();
//No use in selecting the same filter twice
if(filterQueries.contains(filterQuery)){
cell.addContent(displayedValue + " (" + value.getCount() + ")");
} else {
cell.addXref(
contextPath + (dso == null ? "" : "/handle/" + dso.getHandle()) +
"/discover?" +
"&fq=" +
URLEncoder.encode(filterQuery, "UTF-8") +
(request.getQueryString() != null ? "&" + request.getQueryString() : ""),
displayedValue + " (" + value.getCount() + ")"
);
}
}
}
}
}
//DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
/*
if (dso != null)
{
if (dso instanceof Collection)
{
browseContext.addItem().addXref(contextPath + "/discovery/?q=search.resourcetype%3A2+AND+location%3Al" + dso.getID(), T_head_this_collection );
}
if (dso instanceof Community)
{
browseContext.addItem().addXref(contextPath + "/discovery/?q=search.resourcetype%3A2+AND+location%3Am" + dso.getID(), T_head_this_community );
}
}
browseGlobal.addItem().addXref(contextPath + "/discovery/?q=search.resourcetype%3A2", T_head_all_of_dspace );
*/
}
private String getNextPageURL(Request request) {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put(FACET_FIELD, request.getParameter(FACET_FIELD));
if (queryArgs.get(FacetParams.FACET_OFFSET) != null)
{
parameters.put(OFFSET, String.valueOf(Integer.parseInt(queryArgs.get(FacetParams.FACET_OFFSET)) + DEFAULT_PAGE_SIZE));
}
// Add the filter queries
String url = generateURL("browse-discovery", parameters);
String[] fqs = getParameterFacetQueries();
if (fqs != null) {
StringBuilder urlBuilder = new StringBuilder(url);
for (String fq : fqs) {
urlBuilder.append("&fq=").append(fq);
}
url = urlBuilder.toString();
}
return url;
}
private String getPreviousPageURL(Request request) {
//If our offset should be 0 then we shouldn't be able to view a previous page url
if ("0".equals(queryArgs.get(FacetParams.FACET_OFFSET)))
{
return null;
}
Map<String, String> parameters = new HashMap<String, String>();
parameters.put(FACET_FIELD, request.getParameter(FACET_FIELD));
if (queryArgs.get(FacetParams.FACET_OFFSET) != null)
{
parameters.put(OFFSET, String.valueOf(Integer.parseInt(queryArgs.get(FacetParams.FACET_OFFSET)) - DEFAULT_PAGE_SIZE));
}
// Add the filter queries
String url = generateURL("browse-discovery", parameters);
String[] fqs = getParameterFacetQueries();
if (fqs != null) {
StringBuilder urlBuilder = new StringBuilder(url);
for (String fq : fqs) {
urlBuilder.append("&fq=").append(fq);
}
url = urlBuilder.toString();
}
return url;
}
/**
* Recycle
*/
public void recycle() {
// Clear out our item's cache.
this.queryResults = null;
this.validity = null;
super.recycle();
}
public String[] getParameterFacetQueries() {
Request request = ObjectModelHelper.getRequest(objectModel);
return request.getParameterValues("fq") != null ? request.getParameterValues("fq") : new String[0];
}
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.