code
stringlengths
3
1.18M
language
stringclasses
1 value
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.ArrayUtils; import org.w3c.dom.Document; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DCDate; import org.dspace.content.DCValue; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.handle.HandleManager; import com.sun.syndication.feed.synd.SyndFeed; import com.sun.syndication.feed.synd.SyndFeedImpl; import com.sun.syndication.feed.synd.SyndEntry; import com.sun.syndication.feed.synd.SyndEntryImpl; import com.sun.syndication.feed.synd.SyndImage; import com.sun.syndication.feed.synd.SyndImageImpl; import com.sun.syndication.feed.synd.SyndPerson; import com.sun.syndication.feed.synd.SyndPersonImpl; import com.sun.syndication.feed.synd.SyndContent; import com.sun.syndication.feed.synd.SyndContentImpl; import com.sun.syndication.feed.module.DCModuleImpl; import com.sun.syndication.feed.module.DCModule; import com.sun.syndication.feed.module.Module; import com.sun.syndication.io.SyndFeedOutput; import com.sun.syndication.io.FeedException; import org.apache.log4j.Logger; /** * Invoke ROME library to assemble a generic model of a syndication * for the given list of Items and scope. Consults configuration for the * metadata bindings to feed elements. Uses ROME's output drivers to * return any of the implemented formats, e.g. RSS 1.0, RSS 2.0, ATOM 1.0. * * The feed generator and OpenSearch call on this class so feed contents are * uniform for both. * * @author Larry Stone */ public class SyndicationFeed { private static final Logger log = Logger.getLogger(SyndicationFeed.class); /** i18n key values */ public static final String MSG_UNTITLED = "notitle"; public static final String MSG_LOGO_TITLE = "logo.title"; public static final String MSG_FEED_TITLE = "feed.title"; public static final String MSG_FEED_DESCRIPTION = "general-feed.description"; public static final String MSG_METADATA = "metadata."; public static final String MSG_UITYPE = "ui.type"; // UI keywords public static final String UITYPE_XMLUI = "xmlui"; public static final String UITYPE_JSPUI = "jspui"; // default DC fields for entry private static String defaultTitleField = "dc.title"; private static String defaultAuthorField = "dc.contributor.author"; private static String defaultDateField = "dc.date.issued"; private static String defaultDescriptionFields = "dc.description.abstract, dc.description, dc.title.alternative, dc.title"; // metadata field for Item title in entry: private static String titleField = getDefaultedConfiguration("webui.feed.item.title", defaultTitleField); // metadata field for Item publication date in entry: private static String dateField = getDefaultedConfiguration("webui.feed.item.date", defaultDateField); // metadata field for Item description in entry: private static String descriptionFields[] = getDefaultedConfiguration("webui.feed.item.description", defaultDescriptionFields).split("\\s*,\\s*"); private static String authorField = getDefaultedConfiguration("webui.feed.item.author", defaultAuthorField); // metadata field for Item dc:creator field in entry's DCModule (no default) private static String dcCreatorField = ConfigurationManager.getProperty("webui.feed.item.dc.creator"); // metadata field for Item dc:date field in entry's DCModule (no default) private static String dcDateField = ConfigurationManager.getProperty("webui.feed.item.dc.date"); // metadata field for Item dc:author field in entry's DCModule (no default) private static String dcDescriptionField = ConfigurationManager.getProperty("webui.feed.item.dc.description"); // -------- Instance variables: // the feed object we are building private SyndFeed feed = null; // memory of UI that called us, "xmlui" or "jspui" // affects Bitstream retrieval URL and I18N keys private String uiType = null; /** * Constructor. * @param ui either "xmlui" or "jspui" */ public SyndicationFeed(String ui) { feed = new SyndFeedImpl(); uiType = ui; } /** * Returns list of metadata selectors used to compose the description element * * @return selector list - format 'schema.element[.qualifier]' */ public static String[] getDescriptionSelectors() { return (String[]) ArrayUtils.clone(descriptionFields); } /** * Fills in the feed and entry-level metadata from DSpace objects. */ public void populate(HttpServletRequest request, DSpaceObject dso, DSpaceObject items[], Map<String, String> labels) { String logoURL = null; String objectURL = null; String defaultTitle = null; // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = ConfigurationManager.getProperty("dspace.name"); feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); objectURL = resolveURL(request, null); logoURL = ConfigurationManager.getProperty("webui.feed.logo.url"); } else { Bitstream logo = null; if (dso.getType() == Constants.COLLECTION) { Collection col = (Collection)dso; defaultTitle = col.getMetadata("name"); feed.setDescription(col.getMetadata("short_description")); logo = col.getLogo(); } else if (dso.getType() == Constants.COMMUNITY) { Community comm = (Community)dso; defaultTitle = comm.getMetadata("name"); feed.setDescription(comm.getMetadata("short_description")); logo = comm.getLogo(); } objectURL = resolveURL(request, dso); if (logo != null) { logoURL = urlOfBitstream(request, logo); } } feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ? localize(labels, MSG_FEED_TITLE) : defaultTitle); feed.setLink(objectURL); feed.setPublishedDate(new Date()); feed.setUri(objectURL); // add logo if we found one: if (logoURL != null) { // we use the path to the logo for this, the logo itself cannot // be contained in the rdf. Not all RSS-viewers show this logo. SyndImage image = new SyndImageImpl(); image.setLink(objectURL); image.setTitle(localize(labels, MSG_LOGO_TITLE)); image.setUrl(logoURL); feed.setImage(image); } // add entries for items if (items != null) { List<SyndEntry> entries = new ArrayList<SyndEntry>(); for (DSpaceObject itemDSO : items) { if (itemDSO.getType() != Constants.ITEM) { continue; } Item item = (Item)itemDSO; boolean hasDate = false; SyndEntry entry = new SyndEntryImpl(); entries.add(entry); String entryURL = resolveURL(request, item); entry.setLink(entryURL); entry.setUri(entryURL); String title = getOneDC(item, titleField); entry.setTitle(title == null ? localize(labels, MSG_UNTITLED) : title); // "published" date -- should be dc.date.issued String pubDate = getOneDC(item, dateField); if (pubDate != null) { entry.setPublishedDate((new DCDate(pubDate)).toDate()); hasDate = true; } // date of last change to Item entry.setUpdatedDate(item.getLastModified()); StringBuffer db = new StringBuffer(); for (String df : descriptionFields) { // Special Case: "(date)" in field name means render as date boolean isDate = df.indexOf("(date)") > 0; if (isDate) { df = df.replaceAll("\\(date\\)", ""); } DCValue dcv[] = item.getMetadata(df); if (dcv.length > 0) { String fieldLabel = labels.get(MSG_METADATA + df); if (fieldLabel != null && fieldLabel.length()>0) { db.append(fieldLabel).append(": "); } boolean first = true; for (DCValue v : dcv) { if (first) { first = false; } else { db.append("; "); } db.append(isDate ? new DCDate(v.value).toString() : v.value); } db.append("\n"); } } if (db.length() > 0) { SyndContent desc = new SyndContentImpl(); desc.setType("text/plain"); desc.setValue(db.toString()); entry.setDescription(desc); } // This gets the authors into an ATOM feed DCValue authors[] = item.getMetadata(authorField); if (authors.length > 0) { List<SyndPerson> creators = new ArrayList<SyndPerson>(); for (DCValue author : authors) { SyndPerson sp = new SyndPersonImpl(); sp.setName(author.value); creators.add(sp); } entry.setAuthors(creators); } // only add DC module if any DC fields are configured if (dcCreatorField != null || dcDateField != null || dcDescriptionField != null) { DCModule dc = new DCModuleImpl(); if (dcCreatorField != null) { DCValue dcAuthors[] = item.getMetadata(dcCreatorField); if (dcAuthors.length > 0) { List<String> creators = new ArrayList<String>(); for (DCValue author : dcAuthors) { creators.add(author.value); } dc.setCreators(creators); } } if (dcDateField != null && !hasDate) { DCValue v[] = item.getMetadata(dcDateField); if (v.length > 0) { dc.setDate((new DCDate(v[0].value)).toDate()); } } if (dcDescriptionField != null) { DCValue v[] = item.getMetadata(dcDescriptionField); if (v.length > 0) { StringBuffer descs = new StringBuffer(); for (DCValue d : v) { if (descs.length() > 0) { descs.append("\n\n"); } descs.append(d.value); } dc.setDescription(descs.toString()); } } entry.getModules().add(dc); } } feed.setEntries(entries); } } /** * Sets the feed type for XML delivery, e.g. "rss_1.0", "atom_1.0" * Must match one of ROME's configured generators, see rome.properties * (currently rss_1.0, rss_2.0, atom_1.0, atom_0.3) */ public void setType(String feedType) { feed.setFeedType(feedType); // XXX FIXME: workaround ROME 1.0 bug, it puts invalid image element in rss1.0 if ("rss_1.0".equals(feedType)) { feed.setImage(null); } } /** * @return the feed we built as DOM Document */ public Document outputW3CDom() throws FeedException { try { SyndFeedOutput feedWriter = new SyndFeedOutput(); return feedWriter.outputW3CDom(feed); } catch (FeedException e) { log.error(e); throw e; } } /** * @return the feed we built as serialized XML string */ public String outputString() throws FeedException { SyndFeedOutput feedWriter = new SyndFeedOutput(); return feedWriter.outputString(feed); } /** * send the output to designated Writer */ public void output(java.io.Writer writer) throws FeedException, IOException { SyndFeedOutput feedWriter = new SyndFeedOutput(); feedWriter.output(feed, writer); } /** * Add a ROME plugin module (e.g. for OpenSearch) at the feed level. */ public void addModule(Module m) { feed.getModules().add(m); } // utility to get config property with default value when not set. private static String getDefaultedConfiguration(String key, String dfl) { String result = ConfigurationManager.getProperty(key); return (result == null) ? dfl : result; } // returns absolute URL to download content of bitstream (which might not belong to any Item) private String urlOfBitstream(HttpServletRequest request, Bitstream logo) { String name = logo.getName(); return resolveURL(request,null) + (uiType.equalsIgnoreCase(UITYPE_XMLUI) ?"/bitstream/id/":"/retrieve/") + logo.getID()+"/"+(name == null?"":name); } /** * Return a url to the DSpace object, either use the official * handle for the item or build a url based upon the current server. * * If the dspaceobject is null then a local url to the repository is generated. * * @param dso The object to refrence, null if to the repository. * @return */ private String baseURL = null; // cache the result for null private String resolveURL(HttpServletRequest request, DSpaceObject dso) { // If no object given then just link to the whole repository, // since no offical handle exists so we have to use local resolution. if (dso == null) { if (baseURL == null) { if (request == null) { baseURL = ConfigurationManager.getProperty("dspace.url"); } else { baseURL = (request.isSecure()) ? "https://" : "http://"; baseURL += ConfigurationManager.getProperty("dspace.hostname"); baseURL += ":" + request.getServerPort(); baseURL += request.getContextPath(); } } return baseURL; } // return a link to handle in repository else if (ConfigurationManager.getBooleanProperty("webui.feed.localresolve")) { return resolveURL(request, null) + "/handle/" + dso.getHandle(); } // link to the Handle server or other persistent URL source else { return HandleManager.getCanonicalForm(dso.getHandle()); } } // retrieve text for localization key, or mark untranslated private String localize(Map<String, String> labels, String s) { return labels.containsKey(s) ? labels.get(s) : ("Untranslated:"+s); } // spoonful of syntactic sugar when we only need first value private String getOneDC(Item item, String field) { DCValue dcv[] = item.getMetadata(field); return (dcv.length > 0) ? dcv[0].value : null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.util.Map; import java.io.Serializable; /** * Class representing configuration for a single step within an Item Submission * Process. In other words, this is a single step in the SubmissionConfig class. * This class represents the structure of a single 'step' node in the * item-submission.xml configuration file. * * Note: Implements Serializable as it will be saved to the current session during submission. * Please ensure that nothing is added to this class that isn't also serializable * * @see org.dspace.app.util.SubmissionConfigReader * @see org.dspace.app.util.SubmissionConfig * * @author Tim Donohue * @version $Revision: 5844 $ */ public class SubmissionStepConfig implements Serializable { /* * The identifier for the Select Collection step */ public static final String SELECT_COLLECTION_STEP = "collection"; /* * The identifier for the Completion step */ public static final String COMPLETE_STEP = "complete"; /** * the id for this step ('id' only exists if this step is defined in the * <step-definitions> section) */ private String id = null; /** the heading for this step */ private String heading = null; /** the name of the java processing class for this step */ private String processingClassName = null; /** whether or not this step is editable during workflow (default=true) */ private boolean workflowEditable = true; /** * The full name of the JSP-UI binding class for this step. This field is * ONLY used by the JSP-UI. **/ private String jspBindingClassName = null; /** * The full name of the Manakin XML-UI Transformer class which will generate * the necessary DRI for displaying this class in Manakin. This field is * ONLY used by the Manakin XML-UI. */ private String xmlBindingClassName = null; /** The number of this step in the current SubmissionConfig */ private int number = -1; /** * Class constructor for creating an empty SubmissionStepConfig object */ public SubmissionStepConfig() { } /** * Class constructor for creating a SubmissionStepConfig object based on the * contents of a HashMap initialized by the SubmissionConfig object. * * @param stepMap * the HashMap containing all required information about this * step */ public SubmissionStepConfig(Map<String, String> stepMap) { id = stepMap.get("id"); heading = stepMap.get("heading"); processingClassName = stepMap.get("processing-class"); jspBindingClassName = stepMap.get("jspui-binding"); xmlBindingClassName = stepMap.get("xmlui-binding"); String wfEditString = stepMap.get("workflow-editable"); if (wfEditString != null && wfEditString.length() > 0) { workflowEditable = Boolean.parseBoolean(wfEditString); } } /** * Get the ID for this step. An ID is only defined if the step exists in the * <step-definitions> section. This ID field is used to reference special * steps (like the required step with id="collection") * * @return the step ID */ public String getId() { return id; } /** * Get the heading for this step. This can either be a property from * Messages.properties, or the actual heading text. If this "heading" * contains a period(.) it is assumed to reference Messages.properties. * * @return the heading */ public String getHeading() { return heading; } /** * Get the class which handles all processing for this step. * <p> * This class must extend the org.dspace.submit.AbstractProcessingStep class, * and provide processing for BOTH the JSP-UI and XML-UI * * @return the class's full class path (e.g. * "org.dspace.submit.step.MySampleStep") */ public String getProcessingClassName() { return processingClassName; } /** * Retrieve the full class name of the Manakin Transformer which will * generate this step's DRI, for display in Manakin XML-UI. * <P> * This class must extend the * org.dspace.app.xmlui.aspect.submission.StepTransformer class. * <P> * This property is only used by the Manakin XML-UI, and therefore is not * relevant if you are using the JSP-UI. * * @return the full java class name of the Transformer to use for this step */ public String getXMLUIClassName() { return xmlBindingClassName; } /** * Retrieve the full class name of the JSP-UI "binding" class which will * initialize and call the necessary JSPs for display in the JSP-UI * <P> * This class must extend the * org.dspace.app.webui.submit.JSPStep class. * <P> * This property is only used by the JSP-UI, and therefore is not * relevant if you are using the XML-UI (aka. Manakin). * * @return the full java class name of the JSPStep to use for this step */ public String getJSPUIClassName() { return jspBindingClassName; } /** * Get the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special * "select collection" step) * * @return the number of this step in the current SubmissionConfig */ public int getStepNumber() { return number; } /** * Sets the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special * "select collection" step) * * @param stepNum * the step number. */ protected void setStepNumber(int stepNum) { this.number = stepNum; } /** * Whether or not this step is editable during workflow processing. If * "true", then this step will appear in the "Edit Metadata" stage of the * workflow process. * * @return if step is editable in a workflow process */ public boolean isWorkflowEditable() { return workflowEditable; } /** * Whether or not this step is visible within the Progress Bar. A step is * only visible if it has been assigned a Heading, otherwise it's invisible * * @return if step is visible within the progress bar */ public boolean isVisible() { return ((heading != null) && (heading.length() > 0)); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.io.File; import java.util.*; import org.xml.sax.SAXException; import org.w3c.dom.*; import javax.xml.parsers.*; import org.dspace.content.MetadataSchema; import org.dspace.core.ConfigurationManager; /** * Submission form generator for DSpace. Reads and parses the installation * form definitions file, input-forms.xml, from the configuration directory. * A forms definition details the page and field layout of the metadata * collection pages used by the submission process. Each forms definition * starts with a unique name that gets associated with that form set. * * The file also specifies which collections use which form sets. At a * minimum, the definitions file must define a default mapping from the * placeholder collection #0 to the distinguished form 'default'. Any * collections that use a custom form set are listed paired with the name * of the form set they use. * * The definitions file also may contain sets of value pairs. Each value pair * will contain one string that the user reads, and a paired string that will * supply the value stored in the database if its sibling display value gets * selected from a choice list. * * @author Brian S. Hughes * @version $Revision: 5844 $ */ public class DCInputsReader { /** * The ID of the default collection. Will never be the ID of a named * collection */ public static final String DEFAULT_COLLECTION = "default"; /** Name of the form definition XML file */ static final String FORM_DEF_FILE = "input-forms.xml"; /** Keyname for storing dropdown value-pair set name */ static final String PAIR_TYPE_NAME = "value-pairs-name"; /** The fully qualified pathname of the form definition XML file */ private String defsFile = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator + FORM_DEF_FILE; /** * Reference to the collections to forms map, computed from the forms * definition file */ private Map<String, String> whichForms = null; /** * Reference to the forms definitions map, computed from the forms * definition file */ private Map<String, List<List<Map<String, String>>>> formDefns = null; /** * Reference to the value-pairs map, computed from the forms definition file */ private Map<String, List<String>> valuePairs = null; // Holds display/storage pairs /** * Mini-cache of last DCInputSet requested. If submissions are not typically * form-interleaved, there will be a modest win. */ private DCInputSet lastInputSet = null; /** * Parse an XML encoded submission forms template file, and create a hashmap * containing all the form information. This hashmap will contain three top * level structures: a map between collections and forms, the definition for * each page of each form, and lists of pairs of values that populate * selection boxes. */ public DCInputsReader() throws DCInputsReaderException { buildInputs(defsFile); } public DCInputsReader(String fileName) throws DCInputsReaderException { buildInputs(fileName); } private void buildInputs(String fileName) throws DCInputsReaderException { whichForms = new HashMap<String, String>(); formDefns = new HashMap<String, List<List<Map<String, String>>>>(); valuePairs = new HashMap<String, List<String>>(); String uri = "file:" + new File(fileName).getAbsolutePath(); try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); DocumentBuilder db = factory.newDocumentBuilder(); Document doc = db.parse(uri); doNodes(doc); checkValues(); } catch (FactoryConfigurationError fe) { throw new DCInputsReaderException("Cannot create Submission form parser",fe); } catch (Exception e) { throw new DCInputsReaderException("Error creating submission forms: "+e); } } public Iterator<String> getPairsNameIterator() { return valuePairs.keySet().iterator(); } public List<String> getPairs(String name) { return valuePairs.get(name); } /** * Returns the set of DC inputs used for a particular collection, or the * default set if no inputs defined for the collection * * @param collectionHandle * collection's unique Handle * @return DC input set * @throws DCInputsReaderException * if no default set defined */ public DCInputSet getInputs(String collectionHandle) throws DCInputsReaderException { String formName = whichForms.get(collectionHandle); if (formName == null) { formName = whichForms.get(DEFAULT_COLLECTION); } if (formName == null) { throw new DCInputsReaderException("No form designated as default"); } // check mini-cache, and return if match if ( lastInputSet != null && lastInputSet.getFormName().equals( formName ) ) { return lastInputSet; } // cache miss - construct new DCInputSet List<List<Map<String, String>>> pages = formDefns.get(formName); if ( pages == null ) { throw new DCInputsReaderException("Missing the " + formName + " form"); } lastInputSet = new DCInputSet(formName, pages, valuePairs); return lastInputSet; } /** * Return the number of pages the inputs span for a desginated collection * @param collectionHandle collection's unique Handle * @return number of pages of input * @throws DCInputsReaderException if no default set defined */ public int getNumberInputPages(String collectionHandle) throws DCInputsReaderException { return getInputs(collectionHandle).getNumberPages(); } /** * Process the top level child nodes in the passed top-level node. These * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ private void doNodes(Node n) throws SAXException, DCInputsReaderException { if (n == null) { return; } Node e = getElement(n); NodeList nl = e.getChildNodes(); int len = nl.getLength(); boolean foundMap = false; boolean foundDefs = false; for (int i = 0; i < len; i++) { Node nd = nl.item(i); if ((nd == null) || isEmptyTextNode(nd)) { continue; } String tagName = nd.getNodeName(); if (tagName.equals("form-map")) { processMap(nd); foundMap = true; } else if (tagName.equals("form-definitions")) { processDefinition(nd); foundDefs = true; } else if (tagName.equals("form-value-pairs")) { processValuePairs(nd); } // Ignore unknown nodes } if (!foundMap) { throw new DCInputsReaderException("No collection to form map found"); } if (!foundDefs) { throw new DCInputsReaderException("No form definition found"); } } /** * Process the form-map section of the XML file. * Each element looks like: * <name-map collection-handle="hdl" form-name="name" /> * Extract the collection handle and form name, put name in hashmap keyed * by the collection handle. */ private void processMap(Node e) throws SAXException { NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); String value = getAttribute(nd, "form-name"); String content = getValue(nd); if (id == null) { throw new SAXException("name-map element is missing collection-handle attribute"); } if (value == null) { throw new SAXException("name-map element is missing form-name attribute"); } if (content != null && content.length() > 0) { throw new SAXException("name-map element has content, it should be empty."); } whichForms.put(id, value); } // ignore any child node that isn't a "name-map" } } /** * Process the form-definitions section of the XML file. Each element is * formed thusly: <form name="formname">...pages...</form> Each pages * subsection is formed: <page number="#"> ...fields... </page> Each field * is formed from: dc-element, dc-qualifier, label, hint, input-type name, * required text, and repeatable flag. */ private void processDefinition(Node e) throws SAXException, DCInputsReaderException { int numForms = 0; NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); // process each form definition if (nd.getNodeName().equals("form")) { numForms++; String formName = getAttribute(nd, "name"); if (formName == null) { throw new SAXException("form element has no name attribute"); } List<List<Map<String, String>>> pages = new ArrayList<List<Map<String, String>>>(); // the form contains pages formDefns.put(formName, pages); NodeList pl = nd.getChildNodes(); int lenpg = pl.getLength(); for (int j = 0; j < lenpg; j++) { Node npg = pl.item(j); // process each page definition if (npg.getNodeName().equals("page")) { String pgNum = getAttribute(npg, "number"); if (pgNum == null) { throw new SAXException("Form " + formName + " has no identified pages"); } List<Map<String, String>> page = new ArrayList<Map<String, String>>(); pages.add(page); NodeList flds = npg.getChildNodes(); int lenflds = flds.getLength(); for (int k = 0; k < lenflds; k++) { Node nfld = flds.item(k); if ( nfld.getNodeName().equals("field") ) { // process each field definition Map<String, String> field = new HashMap<String, String>(); page.add(field); processPageParts(formName, pgNum, nfld, field); String error = checkForDups(formName, field, pages); if (error != null) { throw new SAXException(error); } } } } // ignore any child that is not a 'page' } // sanity check number of pages if (pages.size() < 1) { throw new DCInputsReaderException("Form " + formName + " has no pages"); } } } if (numForms == 0) { throw new DCInputsReaderException("No form definition found"); } } /** * Process parts of a field * At the end, make sure that input-types 'qualdrop_value' and * 'twobox' are marked repeatable. Complain if dc-element, label, * or input-type are missing. */ private void processPageParts(String formName, String page, Node n, Map<String, String> field) throws SAXException { NodeList nl = n.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if ( ! isEmptyTextNode(nd) ) { String tagName = nd.getNodeName(); String value = getValue(nd); field.put(tagName, value); if (tagName.equals("input-type")) { if (value.equals("dropdown") || value.equals("qualdrop_value") || value.equals("list")) { String pairTypeName = getAttribute(nd, PAIR_TYPE_NAME); if (pairTypeName == null) { throw new SAXException("Form " + formName + ", field " + field.get("dc-element") + "." + field.get("dc-qualifier") + " has no name attribute"); } else { field.put(PAIR_TYPE_NAME, pairTypeName); } } } else if (tagName.equals("vocabulary")) { String closedVocabularyString = getAttribute(nd, "closed"); field.put("closedVocabulary", closedVocabularyString); } } } String missing = null; if (field.get("dc-element") == null) { missing = "dc-element"; } if (field.get("label") == null) { missing = "label"; } if (field.get("input-type") == null) { missing = "input-type"; } if ( missing != null ) { String msg = "Required field " + missing + " missing on page " + page + " of form " + formName; throw new SAXException(msg); } String type = field.get("input-type"); if (type.equals("twobox") || type.equals("qualdrop_value")) { String rpt = field.get("repeatable"); if ((rpt == null) || ((!rpt.equalsIgnoreCase("yes")) && (!rpt.equalsIgnoreCase("true")))) { String msg = "The field \'"+field.get("label")+"\' must be repeatable"; throw new SAXException(msg); } } } /** * Check that this is the only field with the name dc-element.dc-qualifier * If there is a duplicate, return an error message, else return null; */ private String checkForDups(String formName, Map<String, String> field, List<List<Map<String, String>>> pages) { int matches = 0; String err = null; String schema = field.get("dc-schema"); String elem = field.get("dc-element"); String qual = field.get("dc-qualifier"); if ((schema == null) || (schema.equals(""))) { schema = MetadataSchema.DC_SCHEMA; } String schemaTest; for (int i = 0; i < pages.size(); i++) { List<Map<String, String>> pg = pages.get(i); for (int j = 0; j < pg.size(); j++) { Map<String, String> fld = pg.get(j); if ((fld.get("dc-schema") == null) || ((fld.get("dc-schema")).equals(""))) { schemaTest = MetadataSchema.DC_SCHEMA; } else { schemaTest = fld.get("dc-schema"); } // Are the schema and element the same? If so, check the qualifier if (((fld.get("dc-element")).equals(elem)) && (schemaTest.equals(schema))) { String ql = fld.get("dc-qualifier"); if (qual != null) { if ((ql != null) && ql.equals(qual)) { matches++; } } else if (ql == null) { matches++; } } } } if (matches > 1) { err = "Duplicate field " + schema + "." + elem + "." + qual + " detected in form " + formName; } return err; } /** * Process the form-value-pairs section of the XML file. * Each element is formed thusly: * <value-pairs name="..." dc-term="..."> * <pair> * <display>displayed name-</display> * <storage>stored name</storage> * </pair> * For each value-pairs element, create a new vector, and extract all * the pairs contained within it. Put the display and storage values, * respectively, in the next slots in the vector. Store the vector * in the passed in hashmap. */ private void processValuePairs(Node e) throws SAXException { NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); String tagName = nd.getNodeName(); // process each value-pairs set if (tagName.equals("value-pairs")) { String pairsName = getAttribute(nd, PAIR_TYPE_NAME); String dcTerm = getAttribute(nd, "dc-term"); if (pairsName == null) { String errString = "Missing name attribute for value-pairs for DC term " + dcTerm; throw new SAXException(errString); } List<String> pairs = new ArrayList<String>(); valuePairs.put(pairsName, pairs); NodeList cl = nd.getChildNodes(); int lench = cl.getLength(); for (int j = 0; j < lench; j++) { Node nch = cl.item(j); String display = null; String storage = null; if (nch.getNodeName().equals("pair")) { NodeList pl = nch.getChildNodes(); int plen = pl.getLength(); for (int k = 0; k < plen; k++) { Node vn= pl.item(k); String vName = vn.getNodeName(); if (vName.equals("displayed-value")) { display = getValue(vn); } else if (vName.equals("stored-value")) { storage = getValue(vn); if (storage == null) { storage = ""; } } // ignore any children that aren't 'display' or 'storage' } pairs.add(display); pairs.add(storage); } // ignore any children that aren't a 'pair' } } // ignore any children that aren't a 'value-pair' } } /** * Check that all referenced value-pairs are present * and field is consistent * * Throws DCInputsReaderException if detects a missing value-pair. */ private void checkValues() throws DCInputsReaderException { // Step through every field of every page of every form Iterator<String> ki = formDefns.keySet().iterator(); while (ki.hasNext()) { String idName = ki.next(); List<List<Map<String, String>>> pages = formDefns.get(idName); for (int i = 0; i < pages.size(); i++) { List<Map<String, String>> page = pages.get(i); for (int j = 0; j < page.size(); j++) { Map<String, String> fld = page.get(j); // verify reference in certain input types String type = fld.get("input-type"); if (type.equals("dropdown") || type.equals("qualdrop_value") || type.equals("list")) { String pairsName = fld.get(PAIR_TYPE_NAME); List<String> v = valuePairs.get(pairsName); if (v == null) { String errString = "Cannot find value pairs for " + pairsName; throw new DCInputsReaderException(errString); } } // if visibility restricted, make sure field is not required String visibility = fld.get("visibility"); if (visibility != null && visibility.length() > 0 ) { String required = fld.get("required"); if (required != null && required.length() > 0) { String errString = "Field '" + fld.get("label") + "' is required but invisible"; throw new DCInputsReaderException(errString); } } } } } } private Node getElement(Node nd) { NodeList nl = nd.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node n = nl.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { return n; } } return null; } private boolean isEmptyTextNode(Node nd) { boolean isEmpty = false; if (nd.getNodeType() == Node.TEXT_NODE) { String text = nd.getNodeValue().trim(); if (text.length() == 0) { isEmpty = true; } } return isEmpty; } /** * Returns the value of the node's attribute named <name> */ private String getAttribute(Node e, String name) { NamedNodeMap attrs = e.getAttributes(); int len = attrs.getLength(); if (len > 0) { int i; for (i = 0; i < len; i++) { Node attr = attrs.item(i); if (name.equals(attr.getNodeName())) { return attr.getNodeValue().trim(); } } } //no such attribute return null; } /** * Returns the value found in the Text node (if any) in the * node list that's passed in. */ private String getValue(Node nd) { NodeList nl = nd.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node n = nl.item(i); short type = n.getNodeType(); if (type == Node.TEXT_NODE) { return n.getNodeValue().trim(); } } // Didn't find a text node return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.io.File; import java.util.*; import javax.servlet.ServletException; import org.xml.sax.SAXException; import org.w3c.dom.*; import javax.xml.parsers.*; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; /** * Item Submission configuration generator for DSpace. Reads and parses the * installed submission process configuration file, item-submission.xml, from * the configuration directory. This submission process definition details the * ordering of the steps (and number of steps) that occur during the Item * Submission Process. There may be multiple Item Submission processes defined, * where each definition is assigned a unique name. * * The file also specifies which collections use which Item Submission process. * At a minimum, the definitions file must define a default mapping from the * placeholder collection # to the distinguished submission process 'default'. * Any collections that use a custom submission process are listed paired with * the name of the item submission process they use. * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * * @author Tim Donohue based on DCInputsReader by Brian S. Hughes * @version $Revision: 5844 $ */ public class SubmissionConfigReader { /** * The ID of the default collection. Will never be the ID of a named * collection */ public static final String DEFAULT_COLLECTION = "default"; /** Prefix of the item submission definition XML file */ static final String SUBMIT_DEF_FILE_PREFIX = "item-submission-ext"; /** Suffix of the item submission definition XML file */ static final String SUBMIT_DEF_FILE_SUFFIX = ".xml"; /** log4j logger */ private static Logger log = Logger.getLogger(SubmissionConfigReader.class); /** The fully qualified pathname of the directory containing the Item Submission Configuration file */ private String configDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator; /** * Hashmap which stores which submission process configuration is used by * which collection, computed from the item submission config file * (specifically, the 'submission-map' tag) */ private Map<String, String> collectionToSubmissionConfig = null; /** * Reference to the global submission step definitions defined in the * "step-definitions" section */ private Map<String, Map<String, String>> stepDefns = null; /** * Reference to the item submission definitions defined in the * "submission-definitions" section */ private Map<String, List<Map<String, String>>> submitDefns = null; /** * Mini-cache of last SubmissionConfig object requested (so that we don't * always reload from scratch) */ private SubmissionConfig lastSubmissionConfig = null; /** * Load Submission Configuration from the * item-submission.xml configuration file */ public SubmissionConfigReader() throws ServletException { buildInputs(configDir + SUBMIT_DEF_FILE_PREFIX + SUBMIT_DEF_FILE_SUFFIX); } /** * Parse an XML encoded item submission configuration file. * <P> * Creates two main hashmaps: * <ul> * <li>Hashmap of Collection to Submission definition mappings - * defines which Submission process a particular collection uses * <li>Hashmap of all Submission definitions. List of all valid * Submision Processes by name. * </ul> */ private void buildInputs(String fileName) throws ServletException { collectionToSubmissionConfig = new HashMap<String, String>(); submitDefns = new HashMap<String, List<Map<String, String>>>(); String uri = "file:" + new File(fileName).getAbsolutePath(); try { DocumentBuilderFactory factory = DocumentBuilderFactory .newInstance(); factory.setValidating(false); factory.setIgnoringComments(true); factory.setIgnoringElementContentWhitespace(true); DocumentBuilder db = factory.newDocumentBuilder(); Document doc = db.parse(uri); doNodes(doc); } catch (FactoryConfigurationError fe) { throw new ServletException( "Cannot create Item Submission Configuration parser", fe); } catch (Exception e) { throw new ServletException( "Error creating Item Submission Configuration: " + e); } } /** * Returns the Item Submission process config used for a particular * collection, or the default if none is defined for the collection * * @param collectionHandle * collection's unique Handle * @param isWorkflow * whether or not we are loading the submission process for a * workflow * @return the SubmissionConfig representing the item submission config * * @throws ServletException * if no default submission process configuration defined */ public SubmissionConfig getSubmissionConfig(String collectionHandle, boolean isWorkflow) throws ServletException { // get the name of the submission process config for this collection String submitName = collectionToSubmissionConfig .get(collectionHandle); if (submitName == null) { submitName = collectionToSubmissionConfig .get(DEFAULT_COLLECTION); } if (submitName == null) { throw new ServletException( "No item submission process configuration designated as 'default' in 'submission-map' section of 'item-submission.xml'."); } log.debug("Loading submission process config named '" + submitName + "'"); // check mini-cache, and return if match if (lastSubmissionConfig != null && lastSubmissionConfig.getSubmissionName().equals(submitName) && lastSubmissionConfig.isWorkflow() == isWorkflow) { log.debug("Found submission process config '" + submitName + "' in cache."); return lastSubmissionConfig; } // cache miss - construct new SubmissionConfig List<Map<String, String>> steps = submitDefns.get(submitName); if (steps == null) { throw new ServletException( "Missing the Item Submission process config '" + submitName + "' (or unable to load) from 'item-submission.xml'."); } log.debug("Submission process config '" + submitName + "' not in cache. Reloading from scratch."); lastSubmissionConfig = new SubmissionConfig(submitName, steps, isWorkflow); log.debug("Submission process config has " + lastSubmissionConfig.getNumberOfSteps() + " steps listed."); return lastSubmissionConfig; } /** * Returns a particular global step definition based on its ID. * <P> * Global step definitions are those defined in the <step-definitions> * section of the configuration file. * * @param stepID * step's identifier * * @return the SubmissionStepConfig representing the step * * @throws ServletException * if no default submission process configuration defined */ public SubmissionStepConfig getStepConfig(String stepID) throws ServletException { // We should already have the step definitions loaded if (stepDefns != null) { // retreive step info Map<String, String> stepInfo = stepDefns.get(stepID); if (stepInfo != null) { return new SubmissionStepConfig(stepInfo); } } return null; } /** * Process the top level child nodes in the passed top-level node. These * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ private void doNodes(Node n) throws SAXException, ServletException { if (n == null) { return; } Node e = getElement(n); NodeList nl = e.getChildNodes(); int len = nl.getLength(); boolean foundMap = false; boolean foundStepDefs = false; boolean foundSubmitDefs = false; for (int i = 0; i < len; i++) { Node nd = nl.item(i); if ((nd == null) || isEmptyTextNode(nd)) { continue; } String tagName = nd.getNodeName(); if (tagName.equals("submission-map")) { processMap(nd); foundMap = true; } else if (tagName.equals("step-definitions")) { processStepDefinition(nd); foundStepDefs = true; } else if (tagName.equals("submission-definitions")) { processSubmissionDefinition(nd); foundSubmitDefs = true; } // Ignore unknown nodes } if (!foundMap) { throw new ServletException( "No collection to item submission map ('submission-map') found in 'item-submission.xml'"); } if (!foundStepDefs) { throw new ServletException("No 'step-definitions' section found in 'item-submission.xml'"); } if (!foundSubmitDefs) { throw new ServletException( "No 'submission-definitions' section found in 'item-submission.xml'"); } } /** * Process the submission-map section of the XML file. Each element looks * like: <name-map collection-handle="hdl" submission-name="name" /> Extract * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ private void processMap(Node e) throws SAXException { NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); if (id == null) { throw new SAXException( "name-map element is missing collection-handle attribute in 'item-submission.xml'"); } if (value == null) { throw new SAXException( "name-map element is missing submission-name attribute in 'item-submission.xml'"); } if (content != null && content.length() > 0) { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } collectionToSubmissionConfig.put(id, value); } // ignore any child node that isn't a "name-map" } } /** * Process the "step-definition" section of the XML file. Each element is * formed thusly: <step id="unique-id"> ...step_fields... </step> The valid * step_fields are: heading, processing-servlet. * <P> * Extract the step information (from the step_fields) and place in a * HashMap whose key is the step's unique id. */ private void processStepDefinition(Node e) throws SAXException, ServletException { stepDefns = new HashMap<String, Map<String, String>>(); NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); // process each step definition if (nd.getNodeName().equals("step")) { String stepID = getAttribute(nd, "id"); if (stepID == null) { throw new SAXException( "step element has no 'id' attribute in 'item-submission.xml', which is required in the 'step-definitions' section"); } else if (stepDefns.containsKey(stepID)) { throw new SAXException( "There are two step elements with the id '" + stepID + "' in 'item-submission.xml'"); } Map<String, String> stepInfo = processStepChildNodes("step-definition", nd); stepDefns.put(stepID, stepInfo); } // ignore any child that is not a 'step' } // Sanity check number of step definitions if (stepDefns.size() < 1) { throw new ServletException( "step-definition section has no steps! A step with id='collection' is required in 'item-submission.xml'!"); } // Sanity check to see that the required "collection" step is defined if (!stepDefns.containsKey(SubmissionStepConfig.SELECT_COLLECTION_STEP)) { throw new ServletException( "The step-definition section is REQUIRED to have a step with id='" + SubmissionStepConfig.SELECT_COLLECTION_STEP + "' in 'item-submission.xml'! This step is used to ensure that a new item submission is assigned to a collection."); } // Sanity check to see that the required "complete" step is defined if (!stepDefns.containsKey(SubmissionStepConfig.COMPLETE_STEP)) { throw new ServletException( "The step-definition section is REQUIRED to have a step with id='" + SubmissionStepConfig.COMPLETE_STEP + "' in 'item-submission.xml'! This step is used to perform all processing necessary at the completion of the submission (e.g. starting workflow)."); } } /** * Process the "submission-definition" section of the XML file. Each element * is formed thusly: <submission-process name="submitName">...steps...</submit-process> * Each step subsection is formed: <step> ...step_fields... </step> (with * optional "id" attribute, to reference a step from the <step-definition> * section). The valid step_fields are: heading, class-name. * <P> * Extract the submission-process name and steps and place in a HashMap * whose key is the submission-process's unique name. */ private void processSubmissionDefinition(Node e) throws SAXException, ServletException { int numSubmitProcesses = 0; List<String> submitNames = new ArrayList<String>(); // find all child nodes of the 'submission-definition' node and loop // through NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); // process each 'submission-process' node if (nd.getNodeName().equals("submission-process")) { numSubmitProcesses++; String submitName = getAttribute(nd, "name"); if (submitName == null) { throw new SAXException( "'submission-process' element has no 'name' attribute in 'item-submission.xml'"); } else if (submitNames.contains(submitName)) { throw new SAXException( "There are two 'submission-process' elements with the name '" + submitName + "' in 'item-submission.xml'."); } submitNames.add(submitName); // the 'submission-process' definition contains steps List<Map<String, String>> steps = new ArrayList<Map<String, String>>(); submitDefns.put(submitName, steps); // loop through all the 'step' nodes of the 'submission-process' NodeList pl = nd.getChildNodes(); int lenStep = pl.getLength(); for (int j = 0; j < lenStep; j++) { Node nStep = pl.item(j); // process each 'step' definition if (nStep.getNodeName().equals("step")) { // check for an 'id' attribute String stepID = getAttribute(nStep, "id"); Map<String, String> stepInfo; // if this step has an id, load its information from the // step-definition section if ((stepID != null) && (stepID.length() > 0)) { if (stepDefns.containsKey(stepID)) { // load the step information from the // step-definition stepInfo = stepDefns.get(stepID); } else { throw new SAXException( "The Submission process config named " + submitName + " contains a step with id=" + stepID + ". There is no step with this 'id' defined in the 'step-definition' section of 'item-submission.xml'."); } // Ignore all children of a step element with an // "id" } else { // get information about step from its children // nodes stepInfo = processStepChildNodes( "submission-process", nStep); } steps.add(stepInfo); } // ignore any child that is not a 'step' } // sanity check number of steps if (steps.size() < 1) { throw new ServletException( "Item Submission process config named " + submitName + " has no steps defined in 'item-submission.xml'"); } // ALL Item Submission processes MUST BEGIN with selecting a // Collection. So, automatically insert in the "collection" step // (from the 'step-definition' section) // Note: we already did a sanity check that this "collection" // step exists. steps.add(0, stepDefns .get(SubmissionStepConfig.SELECT_COLLECTION_STEP)); // ALL Item Submission processes MUST END with the // "Complete" processing step. // So, automatically append in the "complete" step // (from the 'step-definition' section) // Note: we already did a sanity check that this "complete" // step exists. steps.add(stepDefns .get(SubmissionStepConfig.COMPLETE_STEP)); } } if (numSubmitProcesses == 0) { throw new ServletException( "No 'submission-process' elements/definitions found in 'item-submission.xml'"); } } /** * Process the children of the "step" tag of the XML file. Returns a HashMap * of all the fields under that "step" tag, where the key is the field name, * and the value is the field value. * */ private Map<String, String> processStepChildNodes(String configSection, Node nStep) throws SAXException, ServletException { // initialize the HashMap of step Info Map<String, String> stepInfo = new HashMap<String, String>(); NodeList flds = nStep.getChildNodes(); int lenflds = flds.getLength(); for (int k = 0; k < lenflds; k++) { // process each child node of a <step> tag Node nfld = flds.item(k); if (!isEmptyTextNode(nfld)) { String tagName = nfld.getNodeName(); String value = getValue(nfld); stepInfo.put(tagName, value); } }// end for each field // check for ID attribute & save to step info String stepID = getAttribute(nStep, "id"); if (stepID != null && stepID.length() > 0) { stepInfo.put("id", stepID); } // look for REQUIRED 'step' information String missing = null; if (stepInfo.get("processing-class") == null) { missing = "'processing-class'"; } if (missing != null) { String msg = "Required field " + missing + " missing in a 'step' in the " + configSection + " of the item submission configuration file ('item-submission.xml')"; throw new SAXException(msg); } return stepInfo; } private Node getElement(Node nd) { NodeList nl = nd.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node n = nl.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { return n; } } return null; } private boolean isEmptyTextNode(Node nd) { boolean isEmpty = false; if (nd.getNodeType() == Node.TEXT_NODE) { String text = nd.getNodeValue().trim(); if (text.length() == 0) { isEmpty = true; } } return isEmpty; } /** * Returns the value of the node's attribute named <name> */ private String getAttribute(Node e, String name) { NamedNodeMap attrs = e.getAttributes(); int len = attrs.getLength(); if (len > 0) { int i; for (i = 0; i < len; i++) { Node attr = attrs.item(i); if (name.equals(attr.getNodeName())) { return attr.getNodeValue().trim(); } } } // no such attribute return null; } /** * Returns the value found in the Text node (if any) in the node list that's * passed in. */ private String getValue(Node nd) { NodeList nl = nd.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node n = nl.item(i); short type = n.getNodeType(); if (type == Node.TEXT_NODE) { return n.getNodeValue().trim(); } } // Didn't find a text node return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.net.URLEncoder; import java.io.UnsupportedEncodingException; import org.w3c.dom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; import org.jdom.output.DOMOutputter; import org.jdom.output.XMLOutputter; import org.apache.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.core.ConfigurationManager; import org.dspace.search.QueryResults; import com.sun.syndication.feed.module.opensearch.OpenSearchModule; import com.sun.syndication.feed.module.opensearch.entity.OSQuery; import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl; import com.sun.syndication.io.FeedException; /** * Utility Class with static methods for producing OpenSearch-compliant search results, * and the OpenSearch description document. * <p> * OpenSearch is a specification for describing and advertising search-engines * and their result formats. Commonly, RSS and Atom formats are used, which * the current implementation supports, as is HTML (used directly in browsers). * NB: this is baseline OpenSearch, no extensions currently supported. * </p> * <p> * The value of the "scope" parameter should either be absent (which means no * scope restriction), or the handle of a community or collection. * </p> * * @author Richard Rodgers * */ public class OpenSearch { private static final Logger log = Logger.getLogger(OpenSearch.class); // are open search queries enabled? private static boolean enabled = false; // supported results formats private static List<String> formats = null; // Namespaces used private static final String osNs = "http://a9.com/-/spec/opensearch/1.1/"; // base search UI URL private static String uiUrl = null; // base search service URL private static String svcUrl = null; static { enabled = ConfigurationManager.getBooleanProperty("websvc.opensearch.enable"); svcUrl = ConfigurationManager.getProperty("dspace.url") + "/" + ConfigurationManager.getProperty("websvc.opensearch.svccontext"); uiUrl = ConfigurationManager.getProperty("dspace.url") + "/" + ConfigurationManager.getProperty("websvc.opensearch.uicontext"); // read rest of config info if enabled formats = new ArrayList<String>(); if (enabled) { String fmtsStr = ConfigurationManager.getProperty("websvc.opensearch.formats"); if ( fmtsStr != null ) { for (String fmt : fmtsStr.split(",")) { formats.add(fmt); } } } } /** * Returns list of supported formats * * @return list of format names - 'rss', 'atom' or 'html' */ public static List<String> getFormats() { return formats; } /** * Returns a mime-type associated with passed format * * @param format the results document format (rss, atom, html) * @return content-type mime-type */ public static String getContentType(String format) { return "html".equals(format) ? "text/html" : "application/" + format + "+xml; charset=UTF-8"; } /** * Returns the OpenSearch service document appropriate for given scope * * @param scope - null for entire repository, or handle or community or collection * @return document the service document * @throws IOException */ public static Document getDescriptionDoc(String scope) throws IOException { return jDomToW3(getServiceDocument(scope)); } /** * Returns OpenSearch Servic Document as a string * * @param scope - null for entire repository, or handle or community or collection * @return service document as a string */ public static String getDescription(String scope) { return new XMLOutputter().outputString(getServiceDocument(scope)); } /** * Returns a formatted set of search results as a string * * @param format results format - html, rss or atom * @param query - the search query * @param qResults - the query results to be formatted * @param scope - search scope, null or community/collection handle * @param results the retreived DSpace objects satisfying search * @param labels labels to apply - format specific * @return formatted search results * @throws IOException */ public static String getResultsString(String format, String query, QueryResults qResults, DSpaceObject scope, DSpaceObject[] results, Map<String, String> labels) throws IOException { try { return getResults(format, query, qResults, scope, results, labels).outputString(); } catch (FeedException e) { log.error(e.toString(), e); throw new IOException("Unable to generate feed", e); } } /** * Returns a formatted set of search results as a document * * @param format results format - html, rss or atom * @param query - the search query * @param qResults - the query results to be formatted * @param scope - search scope, null or community/collection handle * @param results the retreived DSpace objects satisfying search * @param labels labels to apply - format specific * @return formatted search results * @throws IOException */ public static Document getResultsDoc(String format, String query, QueryResults qResults, DSpaceObject scope, DSpaceObject[] results, Map<String, String> labels) throws IOException { try { return getResults(format, query, qResults, scope, results, labels).outputW3CDom(); } catch (FeedException e) { log.error(e.toString(), e); throw new IOException("Unable to generate feed", e); } } private static SyndicationFeed getResults(String format, String query, QueryResults qResults, DSpaceObject scope, DSpaceObject[] results, Map<String, String> labels) { // Encode results in requested format if ("rss".equals(format)) { format = "rss_2.0"; } else if ("atom".equals(format)) { format = "atom_1.0"; } SyndicationFeed feed = new SyndicationFeed(labels.get(SyndicationFeed.MSG_UITYPE)); feed.populate(null, scope, results, labels); feed.setType(format); feed.addModule(openSearchMarkup(query, qResults)); return feed; } /* * Generates the OpenSearch elements which are added to the RSS or Atom feeds as foreign markup * wrapped in a module * * @param query the search query * @param qRes the search results * @return module */ private static OpenSearchModule openSearchMarkup(String query, QueryResults qRes) { OpenSearchModule osMod = new OpenSearchModuleImpl(); osMod.setTotalResults(qRes.getHitCount()); osMod.setStartIndex(qRes.getStart()); osMod.setItemsPerPage(qRes.getPageSize()); OSQuery osq = new OSQuery(); osq.setRole("request"); try { osq.setSearchTerms(URLEncoder.encode(query, "UTF-8")); } catch(UnsupportedEncodingException e) { log.error(e); } osq.setStartPage(1 + (qRes.getStart() / qRes.getPageSize())); osMod.addQuery(osq); return osMod; } /** * Returns as a document the OpenSearch service document * * @param scope - null for the entire repository, or a collection/community handle * @return Service Document */ private static org.jdom.Document getServiceDocument(String scope) { Namespace ns = Namespace.getNamespace(osNs); Element root = new Element("OpenSearchDescription", ns); root.addContent(new Element("ShortName", ns).setText(ConfigurationManager.getProperty("websvc.opensearch.shortname"))); root.addContent(new Element("LongName", ns).setText(ConfigurationManager.getProperty("websvc.opensearch.longname"))); root.addContent(new Element("Description", ns).setText(ConfigurationManager.getProperty("websvc.opensearch.description"))); root.addContent(new Element("InputEncoding", ns).setText("UTF-8")); root.addContent(new Element("OutputEncoding", ns).setText("UTF-8")); // optional elements String sample = ConfigurationManager.getProperty("websvc.opensearch.samplequery"); if (sample != null && sample.length() > 0) { Element sq = new Element("Query", ns).setAttribute("role", "example"); root.addContent(sq.setAttribute("searchTerms", sample)); } String tags = ConfigurationManager.getProperty("websvc.opensearch.tags"); if (tags != null && tags.length() > 0) { root.addContent(new Element("Tags", ns).setText(tags)); } String contact = ConfigurationManager.getProperty("mail.admin"); if (contact != null && contact.length() > 0) { root.addContent(new Element("Contact", ns).setText(contact)); } String faviconUrl = ConfigurationManager.getProperty("websvc.opensearch.faviconurl"); if (faviconUrl != null && faviconUrl.length() > 0) { String dim = String.valueOf(16); String type = faviconUrl.endsWith("ico") ? "image/vnd.microsoft.icon" : "image/png"; Element fav = new Element("Image", ns).setAttribute("height", dim).setAttribute("width", dim). setAttribute("type", type).setText(faviconUrl); root.addContent(fav); } // service URLs for (String format : formats) { Element url = new Element("Url", ns).setAttribute("type", getContentType(format)); StringBuffer template = new StringBuffer(); if ("html".equals(format)) { template.append(uiUrl); } else { template.append(svcUrl); } template.append("?query={searchTerms}"); if(! "html".equals(format)) { template.append("&start={startIndex?}&rpp={count?}&format="); template.append(format); } if (scope != null) { template.append("&scope="); template.append(scope); } url.setAttribute("template", template.toString()); root.addContent(url); } return new org.jdom.Document(root); } /** * Converts a JDOM document to a W3C one * @param jdomDoc * @return W3C Document object * @throws IOException */ private static Document jDomToW3(org.jdom.Document jdomDoc) throws IOException { DOMOutputter domOut = new DOMOutputter(); try { return domOut.output(jdomDoc); } catch(JDOMException jde) { throw new IOException("JDOM output exception", jde); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.io.Serializable; import org.apache.log4j.Logger; /** * Class representing a single Item Submission config definition, organized into * steps. This class represents the structure of a single 'submission-process' * node in the item-submission.xml configuration file. * * Note: Implements Serializable as it will be saved to the current session during submission. * Please ensure that nothing is added to this class that isn't also serializable * * @see org.dspace.app.util.SubmissionConfigReader * @see org.dspace.app.util.SubmissionStepConfig * * @author Tim Donohue, based on DCInputSet by Brian S. Hughes * @version $Revision: 5844 $ */ public class SubmissionConfig implements Serializable { /** name of the item submission process */ private String submissionName = null; /** the configuration classes for the steps in this submission process */ private SubmissionStepConfig[] submissionSteps = null; /** whether or not this submission process is being used in a workflow * */ private boolean isWorkflow = false; /** log4j logger */ private static Logger log = Logger.getLogger(SubmissionConfig.class); /** * Constructs a new Submission Configuration object, based on the XML * configuration file (item-submission.xml) * * @param submissionName * the submission process name * @param steps * the vector listing of step information to build * SubmissionStepConfig objects for this submission process * @param isWorkflowProcess * whether this submission process is being used in a workflow or * not. If it is a workflow process this may limit the steps that * are available for editing. */ public SubmissionConfig(String submissionName, List<Map<String, String>> steps, boolean isWorkflowProcess) { this.submissionName = submissionName; this.isWorkflow = isWorkflowProcess; // initialize a vector of SubmissionStepConfig objects List<SubmissionStepConfig> stepConfigs = new ArrayList<SubmissionStepConfig>(); // loop through our steps, and create SubmissionStepConfig objects for (int stepNum = 0; stepNum < steps.size(); stepNum++) { Map<String, String> stepInfo = steps.get(stepNum); SubmissionStepConfig step = new SubmissionStepConfig(stepInfo); // Only add this step to the process if either: // (a) this is not a workflow process OR // (b) this is a workflow process, and this step is editable in a // workflow if ((!this.isWorkflow) || ((this.isWorkflow) && step.isWorkflowEditable())) { // set the number of the step (starts at 0) and add it step.setStepNumber(stepConfigs.size()); stepConfigs.add(step); log.debug("Added step '" + step.getProcessingClassName() + "' as step #" + step.getStepNumber() + " of submission process " + submissionName); } } // get steps as an array of Strings submissionSteps = stepConfigs .toArray(new SubmissionStepConfig[stepConfigs.size()]); } /** * Return the name of the item submission process definition * * @return the name of the submission process */ public String getSubmissionName() { return submissionName; } /** * Return the number of steps in this submission process * * @return number of steps */ public int getNumberOfSteps() { return submissionSteps.length; } /** * Return whether or not this submission process is being used in a * workflow! * * @return true, if it's a workflow process. false, otherwise. */ public boolean isWorkflow() { return isWorkflow; } /** * Retrieve a particular Step configuration in this Item Submission Process * configuration. The first step is numbered "0" (although step #0 is the * implied "select collection" step). * <p> * If you want to retrieve the step after the "select collection" step, you * should retrieve step #1. * * If the specified step isn't found, null is returned. * * @param stepNum * desired step to retrieve * * @return the SubmissionStepConfig object for the step */ public SubmissionStepConfig getStep(int stepNum) { if ((stepNum > submissionSteps.length - 1) || (stepNum < 0)) { return null; } else { return submissionSteps[stepNum]; } } /** * Returns whether or not there are more steps which follow the specified * "stepNum". For example, if you specify stepNum=4, then this method checks * to see if there is a Step #5. The first step is numbered "0". * * @param stepNum * the current step. * * @return true, if a step at "stepNum+1" exists. false, otherwise. */ public boolean hasMoreSteps(int stepNum) { return (getStep(stepNum + 1) != null); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import org.dspace.core.ConfigurationManager; import org.dspace.storage.rdbms.DatabaseManager; import org.apache.log4j.Logger; import javax.servlet.ServletContextListener; import javax.servlet.ServletContextEvent; import java.beans.Introspector; import java.net.URL; import java.net.URLConnection; import java.sql.Driver; import java.sql.DriverManager; import java.util.Enumeration; /** * Class to initialize / cleanup resources used by DSpace when the web application * is started or stopped */ public class DSpaceContextListener implements ServletContextListener { private static Logger log = Logger.getLogger(DSpaceContextListener.class); /** * The DSpace config parameter, this is where the path to the DSpace * configuration file can be obtained */ public static final String DSPACE_CONFIG_PARAMETER = "dspace-config"; /** * Initialize any resources required by the application * @param event */ public void contextInitialized(ServletContextEvent event) { // On Windows, URL caches can cause problems, particularly with undeployment // So, here we attempt to disable them if we detect that we are running on Windows try { String osName = System.getProperty("os.name"); if (osName != null && osName.toLowerCase().contains("windows")) { URL url = new URL("http://localhost/"); URLConnection urlConn = url.openConnection(); urlConn.setDefaultUseCaches(false); } } // Any errors thrown in disabling the caches aren't significant to // the normal execution of the application, so we ignore them catch (RuntimeException e) { log.error(e.getMessage(), e); } catch (Exception e) { log.error(e.getMessage(), e); } // Paths to the various config files String dspaceConfig = null; /** * Stage 1 * * Locate the dspace config */ // first check the local per webapp parameter, then check the global parameter. dspaceConfig = event.getServletContext().getInitParameter(DSPACE_CONFIG_PARAMETER); // Finally, if no config parameter found throw an error if (dspaceConfig == null || "".equals(dspaceConfig)) { throw new IllegalStateException( "\n\nDSpace has failed to initialize. This has occurred because it was unable to determine \n" + "where the dspace.cfg file is located. The path to the configuration file should be stored \n" + "in a context variable, '"+DSPACE_CONFIG_PARAMETER+"', in the global context. \n" + "No context variable was found in either location.\n\n"); } /** * Stage 2 * * Load the dspace config. Also may load log4j configuration. * (Please rely on ConfigurationManager or Log4j to configure logging) * */ try { ConfigurationManager.loadConfig(dspaceConfig); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException( "\n\nDSpace has failed to initialize, during stage 2. Error while attempting to read the \n" + "DSpace configuration file (Path: '"+dspaceConfig+"'). \n" + "This has likely occurred because either the file does not exist, or it's permissions \n" + "are set incorrectly, or the path to the configuration file is incorrect. The path to \n" + "the DSpace configuration file is stored in a context variable, 'dspace-config', in \n" + "either the local servlet or global context.\n\n",e); } } /** * Clean up resources used by the application when stopped * * @param event */ public void contextDestroyed(ServletContextEvent event) { try { // Remove the database pool DatabaseManager.shutdown(); // Clean out the introspector Introspector.flushCaches(); // Remove any drivers registered by this classloader for (Enumeration e = DriverManager.getDrivers(); e.hasMoreElements();) { Driver driver = (Driver) e.nextElement(); if (driver.getClass().getClassLoader() == getClass().getClassLoader()) { DriverManager.deregisterDriver(driver); } } } catch (RuntimeException e) { log.error("Failed to cleanup ClassLoader for webapp", e); } catch (Exception e) { log.error("Failed to cleanup ClassLoader for webapp", e); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Class representing all DC inputs required for a submission, organized into pages * * @author Brian S. Hughes, based on work by Jenny Toves, OCLC * @version $Revision: 5844 $ */ public class DCInputSet { /** name of the input set */ private String formName = null; /** the inputs ordered by page and row position */ private DCInput[][] inputPages = null; /** constructor */ public DCInputSet(String formName, List<List<Map<String, String>>> pages, Map<String, List<String>> listMap) { this.formName = formName; inputPages = new DCInput[pages.size()][]; for ( int i = 0; i < inputPages.length; i++ ) { List<Map<String, String>> page = pages.get(i); inputPages[i] = new DCInput[page.size()]; for ( int j = 0; j < inputPages[i].length; j++ ) { inputPages[i][j] = new DCInput(page.get(j), listMap); } } } /** * Return the name of the form that defines this input set * @return formName the name of the form */ public String getFormName() { return formName; } /** * Return the number of pages in this input set * @return number of pages */ public int getNumberPages() { return inputPages.length; } /** * Get all the rows for a page from the form definition * * @param pageNum desired page within set * @param addTitleAlternative flag to add the additional title row * @param addPublishedBefore flag to add the additional published info * * @return an array containing the page's displayable rows */ public DCInput[] getPageRows(int pageNum, boolean addTitleAlternative, boolean addPublishedBefore) { List<DCInput> filteredInputs = new ArrayList<DCInput>(); if ( pageNum < inputPages.length ) { for (int i = 0; i < inputPages[pageNum].length; i++ ) { DCInput input = inputPages[pageNum][i]; if (doField(input, addTitleAlternative, addPublishedBefore)) { filteredInputs.add(input); } } } // Convert list into an array DCInput[] inputArray = new DCInput[filteredInputs.size()]; return filteredInputs.toArray(inputArray); } /** * Does this set of inputs include an alternate title field? * * @return true if the current set has an alternate title field */ public boolean isDefinedMultTitles() { return isFieldPresent("title.alternative"); } /** * Does this set of inputs include the previously published fields? * * @return true if the current set has all the prev. published fields */ public boolean isDefinedPubBefore() { return ( isFieldPresent("date.issued") && isFieldPresent("identifier.citation") && isFieldPresent("publisher.null") ); } /** * Does the current input set define the named field? * Scan through every field in every page of the input set * * @return true if the current set has the named field */ public boolean isFieldPresent(String fieldName) { for (int i = 0; i < inputPages.length; i++) { DCInput[] pageInputs = inputPages[i]; for (int row = 0; row < pageInputs.length; row++) { String fullName = pageInputs[row].getElement() + "." + pageInputs[row].getQualifier(); if (fullName.equals(fieldName)) { return true; } } } return false; } private static boolean doField(DCInput dcf, boolean addTitleAlternative, boolean addPublishedBefore) { String rowName = dcf.getElement() + "." + dcf.getQualifier(); if ( rowName.equals("title.alternative") && ! addTitleAlternative ) { return false; } if (rowName.equals("date.issued") && ! addPublishedBefore ) { return false; } if (rowName.equals("publisher.null") && ! addPublishedBefore ) { return false; } if (rowName.equals("identifier.citation") && ! addPublishedBefore ) { return false; } return true; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.io.File; import java.io.IOException; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import org.apache.commons.lang.time.DateUtils; import org.apache.log4j.FileAppender; import org.apache.log4j.helpers.LogLog; import org.apache.log4j.spi.LoggingEvent; /** * Special log appender for log4j. Adds the current date (ie. year-mon) to * the end of the file name, so that rolling on to the next log is simply * a case of starting a new one - no renaming of old logs. * * This is advisable if you are using Windows, and have multiple applications * (ie. dspace, dspace-oai, dspace-sword) that all want to write to the same log file, * as each would otherwise try to rename the old files during rollover. * * An example log4j.properties (one log per month, retains three months of logs) * * log4j.rootCategory=INFO, A1 * log4j.appender.A1=org.dspace.app.util.DailyFileAppender * log4j.appender.A1.File=@@log.dir@@/dspace.log * log4j.appender.A1.DatePattern=yyyy-MM * log4j.appender.A1.MaxLogs=3 * log4j.appender.A1.layout=org.apache.log4j.PatternLayout * log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n * */ public class DailyFileAppender extends FileAppender { /** * The fixed date pattern to be used if one is not specified. */ private static final String DATE_PATTERN = "yyyy-MM-dd"; /** * The folder under which daily folders are created. This can be a absolute path * or relative path also. * e.g. JavaLogs/CPRILog or F:/LogFiles/CPRILog */ private String mstrFileName; /** * Used internally and contains the name of the date derived from current system date. */ private Date mstrDate = new Date(System.currentTimeMillis()); /** * Holds the user specified DatePattern, */ private String mstrDatePattern = DATE_PATTERN; private boolean mMonthOnly = false; /** * The date formatter object used for parsing the user specified DatePattern. */ private SimpleDateFormat mobjSDF; private boolean mWithHostName = false; private int mMaxLogs = 0; /** * Default constructor. This is required as the appender class is dynamically * loaded. */ public DailyFileAppender() { super(); } /* (non-Javadoc) * @see org.apache.log4j.FileAppender#activateOptions() */ public void activateOptions() { setFileName(); cleanupOldFiles(); super.activateOptions(); } /*------------------------------------------------------------------------------ * Getters *----------------------------------------------------------------------------*/ public String getDatePattern() { return this.mstrDatePattern; } public String getFile() { return this.mstrFileName; } public boolean getWithHost() { return mWithHostName; } public int getMaxLogs() { return mMaxLogs; } /*------------------------------------------------------------------------------ * Setters *----------------------------------------------------------------------------*/ public void setDatePattern(String pstrPattern) { this.mstrDatePattern = checkPattern(pstrPattern); if (mstrDatePattern.contains("dd") || mstrDatePattern.contains("DD")) { mMonthOnly = false; } else { mMonthOnly = true; } } public void setFile(String file) { // Trim spaces from both ends. The users probably does not want // trailing spaces in file names. String val = file.trim(); mstrFileName = val; } public void setWithHost(boolean wh) { mWithHostName = wh; } public void setMaxLogs(int ml) { mMaxLogs = ml; } /*------------------------------------------------------------------------------ * Methods *----------------------------------------------------------------------------*/ /* (non-Javadoc) * @see org.apache.log4j.WriterAppender#subAppend(org.apache.log4j.spi.LoggingEvent) */ protected void subAppend(LoggingEvent pobjEvent) { Date dtNow = new Date(System.currentTimeMillis()); boolean rollover = false; if (mMonthOnly) { Calendar now = Calendar.getInstance(); Calendar cur = Calendar.getInstance(); now.setTime(dtNow); cur.setTime(mstrDate); rollover = !(now.get(Calendar.YEAR) == cur.get(Calendar.YEAR) && now.get(Calendar.MONTH) == cur.get(Calendar.MONTH)); } else { rollover = !(DateUtils.isSameDay(dtNow, mstrDate)); } if (rollover) { try { rollOver(dtNow); } catch (IOException IOEx) { LogLog.error("rollOver() failed!", IOEx); } } super.subAppend(pobjEvent); } /*------------------------------------------------------------------------------ * Helpers *----------------------------------------------------------------------------*/ /** * The helper function to validate the DatePattern. * @param pstrPattern The DatePattern to be validated. * @return The validated date pattern or defautlt DATE_PATTERN */ private String checkPattern(String pstrPattern) { String strRet = null; SimpleDateFormat objFmt = new SimpleDateFormat(DATE_PATTERN); try { this.mobjSDF = new SimpleDateFormat(pstrPattern); strRet = pstrPattern; } catch (NullPointerException NPExIgnore) { LogLog.error("Invalid DatePattern " + pstrPattern, NPExIgnore); this.mobjSDF = objFmt; strRet = DATE_PATTERN; } catch (IllegalArgumentException IlArgExIgnore) { LogLog.error("Invalid DatePattern " + pstrPattern, IlArgExIgnore); this.mobjSDF = objFmt; strRet = DATE_PATTERN; } finally { objFmt = null; } return strRet; } /** * This function is responsible for performing the actual file rollover. * @param pstrName The name of the new folder based on current system date. * @throws IOException */ private static boolean deletingFiles = false; private void cleanupOldFiles() { // If we need to delete log files if (mMaxLogs > 0 && !deletingFiles) { deletingFiles = true; // Determine the final file extension with the hostname String hostFileExt = null; try { hostFileExt = "." + java.net.InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { LogLog.error("Unable to retrieve host name"); } try { // Array to hold the logs we are going to keep File[] logsToKeep = new File[mMaxLogs]; // Get a 'master' file handle, and the parent directory from it File logMaster = new File(mstrFileName); File logDir = logMaster.getParentFile(); if (logDir.isDirectory()) { // Iterate all the files in that directory File[] logArr = logDir.listFiles(); for (File curLog : logArr) { LogLog.debug("Comparing '" + curLog.getAbsolutePath() + "' to '" + mstrFileName + "'"); String name = curLog.getAbsolutePath(); // First, see if we are not using hostname, or the log file ends with this host if (!mWithHostName || (hostFileExt != null && name.endsWith(hostFileExt))) { // Check that the file is indeed one we want (contains the master file name) if (name.contains(mstrFileName)) { // Iterate through the array of logs we are keeping for (int i = 0; curLog != null && i < logsToKeep.length; i++) { // Have we exhausted the 'to keep' array? if (logsToKeep[i] == null) { // Empty space, retain this log file logsToKeep[i] = curLog; curLog = null; } // If the 'kept' file is older than the current one else if (logsToKeep[i].getName().compareTo(curLog.getName()) < 0) { // Replace tested entry with current file File temp = logsToKeep[i]; logsToKeep[i] = curLog; curLog = temp; } } // If we have a 'current' entry at this point, it's a log we don't want if (curLog != null) { LogLog.debug("Deleting log " + curLog.getName()); if (curLog.delete()) { LogLog.error("Unable to delete log file"); } } } } } } } catch (Exception e) { // Don't worry about exceptions } finally { deletingFiles = false; } } } private void rollOver(Date dtNow) throws IOException { mstrDate = dtNow; setFileName(); this.setFile(fileName, true, bufferedIO, bufferSize); cleanupOldFiles(); } private void setFileName() { fileName = mstrFileName + "." + mobjSDF.format(mstrDate); if (mWithHostName) { try { fileName += "." + java.net.InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { LogLog.error("Unable to retrieve host name"); } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.sql.SQLException; import org.dspace.content.*; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; import java.io.File; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.handle.HandleManager; import org.jdom.Element; /** * * @author Sands Fish * */ @SuppressWarnings("deprecation") public class GoogleMetadata { private final static Logger log = Logger.getLogger(GoogleMetadata.class); private static final String GOOGLE_PREFIX = "google."; private Item item; private String itemURL; // Configuration keys and fields private static Map<String, String> configuredFields = new HashMap<String, String>(); // Google field names (e.g. citation_fieldname) and formatted metadata // values private Map<String, String> metadataMappings = new HashMap<String, String>(); public static final String TITLE = "citation_title"; public static final String JOURNAL_TITLE = "citation_journal_title"; public static final String PUBLISHER = "citation_publisher"; public static final String AUTHORS = "citation_authors"; public static final String DATE = "citation_date"; public static final String VOLUME = "citation_volume"; public static final String ISSUE = "citation_issue"; public static final String FIRSTPAGE = "citation_firstpage"; public static final String LASTPAGE = "citation_lastpage"; public static final String DOI = "citation_doi"; public static final String PMID = "citation_pmid"; public static final String ABSTRACT = "citation_abstract_html_url"; public static final String FULLTEXT = "citation_fulltext_html_url"; public static final String PDF = "citation_pdf_url"; public static final String ISSN = "citation_issn"; public static final String ISBN = "citation_isbn"; public static final String LANGUAGE = "citation_language"; public static final String KEYWORDS = "citation_keywords"; public static final String CONFERENCE = "citation_conference"; public static final String DISSERTATION_ID = "identifiers.dissertation"; public static final String DISSERTATION_NAME = "citation_dissertation_name"; public static final String DISSERTATION_INSTITUTION = "citation_dissertation_institution"; public static final String PATENT_ID = "identifiers.patent"; public static final String PATENT_NUMBER = "citation_patent_number"; public static final String PATENT_COUNTRY = "citation_patent_country"; public static final String TECH_REPORT_ID = "identifiers.technical_report"; public static final String TECH_REPORT_NUMBER = "citation_technical_report_number"; public static final String TECH_REPORT_INSTITUTION = "citation_technical_report_institution"; private static final int SINGLE = 0; private static final int MULTI = 1; private static final int ALL_FIELDS_IN_OPTION = 2; // Load configured fields from google-metadata.properties static { File loadedFile = null; URL url = null; InputStream is = null; String googleConfigFile = ConfigurationManager .getProperty("google-metadata.config"); log.info("Using [" + googleConfigFile + "] for Google Metadata configuration"); loadedFile = new File(googleConfigFile); try { url = loadedFile.toURL(); } catch (MalformedURLException mux) { log.error("Can't find Google Metadata configuration file: " + googleConfigFile, mux); } Properties properties = new Properties(); try { is = url.openStream(); properties.load(is); } catch (IOException iox) { log.error("Could not read Google Metadata configuration file: " + googleConfigFile, iox); } Enumeration propertyNames = properties.propertyNames(); while (propertyNames.hasMoreElements()) { String key = ((String) propertyNames.nextElement()).trim(); if (key.startsWith(GOOGLE_PREFIX)) { String name = key.substring(GOOGLE_PREFIX.length()); String field = properties.getProperty(key); if (null != name && !name.equals("") && null != field && !field.equals("")) { configuredFields.put(name.trim(), field.trim()); } } } if (log.isDebugEnabled()) { logConfiguration(); } } /** * Dump Metadata field mapping to log * */ public static void logConfiguration() { log.debug("Google Metadata Configuration Mapping:"); for (String name : configuredFields.keySet()) { log.debug(" " + name + " => " + configuredFields.get(name)); } } /** * Wrap the item, parse all configured fields and generate metadata field * values. * * @param item * - The item being viewed to extract metadata from */ public GoogleMetadata(Context context, Item item) throws SQLException { // Hold onto the item in case we need to refresh a stale parse this.item = item; itemURL = HandleManager.resolveToURL(context, item.getHandle()); parseItem(); } /** * Add a single metadata value to the Google field, defaulting to the * first-encountered instance of the field for this Item. * * @param fieldName * @param schema * @param element * @param qualifier * @return */ private boolean addSingleField(String fieldName) { String config = configuredFields.get(fieldName); if (null == config || config.equals("")) { return false; } if (log.isDebugEnabled()) { log.debug("Processing " + fieldName); } if (config.equals("$handle")) { if (null != itemURL && !itemURL.equals("")) { metadataMappings.put(fieldName, itemURL); return true; } else { return false; } } if (config.equals("$simple-pdf")) { String pdf_url = getPDFSimpleUrl(item); if(pdf_url.length() > 0) { metadataMappings.put(fieldName, pdf_url); return true; } else { return false; } } DCValue v = resolveMetadataField(config); if (null != v && (null != v.value) && !v.value.trim().equals("")) { metadataMappings.put(fieldName, v.value); return true; } else { // No values found return false; } } /** * A singular version of resolveMetadata to return only one field value * instead of an aggregate. * * @param configFilter * @return The first configured match of metadata field for the item. */ private DCValue resolveMetadataField(String configFilter) { ArrayList<DCValue> fields = resolveMetadata(configFilter, SINGLE); if (null != fields && fields.size() > 0) { return fields.get(0); } return null; } /** * A plural version of resolveMetadata for aggregate fields. * * @param configFilter * @return Aggregate of all matching metadata fields configured in the first * option field-set to return any number of filter matches. */ private ArrayList<DCValue> resolveMetadataFields(String configFilter) { ArrayList<DCValue> fields = resolveMetadata(configFilter, MULTI); if (null != fields && fields.size() > 0) { return fields; } return null; } /** * Aggregate an array of DCValues present on the current item that pass the * configuration filter. * * @param configValue * @return Array of configuration -> item-field matches */ private ArrayList<DCValue> resolveMetadata(String configFilter, int returnType) { if (null == configFilter || configFilter.trim().equals("") || !configFilter.contains(".")) { log.error("The configuration string [" + configFilter + "] is invalid."); return null; } else { configFilter = configFilter.trim(); } ArrayList<ArrayList<String>> parsedOptions = new ArrayList<ArrayList<String>>(); parsedOptions = parseOptions(configFilter); if (log.isDebugEnabled()) { log .debug("Resolved Fields For This Item Per Configuration Filter:"); for (int i = 0; i < parsedOptions.size(); i++) { ArrayList<String> optionFields = parsedOptions.get(i); log.debug("Option " + (i + 1) + ":"); for (String f : optionFields) { log.debug("{" + f + "}"); } } } // Iterate through each configured option's field-set until // we have a match. for (ArrayList<String> optionFields : parsedOptions) { int optionMatches = 0; String[] components; DCValue[] values; ArrayList<DCValue> resolvedFields = new ArrayList<DCValue>(); for (String field : optionFields) { components = parseComponents(field); values = item.getMetadata(components[0], components[1], components[2], Item.ANY); if (values.length > 0) { for (DCValue v : values) { resolvedFields.add(v); if (returnType == SINGLE) { if (!resolvedFields.isEmpty()) { if (log.isDebugEnabled()) { log .debug("Resolved Field Value For This Item:"); for (DCValue r : resolvedFields) { log.debug("{" + r.value + "}"); } } return resolvedFields; } } } } } // If the item had any of the fields contained in this option, // return them, otherwise move on to the next option's field-set. if (!resolvedFields.isEmpty()) { if (log.isDebugEnabled()) { log.debug("Resolved Field Values For This Item:"); for (DCValue v : resolvedFields) { log.debug("{" + v.value + "}"); } } // Check to see if this is a full option match if (ALL_FIELDS_IN_OPTION == returnType) { if (resolvedFields.size() == optionMatches) { return resolvedFields; } // Otherwise, if there are any matches for the option, // return them. } else if (MULTI == returnType) { return resolvedFields; } } } return null; } /** * Parse first-match path of metadata field-group options for the given * configuration. * * @param configFilter * @return */ private ArrayList<ArrayList<String>> parseOptions(String configFilter) { ArrayList<String> options = new ArrayList<String>(); ArrayList<ArrayList<String>> parsedOptions = new ArrayList<ArrayList<String>>(); if (null == configFilter || configFilter.equals("")) { return null; } if (configFilter.contains("|")) { String[] configOptions = configFilter.split("\\|"); for (String option : configOptions) { options.add(option.trim()); } } else { options = new ArrayList<String>(); options.add(configFilter); } // Parse first-match path options. The first option (field-set) // to match fields present in the item is used. ArrayList<String> parsedFields; // Parse the fields for each field-set in order. for (String option : options) { ArrayList<String> fields; parsedFields = new ArrayList<String>(); if (option.contains(",")) { fields = parseFields(option); } else { fields = new ArrayList<String>(); fields.add(option); } // Parse field list for this field-set, expanding any wildcards. for (String field : fields) { if (field.contains("*")) { ArrayList<String> wc = parseWildcard(field); for (String wcField : wc) { if (!parsedFields.contains(wcField)) { parsedFields.add(wcField); } } } else { if (!parsedFields.contains(field)) { parsedFields.add(field); } } } parsedOptions.add(parsedFields); } if (null != parsedOptions) { return parsedOptions; } else { return null; } } /** * Build a Vector of fields that can be added to when expanding wildcards. * * @param configString * - Value of one metadata field configuration * @return A vector of raw field configurations. */ private ArrayList<String> parseFields(String configString) { ArrayList<String> fields = new ArrayList<String>(); for (String field : configString.split("\\,")) { fields.add(field.trim()); } return fields; } /** * Pull apart an individual field structure. * * @param The * configured field for one metadata field map * @return Schema, Element, Qualifier of metadata field */ private String[] parseComponents(String field) { int index = 0; String[] components = new String[3]; for (String c : field.split("\\.")) { components[index] = c.trim(); index++; } return components; } /** * Expand any wildcard characters to an array of all matching fields for * this item. No order consistency is implied. * * @param field * The field identifier containing a wildcard character. * @return Expanded field list. */ private ArrayList<String> parseWildcard(String field) { if (!field.contains("*")) { return null; } else { String[] components = parseComponents(field); for (int i = 0; i < components.length; i++) { if (components[i].trim().equals("*")) { components[i] = Item.ANY; } } DCValue[] allMD = item.getMetadata(components[0], components[1], components[2], Item.ANY); ArrayList<String> expandedDC = new ArrayList<String>(); for (DCValue v : allMD) { // De-dup multiple occurances of field names in item if (!expandedDC.contains(buildFieldName(v))) { expandedDC.add(buildFieldName(v)); } } if (log.isDebugEnabled()) { log.debug("Field Names From Expanded Wildcard \"" + field + "\""); for (String v : expandedDC) { log.debug(" " + v); } } return expandedDC; } } /** * Construct metadata field name out of DCValue components * * @param v * The DCValue to construct a name for. * @return The complete metadata field name. */ private String buildFieldName(DCValue v) { StringBuilder name = new StringBuilder(); name.append(v.schema + "." + v.element); if (null != v.qualifier) { name.append("." + v.qualifier); } return name.toString(); } /** * Using metadata field mappings contained in the loaded configuration, * parse through configured metadata fields, building valid Google metadata * value strings. Field names & values contained in metadataMappings. * */ private void parseItem() { // TITLE addSingleField(TITLE); // AUTHORS (multi) addAggregateValues(AUTHORS, ";"); // DATE addSingleField(DATE); // ISSN addSingleField(ISSN); // ISBN addSingleField(ISBN); // VOLUME addSingleField(VOLUME); // ISSUE addSingleField(ISSUE); // FIRSTPAGE addSingleField(FIRSTPAGE); // LASTPAGE addSingleField(LASTPAGE); // DOI addSingleField(DOI); // PMID addSingleField(PMID); // ABSTRACT_HTML_URL ('$handle' variable substitution if present) addSingleField(ABSTRACT); // FULLTEXT_HTML_URL ('$handle' variable substitution if present) addSingleField(FULLTEXT); // PDF_URL ('$handle' variable substitution if present) addSingleField(PDF); // LANGUAGE addSingleField(LANGUAGE); // KEYWORDS (multi) addAggregateValues(KEYWORDS, ";"); // CONFERENCE addSingleField(CONFERENCE); // Dissertations if (itemIsDissertation()) { if(log.isDebugEnabled()) { log.debug("ITEM TYPE: DISSERTATION"); } addSingleField(DISSERTATION_NAME); addSingleField(DISSERTATION_INSTITUTION); } // Patents if (itemIsPatent()) { if(log.isDebugEnabled()) { log.debug("ITEM TYPE: PATENT"); } addSingleField(PATENT_NUMBER); // Use config value for patent country. Should be a literal. String countryConfig = configuredFields.get(PATENT_COUNTRY); if (null != countryConfig && !countryConfig.trim().equals("")) { metadataMappings.put(PATENT_COUNTRY, countryConfig.trim()); } addSingleField(PUBLISHER); } // Tech Reports if (itemIsTechReport()) { if(log.isDebugEnabled()) { log.debug("ITEM TYPE: TECH REPORT"); } addSingleField(TECH_REPORT_NUMBER); addSingleField(TECH_REPORT_INSTITUTION); } if(!itemIsDissertation() && !itemIsTechReport()) { // PUBLISHER addSingleField(PUBLISHER); } } /** * Fetch all metadata mappings * * Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String, * String> mapping : googlemd.getMappings()) { ... } * * @return Iterable of metadata fields mapped to Google-formatted values */ public Set<Entry<String, String>> getMappings() { return new HashSet<Entry<String, String>>(metadataMappings.entrySet()); } /** * Produce meta elements that can easily be put into the head. * @return */ public List<Element> disseminateList() { List<Element> metas = new ArrayList<Element>(); for (Entry<String, String> m : getMappings()) { Element e = new Element("meta"); e.setNamespace(null); e.setAttribute("name", m.getKey()); e.setAttribute("content", m.getValue()); metas.add(e); } return metas; } // Getters for individual metadata fields... /** * @return the citation_title */ public String getTitle() { return metadataMappings.get(TITLE); } /** * @return the citation_journal_title */ public String getJournalTitle() { return metadataMappings.get(JOURNAL_TITLE); } /** * @return the citation_publisher */ public String getPublisher() { return metadataMappings.get(PUBLISHER); } /** * @return the citation_authors */ public String getAuthors() { return metadataMappings.get(AUTHORS); } /** * @return the citation_date */ public String getDate() { return metadataMappings.get(DATE); } /** * @return the citation_volume */ public String getVolume() { return metadataMappings.get(VOLUME); } /** * @return the citation_issue */ public String getIssue() { return metadataMappings.get(ISSUE); } /** * @return the citation_firstpage */ public String getFirstpage() { return metadataMappings.get(FIRSTPAGE); } /** * @return the citation_lastpage */ public String getLastpage() { return metadataMappings.get(LASTPAGE); } /** * @return the citation_doi */ public String getDOI() { return metadataMappings.get(DOI); } /** * @return the citation_pmid */ public String getPmid() { return metadataMappings.get(PMID); } /** * @return the citation_abstract_html_url */ public String getAbstractHTMLURL() { return metadataMappings.get(ABSTRACT); } /** * @return the citation_fulltext_html_url */ public String getFulltextHTMLURL() { return metadataMappings.get(FULLTEXT); } /** * @return the citation_pdf_url */ public String getPDFURL() { return metadataMappings.get(PDF); } /** * @return the citation_issn */ public String getISSN() { return metadataMappings.get(ISSN); } /** * @return the citation_isbn */ public String getISBN() { return metadataMappings.get(ISBN); } /** * @return the citation_language */ public String getLanguage() { return metadataMappings.get(LANGUAGE); } /** * @return the citation_keywords */ public String getKeywords() { return metadataMappings.get(KEYWORDS); } /** * @return the citation_conference */ public String getConference() { return metadataMappings.get(CONFERENCE); } /** * @return the citation_dissertation_name */ public String getDissertationName() { return metadataMappings.get(DISSERTATION_NAME); } /** * @return the citation_dissertation_institution */ public String getDissertationInstitution() { return metadataMappings.get(DISSERTATION_INSTITUTION); } /** * @return the citation_patent_number */ public String getPatentNumber() { return metadataMappings.get(PATENT_NUMBER); } /** * @return the citation_patent_country */ public String getPatentCountry() { return metadataMappings.get(PATENT_COUNTRY); } /** * @return the citation_technical_report_number */ public String getTechnicalReportNumber() { return metadataMappings.get(TECH_REPORT_NUMBER); } /** * @return the citation_technical_report_institution */ public String getTechnicalReportInstitution() { return metadataMappings.get(TECH_REPORT_INSTITUTION); } /** * Gets the URL to a PDF using a very basic strategy by assuming that the PDF * is in the default content bundle, and that the item only has one public bitstream * and it is a PDF. * * @param item * @return URL that the PDF can be directly downloaded from */ private String getPDFSimpleUrl(Item item) { try { Bundle[] contentBundles = item.getBundles("ORIGINAL"); if (contentBundles.length > 0) { Bitstream[] bitstreams = contentBundles[0].getBitstreams(); if (bitstreams.length == 1) { if (bitstreams[0].getFormat().getMIMEType().equals("application/pdf")) { StringBuilder path = new StringBuilder(); path.append(ConfigurationManager.getProperty("dspace.url")); if (item.getHandle() != null) { path.append("/bitstream/"); path.append(item.getHandle()); path.append("/"); path.append(bitstreams[0].getSequenceID()); } else { path.append("/retrieve/"); path.append(bitstreams[0].getID()); } path.append("/"); path.append(Util.encodeBitstreamName(bitstreams[0].getName(), Constants.DEFAULT_ENCODING)); return path.toString(); } } } } catch (UnsupportedEncodingException ex) { log.debug(ex.getMessage()); } catch (SQLException ex) { log.debug(ex.getMessage()); } return ""; } /** * * * @param Field * to aggregate all values of in a matching option * @param delimiter * to delimit field values with */ private void addAggregateValues(String FIELD, String delim) { String authorConfig = configuredFields.get(FIELD); ArrayList<DCValue> fields = resolveMetadataFields(authorConfig); if (null != fields && !fields.isEmpty()) { StringBuilder fieldMetadata = new StringBuilder(); int count = 0; for (DCValue field : fields) { fieldMetadata.append(field.value); if (count < fields.size() - 1) { fieldMetadata.append(delim + " "); count++; } } metadataMappings.put(FIELD, fieldMetadata.toString()); } } /** * Determine, based on config values, if this item is a dissertation. * * @return boolean */ private boolean itemIsDissertation() { String dConfig = configuredFields.get(DISSERTATION_ID); if (null == dConfig || dConfig.trim().equals("")) { return false; } else { return identifyItemType(dConfig); } } /** * Determine, based on config values, if this item is a patent. * * @return boolean */ private boolean itemIsPatent() { String dConfig = configuredFields.get(PATENT_ID); if (null == dConfig || dConfig.trim().equals("")) { return false; } else { return identifyItemType(dConfig); } } /** * Determine, based on config values, if this item is a tech report. * * @return boolean */ private boolean itemIsTechReport() { String dConfig = configuredFields.get(TECH_REPORT_ID); if (null == dConfig || dConfig.trim().equals("")) { return false; } else { return identifyItemType(dConfig); } } /** * Identifies if this item matches a particular configuration of fields and * values for those fields to identify the type based on a type- cataloging * metadata practice. * * @param dConfig * @return */ private boolean identifyItemType(String dConfig) { // FIXME: Shouldn't have to parse identifiers for every identification. ArrayList<ArrayList<String>> options = parseOptions(dConfig); HashMap<String, ArrayList<String>> mdPairs = new HashMap<String, ArrayList<String>>(); // Parse field/value pairs from field identifier string for (ArrayList<String> option : options) { String pair = option.get(0); String[] parsedPair = pair.split("\\:"); if (2 == parsedPair.length) { // If we've encountered this field before, add the value to the // list if (mdPairs.containsKey(parsedPair[0].trim())) { mdPairs.get(parsedPair[0].trim()).add(parsedPair[1]); if(log.isDebugEnabled()) { log.debug("Registering Type Identifier: " + parsedPair[0] + " => " + parsedPair[1]); } } else { // Otherwise, add it as the first occurance of this field ArrayList<String> newField = new ArrayList<String>(); newField.add(parsedPair[1].trim()); mdPairs.put(parsedPair[0].trim(), newField); if (log.isDebugEnabled()) { log.debug("Registering Type Identifier: " + parsedPair[0] + " => " + parsedPair[1]); } } } else { log.error("Malformed field identifier name/value pair"); } } // Build config string without values, only field names StringBuilder sb = new StringBuilder(); for (String value : mdPairs.keySet()) { sb.append(value + " | "); } // Check resolved/present metadata fields against configured values ArrayList<DCValue> presentMD = resolveMetadataFields(sb.toString()); if (null != presentMD && presentMD.size() != 0) { for (DCValue v : presentMD) { String fieldName = buildFieldName(v); if (mdPairs.containsKey(fieldName)) { for (String configValue : mdPairs.get(fieldName)) { if (configValue.equals(v.value)) { return true; } } } } } return false; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.InProgressSubmission; import org.dspace.submit.AbstractProcessingStep; import org.dspace.workflow.WorkflowItem; /** * Information about an item being editing with the submission UI * * @author Robert Tansley * @version $Revision: 5844 $ */ public class SubmissionInfo { /** log4j logger */ private static Logger log = Logger.getLogger(SubmissionInfo.class); /** The item which is being submitted */ private InProgressSubmission submissionItem = null; /** * The Submission process config, which holds all info about the submission * process that this item is going through (including all steps, etc) */ private SubmissionConfig submissionConfig = null; /** * Handle of the collection where this item is being submitted */ private String collectionHandle = null; /*************************************************************************** * Holds all information used to build the Progress Bar in a key,value set. * Keys are the number of the step, followed by the number of the page * within the step (e.g. "2.1" = The first page of Step 2) (e.g. "5.2" = The * second page of Step 5) Values are the Headings to display for each step * (e.g. "Describe") **************************************************************************/ private Map<String, String> progressBar = null; /** The element or element_qualifier to show more input boxes for */ private String moreBoxesFor; /** The element or element_qualifier to scroll to initially using anchor */ private String jumpToField; /** If non-empty, form-relative indices of missing fields */ private List<String> missingFields; /** Specific bundle we're dealing with */ private Bundle bundle; /** Specific bitstream we're dealing with */ private Bitstream bitstream; /** Reader for submission process configuration file * */ private static SubmissionConfigReader submissionConfigReader; /** * Default Constructor - PRIVATE * <p> * Create a SubmissionInfo object * using the load() method! * */ private SubmissionInfo() { } /** * Loads all known submission information based on the given in progress * submission and request object. * <P> * If subItem is null, then just loads the default submission information * for a new submission. * * @param request * The HTTP Servlet Request object * @param subItem * The in-progress submission we are loading information for * * @return a SubmissionInfo object * * @throws ServletException * if an error occurs */ public static SubmissionInfo load(HttpServletRequest request, InProgressSubmission subItem) throws ServletException { boolean forceReload = false; SubmissionInfo subInfo = new SubmissionInfo(); // load SubmissionConfigReader only the first time // or if we're using a different UI now. if (submissionConfigReader == null) { submissionConfigReader = new SubmissionConfigReader(); forceReload=true; } // save the item which is going through the submission process subInfo.setSubmissionItem(subItem); // Only if the submission item is created can we set its collection String collectionHandle = SubmissionConfigReader.DEFAULT_COLLECTION; if (subItem != null) { collectionHandle = subItem.getCollection().getHandle(); } // save this collection handle to this submission info object subInfo.setCollectionHandle(collectionHandle); // load Submission Process config for this item's collection // (Note: this also loads the Progress Bar info, since it is // dependent on the Submission config) loadSubmissionConfig(request, subInfo, forceReload); return subInfo; } /** * Is this submission in the workflow process? * * @return true if the current submission is in the workflow process */ public boolean isInWorkflow() { return ((this.submissionItem != null) && this.submissionItem instanceof WorkflowItem); } /** * Return the current in progress submission * * @return the InProgressSubmission object representing the current * submission */ public InProgressSubmission getSubmissionItem() { return this.submissionItem; } /** * Updates the current in progress submission item * * @param subItem * the new InProgressSubmission object */ public void setSubmissionItem(InProgressSubmission subItem) { this.submissionItem = subItem; } /** * Return the current submission process config (which includes all steps * which need to be completed for the submission to be successful) * * @return the SubmissionConfig object, which contains info on all the steps * in the current submission process */ public SubmissionConfig getSubmissionConfig() { return this.submissionConfig; } /** * Causes the SubmissionConfig to be completely reloaded from the XML * configuration file (item-submission.xml). * <P> * Note: This also reloads the progress bar info, since the progress bar * depends entirely on the submission process (and its steps). * * @param request * The HTTP Servlet Request object * * @throws ServletException * if an error occurs */ public void reloadSubmissionConfig(HttpServletRequest request) throws ServletException { // Only if the submission item is created can we set its collection String collectionHandle = SubmissionConfigReader.DEFAULT_COLLECTION; if (this.submissionItem != null) { collectionHandle = submissionItem.getCollection().getHandle(); } this.setCollectionHandle(collectionHandle); // force a reload of the submission process configuration loadSubmissionConfig(request, this, true); } /** * Returns a particular global step definition based on its ID. * <P> * Global step definitions are those defined in the <step-definitions> * section of the configuration file. * * @param stepID * step's identifier * * @return the SubmissionStepConfig representing the step * * @throws ServletException * if no default submission process configuration defined */ public SubmissionStepConfig getStepConfig(String stepID) throws ServletException { return submissionConfigReader.getStepConfig(stepID); } /** * Return text information suitable for logging. * <p> * This method is used by several of the Step classes * to log major events during the submission process (e.g. when * license agreement was accepted, when item was submitted, * when it was available in DSpace, etc.) * * @return the type and ID of the submission, bundle and/or bitstream for * logging */ public String getSubmissionLogInfo() { String info = ""; if (isInWorkflow()) { info = info + "workflow_id=" + getSubmissionItem().getID(); } else { info = info + "workspace_item_id" + getSubmissionItem().getID(); } if (getBundle() != null) { info = info + ",bundle_id=" + getBundle().getID(); } if (getBitstream() != null) { info = info + ",bitstream_id=" + getBitstream().getID(); } return info; } /** * Gets the handle of the collection to which this item is being submitted * * @return the collection handle */ public String getCollectionHandle() { return this.collectionHandle; } /** * Sets the handle of the collection to which this item is being submitted * * @param handle * the new collection handle */ public void setCollectionHandle(String handle) { this.collectionHandle = handle; } /** * Return the information used to build the progress bar (this includes all * the steps in this submission, as well as the ordering and names of the * steps). * <p> * Returns a Hashmap, with the following specifics: * <p> * Keys are the number of the step, followed by the number of the page * within the step * <p> * (e.g. "2.1" = The first page of Step 2) * <p> * (e.g. "5.2" = The second page of Step 5) * <P> * Values are the Headings to display for each step (e.g. "Describe") * * @return a Hashmap of Progress Bar information. */ public Map<String, String> getProgressBarInfo() { return this.progressBar; } /** * Return the current bitstream we're working with (This is used during * upload processes, or user interfaces that are dealing with bitstreams) * * @return the Bitstream object for the bitstream */ public Bitstream getBitstream() { return this.bitstream; } /** * Sets the current bitstream we're working with (This is used during upload * processes, or user interfaces that are dealing with bitstreams) * * @param bits * the bitstream */ public void setBitstream(Bitstream bits) { this.bitstream = bits; } /** * Return the current bundle we're working with (This is used during upload * processes, or user interfaces that are dealing with bundles/bitstreams) * * @return the Bundle object for the bundle */ public Bundle getBundle() { return this.bundle; } /** * Sets the current bundle we're working with (This is used during upload * processes, or user interfaces that are dealing with bundles/bitstreams) * * @param bund * the bundle */ public void setBundle(Bundle bund) { this.bundle = bund; } /** * Return form related indices of the required fields which were not filled * out by the user. * * @return a List of empty fields which are required */ public List<String> getMissingFields() { return this.missingFields; } /** * Sets the form related indices of the required fields which were not * filled out by the user. * * @param missing * the List of empty fields which are required */ public void setMissingFields(List<String> missing) { this.missingFields = missing; } /** * Return metadata field which user has requested more input boxes be * displayed (by pressing "Add More" on one of the "Describe" pages) * * @return the String name of the field element */ public String getMoreBoxesFor() { return this.moreBoxesFor; } /** * Sets the metadata field which user has requested more input boxes be * displayed (by pressing "Add More" on one of the "Describe" pages) * * @param fieldname * the name of the field element on the page */ public void setMoreBoxesFor(String fieldname) { this.moreBoxesFor = fieldname; } /** * Return metadata field which JSP should "jump to" (i.e. set focus on) when * the JSP next loads. This is used during the Describe step. * * @return the String name of the field element */ public String getJumpToField() { return this.jumpToField; } /** * Sets metadata field which JSP should "jump to" (i.e. set focus on) when * the JSP next loads. This is used during the Describe step. * * @param fieldname * the name of the field on the page */ public void setJumpToField(String fieldname) { this.jumpToField = fieldname; } /** * Load necessary information to build the Progress Bar for the Item * Submission Progress. * * This information is returned in the form of a HashMap (which is then * stored as a part of the SubmissionInfo). The HashMap takes the following * form: * * Keys - the number of the step, followed by the number of the page within * the step (e.g. "2.1" = The first page of Step 2) (e.g. "5.2" = The second * page of Step 5) * * Values - the headings to display for each step (e.g. "Describe", * "Verify") * * @param request * The HTTP Servlet Request object * @param subInfo * the SubmissionInfo object we are loading into * @param forceReload * If true, this method reloads from scratch (and overwrites * cached progress bar info) * */ private static void loadProgressBar(HttpServletRequest request, SubmissionInfo subInfo, boolean forceReload) { Map<String, String> progressBarInfo = null; log.debug("Loading Progress Bar Info"); if (!forceReload) { // first, attempt to load from cache progressBarInfo = loadProgressBarFromCache(request .getSession()); } if (progressBarInfo != null && log.isDebugEnabled()) { log.debug("Found Progress Bar Info in cache: " + progressBarInfo.size() + " pages to display in progress bar"); } // if unable to load from cache, must load from scratch else { progressBarInfo = new LinkedHashMap<String, String>(); // loop through all steps for (int i = 0; i < subInfo.submissionConfig.getNumberOfSteps(); i++) { // get the current step info SubmissionStepConfig currentStep = subInfo.submissionConfig .getStep(i); String stepNumber = Integer.toString(currentStep .getStepNumber()); String stepHeading = currentStep.getHeading(); // as long as this step is visible, include it in // the Progress Bar if (currentStep.isVisible()) { // default to just one page in this step int numPages = 1; try { // load the processing class for this step ClassLoader loader = subInfo.getClass() .getClassLoader(); Class<AbstractProcessingStep> stepClass = (Class<AbstractProcessingStep>)loader.loadClass(currentStep.getProcessingClassName()); // call the "getNumberOfPages()" method of the class // to get it's number of pages AbstractProcessingStep step = stepClass.newInstance(); // get number of pages from servlet numPages = step.getNumberOfPages(request, subInfo); } catch (Exception e) { log.error( "Error loading progress bar information from Step Class '" + currentStep.getProcessingClassName() + "' Error:", e); } // save each of the step's pages to the progress bar for (int j = 1; j <= numPages; j++) { String pageNumber = Integer.toString(j); // store ("stepNumber.pageNumber", Heading) for each // page in the step progressBarInfo.put(stepNumber + "." + pageNumber, stepHeading); }// end for each page } }// end for each step log.debug("Loaded Progress Bar Info from scratch: " + progressBarInfo.size() + " pages to display in progress bar"); // cache this new progress bar saveProgressBarToCache(request.getSession(), progressBarInfo); }// end if null // save progressBarInfo to submission Info subInfo.progressBar = progressBarInfo; } /** * Saves all progress bar information into session cache. This saves us from * having to reload this same progress bar over and over again. * * @param session * The HTTP Session object * @param progressBarInfo * The progress bar info to cache * */ private static void saveProgressBarToCache(HttpSession session, Map<String, String> progressBarInfo) { // cache progress bar info to Session session.setAttribute("submission.progressbar", progressBarInfo); } /** * Attempts to retrieve progress bar information (for a particular * collection) from session cache. * * If the progress bar info cannot be found, returns null * * @param session * The HTTP Session object * * @return progressBarInfo HashMap (if found), or null (if not) * */ private static Map<String, String> loadProgressBarFromCache(HttpSession session) { return (Map<String, String>) session.getAttribute("submission.progressbar"); } /** * Loads SubmissionConfig object for the given submission info object. If a * SubmissionConfig object cannot be loaded, a Servlet Error is thrown. * <p> * This method just loads this SubmissionConfig object internally, so that * it is available via a call to "getSubmissionConfig()" * * @param request * The HTTP Servlet Request object * @param subInfo * the SubmissionInfo object we are loading into * @param forceReload * If true, this method reloads from scratch (and overwrites * cached SubmissionConfig) * */ private static void loadSubmissionConfig(HttpServletRequest request, SubmissionInfo subInfo, boolean forceReload) throws ServletException { log.debug("Loading Submission Config information"); if (!forceReload) { // first, try to load from cache subInfo.submissionConfig = loadSubmissionConfigFromCache(request .getSession(), subInfo.getCollectionHandle(), subInfo .isInWorkflow()); } if (subInfo.submissionConfig == null || forceReload) { // reload the proper Submission process config // (by reading the XML config file) subInfo.submissionConfig = submissionConfigReader .getSubmissionConfig(subInfo.getCollectionHandle(), subInfo .isInWorkflow()); // cache this new submission process configuration saveSubmissionConfigToCache(request.getSession(), subInfo.submissionConfig, subInfo.getCollectionHandle(), subInfo.isInWorkflow()); // also must force reload Progress Bar info, // since it's based on the Submission config loadProgressBar(request, subInfo, true); } else { log.debug("Found Submission Config in session cache!"); // try and reload progress bar from cache loadProgressBar(request, subInfo, false); } } /** * Saves SubmissionConfig object into session cache. This saves us from * having to reload this object during every "Step". * * @param session * The HTTP Session object * @param subConfig * The SubmissionConfig to cache * @param collectionHandle * The Collection handle this SubmissionConfig corresponds to * @param isWorkflow * Whether this SubmissionConfig corresponds to a workflow * * */ private static void saveSubmissionConfigToCache(HttpSession session, SubmissionConfig subConfig, String collectionHandle, boolean isWorkflow) { // cache the submission process config // and the collection it corresponds to session.setAttribute("submission.config", subConfig); session.setAttribute("submission.config.collection", collectionHandle); session.setAttribute("submission.config.isWorkflow", Boolean.valueOf( isWorkflow)); } /** * Loads SubmissionConfig object from session cache for the given * Collection. If a SubmissionConfig object cannot be found, null is * returned. * * @param session * The HTTP Session object * @param collectionHandle * The Collection handle of the SubmissionConfig to load * @param isWorkflow * whether or not we loading the Submission process for a * workflow item * * @return The cached SubmissionConfig for this collection */ private static SubmissionConfig loadSubmissionConfigFromCache( HttpSession session, String collectionHandle, boolean isWorkflow) { // attempt to load submission process config // from cache for the current collection String cachedHandle = (String) session .getAttribute("submission.config.collection"); Boolean cachedIsWorkflow = (Boolean) session .getAttribute("submission.config.isWorkflow"); // only load from cache if the collection handle and // workflow item status both match! if (collectionHandle.equals(cachedHandle) && isWorkflow == cachedIsWorkflow.booleanValue()) { return (SubmissionConfig) session.getAttribute("submission.config"); } else { return null; } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.util.List; import java.util.Map; import org.dspace.content.MetadataSchema; /** * Class representing a line in an input form. * * @author Brian S. Hughes, based on work by Jenny Toves, OCLC * @version */ public class DCInput { /** the DC element name */ private String dcElement = null; /** the DC qualifier, if any */ private String dcQualifier = null; /** the DC namespace schema */ private String dcSchema = null; /** a label describing input */ private String label = null; /** the input type */ private String inputType = null; /** is input required? */ private boolean required = false; /** if required, text to display when missing */ private String warning = null; /** is input repeatable? */ private boolean repeatable = false; /** 'hint' text to display */ private String hint = null; /** if input list-controlled, name of list */ private String valueListName = null; /** if input list-controlled, the list itself */ private List<String> valueList = null; /** if non-null, visibility scope restriction */ private String visibility = null; /** if non-null, readonly out of the visibility scope */ private String readOnly = null; /** the name of the controlled vocabulary to use */ private String vocabulary = null; /** is the entry closed to vocabulary terms? */ private boolean closedVocabulary = false; /** size of the input field (characters)*/ private int size = 0; /** show choice of language */ private boolean askLang = false; /** authority control parameters * now we can specify authority control for each field * on different submission forms independently */ private boolean authority = false; private boolean closed = false; private boolean editable = true; private int choisesLimit = 0; private String authorityURLsuffix = ""; private AuthorityPresentation presentation = AuthorityPresentation.SUGGEST; public static enum AuthorityPresentation { SUGGEST, LOOKUP }; /** * The scope of the input sets, this restricts hidden metadata fields from * view during workflow processing. */ public static final String WORKFLOW_SCOPE = "workflow"; /** * The scope of the input sets, this restricts hidden metadata fields from * view by the end user during submission. */ public static final String SUBMISSION_SCOPE = "submit"; /** * Class constructor for creating a DCInput object based on the contents of * a HashMap * * @param fieldMap * ??? * @param listMap */ public DCInput(Map<String, String> fieldMap, Map<String, List<String>> listMap) { dcElement = fieldMap.get("dc-element"); dcQualifier = fieldMap.get("dc-qualifier"); // Default the schema to dublin core dcSchema = fieldMap.get("dc-schema"); if (dcSchema == null) { dcSchema = MetadataSchema.DC_SCHEMA; } String repStr = fieldMap.get("repeatable"); repeatable = "true".equalsIgnoreCase(repStr) || "yes".equalsIgnoreCase(repStr); label = fieldMap.get("label"); inputType = fieldMap.get("input-type"); // these types are list-controlled if ("dropdown".equals(inputType) || "qualdrop_value".equals(inputType) || "list".equals(inputType)) { valueListName = fieldMap.get("value-pairs-name"); valueList = listMap.get(valueListName); } hint = fieldMap.get("hint"); warning = fieldMap.get("required"); required = (warning != null && warning.length() > 0); visibility = fieldMap.get("visibility"); readOnly = fieldMap.get("readonly"); vocabulary = fieldMap.get("vocabulary"); String closedVocabularyStr = fieldMap.get("closedVocabulary"); closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr) || "yes".equalsIgnoreCase(closedVocabularyStr); if (fieldMap.containsKey("asklang")) { if ("true".equals(fieldMap.get("asklang"))) { askLang = true; } } if (fieldMap.containsKey("size")) { try { size = Integer.parseInt(fieldMap.get("size")); } catch (Exception e) { size = 0; } } else { size = 0; } if ("true".equals(fieldMap.get("authority")) || "yes".equals(fieldMap.get("authority"))) this.authority = true; if ("true".equals(fieldMap.get("aclosed")) || "yes".equals(fieldMap.get("aclosed"))) this.closed = true; if ("false".equals(fieldMap.get("aeditable")) || "off".equals(fieldMap.get("aeditable"))) this.editable = false; try { this.choisesLimit = Integer.parseInt(fieldMap.get("choises")); } catch (Exception e) { this.choisesLimit = 0; } // if("lookup".equals(fieldMap.get("presentation"))) // this.presentation = AuthorityPresentation.LOOKUP; if(fieldMap.containsKey("authURL")) this.authorityURLsuffix = fieldMap.get("authURL"); } /** * Is this DCInput for display in the given scope? The scope should be * either "workflow" or "submit", as per the input forms definition. If the * internal visibility is set to "null" then this will always return true. * * @param scope * String identifying the scope that this input's visibility * should be tested for * * @return whether the input should be displayed or not */ public boolean isVisible(String scope) { return (visibility == null || visibility.equals(scope)); } /** * Is this DCInput for display in readonly mode in the given scope? * If the scope differ from which in visibility field then we use the out attribute * of the visibility element. Possible values are: hidden (default) and readonly. * If the DCInput is visible in the scope then this methods must return false * * @param scope * String identifying the scope that this input's readonly visibility * should be tested for * * @return whether the input should be displayed in a readonly way or fully hidden */ public boolean isReadOnly(String scope) { if (isVisible(scope)) { return false; } else { return readOnly != null && readOnly.equalsIgnoreCase("readonly"); } } /** * Get the repeatable flag for this row * * @return the repeatable flag */ public boolean isRepeatable() { return repeatable; } /** * Alternate way of calling isRepeatable() * * @return the repeatable flag */ public boolean getRepeatable() { return isRepeatable(); } /** * Get the input type for this row * * @return the input type */ public String getInputType() { return inputType; } /** * Get the DC element for this form row. * * @return the DC element */ public String getElement() { return dcElement; } /** * Get the DC namespace prefix for this form row. * * @return the DC namespace prefix */ public String getSchema() { return dcSchema; } /** * Get the warning string for a missing required field, formatted for an * HTML table. * * @return the string prompt if required field was ignored */ public String getWarning() { return warning; } /** * Is there a required string for this form row? * * @return true if a required string is set */ public boolean isRequired() { return required; } /** * Get the DC qualifier for this form row. * * @return the DC qualifier */ public String getQualifier() { return dcQualifier; } /** * Get the hint for this form row, formatted for an HTML table * * @return the hints */ public String getHints() { return hint; } /** * Get the label for this form row. * * @return the label */ public String getLabel() { return label; } /** * Get the name of the pairs type * * @return the pairs type name */ public String getPairsType() { return valueListName; } /** * Get the name of the pairs type * * @return the pairs type name */ public List getPairs() { return valueList; } /** * Get the name of the controlled vocabulary that is associated with this * field * * @return the name of associated the vocabulary */ public String getVocabulary() { return vocabulary; } /** * Set the name of the controlled vocabulary that is associated with this * field * * @param vocabulary * the name of the vocabulary */ public void setVocabulary(String vocabulary) { this.vocabulary = vocabulary; } /** * Gets the display string that corresponds to the passed storage string in * a particular display-storage pair set. * * @param pairTypeName * Name of display-storage pair set to search * @param storedString * the string that gets stored * * @return the displayed string whose selection causes storageString to be * stored, null if no match */ public String getDisplayString(String pairTypeName, String storedString) { if (valueList != null && storedString != null) { for (int i = 0; i < valueList.size(); i += 2) { if (storedString.equals(valueList.get(i + 1))) { return valueList.get(i); } } } return null; } /** * Gets the stored string that corresponds to the passed display string in a * particular display-storage pair set. * * @param pairTypeName * Name of display-storage pair set to search * @param displayedString * the string that gets displayed * * @return the string that gets stored when displayString gets selected, * null if no match */ public String getStoredString(String pairTypeName, String displayedString) { if (valueList != null && displayedString != null) { for (int i = 0; i < valueList.size(); i += 2) { if (displayedString.equals(valueList.get(i))) { return valueList.get(i + 1); } } } return null; } /** * The closed attribute of the vocabulary tag for this field as set in * input-forms.xml * * <code> * <field> * ..... * <vocabulary closed="true">nsrc</vocabulary> * </field> * </code> * @return the closedVocabulary flags: true if the entry should be restricted * only to vocabulary terms, false otherwise */ public boolean isClosedVocabulary() { return closedVocabulary; } /** * Gets the desired size for input element (box) in user interface. * @return size * the desired size of the input element */ public int getSize() { return this.size; } /** * Gets the flag value - to request user input for language attribute * for the field. */ public boolean getAskLanguage() { return askLang; } /** * Returns fully qualified name of the field. * * @return String full name of the field schema.element.qualifier */ public String getFullQualName() { String result = ""; result += getSchema() == null ? "" : getSchema(); result += getElement() == null ? "" : ("." + getElement()); result += getQualifier() == null ? "" : ("." + getQualifier()); return result; } public boolean isAuthority() { return this.authority; } public void onAutority(boolean isclosed, boolean iseditable, AuthorityPresentation presentation, int limit, String URL) { this.authority = true; this.authorityURLsuffix = URL; this.closed = isclosed; this.editable = iseditable; this.choisesLimit = limit; this.presentation = presentation; } public void offAuthority() { this.authority = false; } public String getAuthorityURLsuffix() { return authorityURLsuffix; } public int getChoisesLimit() { return choisesLimit; } public boolean isAuthorityClosed() { return closed; } public boolean isAuthorityEditable() { return editable; } public AuthorityPresentation getPresentation() { return presentation; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; /** * This is a superclass for exceptions representing a failure when * importing or exporting a package. E.g., unacceptable package format * or contents. Implementations should throw one of the more specific * exceptions. This class is intended for declarations and catch clauses. * * @author Larry Stone * @version $Revision: 3761 $ */ public class DCInputsReaderException extends Exception { /** * No-args constructor. */ public DCInputsReaderException() { super(); } /** * Constructor for a given message. * @param message diagnostic message. */ public DCInputsReaderException(String message) { super(message); } /** * Constructor for a given cause. * @param cause throwable that caused this exception */ public DCInputsReaderException(Throwable cause) { super(cause); } /** * Constructor to create a new exception wrapping it around another exception. * @param message diagnostic message. * @param cause throwable that caused this exception */ public DCInputsReaderException(String message, Throwable cause) { super(message, cause); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.util; import java.sql.SQLException; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.core.Constants; import org.dspace.core.Context; /** * This class is an addition to the AuthorizeManager that perform authorization * check on not CRUD (ADD, WRITE, etc.) actions. * * @author bollini * */ public class AuthorizeUtil { /** * Is allowed manage (create, remove, edit) bitstream's policies in the * current context? * * @param context * the DSpace Context Object * @param bitstream * the bitstream that the policy refer to * @throws AuthorizeException * if the current context (current user) is not allowed to * manage the bitstream's policies * @throws SQLException * if a db error occur */ public static void authorizeManageBitstreamPolicy(Context context, Bitstream bitstream) throws AuthorizeException, SQLException { Bundle bundle = bitstream.getBundles()[0]; authorizeManageBundlePolicy(context, bundle); } /** * Is allowed manage (create, remove, edit) bundle's policies in the * current context? * * @param context * the DSpace Context Object * @param bundle * the bundle that the policy refer to * @throws AuthorizeException * if the current context (current user) is not allowed to * manage the bundle's policies * @throws SQLException * if a db error occur */ public static void authorizeManageBundlePolicy(Context context, Bundle bundle) throws AuthorizeException, SQLException { Item item = bundle.getItems()[0]; authorizeManageItemPolicy(context, item); } /** * Is allowed manage (create, remove, edit) item's policies in the * current context? * * @param context * the DSpace Context Object * @param item * the item that the policy refer to * @throws AuthorizeException * if the current context (current user) is not allowed to * manage the item's policies * @throws SQLException * if a db error occur */ public static void authorizeManageItemPolicy(Context context, Item item) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canItemAdminManagePolicies()) { AuthorizeManager.authorizeAction(context, item, Constants.ADMIN); } else if (AuthorizeConfiguration.canCollectionAdminManageItemPolicies()) { AuthorizeManager.authorizeAction(context, item .getOwningCollection(), Constants.ADMIN); } else if (AuthorizeConfiguration.canCommunityAdminManageItemPolicies()) { AuthorizeManager .authorizeAction(context, item.getOwningCollection() .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage item policies"); } } /** * Is allowed manage (create, remove, edit) collection's policies in the * current context? * * @param context * the DSpace Context Object * @param collection * the collection that the policy refer to * @throws AuthorizeException * if the current context (current user) is not allowed to * manage the collection's policies * @throws SQLException * if a db error occur */ public static void authorizeManageCollectionPolicy(Context context, Collection collection) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCollectionAdminManagePolicies()) { AuthorizeManager.authorizeAction(context, collection, Constants.ADMIN); } else if (AuthorizeConfiguration .canCommunityAdminManageCollectionPolicies()) { AuthorizeManager.authorizeAction(context, collection .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage collection policies"); } } /** * Is allowed manage (create, remove, edit) community's policies in the * current context? * * @param context * the DSpace Context Object * @param community * the community that the policy refer to * @throws AuthorizeException * if the current context (current user) is not allowed to * manage the community's policies * @throws SQLException * if a db error occur */ public static void authorizeManageCommunityPolicy(Context context, Community community) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCommunityAdminManagePolicies()) { AuthorizeManager.authorizeAction(context, community, Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage community policies"); } } /** * Throw an AuthorizeException if the current user is not a System Admin * * @param context * the DSpace Context Object * @throws AuthorizeException * if the current user is not a System Admin * @throws SQLException * if a db error occur */ public static void requireAdminRole(Context context) throws AuthorizeException, SQLException { if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to perform this action"); } } /** * Is the current user allowed to manage (add, remove, replace) the item's * CC License * * @param context * the DSpace Context Object * @param item * the item that the CC License refer to * @throws AuthorizeException * if the current user is not allowed to * manage the item's CC License * @throws SQLException * if a db error occur */ public static void authorizeManageCCLicense(Context context, Item item) throws AuthorizeException, SQLException { try { AuthorizeManager.authorizeAction(context, item, Constants.ADD); AuthorizeManager.authorizeAction(context, item, Constants.REMOVE); } catch (AuthorizeException authex) { if (AuthorizeConfiguration.canItemAdminManageCCLicense()) { AuthorizeManager .authorizeAction(context, item, Constants.ADMIN); } else if (AuthorizeConfiguration.canCollectionAdminManageCCLicense()) { AuthorizeManager.authorizeAction(context, item .getParentObject(), Constants.ADMIN); } else if (AuthorizeConfiguration.canCommunityAdminManageCCLicense()) { AuthorizeManager.authorizeAction(context, item .getParentObject().getParentObject(), Constants.ADMIN); } else { requireAdminRole(context); } } } /** * Is the current user allowed to manage (create, remove, edit) the * collection's template item? * * @param context * the DSpace Context Object * @param collection * the collection * @throws AuthorizeException * if the current user is not allowed to manage the collection's * template item * @throws SQLException * if a db error occur */ public static void authorizeManageTemplateItem(Context context, Collection collection) throws AuthorizeException, SQLException { boolean isAuthorized = collection.canEditBoolean(false); if (!isAuthorized && AuthorizeConfiguration .canCollectionAdminManageTemplateItem()) { AuthorizeManager.authorizeAction(context, collection, Constants.ADMIN); } else if (!isAuthorized && AuthorizeConfiguration .canCommunityAdminManageCollectionTemplateItem()) { Community[] communities = collection.getCommunities(); Community parent = communities != null && communities.length > 0 ? communities[0] : null; AuthorizeManager.authorizeAction(context, parent, Constants.ADMIN); } else if (!isAuthorized && !AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "You are not authorized to create a template item for the collection"); } } /** * Can the current user manage (create, remove, edit) the submitters group of * the collection? * * @param context * the DSpace Context Object * @param collection * the collection * @throws AuthorizeException * if the current user is not allowed to manage the collection's * submitters group * @throws SQLException * if a db error occur */ public static void authorizeManageSubmittersGroup(Context context, Collection collection) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCollectionAdminManageSubmitters()) { AuthorizeManager.authorizeAction(context, collection, Constants.ADMIN); } else if (AuthorizeConfiguration .canCommunityAdminManageCollectionSubmitters()) { AuthorizeManager.authorizeAction(context, collection .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage collection submitters"); } } /** * Can the current user manage (create, remove, edit) the workflow groups of * the collection? * * @param context * the DSpace Context Object * @param collection * the collection * @throws AuthorizeException * if the current user is not allowed to manage the collection's * workflow groups * @throws SQLException * if a db error occur */ public static void authorizeManageWorkflowsGroup(Context context, Collection collection) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCollectionAdminManageWorkflows()) { AuthorizeManager.authorizeAction(context, collection, Constants.ADMIN); } else if (AuthorizeConfiguration .canCommunityAdminManageCollectionWorkflows()) { AuthorizeManager.authorizeAction(context, collection .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage collection workflow"); } } /** * Can the current user create/edit the admins group of the collection? * please note that the remove action need a separate check * * @see #authorizeRemoveAdminGroup(Context, Collection) * * @param context * the DSpace Context Object * @param collection * the collection * @throws AuthorizeException * if the current user is not allowed to create/edit the * collection's admins group * @throws SQLException * if a db error occur */ public static void authorizeManageAdminGroup(Context context, Collection collection) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) { AuthorizeManager.authorizeAction(context, collection, Constants.ADMIN); } else if (AuthorizeConfiguration .canCommunityAdminManageCollectionAdminGroup()) { AuthorizeManager.authorizeAction(context, collection .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage collection admin"); } } /** * Can the current user remove the admins group of the collection? * please note that the create/edit actions need separate check * * @see #authorizeManageAdminGroup(Context, Collection) * * @param context * the DSpace Context Object * @param collection * the collection * @throws AuthorizeException * if the current user is not allowed to remove the * collection's admins group * @throws SQLException * if a db error occur */ public static void authorizeRemoveAdminGroup(Context context, Collection collection) throws AuthorizeException, SQLException { Community[] parentCommunities = collection.getCommunities(); if (AuthorizeConfiguration .canCommunityAdminManageCollectionAdminGroup() && parentCommunities != null && parentCommunities.length > 0) { AuthorizeManager.authorizeAction(context, collection .getCommunities()[0], Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin can remove the admin group of a collection"); } } /** * Can the current user create/edit the admins group of the community? * please note that the remove action need a separate check * * @see #authorizeRemoveAdminGroup(Context, Collection) * * @param context * the DSpace Context Object * @param community * the community * @throws AuthorizeException * if the current user is not allowed to create/edit the * community's admins group * @throws SQLException * if a db error occur */ public static void authorizeManageAdminGroup(Context context, Community community) throws AuthorizeException, SQLException { if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()) { AuthorizeManager.authorizeAction(context, community, Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin are allowed to manage community admin"); } } /** * Can the current user remove the admins group of the community? * please note that the create/edit actions need separate check * * @see #authorizeManageAdminGroup(Context, Community) * * @param context * the DSpace Context Object * @param community * the community * @throws AuthorizeException * if the current user is not allowed to remove the * collection's admins group * @throws SQLException * if a db error occur */ public static void authorizeRemoveAdminGroup(Context context, Community community) throws SQLException, AuthorizeException { Community parentCommunity = community.getParentCommunity(); if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup() && parentCommunity != null) { AuthorizeManager.authorizeAction(context, parentCommunity, Constants.ADMIN); } else if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only system admin can remove the admin group of the community"); } } /** * Can the current user remove or edit the supplied policy? * * @param c * the DSpace Context Object * @param rp * a resource policy * @throws AuthorizeException * if the current context (current user) is not allowed to * remove/edit the policy * @throws SQLException * if a db error occur */ public static void authorizeManagePolicy(Context c, ResourcePolicy rp) throws SQLException, AuthorizeException { switch (rp.getResourceType()) { case Constants.BITSTREAM: authorizeManageBitstreamPolicy(c, Bitstream.find(c, rp .getResourceID())); break; case Constants.BUNDLE: authorizeManageBundlePolicy(c, Bundle.find(c, rp.getResourceID())); break; case Constants.ITEM: authorizeManageItemPolicy(c, Item.find(c, rp.getResourceID())); break; case Constants.COLLECTION: authorizeManageCollectionPolicy(c, Collection.find(c, rp .getResourceID())); break; case Constants.COMMUNITY: authorizeManageCommunityPolicy(c, Community.find(c, rp .getResourceID())); break; default: requireAdminRole(c); break; } } /** * Can the current user withdraw the item? * * @param context * the DSpace Context Object * @param item * the item * @throws SQLException * if a db error occur * @throws AuthorizeException * if the current user is not allowed to perform the item * withdraw */ public static void authorizeWithdrawItem(Context context, Item item) throws SQLException, AuthorizeException { boolean authorized = false; if (AuthorizeConfiguration.canCollectionAdminPerformItemWithdrawn()) { authorized = AuthorizeManager.authorizeActionBoolean(context, item .getOwningCollection(), Constants.ADMIN); } else if (AuthorizeConfiguration.canCommunityAdminPerformItemWithdrawn()) { authorized = AuthorizeManager .authorizeActionBoolean(context, item.getOwningCollection() .getCommunities()[0], Constants.ADMIN); } if (!authorized) { authorized = AuthorizeManager.authorizeActionBoolean(context, item .getOwningCollection(), Constants.REMOVE, false); } // authorized if (!authorized) { throw new AuthorizeException( "To withdraw item must be COLLECTION_ADMIN or have REMOVE authorization on owning Collection"); } } /** * Can the current user reinstate the item? * * @param context * the DSpace Context Object * @param item * the item * @throws SQLException * if a db error occur * @throws AuthorizeException * if the current user is not allowed to perform the item * reinstatement */ public static void authorizeReinstateItem(Context context, Item item) throws SQLException, AuthorizeException { Collection[] colls = item.getCollections(); for (int i = 0; i < colls.length; i++) { if (!AuthorizeConfiguration .canCollectionAdminPerformItemReinstatiate()) { if (AuthorizeConfiguration .canCommunityAdminPerformItemReinstatiate() && AuthorizeManager.authorizeActionBoolean(context, colls[i].getCommunities()[0], Constants.ADMIN)) { // authorized } else { AuthorizeManager.authorizeAction(context, colls[i], Constants.ADD, false); } } else { AuthorizeManager.authorizeAction(context, colls[i], Constants.ADD); } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.license; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.sql.SQLException; import java.util.Properties; import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; import org.dspace.content.ItemIterator; import org.dspace.core.Context; /** * Cleanup class for CC Licenses, corrects XML formating errors by replacing the license_rdf bitstream. * * @author mdiggory */ public class LicenseCleanup { private static final Logger log = Logger.getLogger(LicenseCleanup.class); protected static final Templates templates; static { try { templates = TransformerFactory.newInstance().newTemplates( new StreamSource(CreativeCommons.class .getResourceAsStream("LicenseCleanup.xsl"))); } catch (TransformerConfigurationException e) { log.error(e.getMessage(), e); throw new IllegalStateException(e.getMessage(), e); } } /** * @param args * @throws SQLException * @throws IOException * @throws AuthorizeException */ public static void main(String[] args) throws SQLException, AuthorizeException, IOException { Context ctx = new Context(); ctx.setIgnoreAuthorization(true); ItemIterator iter = Item.findAll(ctx); Properties props = new Properties(); File processed = new File("license.processed"); if (processed.exists()) { props.load(new FileInputStream(processed)); } int i = 0; try { while (iter.hasNext()) { if (i == 100) { props.store(new FileOutputStream(processed), "processed license files, remove to restart processing from scratch"); i = 0; } Item item = (Item) iter.next(); log.info("checking: " + item.getID()); if (!props.containsKey("I" + item.getID())) { handleItem(item); log.info("processed: " + item.getID()); } item.decache(); props.put("I" + item.getID(), "done"); i++; } } finally { props .store(new FileOutputStream(processed), "processed license files, remove to restart processing from scratch"); } } /** * Process Item, correcting CC-License if encountered. * @param item * @throws SQLException * @throws AuthorizeException * @throws IOException */ protected static void handleItem(Item item) throws SQLException, AuthorizeException, IOException { Bundle[] bundles = item.getBundles("CC-LICENSE"); if (bundles == null || bundles.length == 0) { return; } Bundle bundle = bundles[0]; Bitstream bitstream = bundle.getBitstreamByName("license_rdf"); String license_rdf = new String(copy(bitstream)); /* quickly fix xml by ripping out offensive parts */ license_rdf = license_rdf.replaceFirst("<license", ""); license_rdf = license_rdf.replaceFirst("</license>", ""); StringWriter result = new StringWriter(); try { templates.newTransformer().transform( new StreamSource(new ByteArrayInputStream(license_rdf .getBytes())), new StreamResult(result)); } catch (TransformerException e) { throw new IllegalStateException(e.getMessage(), e); } StringBuffer buffer = result.getBuffer(); Bitstream newBitstream = bundle .createBitstream(new ByteArrayInputStream(buffer.toString() .getBytes())); newBitstream.setName(bitstream.getName()); newBitstream.setDescription(bitstream.getDescription()); newBitstream.setFormat(bitstream.getFormat()); newBitstream.setSource(bitstream.getSource()); newBitstream.setUserFormatDescription(bitstream .getUserFormatDescription()); newBitstream.update(); bundle.removeBitstream(bitstream); bundle.update(); } static final int BUFF_SIZE = 100000; static final byte[] buffer = new byte[BUFF_SIZE]; /** * Fast stream copy routine * * @param b * @return * @throws IOException * @throws SQLException * @throws AuthorizeException */ public static byte[] copy(Bitstream b) throws IOException, SQLException, AuthorizeException { InputStream in = null; ByteArrayOutputStream out = null; try { in = b.retrieve(); out = new ByteArrayOutputStream(); while (true) { synchronized (buffer) { int amountRead = in.read(buffer); if (amountRead == -1) { break; } out.write(buffer, 0, amountRead); } } } finally { if (in != null) { in.close(); } if (out != null) { out.close(); } } return out.toByteArray(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.license; import java.io.InputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.sql.SQLException; import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.Utils; public class CreativeCommons { /** * The Bundle Name */ public static final String CC_BUNDLE_NAME = "CC-LICENSE"; private static final String CC_BS_SOURCE = "org.dspace.license.CreativeCommons"; /** * Some BitStream Names (BSN) */ private static final String BSN_LICENSE_URL = "license_url"; private static final String BSN_LICENSE_TEXT = "license_text"; private static final String BSN_LICENSE_RDF = "license_rdf"; protected static final Templates templates; private static boolean enabled_p; static { // we only check the property once enabled_p = ConfigurationManager .getBooleanProperty("webui.submit.enable-cc"); if (enabled_p) { // if defined, set a proxy server for http requests to Creative // Commons site String proxyHost = ConfigurationManager .getProperty("http.proxy.host"); String proxyPort = ConfigurationManager .getProperty("http.proxy.port"); if ((proxyHost != null) && (proxyPort != null)) { System.setProperty("http.proxyHost", proxyHost); System.setProperty("http.proxyPort", proxyPort); } } try { templates = TransformerFactory.newInstance().newTemplates( new StreamSource(CreativeCommons.class .getResourceAsStream("CreativeCommons.xsl"))); } catch (TransformerConfigurationException e) { throw new IllegalStateException(e.getMessage(),e); } } /** * Simple accessor for enabling of CC */ public static boolean isEnabled() { return enabled_p; } // create the CC bundle if it doesn't exist // If it does, remove it and create a new one. private static Bundle getCcBundle(Item item) throws SQLException, AuthorizeException, IOException { Bundle[] bundles = item.getBundles(CC_BUNDLE_NAME); if ((bundles.length > 0) && (bundles[0] != null)) { item.removeBundle(bundles[0]); } return item.createBundle(CC_BUNDLE_NAME); } /** * This is a bit of the "do-the-right-thing" method for CC stuff in an item */ public static void setLicense(Context context, Item item, String cc_license_url) throws SQLException, IOException, AuthorizeException { Bundle bundle = getCcBundle(item); // get some more information String license_text = fetchLicenseText(cc_license_url); String license_rdf = fetchLicenseRDF(cc_license_url); // set the formats BitstreamFormat bs_url_format = BitstreamFormat.findByShortDescription( context, "License"); BitstreamFormat bs_text_format = BitstreamFormat.findByShortDescription( context, "CC License"); BitstreamFormat bs_rdf_format = BitstreamFormat.findByShortDescription( context, "RDF XML"); // set the URL bitstream setBitstreamFromBytes(item, bundle, BSN_LICENSE_URL, bs_url_format, cc_license_url.getBytes()); // set the license text bitstream setBitstreamFromBytes(item, bundle, BSN_LICENSE_TEXT, bs_text_format, license_text.getBytes()); // set the RDF bitstream setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format, license_rdf.getBytes()); } public static void setLicense(Context context, Item item, InputStream licenseStm, String mimeType) throws SQLException, IOException, AuthorizeException { Bundle bundle = getCcBundle(item); // set the format BitstreamFormat bs_format; if ("text/xml".equalsIgnoreCase(mimeType)) { bs_format = BitstreamFormat.findByShortDescription(context, "CC License"); } else if ("text/rdf".equalsIgnoreCase(mimeType)) { bs_format = BitstreamFormat.findByShortDescription(context, "RDF XML"); } else { bs_format = BitstreamFormat.findByShortDescription(context, "License"); } Bitstream bs = bundle.createBitstream(licenseStm); bs.setSource(CC_BS_SOURCE); bs.setName((mimeType != null && (mimeType.equalsIgnoreCase("text/xml") || mimeType.equalsIgnoreCase("text/rdf"))) ? BSN_LICENSE_RDF : BSN_LICENSE_TEXT); bs.setFormat(bs_format); bs.update(); } public static void removeLicense(Context context, Item item) throws SQLException, IOException, AuthorizeException { // remove CC license bundle if one exists Bundle[] bundles = item.getBundles(CC_BUNDLE_NAME); if ((bundles.length > 0) && (bundles[0] != null)) { item.removeBundle(bundles[0]); } } public static boolean hasLicense(Context context, Item item) throws SQLException, IOException { // try to find CC license bundle Bundle[] bundles = item.getBundles(CC_BUNDLE_NAME); if (bundles.length == 0) { return false; } // verify it has correct contents try { if ((getLicenseURL(item) == null) || (getLicenseText(item) == null) || (getLicenseRDF(item) == null)) { return false; } } catch (AuthorizeException ae) { return false; } return true; } public static String getLicenseURL(Item item) throws SQLException, IOException, AuthorizeException { return getStringFromBitstream(item, BSN_LICENSE_URL); } public static String getLicenseText(Item item) throws SQLException, IOException, AuthorizeException { return getStringFromBitstream(item, BSN_LICENSE_TEXT); } public static String getLicenseRDF(Item item) throws SQLException, IOException, AuthorizeException { return getStringFromBitstream(item, BSN_LICENSE_RDF); } /** * Get Creative Commons license RDF, returning Bitstream object. * @return bitstream or null. */ public static Bitstream getLicenseRdfBitstream(Item item) throws SQLException, IOException, AuthorizeException { return getBitstream(item, BSN_LICENSE_RDF); } /** * Get Creative Commons license Text, returning Bitstream object. * @return bitstream or null. */ public static Bitstream getLicenseTextBitstream(Item item) throws SQLException, IOException, AuthorizeException { return getBitstream(item, BSN_LICENSE_TEXT); } /** * Get a few license-specific properties. We expect these to be cached at * least per server run. */ public static String fetchLicenseText(String license_url) { String text_url = license_url; byte[] urlBytes = fetchURL(text_url); return (urlBytes != null) ? new String(urlBytes) : ""; } public static String fetchLicenseRDF(String license_url) { StringWriter result = new StringWriter(); try { templates.newTransformer().transform( new StreamSource(license_url + "rdf"), new StreamResult(result) ); } catch (TransformerException e) { throw new IllegalStateException(e.getMessage(),e); } return result.getBuffer().toString(); } // The following two helper methods assume that the CC // bitstreams are short and easily expressed as byte arrays in RAM /** * This helper method takes some bytes and stores them as a bitstream for an * item, under the CC bundle, with the given bitstream name */ private static void setBitstreamFromBytes(Item item, Bundle bundle, String bitstream_name, BitstreamFormat format, byte[] bytes) throws SQLException, IOException, AuthorizeException { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); Bitstream bs = bundle.createBitstream(bais); bs.setName(bitstream_name); bs.setSource(CC_BS_SOURCE); bs.setFormat(format); // commit everything bs.update(); } /** * This helper method wraps a String around a byte array returned from the * bitstream method further down */ private static String getStringFromBitstream(Item item, String bitstream_name) throws SQLException, IOException, AuthorizeException { byte[] bytes = getBytesFromBitstream(item, bitstream_name); if (bytes == null) { return null; } return new String(bytes); } /** * This helper method retrieves the bytes of a bitstream for an item under * the CC bundle, with the given bitstream name */ private static Bitstream getBitstream(Item item, String bitstream_name) throws SQLException, IOException, AuthorizeException { Bundle cc_bundle = null; // look for the CC bundle try { Bundle[] bundles = item.getBundles(CC_BUNDLE_NAME); if ((bundles != null) && (bundles.length > 0)) { cc_bundle = bundles[0]; } else { return null; } } catch (Exception exc) { // this exception catching is a bit generic, // but basically it happens if there is no CC bundle return null; } return cc_bundle.getBitstreamByName(bitstream_name); } private static byte[] getBytesFromBitstream(Item item, String bitstream_name) throws SQLException, IOException, AuthorizeException { Bitstream bs = getBitstream(item, bitstream_name); // no such bitstream if (bs == null) { return null; } // create a ByteArrayOutputStream ByteArrayOutputStream baos = new ByteArrayOutputStream(); Utils.copy(bs.retrieve(), baos); return baos.toByteArray(); } /** * Fetch the contents of a URL */ private static byte[] fetchURL(String url_string) { try { URL url = new URL(url_string); URLConnection connection = url.openConnection(); byte[] bytes = new byte[connection.getContentLength()]; // loop and read the data until it's done int offset = 0; while (true) { int len = connection.getInputStream().read(bytes, offset, bytes.length - offset); if (len == -1) { break; } offset += len; } return bytes; } catch (MalformedURLException e) { return null; } catch (IOException e) { return null; } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.handle; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.Site; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * Interface to the <a href="http://www.handle.net" target=_new>CNRI Handle * System </a>. * * <p> * Currently, this class simply maps handles to local facilities; handles which * are owned by other sites (including other DSpaces) are treated as * non-existent. * </p> * * @author Peter Breton * @version $Revision: 5844 $ */ public class HandleManager { /** log4j category */ private static Logger log = Logger.getLogger(HandleManager.class); /** Prefix registered to no one */ static final String EXAMPLE_PREFIX = "123456789"; /** Private Constructor */ private HandleManager() { } /** * Return the local URL for handle, or null if handle cannot be found. * * The returned URL is a (non-handle-based) location where a dissemination * of the object referred to by handle can be obtained. * * @param context * DSpace context * @param handle * The handle * @return The local URL * @exception SQLException * If a database error occurs */ public static String resolveToURL(Context context, String handle) throws SQLException { TableRow dbhandle = findHandleInternal(context, handle); if (dbhandle == null) { return null; } String url = ConfigurationManager.getProperty("dspace.url") + "/handle/" + handle; if (log.isDebugEnabled()) { log.debug("Resolved " + handle + " to " + url); } return url; } /** * Transforms handle into the canonical form <em>hdl:handle</em>. * * No attempt is made to verify that handle is in fact valid. * * @param handle * The handle * @return The canonical form */ public static String getCanonicalForm(String handle) { // Let the admin define a new prefix, if not then we'll use the // CNRI default. This allows the admin to use "hdl:" if they want too or // use a locally branded prefix handle.myuni.edu. String handlePrefix = ConfigurationManager.getProperty("handle.canonical.prefix"); if (handlePrefix == null || handlePrefix.length() == 0) { handlePrefix = "http://hdl.handle.net/"; } return handlePrefix + handle; } /** * Returns displayable string of the handle's 'temporary' URL * <em>http://hdl.handle.net/handle/em>. * * No attempt is made to verify that handle is in fact valid. * * @param handle The handle * @return The canonical form */ // public static String getURLForm(String handle) // { // return "http://hdl.handle.net/" + handle; // } /** * Creates a new handle in the database. * * @param context * DSpace context * @param dso * The DSpaceObject to create a handle for * @return The newly created handle * @exception SQLException * If a database error occurs */ public static String createHandle(Context context, DSpaceObject dso) throws SQLException { TableRow handle = DatabaseManager.create(context, "Handle"); String handleId = createId(handle.getIntColumn("handle_id")); handle.setColumn("handle", handleId); handle.setColumn("resource_type_id", dso.getType()); handle.setColumn("resource_id", dso.getID()); DatabaseManager.update(context, handle); if (log.isDebugEnabled()) { log.debug("Created new handle for " + Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + handleId ); } return handleId; } /** * Creates a handle entry, but with a handle supplied by the caller (new * Handle not generated) * * @param context * DSpace context * @param dso * DSpaceObject * @param suppliedHandle * existing handle value * @return the Handle * @throws IllegalStateException if specified handle is already in use by another object */ public static String createHandle(Context context, DSpaceObject dso, String suppliedHandle) throws SQLException, IllegalStateException { //Check if the supplied handle is already in use -- cannot use the same handle twice TableRow handle = findHandleInternal(context, suppliedHandle); if(handle!=null && !handle.isColumnNull("resource_id")) { //Check if this handle is already linked up to this specified DSpace Object if(handle.getIntColumn("resource_id")==dso.getID() && handle.getIntColumn("resource_type_id")==dso.getType()) { //This handle already links to this DSpace Object -- so, there's nothing else we need to do return suppliedHandle; } else { //handle found in DB table & already in use by another existing resource throw new IllegalStateException("Attempted to create a handle which is already in use: " + suppliedHandle); } } else if(handle!=null && !handle.isColumnNull("resource_type_id")) { //If there is a 'resource_type_id' (but 'resource_id' is empty), then the object using // this handle was previously unbound (see unbindHandle() method) -- likely because object was deleted int previousType = handle.getIntColumn("resource_type_id"); //Since we are restoring an object to a pre-existing handle, double check we are restoring the same *type* of object // (e.g. we will not allow an Item to be restored to a handle previously used by a Collection) if(previousType != dso.getType()) { throw new IllegalStateException("Attempted to reuse a handle previously used by a " + Constants.typeText[previousType] + " for a new " + Constants.typeText[dso.getType()]); } } else if(handle==null) //if handle not found, create it { //handle not found in DB table -- create a new table entry handle = DatabaseManager.create(context, "Handle"); handle.setColumn("handle", suppliedHandle); } handle.setColumn("resource_type_id", dso.getType()); handle.setColumn("resource_id", dso.getID()); DatabaseManager.update(context, handle); if (log.isDebugEnabled()) { log.debug("Created new handle for " + Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + suppliedHandle ); } return suppliedHandle; } /** * Removes binding of Handle to a DSpace object, while leaving the * Handle in the table so it doesn't get reallocated. The AIP * implementation also needs it there for foreign key references. * * @param context DSpace context * @param dso DSpaceObject whose Handle to unbind. */ public static void unbindHandle(Context context, DSpaceObject dso) throws SQLException { TableRow row = getHandleInternal(context, dso.getType(), dso.getID()); if (row != null) { //Only set the "resouce_id" column to null when unbinding a handle. // We want to keep around the "resource_type_id" value, so that we // can verify during a restore whether the same *type* of resource // is reusing this handle! row.setColumnNull("resource_id"); DatabaseManager.update(context, row); if(log.isDebugEnabled()) { log.debug("Unbound Handle " + row.getStringColumn("handle") + " from object " + Constants.typeText[dso.getType()] + " id=" + dso.getID()); } } else { log.warn("Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso.getID()); } } /** * Return the object which handle maps to, or null. This is the object * itself, not a URL which points to it. * * @param context * DSpace context * @param handle * The handle to resolve * @return The object which handle maps to, or null if handle is not mapped * to any object. * @exception IllegalStateException * If handle was found but is not bound to an object * @exception SQLException * If a database error occurs */ public static DSpaceObject resolveToObject(Context context, String handle) throws IllegalStateException, SQLException { TableRow dbhandle = findHandleInternal(context, handle); if (dbhandle == null) { //If this is the Site-wide Handle, return Site object if (handle.equals(Site.getSiteHandle())) { return Site.find(context, 0); } //Otherwise, return null (i.e. handle not found in DB) return null; } // check if handle was allocated previously, but is currently not // associated with a DSpaceObject // (this may occur when 'unbindHandle()' is called for an obj that was removed) if ((dbhandle.isColumnNull("resource_type_id")) || (dbhandle.isColumnNull("resource_id"))) { //if handle has been unbound, just return null (as this will result in a PageNotFound) return null; } // What are we looking at here? int handletypeid = dbhandle.getIntColumn("resource_type_id"); int resourceID = dbhandle.getIntColumn("resource_id"); if (handletypeid == Constants.ITEM) { Item item = Item.find(context, resourceID); if (log.isDebugEnabled()) { log.debug("Resolved handle " + handle + " to item " + ((item == null) ? (-1) : item.getID())); } return item; } else if (handletypeid == Constants.COLLECTION) { Collection collection = Collection.find(context, resourceID); if (log.isDebugEnabled()) { log.debug("Resolved handle " + handle + " to collection " + ((collection == null) ? (-1) : collection.getID())); } return collection; } else if (handletypeid == Constants.COMMUNITY) { Community community = Community.find(context, resourceID); if (log.isDebugEnabled()) { log.debug("Resolved handle " + handle + " to community " + ((community == null) ? (-1) : community.getID())); } return community; } throw new IllegalStateException("Unsupported Handle Type " + Constants.typeText[handletypeid]); } /** * Return the handle for an Object, or null if the Object has no handle. * * @param context * DSpace context * @param dso * The object to obtain a handle for * @return The handle for object, or null if the object has no handle. * @exception SQLException * If a database error occurs */ public static String findHandle(Context context, DSpaceObject dso) throws SQLException { TableRow row = getHandleInternal(context, dso.getType(), dso.getID()); if (row == null) { if (dso.getType() == Constants.SITE) { return Site.getSiteHandle(); } else { return null; } } else { return row.getStringColumn("handle"); } } /** * Return all the handles which start with prefix. * * @param context * DSpace context * @param prefix * The handle prefix * @return A list of the handles starting with prefix. The list is * guaranteed to be non-null. Each element of the list is a String. * @exception SQLException * If a database error occurs */ static List<String> getHandlesForPrefix(Context context, String prefix) throws SQLException { String sql = "SELECT handle FROM handle WHERE handle LIKE ? "; TableRowIterator iterator = DatabaseManager.queryTable(context, null, sql, prefix+"%"); List<String> results = new ArrayList<String>(); try { while (iterator.hasNext()) { TableRow row = (TableRow) iterator.next(); results.add(row.getStringColumn("handle")); } } finally { // close the TableRowIterator to free up resources if (iterator != null) { iterator.close(); } } return results; } /** * Get the configured Handle prefix string, or a default * @return configured prefix or "123456789" */ public static String getPrefix() { String prefix = ConfigurationManager.getProperty("handle.prefix"); if (null == prefix) { prefix = EXAMPLE_PREFIX; // XXX no good way to exit cleanly log.error("handle.prefix is not configured; using " + prefix); } return prefix; } //////////////////////////////////////// // Internal methods //////////////////////////////////////// /** * Return the handle for an Object, or null if the Object has no handle. * * @param context * DSpace context * @param type * The type of object * @param id * The id of object * @return The handle for object, or null if the object has no handle. * @exception SQLException * If a database error occurs */ private static TableRow getHandleInternal(Context context, int type, int id) throws SQLException { String sql = "SELECT * FROM Handle WHERE resource_type_id = ? " + "AND resource_id = ?"; return DatabaseManager.querySingleTable(context, "Handle", sql, type, id); } /** * Find the database row corresponding to handle. * * @param context * DSpace context * @param handle * The handle to resolve * @return The database row corresponding to the handle * @exception SQLException * If a database error occurs */ private static TableRow findHandleInternal(Context context, String handle) throws SQLException { if (handle == null) { throw new IllegalArgumentException("Handle is null"); } return DatabaseManager .findByUnique(context, "Handle", "handle", handle); } /** * Create a new handle id. The implementation uses the PK of the RDBMS * Handle table. * * @return A new handle id * @exception SQLException * If a database error occurs */ private static String createId(int id) throws SQLException { String handlePrefix = getPrefix(); return new StringBuffer().append(handlePrefix).append( handlePrefix.endsWith("/") ? "" : "/").append(id).toString(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.handle; import java.io.BufferedReader; import java.io.InputStreamReader; import org.dspace.core.Context; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.search.DSIndexer; import org.dspace.browse.IndexBrowse; /** * A script to update the handle values in the database. This is typically used * when moving from a test machine (handle = 123456789) to a production service. * * @author Stuart Lewis */ public class UpdateHandlePrefix { public static void main(String[] args) throws Exception { // There should be two paramters if (args.length < 2) { System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n"); } else { // Confirm with the user that this is what they want to do String oldH = args[0]; String newH = args[1]; BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); Context context = new Context(); System.out.println("If you continue, all handles in your repository with prefix " + oldH + " will be updated to have handle prefix " + newH + "\n"); String sql = "SELECT count(*) as count FROM handle " + "WHERE handle LIKE '" + oldH + "%'"; TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {}); long count = row.getLongColumn("count"); System.out.println(count + " items will be updated.\n"); System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: "); String choiceString = input.readLine(); if (choiceString.equalsIgnoreCase("y")) { // Make the changes System.out.print("Updating handle table... "); sql = "update handle set handle = '" + newH + "' || '/' || handle_id " + "where handle like '" + oldH + "/%'"; int updated = DatabaseManager.updateQuery(context, sql, new Object[] {}); System.out.println(updated + " items updated"); System.out.print("Updating metadatavalues table... "); sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " + "handle FROM handle WHERE handle.resource_id=item_id AND " + "handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';"; updated = DatabaseManager.updateQuery(context, sql, new Object[] {}); System.out.println(updated + " metadata values updated"); // Commit the changes context.complete(); System.out.print("Re-creating browse and search indexes... "); // Reinitialise the browse system IndexBrowse.main(new String[] {"-i"}); // Reinitialise the browse system try { DSIndexer.main(new String[0]); } catch (Exception e) { // Not a lot we can do System.out.println("Error re-indexing:"); e.printStackTrace(); System.out.println("\nPlease manually run [dspace]/bin/index-all"); } // All done System.out.println("\nHandles successfully updated."); } else { System.out.println("No changes have been made to your data."); } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.handle; import java.sql.SQLException; import java.util.Collections; import java.util.Enumeration; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import net.handle.hdllib.Encoder; import net.handle.hdllib.HandleException; import net.handle.hdllib.HandleStorage; import net.handle.hdllib.HandleValue; import net.handle.hdllib.ScanCallback; import net.handle.hdllib.Util; import net.handle.util.StreamTable; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; /** * Extension to the CNRI Handle Server that translates requests to resolve * handles into DSpace API calls. The implementation simply stubs out most of * the methods, and delegates the rest to the * {@link org.dspace.handle.HandleManager}. This only provides some of the * functionality (namely, the resolving of handles to URLs) of the CNRI * HandleStorage interface. * * <p> * This class is intended to be embedded in the CNRI Handle Server. It conforms * to the HandleStorage interface that was delivered with Handle Server version * 5.2.0. * </p> * * @author Peter Breton * @version $Revision: 5844 $ */ public class HandlePlugin implements HandleStorage { /** log4j category */ private static Logger log = Logger.getLogger(HandlePlugin.class); /** * Constructor */ public HandlePlugin() { } //////////////////////////////////////// // Non-Resolving methods -- unimplemented //////////////////////////////////////// /** * HandleStorage interface method - not implemented. */ public void init(StreamTable st) throws Exception { // Not implemented if (log.isInfoEnabled()) { log.info("Called init (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void setHaveNA(byte[] theHandle, boolean haveit) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called setHaveNA (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void createHandle(byte[] theHandle, HandleValue[] values) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called createHandle (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public boolean deleteHandle(byte[] theHandle) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called deleteHandle (not implemented)"); } return false; } /** * HandleStorage interface method - not implemented. */ public void updateValue(byte[] theHandle, HandleValue[] values) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called updateValue (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void deleteAllRecords() throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called deleteAllRecords (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void checkpointDatabase() throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called checkpointDatabase (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void shutdown() { // Not implemented if (log.isInfoEnabled()) { log.info("Called shutdown (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void scanHandles(ScanCallback callback) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called scanHandles (not implemented)"); } } /** * HandleStorage interface method - not implemented. */ public void scanNAs(ScanCallback callback) throws HandleException { // Not implemented if (log.isInfoEnabled()) { log.info("Called scanNAs (not implemented)"); } } //////////////////////////////////////// // Resolving methods //////////////////////////////////////// /** * Return the raw values for this handle. This implementation returns a * single URL value. * * @param theHandle * byte array representation of handle * @param indexList * ignored * @param typeList * ignored * @return A byte array with the raw data for this handle. Currently, this * consists of a single URL value. * @exception HandleException * If an error occurs while calling the Handle API. */ public byte[][] getRawHandleValues(byte[] theHandle, int[] indexList, byte[][] typeList) throws HandleException { if (log.isInfoEnabled()) { log.info("Called getRawHandleValues"); } Context context = null; try { if (theHandle == null) { throw new HandleException(HandleException.INTERNAL_ERROR); } String handle = Util.decodeString(theHandle); context = new Context(); String url = HandleManager.resolveToURL(context, handle); if (url == null) { throw new HandleException(HandleException.HANDLE_DOES_NOT_EXIST); } HandleValue value = new HandleValue(); value.setIndex(100); value.setType(Util.encodeString("URL")); value.setData(Util.encodeString(url)); value.setTTLType((byte) 0); value.setTTL(100); value.setTimestamp(100); value.setReferences(null); value.setAdminCanRead(true); value.setAdminCanWrite(false); value.setAnyoneCanRead(true); value.setAnyoneCanWrite(false); List<HandleValue> values = new LinkedList<HandleValue>(); values.add(value); byte[][] rawValues = new byte[values.size()][]; for (int i = 0; i < values.size(); i++) { HandleValue hvalue = values.get(i); rawValues[i] = new byte[Encoder.calcStorageSize(hvalue)]; Encoder.encodeHandleValue(rawValues[i], 0, hvalue); } return rawValues; } catch (HandleException he) { throw he; } catch (Exception e) { if (log.isDebugEnabled()) { log.debug("Exception in getRawHandleValues", e); } // Stack loss as exception does not support cause throw new HandleException(HandleException.INTERNAL_ERROR); } finally { if (context != null) { try { context.complete(); } catch (SQLException sqle) { } } } } /** * Return true if we have this handle in storage. * * @param theHandle * byte array representation of handle * @return True if we have this handle in storage * @exception HandleException * If an error occurs while calling the Handle API. */ public boolean haveNA(byte[] theHandle) throws HandleException { if (log.isInfoEnabled()) { log.info("Called haveNA"); } /* * Naming authority Handles are in the form: 0.NA/1721.1234 * * 0.NA is basically the naming authority for naming authorities. For * this simple implementation, we will just check that the prefix * configured in dspace.cfg is the one in the request, returning true if * this is the case, false otherwise. * * FIXME: For more complex Handle situations, this will need enhancing. */ // This parameter allows the dspace handle server to be capable of having multiple // name authorities assigned to it. So long as the handle table the alternative prefixes // defined the dspace will answer for those handles prefixes. This is not ideal and only // works if the dspace instances assumes control over all the items in a prefix, but it // does allow the admin to merge together two previously separate dspace instances each // with their own prefixes and have the one instance handle both prefixes. In this case // all new handle would be given a unified prefix but all old handles would still be // resolvable. if (ConfigurationManager.getBooleanProperty("handle.plugin.checknameauthority",true)) { // First, construct a string representing the naming authority Handle // we'd expect. String expected = "0.NA/" + HandleManager.getPrefix(); // Which authority does the request pertain to? String received = Util.decodeString(theHandle); // Return true if they match return expected.equals(received); } else { return true; } } /** * Return all handles in local storage which start with the naming authority * handle. * * @param theNAHandle * byte array representation of naming authority handle * @return All handles in local storage which start with the naming * authority handle. * @exception HandleException * If an error occurs while calling the Handle API. */ public Enumeration getHandlesForNA(byte[] theNAHandle) throws HandleException { String naHandle = Util.decodeString(theNAHandle); if (log.isInfoEnabled()) { log.info("Called getHandlesForNA for NA " + naHandle); } Context context = null; try { context = new Context(); List<String> handles = HandleManager.getHandlesForPrefix(context, naHandle); List<byte[]> results = new LinkedList<byte[]>(); for (Iterator<String> iterator = handles.iterator(); iterator.hasNext();) { String handle = iterator.next(); // Transforms to byte array results.add(Util.encodeString(handle)); } return Collections.enumeration(results); } catch (SQLException sqle) { if (log.isDebugEnabled()) { log.debug("Exception in getHandlesForNA", sqle); } // Stack loss as exception does not support cause throw new HandleException(HandleException.INTERNAL_ERROR); } finally { if (context != null) { try { context.complete(); } catch (SQLException sqle) { } } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.harvest; import java.io.IOException; import java.sql.SQLException; import java.util.Date; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * @author Alexey Maslov */ public class HarvestedItem { private Context context; private TableRow harvestRow; HarvestedItem(Context c, TableRow row) { context = c; harvestRow = row; } public static void exists(Context c) throws SQLException { DatabaseManager.queryTable(c, "harvested_item", "SELECT COUNT(*) FROM harvested_item"); } /** * Find the harvest parameters corresponding to the specified DSpace item * @return a HarvestedItem object corresponding to this item, null if not found. */ public static HarvestedItem find(Context c, int item_id) throws SQLException { TableRow row = DatabaseManager.findByUnique(c, "harvested_item", "item_id", item_id); if (row == null) { return null; } return new HarvestedItem(c, row); } /* * select foo.item_id from (select item.item_id, item.owning_collection from item join item2bundle on item.item_id=item2bundle.item_id where item2bundle.bundle_id=22) as foo join collection on foo.owning_collection=collection.collection_id where collection.collection_id=5; */ /** * Retrieve a DSpace Item that corresponds to this particular combination of owning collection and OAI ID. * @param context * @param itemOaiID the string used by the OAI-PMH provider to identify the item * @param collectionID id of the local collection that the item should be found in * @return DSpace Item or null if no item was found */ public static Item getItemByOAIId(Context context, String itemOaiID, int collectionID) throws SQLException { /* * FYI: This method has to be scoped to a collection. Otherwise, we could have collisions as more * than one collection might be importing the same item. That is OAI_ID's might be unique to the * provider but not to the harvester. */ Item resolvedItem = null; TableRowIterator tri = null; final String selectItemFromOaiId = "SELECT dsi.item_id FROM " + "(SELECT item.item_id, item.owning_collection FROM item JOIN harvested_item ON item.item_id=harvested_item.item_id WHERE harvested_item.oai_id=?) " + "dsi JOIN collection ON dsi.owning_collection=collection.collection_id WHERE collection.collection_id=?"; try { tri = DatabaseManager.query(context, selectItemFromOaiId, itemOaiID, collectionID); if (tri.hasNext()) { TableRow row = tri.next(); int itemID = row.getIntColumn("item_id"); resolvedItem = Item.find(context, itemID); } else { return null; } } finally { if (tri != null) { tri.close(); } } return resolvedItem; } /** * Create a new harvested item row for a specified item id. * @return a new HarvestedItem object */ public static HarvestedItem create(Context c, int itemId, String itemOAIid) throws SQLException { TableRow row = DatabaseManager.row("harvested_item"); row.setColumn("item_id", itemId); row.setColumn("oai_id", itemOAIid); DatabaseManager.insert(c, row); return new HarvestedItem(c, row); } public String getItemID() { String oai_id = harvestRow.getStringColumn("item_id"); return oai_id; } /** * Get the oai_id associated with this item */ public String getOaiID() { String oai_id = harvestRow.getStringColumn("oai_id"); return oai_id; } /** * Set the oai_id associated with this item */ public void setOaiID(String itemOaiID) { harvestRow.setColumn("oai_id",itemOaiID); return; } public void setHarvestDate(Date date) { if (date == null) { date = new Date(); } harvestRow.setColumn("last_harvested", date); } public Date getHarvestDate() { return harvestRow.getDateColumn("last_harvested"); } public void delete() throws SQLException { DatabaseManager.delete(context, harvestRow); } public void update() throws SQLException, IOException, AuthorizeException { DatabaseManager.update(context, harvestRow); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.harvest; import org.dspace.authorize.AuthorizeException; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRowIterator; import org.dspace.storage.rdbms.TableRow; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; /** * @author Alexey Maslov */ public class HarvestedCollection { private Context context; private TableRow harvestRow; public static final int TYPE_NONE = 0; public static final int TYPE_DMD = 1; public static final int TYPE_DMDREF = 2; public static final int TYPE_FULL = 3; public static final int STATUS_READY = 0; public static final int STATUS_BUSY = 1; public static final int STATUS_QUEUED = 2; public static final int STATUS_OAI_ERROR = 3; public static final int STATUS_UNKNOWN_ERROR = -1; /* * collection_id | integer | not null harvest_type | integer | oai_source | text | oai_set_id | text | harvest_message | text | metadata_config_id | text | harvest_status | integer | harvest_start_time | timestamp with time zone | */ // TODO: make sure this guy knows to lock people out if the status is not zero. // i.e. someone editing a collection's setting from the admin menu should have // to stop an ongoing harvest before they can edit the settings. HarvestedCollection(Context c, TableRow row) { context = c; harvestRow = row; } public static void exists(Context c) throws SQLException { DatabaseManager.queryTable(c, "harvested_collection", "SELECT COUNT(*) FROM harvested_collection"); } /** * Find the harvest settings corresponding to this collection * @return a HarvestInstance object corresponding to this collection's settings, null if not found. */ public static HarvestedCollection find(Context c, int collectionId) throws SQLException { TableRow row = DatabaseManager.findByUnique(c, "harvested_collection", "collection_id", collectionId); if (row == null) { return null; } return new HarvestedCollection(c, row); } /** * Create a new harvest instance row for a specified collection. * @return a new HarvestInstance object */ public static HarvestedCollection create(Context c, int collectionId) throws SQLException { TableRow row = DatabaseManager.row("harvested_collection"); row.setColumn("collection_id", collectionId); row.setColumn("harvest_type", 0); DatabaseManager.insert(c, row); return new HarvestedCollection(c, row); } /** Returns whether the specified collection is harvestable, i.e. whether its harvesting * options are set up correctly. This is distinct from "ready", since this collection may * be in process of being harvested. */ public static boolean isHarvestable(Context c, int collectionId) throws SQLException { HarvestedCollection hc = HarvestedCollection.find(c, collectionId); if (hc != null && hc.getHarvestType() > 0 && hc.getOaiSource() != null && hc.getOaiSetId() != null && hc.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) { return true; } return false; } /** Returns whether this harvest instance is actually harvestable, i.e. whether its settings * options are set up correctly. This is distinct from "ready", since this collection may * be in process of being harvested. */ public boolean isHarvestable() throws SQLException { if (this.getHarvestType() > 0 && this.getOaiSource() != null && this.getOaiSetId() != null && this.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) { return true; } return false; } /** Returns whether the specified collection is ready for immediate harvest. */ public static boolean isReady(Context c, int collectionId) throws SQLException { HarvestedCollection hc = HarvestedCollection.find(c, collectionId); return hc.isReady(); } public boolean isReady() throws SQLException { if (this.isHarvestable() && (this.getHarvestStatus() == HarvestedCollection.STATUS_READY || this.getHarvestStatus() == HarvestedCollection.STATUS_OAI_ERROR)) { return true; } return false; } /** Find all collections that are set up for harvesting * * return: list of collection id's * @throws SQLException */ public static List<Integer> findAll(Context c) throws SQLException { TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection", "SELECT * FROM harvested_collection"); List<Integer> collectionIds = new ArrayList<Integer>(); while (tri.hasNext()) { TableRow row = tri.next(); collectionIds.add(row.getIntColumn("collection_id")); } return collectionIds; } /** Find all collections that are ready for harvesting * * return: list of collection id's * @throws SQLException */ public static List<Integer> findReady(Context c) throws SQLException { int harvestInterval = ConfigurationManager.getIntProperty("harvester.harvestFrequency"); if (harvestInterval == 0) { harvestInterval = 720; } int expirationInterval = ConfigurationManager.getIntProperty("harvester.threadTimeout"); if (expirationInterval == 0) { expirationInterval = 24; } Date startTime; Date expirationTime; Calendar calendar = Calendar.getInstance(); calendar.setTime(new Date()); calendar.add(Calendar.MINUTE, -1 * harvestInterval); startTime = calendar.getTime(); calendar.setTime(startTime); calendar.add(Calendar.HOUR, -2 * expirationInterval); expirationTime = calendar.getTime(); /* Select all collections whose last_harvest is before our start time, whose harvest_type *is not* 0 and whose status *is* 0 (available) or 3 (OAI Error). */ TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection", "SELECT * FROM harvested_collection WHERE (last_harvested < ? or last_harvested is null) and harvest_type > ? and (harvest_status = ? or harvest_status = ? or (harvest_status=? and harvest_start_time < ?)) ORDER BY last_harvested", new java.sql.Timestamp(startTime.getTime()), 0, HarvestedCollection.STATUS_READY, HarvestedCollection.STATUS_OAI_ERROR, HarvestedCollection.STATUS_BUSY, new java.sql.Timestamp(expirationTime.getTime())); List<Integer> collectionIds = new ArrayList<Integer>(); while (tri.hasNext()) { TableRow row = tri.next(); collectionIds.add(row.getIntColumn("collection_id")); } return collectionIds; } /** * Find all collections with the specified status flag * @param c * @param status see HarvestInstance.STATUS_... * @return * @throws SQLException */ public static List<Integer> findByStatus(Context c, int status) throws SQLException { TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection", "SELECT * FROM harvested_collection WHERE harvest_status = ?", status); List<Integer> collectionIds = new ArrayList<Integer>(); while (tri.hasNext()) { TableRow row = tri.next(); collectionIds.add(row.getIntColumn("collection_id")); } return collectionIds; } /** Find the collection that was harvested the longest time ago. * @throws SQLException */ public static Integer findOldestHarvest (Context c) throws SQLException { String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested asc limit 1"; if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested asc"; } TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection", query, 0, 0); TableRow row = tri.next(); if (row != null) { return row.getIntColumn("collection_id"); } else { return -1; } } /** Find the collection that was harvested most recently. * @throws SQLException */ public static Integer findNewestHarvest (Context c) throws SQLException { String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested desc limit 1"; if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested desc"; } TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection", query , 0, 0); TableRow row = tri.next(); if (row != null) { return row.getIntColumn("collection_id"); } else { return -1; } } /** * A function to set all harvesting-related parameters at once */ public void setHarvestParams(int type, String oaiSource, String oaiSetId, String mdConfigId) { setHarvestType(type); setOaiSource(oaiSource); setOaiSetId(oaiSetId); setHarvestMetadataConfig(mdConfigId); } /* Setters for the appropriate harvesting-related columns */ public void setHarvestType(int type) { harvestRow.setColumn("harvest_type",type); } /** * Sets the current status of the collection. * * @param status a HarvestInstance.STATUS_... constant */ public void setHarvestStatus(int status) { harvestRow.setColumn("harvest_status",status); } public void setOaiSource(String oaiSource) { if (oaiSource == null || oaiSource.length() == 0) { harvestRow.setColumnNull("oai_source"); } else { harvestRow.setColumn("oai_source",oaiSource); } } public void setOaiSetId(String oaiSetId) { if (oaiSetId == null || oaiSetId.length() == 0) { harvestRow.setColumnNull("oai_set_id"); } else { harvestRow.setColumn("oai_set_id",oaiSetId); } } public void setHarvestMetadataConfig(String mdConfigId) { if (mdConfigId == null || mdConfigId.length() == 0) { harvestRow.setColumnNull("metadata_config_id"); } else { harvestRow.setColumn("metadata_config_id",mdConfigId); } } public void setHarvestResult(Date date, String message) { if (date == null) { harvestRow.setColumnNull("last_harvested"); } else { harvestRow.setColumn("last_harvested", date); } if (message == null || message.length() == 0) { harvestRow.setColumnNull("harvest_message"); } else { harvestRow.setColumn("harvest_message", message); } } public void setHarvestMessage(String message) { if (message == null || message.length() == 0) { harvestRow.setColumnNull("harvest_message"); } else { harvestRow.setColumn("harvest_message", message); } } public void setHarvestStartTime(Date date) { if (date == null) { harvestRow.setColumnNull("harvest_start_time"); } else { harvestRow.setColumn("harvest_start_time", date); } } /* Getting for the appropriate harvesting-related columns */ public int getCollectionId() { return harvestRow.getIntColumn("collection_id"); } public int getHarvestType() { return harvestRow.getIntColumn("harvest_type"); } public int getHarvestStatus() { return harvestRow.getIntColumn("harvest_status"); } public String getOaiSource() { return harvestRow.getStringColumn("oai_source"); } public String getOaiSetId() { return harvestRow.getStringColumn("oai_set_id"); } public String getHarvestMetadataConfig() { return harvestRow.getStringColumn("metadata_config_id"); } public String getHarvestMessage() { return harvestRow.getStringColumn("harvest_message"); } public Date getHarvestDate() { return harvestRow.getDateColumn("last_harvested"); } public Date getHarvestStartTime() { return harvestRow.getDateColumn("harvest_start_time"); } public void delete() throws SQLException { DatabaseManager.delete(context, harvestRow); } public void update() throws SQLException, IOException, AuthorizeException { DatabaseManager.update(context, harvestRow); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.harvest; import org.apache.log4j.Logger; import org.dspace.core.*; import org.dspace.event.Consumer; import org.dspace.event.Event; /** * Class for handling cleanup of harvest settings for collections and items * * * @version $Revision: 3705 $ * * @author Stuart Lewis * @author Alexey Maslov */ public class HarvestConsumer implements Consumer { /** log4j logger */ private static Logger log = Logger.getLogger(HarvestConsumer.class); /** * Initialise the consumer * * @throws Exception */ public void initialize() throws Exception { } /** * Consume the event * * @param context * @param event * @throws Exception */ public void consume(Context context, Event event) throws Exception { int st = event.getSubjectType(); int et = event.getEventType(); int id = event.getSubjectID(); switch (st) { case Constants.ITEM: if (et == Event.DELETE) { HarvestedItem hi = HarvestedItem.find(context, id); if (hi != null) { log.debug("Deleted item '" + id + "', also deleting associated harvested_item '" + hi.getOaiID() + "'."); hi.delete(); hi.update(); } else { log.debug("Deleted item '" + id + "' and the associated harvested_item."); } } break; case Constants.COLLECTION: if (et == Event.DELETE) { HarvestedCollection hc = HarvestedCollection.find(context, id); if (hc != null) { log.debug("Deleted collection '" + id + "', also deleting associated harvested_collection '" + hc.getOaiSource() + ":" + hc.getOaiSetId() + "'."); hc.delete(); hc.update(); } else { log.debug("Deleted collection '" + id + "' and the associated harvested_collection."); } } default: log.warn("consume() got unrecognized event: " + event.toString()); } } /** * Handle the end of the event * * @param ctx * @throws Exception */ public void end(Context ctx) throws Exception { } /** * Finish the event * * @param ctx */ public void finish(Context ctx) { } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.harvest; import java.io.ByteArrayInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.net.ConnectException; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.Stack; import java.util.TimeZone; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.DCValue; import org.dspace.content.DSpaceObject; import org.dspace.content.FormatIdentifier; import org.dspace.content.InstallItem; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.NonUniqueMetadataException; import org.dspace.content.WorkspaceItem; import org.dspace.content.crosswalk.CrosswalkException; import org.dspace.content.crosswalk.IngestionCrosswalk; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.PluginManager; import org.dspace.core.Utils; import org.dspace.eperson.EPerson; import org.dspace.handle.HandleManager; import org.jdom.Document; import org.jdom.Element; import org.jdom.Namespace; import org.jdom.input.DOMBuilder; import org.jdom.output.XMLOutputter; import org.xml.sax.SAXException; import ORG.oclc.oai.harvester2.verb.GetRecord; import ORG.oclc.oai.harvester2.verb.Identify; import ORG.oclc.oai.harvester2.verb.ListMetadataFormats; import ORG.oclc.oai.harvester2.verb.ListRecords; import ORG.oclc.oai.harvester2.verb.ListSets; /** * This class handles OAI harvesting of externally located records into this repository. * * @author Alexey Maslov */ public class OAIHarvester { /* The main harvesting thread */ private static HarvestScheduler harvester; private static Thread mainHarvestThread; /** log4j category */ private static Logger log = Logger.getLogger(OAIHarvester.class); private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom"); private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/"); private static final Namespace OAI_NS = Namespace.getNamespace("http://www.openarchives.org/OAI/2.0/"); public static final String OAI_ADDRESS_ERROR = "invalidAddress"; public static final String OAI_SET_ERROR = "noSuchSet"; public static final String OAI_DMD_ERROR = "metadataNotSupported"; public static final String OAI_ORE_ERROR = "oreNotSupported"; // The collection this harvester instance is dealing with Collection targetCollection; HarvestedCollection harvestRow; // our context Context ourContext; // Namespace used by the ORE serialization format // Set in dspace.cfg as harvester.oai.oreSerializationFormat.{ORESerialKey} = {ORESerialNS} private Namespace ORESerialNS; private String ORESerialKey; // Namespace of the descriptive metadata that should be harvested in addition to the ORE // Set in dspace.cfg as harvester.oai.metadataformats.{MetadataKey} = {MetadataNS},{Display Name} private Namespace metadataNS; private String metadataKey; // DOMbuilder class for the DOM -> JDOM conversions private static DOMBuilder db = new DOMBuilder(); // The point at which this thread should terminate itself /* Initialize the harvester with a collection object */ public OAIHarvester(Context c, DSpaceObject dso, HarvestedCollection hc) throws HarvestingException, SQLException { if (dso.getType() != Constants.COLLECTION) { throw new HarvestingException("OAIHarvester can only harvest collections"); } ourContext = c; targetCollection = (Collection)dso; harvestRow = hc; if (harvestRow == null || !harvestRow.isHarvestable()) { throw new HarvestingException("Provided collection is not set up for harvesting"); } // Set the ORE options Namespace ORESerializationNamespace = OAIHarvester.getORENamespace(); if (ORESerializationNamespace == null) { log.error("No ORE serialization namespace declared; see dspace.cfg option \"harvester.oai.oreSerializationFormat.{ORESerialKey} = {ORESerialNS}\""); throw new HarvestingException("No ORE serialization namespace specified"); } else { ORESerialNS = Namespace.getNamespace(ORESerializationNamespace.getURI()); ORESerialKey = ORESerializationNamespace.getPrefix(); } // Set the metadata options metadataKey = harvestRow.getHarvestMetadataConfig(); metadataNS = OAIHarvester.getDMDNamespace(metadataKey); if (metadataNS == null) { log.error("No matching metadata namespace found for \"" + metadataKey + "\", see dspace.cfg option \"harvester.oai.metadataformats.{MetadataKey} = {MetadataNS},{Display Name}\""); throw new HarvestingException("Metadata declaration not found"); } } /** * Search the configuration options and find the ORE serializaition string * @return Namespace of the supported ORE format. Returns null if not found. */ private static Namespace getORENamespace() { String ORESerializationString = null; String ORESeialKey = null; String oreString = "harvester.oai.oreSerializationFormat."; Enumeration pe = ConfigurationManager.propertyNames(); while (pe.hasMoreElements()) { String key = (String)pe.nextElement(); if (key.startsWith(oreString)) { ORESeialKey = key.substring(oreString.length()); ORESerializationString = ConfigurationManager.getProperty(key); return Namespace.getNamespace(ORESeialKey, ORESerializationString); } } // Fallback if the configuration option is not present return Namespace.getNamespace("ore", ATOM_NS.getURI()); } /** * Cycle through the options and find the metadata namespace matching the provided key. * @param metadataKey * @return Namespace of the designated metadata format. Returns null of not found. */ private static Namespace getDMDNamespace(String metadataKey) { String metadataString = null; String metaString = "harvester.oai.metadataformats."; Enumeration pe = ConfigurationManager.propertyNames(); while (pe.hasMoreElements()) { String key = (String)pe.nextElement(); if (key.startsWith(metaString) && key.substring(metaString.length()).equals((metadataKey))) { metadataString = ConfigurationManager.getProperty(key); String namespacePiece; if (metadataString.indexOf(',') != -1) { namespacePiece = metadataString.substring(0, metadataString.indexOf(',')); } else { namespacePiece = metadataString; } return Namespace.getNamespace(namespacePiece); } } return null; } /** * Performs a harvest cycle on this collection. This will query the remote OAI-PMH provider, check for updates since last * harvest, and ingest the returned items. */ public void runHarvest() throws SQLException, IOException, AuthorizeException { // figure out the relevant parameters String oaiSource = harvestRow.getOaiSource(); String oaiSetId = harvestRow.getOaiSetId(); //If we have all selected then make sure that we do not include a set filter if("all".equals(oaiSetId)) { oaiSetId = null; } Date lastHarvestDate = harvestRow.getHarvestDate(); String fromDate = null; if (lastHarvestDate != null) { fromDate = processDate(harvestRow.getHarvestDate()); } Date startTime = new Date(); String toDate = processDate(startTime,0); String dateGranularity; try { // obtain the desired descriptive metadata format and verify that the OAI server actually provides it // do the same thing for ORE, which should be encoded in Atom and carry its namespace String descMDPrefix = null; String OREPrefix; try { dateGranularity = oaiGetDateGranularity(oaiSource); if (fromDate != null) { fromDate = fromDate.substring(0, dateGranularity.length()); } toDate = toDate.substring(0, dateGranularity.length()); descMDPrefix = oaiResolveNamespaceToPrefix(oaiSource, metadataNS.getURI()); OREPrefix = oaiResolveNamespaceToPrefix(oaiSource, ORESerialNS.getURI()); } catch (FileNotFoundException fe) { log.error("The OAI server did not respond."); throw new HarvestingException("The OAI server did not respond.", fe); } catch (ConnectException fe) { log.error("The OAI server did not respond."); throw new HarvestingException("The OAI server did not respond.", fe); } if (descMDPrefix == null) { log.error("The OAI server does not support this metadata format"); throw new HarvestingException("The OAI server does not support this metadata format: " + metadataNS.getURI()); } if (OREPrefix == null && harvestRow.getHarvestType() != HarvestedCollection.TYPE_DMD) { throw new HarvestingException("The OAI server does not support ORE dissemination in the configured serialization format: " + ORESerialNS.getURI()); } Document oaiResponse = null; Element root = null; String resumptionToken; // set the status indicating the collection is currently being processed harvestRow.setHarvestStatus(HarvestedCollection.STATUS_BUSY); harvestRow.setHarvestMessage("Collection is currently being harvested"); harvestRow.setHarvestStartTime(startTime); harvestRow.update(); ourContext.commit(); // expiration timer starts int expirationInterval = ConfigurationManager.getIntProperty("harvester.threadTimeout"); if (expirationInterval == 0) { expirationInterval = 24; } Calendar calendar = Calendar.getInstance(); calendar.setTime(startTime); calendar.add(Calendar.HOUR, expirationInterval); Date expirationTime = calendar.getTime(); // main loop to keep requesting more objects until we're done List<Element> records; Set<String> errorSet = new HashSet<String>(); ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix); log.debug("Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " + oaiSetId + " " + descMDPrefix); if (listRecords != null) { log.info("HTTP Request: " + listRecords.getRequestURL()); } while (listRecords != null) { records = new ArrayList<Element>(); oaiResponse = db.build(listRecords.getDocument()); if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) { for (int i=0; i<listRecords.getErrors().getLength(); i++) { String errorCode = listRecords.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); errorSet.add(errorCode); } if (errorSet.contains("noRecordsMatch")) { log.info("noRecordsMatch: OAI server did not contain any updates"); harvestRow.setHarvestResult(new Date(), "OAI server did not contain any updates"); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_READY); harvestRow.update(); return; } else { throw new HarvestingException(errorSet.toString()); } } else { root = oaiResponse.getRootElement(); records.addAll(root.getChild("ListRecords", OAI_NS).getChildren("record", OAI_NS)); } // Process the obtained records if (records != null && records.size()>0) { log.info("Found " + records.size() + " records to process"); for (Element record : records) { // check for STOP interrupt from the scheduler if (HarvestScheduler.interrupt == HarvestScheduler.HARVESTER_INTERRUPT_STOP) { throw new HarvestingException("Harvest process for " + targetCollection.getID() + " interrupted by stopping the scheduler."); } // check for timeout if (expirationTime.before(new Date())) { throw new HarvestingException("runHarvest method timed out for collection " + targetCollection.getID()); } processRecord(record,OREPrefix); ourContext.commit(); } } // keep going if there are more records to process resumptionToken = listRecords.getResumptionToken(); if (resumptionToken == null || resumptionToken.length() == 0) { listRecords = null; } else { listRecords = new ListRecords(oaiSource, resumptionToken); } targetCollection.update(); ourContext.commit(); } } catch (HarvestingException hex) { log.error("Harvesting error occured while processing an OAI record: " + hex.getMessage()); harvestRow.setHarvestMessage("Error occured while processing an OAI record"); // if the last status is also an error, alert the admin if (harvestRow.getHarvestMessage().contains("Error")) { alertAdmin(HarvestedCollection.STATUS_OAI_ERROR, hex); } harvestRow.setHarvestStatus(HarvestedCollection.STATUS_OAI_ERROR); return; } catch (Exception ex) { harvestRow.setHarvestMessage("Unknown error occured while generating an OAI response"); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex); log.error("Error occured while generating an OAI response: " + ex.getMessage() + " " + ex.getCause()); ex.printStackTrace(); return; } finally { harvestRow.update(); targetCollection.update(); ourContext.commit(); ourContext.restoreAuthSystemState(); } // If we got to this point, it means the harvest was completely succeful Date finishTime = new Date(); long timeTaken = finishTime.getTime() - startTime.getTime(); harvestRow.setHarvestResult(startTime, "Harvest from " + oaiSource + " sucessful"); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_READY); log.info("Harvest from " + oaiSource + " sucessful. The process took " + timeTaken + " milliseconds."); harvestRow.update(); ourContext.commit(); } /** * Process an individual PMH record, making (or updating) a corresponding DSpace Item. * @param record a JDOM Element containing the actual PMH record with descriptive metadata. * @param OREPrefix the metadataprefix value used by the remote PMH server to disseminate ORE. Only used for collections set up to harvest content. */ private void processRecord(Element record, String OREPrefix) throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException, ParserConfigurationException, SAXException, TransformerException { WorkspaceItem wi = null; Date timeStart = new Date(); // grab the oai identifier String itemOaiID = record.getChild("header", OAI_NS).getChild("identifier", OAI_NS).getText(); Element header = record.getChild("header",OAI_NS); // look up the item corresponsing to the OAI identifier Item item = HarvestedItem.getItemByOAIId(ourContext, itemOaiID, targetCollection.getID()); // Make sure the item hasn't been deleted in the mean time if (header.getAttribute("status") != null && header.getAttribute("status").getValue().equals("deleted")) { log.info("Item " + itemOaiID + " has been marked as deleted on the OAI server."); if (item != null) { targetCollection.removeItem(item); } ourContext.restoreAuthSystemState(); return; } // If we are only harvesting descriptive metadata, the record should already contain all we need List<Element> descMD = record.getChild("metadata", OAI_NS).getChildren(); IngestionCrosswalk MDxwalk = (IngestionCrosswalk)PluginManager.getNamedPlugin(IngestionCrosswalk.class, this.metadataKey); // Otherwise, obtain the ORE ReM and initiate the ORE crosswalk IngestionCrosswalk ORExwalk = null; Element oreREM = null; if (harvestRow.getHarvestType() > 1) { oreREM = getMDrecord(harvestRow.getOaiSource(), itemOaiID, OREPrefix).get(0); ORExwalk = (IngestionCrosswalk)PluginManager.getNamedPlugin(IngestionCrosswalk.class, this.ORESerialKey); } // Ignore authorization ourContext.turnOffAuthorisationSystem(); HarvestedItem hi; if (item != null) // found an item so we modify { log.debug("Item " + item.getHandle() + " was found locally. Using it to harvest " + itemOaiID + "."); // FIXME: check for null pointer if for some odd reason we don't have a matching hi hi = HarvestedItem.find(ourContext, item.getID()); // Comprate last-harvest on the item versus the last time the item was updated on the OAI provider side // If ours is more recent, forgo this item, since it's probably a left-over from a previous harvesting attempt Date OAIDatestamp = Utils.parseISO8601Date(header.getChildText("datestamp", OAI_NS)); Date itemLastHarvest = hi.getHarvestDate(); if (itemLastHarvest != null && OAIDatestamp.before(itemLastHarvest)) { log.info("Item " + item.getHandle() + " was harvested more recently than the last update time reporetd by the OAI server; skipping."); return; } // Otherwise, clear and re-import the metadata and bitstreams item.clearMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY); if (descMD.size() == 1) { MDxwalk.ingest(ourContext, item, descMD.get(0)); } else { MDxwalk.ingest(ourContext, item, descMD); } // Import the actual bitstreams if (harvestRow.getHarvestType() == 3) { log.info("Running ORE ingest on: " + item.getHandle()); Bundle[] allBundles = item.getBundles(); for (Bundle bundle : allBundles) { item.removeBundle(bundle); } ORExwalk.ingest(ourContext, item, oreREM); } scrubMetadata(item); } else // NOTE: did not find, so we create (presumably, there will never be a case where an item already // exists in a harvest collection but does not have an OAI_id) { wi = WorkspaceItem.create(ourContext, targetCollection, false); item = wi.getItem(); hi = HarvestedItem.create(ourContext, item.getID(), itemOaiID); //item.setOaiID(itemOaiID); if (descMD.size() == 1) { MDxwalk.ingest(ourContext, item, descMD.get(0)); } else { MDxwalk.ingest(ourContext, item, descMD); } if (harvestRow.getHarvestType() == 3) { ORExwalk.ingest(ourContext, item, oreREM); } // see if we can do something about the wonky metadata scrubMetadata(item); // see if a handle can be exracted for the item String handle = extractHandle(item); if (handle != null) { DSpaceObject dso = HandleManager.resolveToObject(ourContext, handle); if (dso != null) { throw new HarvestingException("Handle collision: attempted to re-assign handle '" + handle + "' to an incoming harvested item '" + hi.getOaiID() + "'."); } } try { item = InstallItem.installItem(ourContext, wi, handle); //item = InstallItem.installItem(ourContext, wi); } // clean up the workspace item if something goes wrong before catch(SQLException se) { wi.deleteWrapper(); throw se; } catch(IOException ioe) { wi.deleteWrapper(); throw ioe; } catch(AuthorizeException ae) { wi.deleteWrapper(); throw ae; } } // Now create the special ORE bundle and drop the ORE document in it if (harvestRow.getHarvestType() == 2 || harvestRow.getHarvestType() == 3) { Bundle OREBundle = item.createBundle("ORE"); XMLOutputter outputter = new XMLOutputter(); String OREString = outputter.outputString(oreREM); ByteArrayInputStream OREStream = new ByteArrayInputStream(OREString.getBytes()); Bitstream OREBitstream = OREBundle.createBitstream(OREStream); OREBitstream.setName("ORE.xml"); BitstreamFormat bf = FormatIdentifier.guessFormat(ourContext, OREBitstream); OREBitstream.setFormat(bf); OREBitstream.update(); OREBundle.addBitstream(OREBitstream); OREBundle.update(); } //item.setHarvestDate(new Date()); hi.setHarvestDate(new Date()); // Add provenance that this item was harvested via OAI String provenanceMsg = "Item created via OAI harvest from source: " + this.harvestRow.getOaiSource() + " on " + new DCDate(hi.getHarvestDate()) + " (GMT). Item's OAI Record identifier: " + hi.getOaiID(); item.addMetadata("dc", "description", "provenance", "en", provenanceMsg); item.update(); hi.update(); long timeTaken = new Date().getTime() - timeStart.getTime(); log.info("Item " + item.getHandle() + "(" + item.getID() + ")" + " has been ingested. The whole process took: " + timeTaken + " ms. "); // Un-ignore authorization ourContext.restoreAuthSystemState(); } /** * Scan an item's metadata, looking for the value "identifier.*". If it meets the parameters that identify it as valid handle * as set in dspace.cfg (harvester.acceptedHandleServer and harvester.rejectedHandlePrefix), use that handle instead of * minting a new one. * @param item a newly created, but not yet installed, DSpace Item * @return null or the handle to be used. */ private String extractHandle(Item item) { String acceptedHandleServersString = ConfigurationManager.getProperty("harvester.acceptedHandleServer"); if (acceptedHandleServersString == null) { acceptedHandleServersString = "hdl.handle.net"; } String rejectedHandlePrefixString = ConfigurationManager.getProperty("harvester.rejectedHandlePrefix"); if (rejectedHandlePrefixString == null) { rejectedHandlePrefixString = "123456789"; } DCValue[] values = item.getMetadata("dc", "identifier", Item.ANY, Item.ANY); if (values.length > 0 && !acceptedHandleServersString.equals("")) { String[] acceptedHandleServers = acceptedHandleServersString.split(","); String[] rejectedHandlePrefixes = rejectedHandlePrefixString.split(","); for (DCValue value : values) { // 0 1 2 3 4 // http://hdl.handle.net/1234/12 String[] urlPieces = value.value.split("/"); if (urlPieces.length != 5) { continue; } for (String server : acceptedHandleServers) { if (urlPieces[2].equals(server)) { for (String prefix : rejectedHandlePrefixes) { if (!urlPieces[3].equals(prefix)) { return urlPieces[3] + "/" + urlPieces[4]; } } } } } } return null; } /** * Scans an item's newly ingested metadata for elements not defined in this DSpace instance. It then takes action based * on a configurable parameter (fail, ignore, add). * @param item a DSpace item recently pushed through an ingestion crosswalk but prior to update/installation */ private void scrubMetadata(Item item) throws SQLException, HarvestingException, AuthorizeException, IOException { // The two options, with three possibilities each: add, ignore, fail String schemaChoice = ConfigurationManager.getProperty("harvester.unknownSchema"); if (schemaChoice == null) { schemaChoice = "fail"; } String fieldChoice = ConfigurationManager.getProperty("harvester.unknownField"); if (fieldChoice == null) { fieldChoice = "fail"; } List<String> clearList = new ArrayList<String>(); DCValue[] values = item.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY); for (DCValue value : values) { // Verify that the schema exists MetadataSchema mdSchema = MetadataSchema.find(ourContext, value.schema); if (mdSchema == null && !clearList.contains(value.schema)) { // add a new schema, giving it a namespace of "unknown". Possibly a very bad idea. if (schemaChoice.equals("add")) { mdSchema = new MetadataSchema(value.schema,String.valueOf(new Date().getTime())); try { mdSchema.create(ourContext); mdSchema.setName(value.schema); mdSchema.setNamespace("unknown"+mdSchema.getSchemaID()); mdSchema.update(ourContext); } catch (NonUniqueMetadataException e) { // This case should not be possible e.printStackTrace(); } clearList.add(value.schema); } // ignore the offending schema, quietly dropping all of it's metadata elements before they clog our gears else if (schemaChoice.equals("ignore")) { item.clearMetadata(value.schema, Item.ANY, Item.ANY, Item.ANY); continue; } // otherwise, go ahead and generate the error else { throw new HarvestingException("The '" + value.schema + "' schema has not been defined in this DSpace instance. "); } } if (mdSchema != null) { // Verify that the element exists; this part is reachable only if the metadata schema is valid MetadataField mdField = MetadataField.findByElement(ourContext, mdSchema.getSchemaID(), value.element, value.qualifier); if (mdField == null) { if (fieldChoice.equals("add")) { mdField = new MetadataField(mdSchema, value.element, value.qualifier, null); try { mdField.create(ourContext); mdField.update(ourContext); } catch (NonUniqueMetadataException e) { // This case should also not be possible e.printStackTrace(); } } else if (fieldChoice.equals("ignore")) { item.clearMetadata(value.schema, value.element, value.qualifier, Item.ANY); } else { throw new HarvestingException("The '" + value.element + "." + value.qualifier + "' element has not been defined in this DSpace instance. "); } } } } return; } /** * Process a date, converting it to RFC3339 format, setting the timezone to UTC and subtracting time padding * from the config file. * @param date source Date * @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone */ private String processDate(Date date) { Integer timePad = ConfigurationManager.getIntProperty("harvester.timePadding"); if (timePad == 0) { timePad = 120; } return processDate(date, timePad); } /** * Process a date, converting it to RFC3339 format, setting the timezone to UTC and subtracting time padding * from the config file. * @param date source Date * @param secondsPad number of seconds to subtract from the date * @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone */ private String processDate(Date date, int secondsPad) { SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.SECOND, -1*secondsPad); date = calendar.getTime(); return formatter.format(date); } /** * Query OAI-PMH server for the granularity of its datestamps. * @throws TransformerException * @throws SAXException * @throws ParserConfigurationException * @throws IOException */ private String oaiGetDateGranularity(String oaiSource) throws IOException, ParserConfigurationException, SAXException, TransformerException { Identify iden = new Identify(oaiSource); return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent(); } /** * Query the OAI-PMH server for its mapping of the supplied namespace and metadata prefix. * For example for a typical OAI-PMH server a query "http://www.openarchives.org/OAI/2.0/oai_dc/" would return "oai_dc". * @param oaiSource the address of the OAI-PMH provider * @param MDNamespace the namespace that we are trying to resove to the metadataPrefix * @return metadataPrefix the OAI-PMH provider has assigned to the supplied namespace */ public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace) throws IOException, ParserConfigurationException, SAXException, TransformerException, ConnectException { String metaPrefix = null; // Query the OAI server for the metadata ListMetadataFormats lmf = new ListMetadataFormats(oaiSource); if (lmf != null) { Document lmfResponse = db.build(lmf.getDocument()); List<Element> mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS).getChildren("metadataFormat", OAI_NS); for (Element mdFormat : mdFormats) { if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) { metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS); break; } } } return metaPrefix; } /** * Generate and send an email to the administrator. Prompted by errors encountered during harvesting. * @param status the current status of the collection, usually HarvestedCollection.STATUS_OAI_ERROR or HarvestedCollection.STATUS_UNKNOWN_ERROR * @param ex the Exception that prompted this action */ private void alertAdmin(int status, Exception ex) { try { String recipient = ConfigurationManager.getProperty("alert.recipient"); if (recipient != null) { Email email = ConfigurationManager.getEmail(I18nUtil.getEmailFilename(Locale.getDefault(), "harvesting_error")); email.addRecipient(recipient); email.addArgument(targetCollection.getID()); email.addArgument(new Date()); email.addArgument(status); String stackTrace; if (ex != null) { email.addArgument(ex.getMessage()); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); ex.printStackTrace(pw); pw.flush(); stackTrace = sw.toString(); } else { stackTrace = "No exception"; } email.addArgument(stackTrace); email.send(); } } catch (Exception e) { log.warn("Unable to send email alert", e); } } /** * Query the OAI-PMH provider for a specific metadata record. * @param oaiSource the address of the OAI-PMH provider * @param itemOaiId the OAI identifier of the target item * @param metadataPrefix the OAI metadataPrefix of the desired metadata * @return list of JDOM elements corresponding to the metadata entries in the located record. */ private List<Element> getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix) throws IOException, ParserConfigurationException, SAXException, TransformerException, HarvestingException { GetRecord getRecord = new GetRecord(oaiSource,itemOaiId,metadataPrefix); Set<String> errorSet = new HashSet<String>(); // If the metadata is not available for this item, can the whole thing if (getRecord != null && getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { for (int i=0; i<getRecord.getErrors().getLength(); i++) { String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); errorSet.add(errorCode); } throw new HarvestingException("OAI server returned the following errors during getDescMD execution: " + errorSet.toString()); } Document record = db.build(getRecord.getDocument()); Element root = record.getRootElement(); return root.getChild("GetRecord",OAI_NS).getChild("record", OAI_NS).getChild("metadata",OAI_NS).getChildren(); } /** * Verify OAI settings for the current collection * @return list of errors encountered during verification. Empty list indicates a "success" condition. */ public List<String> verifyOAIharvester() { String oaiSource = harvestRow.getOaiSource(); String oaiSetId = harvestRow.getOaiSetId(); String metaPrefix = harvestRow.getHarvestMetadataConfig(); return verifyOAIharvester(oaiSource, oaiSetId, metaPrefix, true); } /** * Verify the existance of an OAI server with the specified set and supporting the provided metadata formats. * @param oaiSource the address of the OAI-PMH provider * @param oaiSetId * @param metaPrefix * @param testORE whether the method should also check the PMH provider for ORE support * @return list of errors encountered during verification. Empty list indicates a "success" condition. */ public static List<String> verifyOAIharvester(String oaiSource, String oaiSetId, String metaPrefix, boolean testORE) { List<String> errorSet = new ArrayList<String>(); // First, make sure the metadata we need is supported by the target server Namespace ORE_NS = OAIHarvester.getORENamespace(); String OREOAIPrefix = null; Namespace DMD_NS = OAIHarvester.getDMDNamespace(metaPrefix); String DMDOAIPrefix = null; try { Identify idenTest = new Identify(oaiSource); } catch (Exception ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached."); return errorSet; } try { OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, ORE_NS.getURI()); DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI()); } catch (Exception ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI did not respond to ListMetadataFormats query (" + ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; " + DMD_NS.getPrefix() + ":" + DMDOAIPrefix + ")"); return errorSet; } if (testORE && OREOAIPrefix == null) { errorSet.add(OAI_ORE_ERROR + ": The OAI server does not support ORE dissemination"); } if (DMDOAIPrefix == null) { errorSet.add(OAI_DMD_ERROR + ": The OAI server does not support dissemination in this format"); } // Now scan the sets and make sure the one supplied is in the list boolean foundSet = false; try { //If we do not want to harvest from one set, then skip this. if(!"all".equals(oaiSetId)){ ListSets ls = new ListSets(oaiSource); // The only error we can really get here is "noSetHierarchy" if (ls.getErrors() != null && ls.getErrors().getLength() > 0) { for (int i=0; i<ls.getErrors().getLength(); i++) { String errorCode = ls.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); errorSet.add(errorCode); } } else { // Drilling down to /OAI-PMH/ListSets/set Document reply = db.build(ls.getDocument()); Element root = reply.getRootElement(); List<Element> sets= root.getChild("ListSets",OAI_NS).getChildren("set",OAI_NS); for (Element set : sets) { String setSpec = set.getChildText("setSpec", OAI_NS); if (setSpec.equals(oaiSetId)) { foundSet = true; break; } } if (!foundSet) { errorSet.add(OAI_SET_ERROR + ": The OAI server does not have a set with the specified setSpec"); } } } } catch (RuntimeException re) { throw re; } catch (Exception e) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached"); return errorSet; } return errorSet; } /** * Start harvest scheduler. */ public static synchronized void startNewScheduler() throws SQLException, AuthorizeException { Context c = new Context(); HarvestedCollection.exists(c); c.complete(); if (mainHarvestThread != null && harvester != null) { stopScheduler(); } harvester = new HarvestScheduler(); HarvestScheduler.interrupt = HarvestScheduler.HARVESTER_INTERRUPT_NONE; mainHarvestThread = new Thread(harvester); mainHarvestThread.start(); } /** * Stop an active harvest scheduler. */ public static synchronized void stopScheduler() throws SQLException, AuthorizeException { synchronized(HarvestScheduler.lock) { HarvestScheduler.interrupt = HarvestScheduler.HARVESTER_INTERRUPT_STOP; HarvestScheduler.lock.notify(); } mainHarvestThread = null; harvester = null; } /** * Pause an active harvest scheduler. */ public static void pauseScheduler() throws SQLException, AuthorizeException { synchronized(HarvestScheduler.lock) { HarvestScheduler.interrupt = HarvestScheduler.HARVESTER_INTERRUPT_PAUSE; HarvestScheduler.lock.notify(); } } /** * Resume a paused harvest scheduler. */ public static void resumeScheduler() throws SQLException, AuthorizeException { HarvestScheduler.interrupt = HarvestScheduler.HARVESTER_INTERRUPT_RESUME; } public static void resetScheduler() throws SQLException, AuthorizeException, IOException { Context context = new Context(); List<Integer> cids = HarvestedCollection.findAll(context); for (Integer cid : cids) { HarvestedCollection hc = HarvestedCollection.find(context, cid); hc.setHarvestStartTime(null); hc.setHarvestStatus(HarvestedCollection.STATUS_READY); hc.update(); } context.commit(); } /** * Exception class specifically assigned to recoverable errors that occur during harvesting. Throughout the harvest process, various exceptions * are caught and turned into a HarvestingException. Uncaught exceptions are irrecoverable errors. * @author alexey */ public static class HarvestingException extends Exception { public HarvestingException() { super(); } public HarvestingException(String message, Throwable t) { super(message, t); } public HarvestingException(String message) { super(message); } public HarvestingException(Throwable t) { super(t); } } /** * The class responsible for scheduling harvesting cycles are regular intervals. * @author alexey */ public static class HarvestScheduler implements Runnable { private EPerson harvestAdmin; private Context mainContext; public static final Object lock = new Object(); private static Stack<HarvestThread> harvestThreads; private static Integer maxActiveThreads; protected static volatile Integer activeThreads = 0; public static final int HARVESTER_STATUS_RUNNING = 1; public static final int HARVESTER_STATUS_SLEEPING = 2; public static final int HARVESTER_STATUS_PAUSED = 3; public static final int HARVESTER_STATUS_STOPPED = 4; public static final int HARVESTER_INTERRUPT_NONE = 0; public static final int HARVESTER_INTERRUPT_PAUSE = 1; public static final int HARVESTER_INTERRUPT_STOP = 2; public static final int HARVESTER_INTERRUPT_RESUME = 3; public static final int HARVESTER_INTERRUPT_INSERT_THREAD = 4; public static final int HARVESTER_INTERRUPT_KILL_THREAD = 5; private static int status = HARVESTER_STATUS_STOPPED; private static int interrupt = HARVESTER_INTERRUPT_NONE; private static Integer interruptValue = 0; private static long minHeartbeat; private static long maxHeartbeat; public static boolean hasStatus(int statusToCheck) { return status == statusToCheck; } public static synchronized void setInterrupt(int newInterrupt) { interrupt = newInterrupt; } public static synchronized void setInterrupt(int newInterrupt, int newInterruptValue) { interrupt = newInterrupt; interruptValue = newInterruptValue; } public static String getStatus() { switch(status) { case HARVESTER_STATUS_RUNNING: switch(interrupt) { case HARVESTER_INTERRUPT_PAUSE: return("The scheduler is finishing active harvests before pausing. "); case HARVESTER_INTERRUPT_STOP: return("The scheduler is shutting down. "); } return("The scheduler is actively harvesting collections. "); case HARVESTER_STATUS_SLEEPING: return("The scheduler is waiting for collections to harvest. "); case HARVESTER_STATUS_PAUSED: return("The scheduler is paused. "); default: return("Automatic harvesting is not active. "); } } public HarvestScheduler() throws SQLException, AuthorizeException { mainContext = new Context(); String harvestAdminParam = ConfigurationManager.getProperty("harvester.eperson"); harvestAdmin = null; if (harvestAdminParam != null && harvestAdminParam.length() > 0) { harvestAdmin = EPerson.findByEmail(mainContext, harvestAdminParam); } harvestThreads = new Stack<HarvestThread>(); maxActiveThreads = ConfigurationManager.getIntProperty("harvester.maxThreads"); if (maxActiveThreads == 0) { maxActiveThreads = 3; } minHeartbeat = ConfigurationManager.getIntProperty("harvester.minHeartbeat") * 1000; if (minHeartbeat == 0) { minHeartbeat = 30000; } maxHeartbeat = ConfigurationManager.getIntProperty("harvester.maxHeartbeat") * 1000; if (maxHeartbeat == 0) { maxHeartbeat = 3600000; } } public void run() { scheduleLoop(); } private void scheduleLoop() { long i=0; while(true) { try { synchronized (HarvestScheduler.class) { switch (interrupt) { case HARVESTER_INTERRUPT_NONE: break; case HARVESTER_INTERRUPT_INSERT_THREAD: interrupt = HARVESTER_INTERRUPT_NONE; addThread(interruptValue); interruptValue = 0; break; case HARVESTER_INTERRUPT_PAUSE: interrupt = HARVESTER_INTERRUPT_NONE; status = HARVESTER_STATUS_PAUSED; case HARVESTER_INTERRUPT_STOP: interrupt = HARVESTER_INTERRUPT_NONE; status = HARVESTER_STATUS_STOPPED; return; } } if (status == HARVESTER_STATUS_PAUSED) { while(interrupt != HARVESTER_INTERRUPT_RESUME && interrupt != HARVESTER_INTERRUPT_STOP) { Thread.sleep(1000); } if (interrupt != HARVESTER_INTERRUPT_STOP) { break; } } status = HARVESTER_STATUS_RUNNING; // Stage #1: if something is ready for harvest, push it onto the ready stack, mark it as "queued" mainContext = new Context(); List<Integer> cids = HarvestedCollection.findReady(mainContext); log.info("Collections ready for immediate harvest: " + cids.toString()); for (Integer cid : cids) { addThread(cid); } // Stage #2: start up all the threads currently in the queue up to the maximum number while (!harvestThreads.isEmpty()) { synchronized(HarvestScheduler.class) { activeThreads++; } Thread activeThread = new Thread(harvestThreads.pop()); activeThread.start(); log.info("Thread started: " + activeThread.toString()); /* Wait while the number of threads running is greater than or equal to max */ while (activeThreads >= maxActiveThreads) { /* Wait a second */ Thread.sleep(1000); } } // Finally, wait for the last few remaining threads to finish // TODO: this step might be unnecessary. Theoretically a single very long harvest process // could then lock out all the other ones from starting on their next iteration. // FIXME: also, this might lead to a situation when a single thread getting stuck without // throwing an exception would shut down the whole scheduler while (activeThreads != 0) { /* Wait a second */ Thread.sleep(1000); } // Commit everything try { mainContext.commit(); mainContext.complete(); log.info("Done with iteration " + i); } catch (SQLException e) { e.printStackTrace(); mainContext.abort(); } } catch (Exception e) { log.error("Exception on iteration: " + i); e.printStackTrace(); } // Stage #3: figure out how long until the next iteration and wait try { Context tempContext = new Context(); int nextCollectionId = HarvestedCollection.findOldestHarvest(tempContext); HarvestedCollection hc = HarvestedCollection.find(tempContext, nextCollectionId); int harvestInterval = ConfigurationManager.getIntProperty("harvester.harvestFrequency"); if (harvestInterval == 0) { harvestInterval = 720; } Date nextTime; long nextHarvest = 0; if (hc != null) { Calendar calendar = Calendar.getInstance(); calendar.setTime(hc.getHarvestDate()); calendar.add(Calendar.MINUTE, harvestInterval); nextTime = calendar.getTime(); nextHarvest = nextTime.getTime() + - new Date().getTime(); } long upperBound = Math.min(nextHarvest,maxHeartbeat); long delay = Math.max(upperBound, minHeartbeat) + 1000; tempContext.complete(); status = HARVESTER_STATUS_SLEEPING; synchronized(lock) { lock.wait(delay); } } catch (InterruptedException ie) { log.warn("Interrupt: " + ie.getMessage()); } catch (SQLException e) { e.printStackTrace(); } i++; } } /** * Adds a thread to the ready stack. Can also be called externally to queue up a collection * for harvesting before it is "due" for another cycle. This allows starting a harvest process * from the UI that still "plays nice" with these thread mechanics instead of making an * asynchronous call to runHarvest(). */ public static void addThread(int collecionID) throws SQLException, IOException, AuthorizeException { log.debug("****** Entered the addThread method. Active threads: " + harvestThreads.toString()); Context subContext = new Context(); //subContext.setCurrentUser(harvestAdmin); HarvestedCollection hc = HarvestedCollection.find(subContext, collecionID); hc.setHarvestStatus(HarvestedCollection.STATUS_QUEUED); hc.update(); subContext.commit(); HarvestThread ht = new HarvestThread(subContext, hc); harvestThreads.push(ht); log.debug("****** Queued up a thread. Active threads: " + harvestThreads.toString()); log.info("Thread queued up: " + ht.toString()); } } /** * A harvester thread used to execute a single harvest cycle on a collection * @author alexey */ private static class HarvestThread extends Thread { Context context; HarvestedCollection hc; HarvestThread(Context context, HarvestedCollection hc) throws SQLException { this.context = context; this.hc = hc; } public void run() { log.info("Thread for collection " + hc.getCollectionId() + " starts."); runHarvest(); } private void runHarvest() { Collection dso = null; try { dso = Collection.find(context, hc.getCollectionId()); OAIHarvester harvester = new OAIHarvester(context, dso, hc); harvester.runHarvest(); } catch (RuntimeException e) { log.error("Runtime exception in thread: " + this.toString()); log.error(e.getMessage() + " " + e.getCause()); hc.setHarvestMessage("Runtime error occured while generating an OAI response"); hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); } catch (Exception ex) { log.error("General exception in thread: " + this.toString()); log.error(ex.getMessage() + " " + ex.getCause()); hc.setHarvestMessage("Error occured while generating an OAI response"); hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); } finally { try { hc.update(); context.restoreAuthSystemState(); context.complete(); } catch (RuntimeException e) { log.error("Unexpected exception while recovering from a harvesting error: " + e.getMessage(), e); context.abort(); } catch (Exception e) { log.error("Unexpected exception while recovering from a harvesting error: " + e.getMessage(), e); context.abort(); } synchronized (HarvestScheduler.class) { HarvestScheduler.activeThreads--; } } log.info("Thread for collection " + hc.getCollectionId() + " completes."); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; /** * Abstract processing class for DSpace Submission Steps. This defines the base * methods which are required for any Step processing class. * <P> * This abstract class defines the base methods which are used by both the * Manakin XML UI and the JSP UI to perform submission step processing. * <P> * This includes the following methods: * <ul> * <li>doProcessing() method - called to perform any step processing</li> * <li>getErrorFields() method - called to determine the fields which errored * out during processing</li> * <li>getErrorMessage() method - called to determine any error message * returned after processing</li> * </ul> * <P> * If you are using the JSP UI (with the SubmissionController servlet) you * should extend the org.dspace.submit.SubmissionStep class, which defines * additional methods used to maintain the context of the submission within a * JSP environment! * * @see org.dspace.app.webui.submit.JSPStepManager * @see org.dspace.app.webui.servlet.SubmissionController * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * * @author Tim Donohue * @version $Revision: 5844 $ */ public abstract class AbstractProcessingStep { /*************************************************************************** * Constant - Name of the "<-Previous" button **************************************************************************/ public static final String PREVIOUS_BUTTON = "submit_prev"; /*************************************************************************** * Constant - Name of the "Next->" button **************************************************************************/ public static final String NEXT_BUTTON = "submit_next"; /*************************************************************************** * Constant - Name of the "Cancel/Save" button **************************************************************************/ public static final String CANCEL_BUTTON = "submit_cancel"; /*************************************************************************** * Constant - Prefix of all buttons in the Progress Bar **************************************************************************/ public static final String PROGRESS_BAR_PREFIX = "submit_jump_"; /*************************************************************************** * Flag which specifies that the LAST PAGE of a step has been reached. This * flag is used when a Workflow Item is rejected (and returned to the * workspace) to specify that the LAST PAGE of the LAST STEP has already * been reached **************************************************************************/ public static final int LAST_PAGE_REACHED = Integer.MAX_VALUE; /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) **************************************************************************/ public static final int STATUS_COMPLETE = 0; public static final int NEW_DOC_TYPE = 1; /** Maps each status/error flag to a textual, human understandable message * */ private Map<Integer, String> errorMessages = null; private static final String ERROR_FIELDS_ATTRIBUTE = "dspace.submit.error_fields"; /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the doPostProcessing() method. * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public abstract int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException; /** * Return a list of all UI fields which had errors that occurred during the * step processing. This list is for usage in generating the appropriate * error message(s) in the UI. * <P> * The list of fields which had errors should be set by the AbstractProcessingStep's * doProcessing() method, so that it can be accessed later by whatever UI is * generated. * * @param request * current servlet request object * @return List of error fields (as Strings) */ public static final List<String> getErrorFields(HttpServletRequest request) { return (List<String>) request.getAttribute(ERROR_FIELDS_ATTRIBUTE); } /** * Sets the list of all UI fields which had errors that occurred during the * step processing. This list is for usage in generating the appropriate * error message(s) in the UI. * <P> * The list of fields which had errors should be set by the AbstractProcessingStep's * doProcessing() method, so that it can be accessed later by whatever UI is * generated. * * @param request * current servlet request object * @param errorFields * List of all fields (as Strings) which had errors */ private static final void setErrorFields(HttpServletRequest request, List<String> errorFields) { if(errorFields==null) { request.removeAttribute(ERROR_FIELDS_ATTRIBUTE); } else { request.setAttribute(ERROR_FIELDS_ATTRIBUTE, errorFields); } } /** * Add a single UI field to the list of all error fields (which can * later be retrieved using getErrorFields()) * <P> * The list of fields which had errors should be set by the AbstractProcessingStep's * doProcessing() method, so that it can be accessed later by whatever UI is * generated. * * @param fieldName * the name of the field which had an error */ protected static final void addErrorField(HttpServletRequest request, String fieldName) { //get current list List<String> errorFields = getErrorFields(request); if (errorFields == null) { errorFields = new ArrayList<String>(); } //add this field errorFields.add(fieldName); //save updated list setErrorFields(request, errorFields); } /** * Clears the list of all fields that errored out during the previous step's * processing. * * @param request * current servlet request object * */ protected static final void clearErrorFields(HttpServletRequest request) { //get current list List<String> errorFields = getErrorFields(request); if (errorFields != null) { setErrorFields(request, null); } } /** * Return the text of an error message based on the passed in error flag. * These error messages are used for non-interactive steps (so that they can * log something more specific than just an error flag) * <P> * Since each step can define its own error messages and flags, this method * depends on all the error messages being initialized by using the * "addErrorMessage()" method within the constructor for the step class! * * @param errorFlag * The error flag defined in this step which represents an error * message. * @return String which contains the text of the error message, or null if * error message not found */ public final String getErrorMessage(int errorFlag) { if (this.errorMessages == null || this.errorMessages.size() == 0) { return null; } else { return this.errorMessages.get(Integer.valueOf(errorFlag)); } } /** * Add an error message to the internal map for this step. * <P> * This method associates a specific error message with an error flag * defined in this step. * <P> * This is extremely useful to define the error message which will be logged * for a non-interactive step. * * @param errorFlag * the status value indicating the type of error * @param errorMessage * text of the message to be added */ protected final void addErrorMessage(int errorFlag, String errorMessage) { if (this.errorMessages == null) { this.errorMessages = new HashMap<Integer, String>(); } errorMessages.put(Integer.valueOf(errorFlag), errorMessage); } /** * Retrieves the number of pages that this "step" extends over. This method * is used by the SubmissionController to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public abstract int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException; /** * Find out which page a user is currently viewing * * @param request * HTTP request * * @return current page */ public static final int getCurrentPage(HttpServletRequest request) { int pageNum = -1; // try to retrieve cached page from request attribute Integer currentPage = (Integer) request.getAttribute("submission.page"); if (currentPage == null) { // try and get it as a 'page' parameter String val = request.getParameter("page"); try { pageNum = Integer.parseInt(val.trim()); } catch (Exception e) { // Problem with parameter pageNum = -1; } // if couldn't find page in request parameter if (pageNum < 0) { // default to page #1, since no other optionsc pageNum = 1; setCurrentPage(request, pageNum); } else { // save to request attribute setCurrentPage(request, pageNum); } } else { pageNum = currentPage.intValue(); } return pageNum; } /** * Set which page a user is currently viewing * * @param request * HTTP request * @param pageNumber * new current page */ public static final void setCurrentPage(HttpServletRequest request, int pageNumber) { // set info to request request.setAttribute("submission.page", Integer.valueOf(pageNumber)); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; import java.util.Enumeration; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.FormatIdentifier; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.core.ConfigurationManager; import org.dspace.submit.AbstractProcessingStep; /** * Upload step for DSpace. Processes the actual upload of files * for an item being submitted into DSpace. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class UploadStep extends AbstractProcessingStep { /** Button to upload a file * */ public static final String SUBMIT_UPLOAD_BUTTON = "submit_upload"; /** Button to skip uploading a file * */ public static final String SUBMIT_SKIP_BUTTON = "submit_skip"; /** Button to submit more files * */ public static final String SUBMIT_MORE_BUTTON = "submit_more"; /** Button to cancel editing of file info * */ public static final String CANCEL_EDIT_BUTTON = "submit_edit_cancel"; /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // integrity error occurred public static final int STATUS_INTEGRITY_ERROR = 1; // error in uploading file public static final int STATUS_UPLOAD_ERROR = 2; // error - no files uploaded! public static final int STATUS_NO_FILES_ERROR = 5; // format of uploaded file is unknown public static final int STATUS_UNKNOWN_FORMAT = 10; // edit file information public static final int STATUS_EDIT_BITSTREAM = 20; // return from editing file information public static final int STATUS_EDIT_COMPLETE = 25; /** log4j logger */ private static Logger log = Logger.getLogger(UploadStep.class); /** is the upload required? */ private boolean fileRequired = ConfigurationManager.getBooleanProperty("webui.submit.upload.required", true); /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // get button user pressed String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON); // get reference to item Item item = subInfo.getSubmissionItem().getItem(); // ----------------------------------- // Step #0: Upload new files (if any) // ----------------------------------- String contentType = request.getContentType(); // if multipart form, then we are uploading a file if ((contentType != null) && (contentType.indexOf("multipart/form-data") != -1)) { // This is a multipart request, so it's a file upload // (return any status messages or errors reported) int status = processUploadFile(context, request, response, subInfo); // if error occurred, return immediately if (status != STATUS_COMPLETE) { return status; } } // if user pressed jump-to button in process bar, // return success (so that jump will occur) if (buttonPressed.startsWith(PROGRESS_BAR_PREFIX)) { // check if a file is required to be uploaded if (fileRequired && !item.hasUploadedFiles()) { return STATUS_NO_FILES_ERROR; } else { return STATUS_COMPLETE; } } // --------------------------------------------- // Step #1: Check if this was just a request to // edit file information. // (or canceled editing information) // --------------------------------------------- // check if we're already editing a specific bitstream if (request.getParameter("bitstream_id") != null) { if (buttonPressed.equals(CANCEL_EDIT_BUTTON)) { // canceled an edit bitstream request subInfo.setBitstream(null); // this flag will just return us to the normal upload screen return STATUS_EDIT_COMPLETE; } else { // load info for bitstream we are editing Bitstream b = Bitstream.find(context, Integer.parseInt(request .getParameter("bitstream_id"))); // save bitstream to submission info subInfo.setBitstream(b); } } else if (buttonPressed.startsWith("submit_edit_")) { // get ID of bitstream that was requested for editing String bitstreamID = buttonPressed.substring("submit_edit_" .length()); Bitstream b = Bitstream .find(context, Integer.parseInt(bitstreamID)); // save bitstream to submission info subInfo.setBitstream(b); // return appropriate status flag to say we are now editing the // bitstream return STATUS_EDIT_BITSTREAM; } // --------------------------------------------- // Step #2: Process any remove file request(s) // --------------------------------------------- // Remove-selected requests come from Manakin if (buttonPressed.equalsIgnoreCase("submit_remove_selected")) { // this is a remove multiple request! if (request.getParameter("remove") != null) { // get all files to be removed String[] removeIDs = request.getParameterValues("remove"); // remove each file in the list for (int i = 0; i < removeIDs.length; i++) { int id = Integer.parseInt(removeIDs[i]); int status = processRemoveFile(context, item, id); // if error occurred, return immediately if (status != STATUS_COMPLETE) { return status; } } // remove current bitstream from Submission Info subInfo.setBitstream(null); } } else if (buttonPressed.startsWith("submit_remove_")) { // A single file "remove" button must have been pressed int id = Integer.parseInt(buttonPressed.substring(14)); int status = processRemoveFile(context, item, id); // if error occurred, return immediately if (status != STATUS_COMPLETE) { return status; } // remove current bitstream from Submission Info subInfo.setBitstream(null); } // ------------------------------------------------- // Step #3: Check for a change in file description // ------------------------------------------------- String fileDescription = request.getParameter("description"); if (fileDescription != null && fileDescription.length() > 0) { // save this file description int status = processSaveFileDescription(context, request, response, subInfo); // if error occurred, return immediately if (status != STATUS_COMPLETE) { return status; } } // ------------------------------------------ // Step #4: Check for a file format change // (if user had to manually specify format) // ------------------------------------------ int formatTypeID = Util.getIntParameter(request, "format"); String formatDesc = request.getParameter("format_description"); // if a format id or description was found, then save this format! if (formatTypeID >= 0 || (formatDesc != null && formatDesc.length() > 0)) { // save this specified format int status = processSaveFileFormat(context, request, response, subInfo); // if error occurred, return immediately if (status != STATUS_COMPLETE) { return status; } } // --------------------------------------------------- // Step #5: Check if primary bitstream has changed // ------------------------------------------------- if (request.getParameter("primary_bitstream_id") != null) { Bundle[] bundles = item.getBundles("ORIGINAL"); if (bundles.length > 0) { bundles[0].setPrimaryBitstreamID(Integer.valueOf(request .getParameter("primary_bitstream_id")).intValue()); bundles[0].update(); } } // --------------------------------------------------- // Step #6: Determine if there is an error because no // files have been uploaded. // --------------------------------------------------- //check if a file is required to be uploaded if (fileRequired && !item.hasUploadedFiles()) { return STATUS_NO_FILES_ERROR; } // commit all changes to database context.commit(); return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // Despite using many JSPs, this step only appears // ONCE in the Progress Bar, so it's only ONE page return 1; } // **************************************************************** // **************************************************************** // METHODS FOR UPLOADING FILES (and associated information) // **************************************************************** // **************************************************************** /** * Remove a file from an item * * @param context * current DSpace context * @param item * Item where file should be removed from * @param bitstreamID * The id of bitstream representing the file to remove * @return Status or error flag which will be processed by * UI-related code! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processRemoveFile(Context context, Item item, int bitstreamID) throws IOException, SQLException, AuthorizeException { Bitstream bitstream; // Try to find bitstream try { bitstream = Bitstream.find(context, bitstreamID); } catch (NumberFormatException nfe) { bitstream = null; } if (bitstream == null) { // Invalid or mangled bitstream ID // throw an error and return immediately return STATUS_INTEGRITY_ERROR; } // remove bitstream from bundle.. // delete bundle if it's now empty Bundle[] bundles = bitstream.getBundles(); bundles[0].removeBitstream(bitstream); Bitstream[] bitstreams = bundles[0].getBitstreams(); // remove bundle if it's now empty if (bitstreams.length < 1) { item.removeBundle(bundles[0]); item.update(); } // no errors occurred return STATUS_COMPLETE; } /** * Process the upload of a new file! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * * @return Status or error flag which will be processed by * UI-related code! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processUploadFile(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { boolean formatKnown = true; boolean fileOK = false; BitstreamFormat bf = null; Bitstream b = null; //NOTE: File should already be uploaded. //Manakin does this automatically via Cocoon. //For JSP-UI, the SubmissionController.uploadFiles() does the actual upload Enumeration attNames = request.getAttributeNames(); //loop through our request attributes while(attNames.hasMoreElements()) { String attr = (String) attNames.nextElement(); //if this ends with "-path", this attribute //represents a newly uploaded file if(attr.endsWith("-path")) { //strip off the -path to get the actual parameter //that the file was uploaded as String param = attr.replace("-path", ""); // Load the file's path and input stream and description String filePath = (String) request.getAttribute(param + "-path"); InputStream fileInputStream = (InputStream) request.getAttribute(param + "-inputstream"); //attempt to get description from attribute first, then direct from a parameter String fileDescription = (String) request.getAttribute(param + "-description"); if(fileDescription==null ||fileDescription.length()==0) { request.getParameter("description"); } // if information wasn't passed by User Interface, we had a problem // with the upload if (filePath == null || fileInputStream == null) { return STATUS_UPLOAD_ERROR; } if (subInfo != null) { // Create the bitstream Item item = subInfo.getSubmissionItem().getItem(); // do we already have a bundle? Bundle[] bundles = item.getBundles("ORIGINAL"); if (bundles.length < 1) { // set bundle's name to ORIGINAL b = item.createSingleBitstream(fileInputStream, "ORIGINAL"); } else { // we have a bundle already, just add bitstream b = bundles[0].createBitstream(fileInputStream); } // Strip all but the last filename. It would be nice // to know which OS the file came from. String noPath = filePath; while (noPath.indexOf('/') > -1) { noPath = noPath.substring(noPath.indexOf('/') + 1); } while (noPath.indexOf('\\') > -1) { noPath = noPath.substring(noPath.indexOf('\\') + 1); } b.setName(noPath); b.setSource(filePath); b.setDescription(fileDescription); // Identify the format bf = FormatIdentifier.guessFormat(context, b); b.setFormat(bf); // Update to DB b.update(); item.update(); if (bf == null || !bf.isInternal()) { fileOK = true; } else { log.warn("Attempt to upload file format marked as internal system use only"); // remove bitstream from bundle.. // delete bundle if it's now empty Bundle[] bnd = b.getBundles(); bnd[0].removeBitstream(b); Bitstream[] bitstreams = bnd[0].getBitstreams(); // remove bundle if it's now empty if (bitstreams.length < 1) { item.removeBundle(bnd[0]); item.update(); } subInfo.setBitstream(null); } }// if subInfo not null else { // In any event, if we don't have the submission info, the request // was malformed return STATUS_INTEGRITY_ERROR; } // as long as everything completed ok, commit changes. Otherwise show // error page. if (fileOK) { context.commit(); // save this bitstream to the submission info, as the // bitstream we're currently working with subInfo.setBitstream(b); //if format was not identified if (bf == null) { // the bitstream format is unknown! formatKnown=false; } } else { // if we get here there was a problem uploading the file! return STATUS_UPLOAD_ERROR; } }//end if attribute ends with "-path" }//end while if(!formatKnown) { //return that the bitstream format is unknown! return STATUS_UNKNOWN_FORMAT; } else { return STATUS_COMPLETE; } } /** * Process input from get file type page * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * * @return Status or error flag which will be processed by * UI-related code! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processSaveFileFormat(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { if (subInfo.getBitstream() != null) { // Did the user select a format? int typeID = Util.getIntParameter(request, "format"); BitstreamFormat format = BitstreamFormat.find(context, typeID); if (format != null) { subInfo.getBitstream().setFormat(format); } else { String userDesc = request.getParameter("format_description"); subInfo.getBitstream().setUserFormatDescription(userDesc); } // update database subInfo.getBitstream().update(); } else { return STATUS_INTEGRITY_ERROR; } return STATUS_COMPLETE; } /** * Process input from the "change file description" page * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * * @return Status or error flag which will be processed by * UI-related code! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processSaveFileDescription(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { if (subInfo.getBitstream() != null) { subInfo.getBitstream().setDescription( request.getParameter("description")); subInfo.getBitstream().update(); context.commit(); } else { return STATUS_INTEGRITY_ERROR; } return STATUS_COMPLETE; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.LicenseUtils; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.eperson.EPerson; import org.dspace.license.CreativeCommons; import org.dspace.submit.AbstractProcessingStep; /** * License step for DSpace Submission Process. Processes the * user response to the license. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class LicenseStep extends AbstractProcessingStep { /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // user rejected the license public static final int STATUS_LICENSE_REJECTED = 1; /** log4j logger */ private static Logger log = Logger.getLogger(LicenseStep.class); /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // If creative commons licensing is enabled, then it is page #1 if (CreativeCommons.isEnabled() && AbstractProcessingStep.getCurrentPage(request) == 1) { // process Creative Commons license // (and return any error messages encountered) return processCC(context, request, response, subInfo); } // otherwise, if we came from general DSpace license else { // process DSpace license (and return any error messages // encountered) return processLicense(context, request, response, subInfo); } } /** * Process the input from the license page * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * * @return Status or error flag which will be processed by * UI-related code! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processLicense(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { String buttonPressed = Util.getSubmitButton(request, CANCEL_BUTTON); boolean licenseGranted = false; // For Manakin: // Accepting the license means checking a box and clicking Next String decision = request.getParameter("decision"); if (decision != null && decision.equalsIgnoreCase("accept") && buttonPressed.equals(NEXT_BUTTON)) { licenseGranted = true; } // For JSP-UI: User just needed to click "I Accept" button else if (buttonPressed.equals("submit_grant")) { licenseGranted = true; }// JSP-UI: License was explicitly rejected else if (buttonPressed.equals("submit_reject")) { licenseGranted = false; }// Manakin UI: user didn't make a decision and clicked Next-> else if (buttonPressed.equals(NEXT_BUTTON)) { // no decision made (this will cause Manakin to display an error) return STATUS_LICENSE_REJECTED; } if (licenseGranted && (buttonPressed.equals("submit_grant") || buttonPressed .equals(NEXT_BUTTON))) { // License granted log.info(LogManager.getHeader(context, "accept_license", subInfo.getSubmissionLogInfo())); // Add the license to the item Item item = subInfo.getSubmissionItem().getItem(); EPerson submitter = context.getCurrentUser(); // remove any existing DSpace license (just in case the user // accepted it previously) item.removeDSpaceLicense(); String license = LicenseUtils.getLicenseText(context .getCurrentLocale(), subInfo.getSubmissionItem() .getCollection(), item, submitter); LicenseUtils.grantLicense(context, item, license); // commit changes context.commit(); } // completed without errors return STATUS_COMPLETE; } /** * Process the input from the CC license page * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ protected int processCC(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON); // RLR hack - need to distinguish between progress bar real submission // (if cc_license_url exists, then users has accepted the CC License) String ccLicenseUrl = request.getParameter("cc_license_url"); if (buttonPressed.equals("submit_no_cc")) { // Skipping the CC license - remove any existing license selection CreativeCommons.removeLicense(context, subInfo.getSubmissionItem() .getItem()); } else if ((ccLicenseUrl != null) && (ccLicenseUrl.length() > 0)) { Item item = subInfo.getSubmissionItem().getItem(); // save the CC license CreativeCommons.setLicense(context, item, ccLicenseUrl); } // commit changes context.commit(); // completed without errors return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // if creative commons licensing is enabled, // then there are 2 license pages if (CreativeCommons.isEnabled()) { return 2; } else { return 1; } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.authorize.AuthorizeException; import org.dspace.content.InProgressSubmission; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * This is a Simple Step class that need to be used when you want skip the * initial questions step! * <p> * At the moment this step is required because part of the behaviour of the * InitialQuestionStep is required to be managed also in the DescribeStep (see * JIRA [DS-83] Hardcoded behaviour of Initial question step in the submission) * </p> * * @see org.dspace.submit.AbstractProcessingStep * @see org.dspace.submit.step.InitialQuestionStep * @see org.dspace.submit.step.DescribeStep * * @author Andrea Bollini * @version $Revision: 5844 $ */ public class SkipInitialQuestionsStep extends AbstractProcessingStep { /** * Simply we flags the submission as the user had checked both multi-title, * multi-files and published before so that the input-form configuration * will be used as is */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { InProgressSubmission submissionItem = subInfo.getSubmissionItem(); submissionItem.setMultipleFiles(true); submissionItem.setMultipleTitles(true); submissionItem.setPublishedBefore(true); submissionItem.update(); return STATUS_COMPLETE; } @Override public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { return 1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * SelectCollection Step which processes the collection that the user selected * in that step of the DSpace Submission process * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class SelectCollectionStep extends AbstractProcessingStep { /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // no collection was selected public static final int STATUS_NO_COLLECTION = 1; // invalid collection or error finding collection public static final int STATUS_INVALID_COLLECTION = 2; /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // First we find the collection which was selected int id = Util.getIntParameter(request, "collection"); // if the user didn't select a collection, // send him/her back to "select a collection" page if (id < 0) { return STATUS_NO_COLLECTION; } // try to load the collection Collection col = Collection.find(context, id); // Show an error if the collection is invalid if (col == null) { return STATUS_INVALID_COLLECTION; } else { // create our new Workspace Item WorkspaceItem wi = WorkspaceItem.create(context, col, true); // update Submission Information with this Workspace Item subInfo.setSubmissionItem(wi); // commit changes to database context.commit(); // need to reload current submission process config, // since it is based on the Collection selected subInfo.reloadSubmissionConfig(request); } // no errors occurred return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // there is always just one page in the "select a collection" step! return 1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.dspace.app.util.SubmissionInfo; import org.dspace.submit.AbstractProcessingStep; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.MetadataSchema; import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.workflow.WorkflowManager; import proj.oceandocs.citation.CitationManager; /** * This is the class which defines what happens once a submission completes! * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * This step is non-interactive (i.e. no user interface), and simply performs * the processing that is necessary after a submission has been completed! * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class CompleteStep extends AbstractProcessingStep { /** log4j logger */ private static Logger log = Logger.getLogger(CompleteStep.class); /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // The Submission is COMPLETE!! log.info(LogManager.getHeader(context, "submission_complete", "Completed submission with id=" + subInfo.getSubmissionItem().getID())); // Start the workflow for this Submission boolean success = false; try { WorkflowManager.start(context, (WorkspaceItem) subInfo.getSubmissionItem()); Item item = subInfo.getSubmissionItem().getItem(); item.updateCitationString(); item.updateISSN(); item.updateSubjectFields(); success = true; } catch (Exception e) { log.error("Caught exception in submission step: ", e); throw new ServletException(e); } finally { // commit changes to database if (success) { context.commit(); } else { context.getDBConnection().rollback(); } } return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // This class represents the non-interactive processing step // that occurs just *before* the final confirmation page! // (so it should only be processed once!) return 1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * Initial Submission servlet for DSpace. Handles the initial questions which * are asked to users to gather information regarding what metadata needs to be * gathered. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class InitialQuestionsStep extends AbstractProcessingStep { /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // pruning of metadata needs to take place public static final int STATUS_VERIFY_PRUNE = 1; // pruning was cancelled by user public static final int STATUS_CANCEL_PRUNE = 2; // user attempted to upload a thesis, when theses are not accepted public static final int STATUS_THESIS_REJECTED = 3; /** * Global flags to determine if we need to prune anything */ protected boolean willRemoveTitles = false; protected boolean willRemoveDate = false; protected boolean willRemoveFiles = false; /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // Get the values from the initial questions form boolean multipleTitles = Util.getBoolParameter(request, "multiple_titles"); boolean publishedBefore = Util.getBoolParameter(request, "published_before"); boolean multipleFiles = Util.getBoolParameter(request, "multiple_files"); boolean isThesis = ConfigurationManager .getBooleanProperty("webui.submit.blocktheses") && Util.getBoolParameter(request, "is_thesis"); if (subInfo.isInWorkflow()) { // Thesis question does not appear in workflow mode.. isThesis = false; // Pretend "multiple files" is true in workflow mode // (There will always be the license file) multipleFiles = true; } // First and foremost - if it's a thesis, reject the submission if (isThesis) { WorkspaceItem wi = (WorkspaceItem) subInfo.getSubmissionItem(); wi.deleteAll(); subInfo.setSubmissionItem(null); // Remember that we've removed a thesis in the session request.getSession().setAttribute("removed_thesis", Boolean.TRUE); return STATUS_THESIS_REJECTED; // since theses are disabled, throw // an error! } // Next, check if we are pruning some existing metadata if (request.getParameter("do_not_prune") != null) { return STATUS_CANCEL_PRUNE; // cancelled pruning! } else if (request.getParameter("prune") != null) { processVerifyPrune(context, request, response, subInfo, multipleTitles, publishedBefore, multipleFiles); } else // otherwise, check if pruning is necessary { // Now check to see if the changes will remove any values // (i.e. multiple files, titles or an issue date.) if (subInfo.getSubmissionItem() != null) { // shouldn't need to check if submission is null, but just in case! if (!multipleTitles) { DCValue[] altTitles = subInfo.getSubmissionItem().getItem() .getDC("title", "alternative", Item.ANY); willRemoveTitles = altTitles.length > 0; } if (!publishedBefore) { DCValue[] dateIssued = subInfo.getSubmissionItem().getItem() .getDC("date", "issued", Item.ANY); DCValue[] citation = subInfo.getSubmissionItem().getItem() .getDC("identifier", "citation", Item.ANY); DCValue[] publisher = subInfo.getSubmissionItem().getItem() .getDC("publisher", null, Item.ANY); willRemoveDate = (dateIssued.length > 0) || (citation.length > 0) || (publisher.length > 0); } if (!multipleFiles) { // see if number of bitstreams in "ORIGINAL" bundle > 1 // FIXME: Assumes multiple bundles, clean up someday... Bundle[] bundles = subInfo.getSubmissionItem().getItem() .getBundles("ORIGINAL"); if (bundles.length > 0) { Bitstream[] bitstreams = bundles[0].getBitstreams(); willRemoveFiles = bitstreams.length > 1; } } } // If anything is going to be removed from the item as a result // of changing the answer to one of the questions, we need // to inform the user and make sure that's OK, before saving! if (willRemoveTitles || willRemoveDate || willRemoveFiles) { //save what we will need to prune to request (for UI to process) request.setAttribute("will.remove.titles", Boolean.valueOf(willRemoveTitles)); request.setAttribute("will.remove.date", Boolean.valueOf(willRemoveDate)); request.setAttribute("will.remove.files", Boolean.valueOf(willRemoveFiles)); return STATUS_VERIFY_PRUNE; // we will need to do pruning! } } // If step is complete, save the changes subInfo.getSubmissionItem().setMultipleTitles(multipleTitles); subInfo.getSubmissionItem().setPublishedBefore(publishedBefore); // "Multiple files" irrelevant in workflow mode if (!subInfo.isInWorkflow()) { subInfo.getSubmissionItem().setMultipleFiles(multipleFiles); } // commit all changes to DB subInfo.getSubmissionItem().update(); context.commit(); return STATUS_COMPLETE; // no errors! } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // always just one page of initial questions return 1; } /** * Process input from "verify prune" page * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @param multipleTitles * if there is multiple titles * @param publishedBefore * if published before * @param multipleFiles * if there will be multiple files */ protected void processVerifyPrune(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo, boolean multipleTitles, boolean publishedBefore, boolean multipleFiles) throws ServletException, IOException, SQLException, AuthorizeException { // get the item to prune Item item = subInfo.getSubmissionItem().getItem(); if (!multipleTitles && subInfo.getSubmissionItem().hasMultipleTitles()) { item.clearDC("title", "alternative", Item.ANY); } if (!publishedBefore && subInfo.getSubmissionItem().isPublishedBefore()) { item.clearDC("date", "issued", Item.ANY); item.clearDC("identifier", "citation", Item.ANY); item.clearDC("publisher", null, Item.ANY); } if (!multipleFiles && subInfo.getSubmissionItem().hasMultipleFiles()) { // remove all but first bitstream from bundle[0] // FIXME: Assumes multiple bundles, clean up someday... // (only messes with the first bundle.) Bundle[] bundles = item.getBundles("ORIGINAL"); if (bundles.length > 0) { Bitstream[] bitstreams = bundles[0].getBitstreams(); // Remove all but the first bitstream for (int i = 1; i < bitstreams.length; i++) { bundles[0].removeBitstream(bitstreams[i]); } } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import java.util.LinkedList; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInput; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.DCPersonName; import org.dspace.content.DCSeriesNumber; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.authority.MetadataAuthorityManager; import org.dspace.content.authority.ChoiceAuthorityManager; import org.dspace.content.authority.Choices; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * Describe step for DSpace submission process. Handles the gathering of * descriptive information (i.e. metadata) for an item being submitted into * DSpace. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class DescribeStep extends AbstractProcessingStep { /** log4j logger */ private static Logger log = Logger.getLogger(DescribeStep.class); /** hash of all submission forms details */ private static DCInputsReader inputsReader = null; /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // user requested an extra input field to be displayed public static final int STATUS_MORE_INPUT_REQUESTED = 1; // there were required fields that were not filled out public static final int STATUS_MISSING_REQUIRED_FIELDS = 2; // the metadata language qualifier public static final String LANGUAGE_QUALIFIER = getDefaultLanguageQualifier(); /** Constructor */ public DescribeStep() throws ServletException { //load the DCInputsReader getInputsReader(); } /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // check what submit button was pressed in User Interface String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON); // get the item and current page Item item = subInfo.getSubmissionItem().getItem(); int currentPage = getCurrentPage(request); // lookup applicable inputs Collection c = subInfo.getSubmissionItem().getCollection(); DCInput[] inputs = null; try { inputs = inputsReader.getInputs(c.getHandle()).getPageRows( currentPage - 1, subInfo.getSubmissionItem().hasMultipleTitles(), subInfo.getSubmissionItem().isPublishedBefore()); } catch (DCInputsReaderException e) { throw new ServletException(e); } // Step 1: // clear out all item metadata defined on this page for (int i = 0; i < inputs.length; i++) { if (!inputs[i] .isVisible(subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE)) { continue; } String qualifier = inputs[i].getQualifier(); if (qualifier == null && inputs[i].getInputType().equals("qualdrop_value")) { qualifier = Item.ANY; } item.clearMetadata(inputs[i].getSchema(), inputs[i].getElement(), qualifier, Item.ANY); } // Clear required-field errors first since missing authority // values can add them too. clearErrorFields(request); // Step 2: // now update the item metadata. String fieldName; boolean moreInput = false; for (int j = 0; j < inputs.length; j++) { if (!inputs[j] .isVisible(subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE)) { continue; } String element = inputs[j].getElement(); String qualifier = inputs[j].getQualifier(); String schema = inputs[j].getSchema(); if (qualifier != null && !qualifier.equals(Item.ANY)) { fieldName = schema + "_" + element + '_' + qualifier; } else { fieldName = schema + "_" + element; } String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); ChoiceAuthorityManager cmgr = ChoiceAuthorityManager.getManager(); String inputType = inputs[j].getInputType(); if (inputType.equals("name")) { readNames(request, item, schema, element, qualifier, inputs[j] .getRepeatable()); } else if (inputType.equals("date")) { readDate(request, item, schema, element, qualifier); } // choice-controlled input with "select" presentation type is // always rendered as a dropdown menu else if (inputType.equals("dropdown") || inputType.equals("list") || (cmgr.isChoicesConfigured(fieldKey) && "select".equals(cmgr.getPresentation(fieldKey)))) { String[] vals = request.getParameterValues(fieldName); if (vals != null) { for (int z = 0; z < vals.length; z++) { if (!vals[z].equals("")) { item.addMetadata(schema, element, qualifier, LANGUAGE_QUALIFIER, vals[z]); } } } } else if (inputType.equals("series")) { readSeriesNumbers(request, item, schema, element, qualifier, inputs[j].getRepeatable()); } else if (inputType.equals("qualdrop_value")) { List<String> quals = getRepeatedParameter(request, schema + "_" + element, schema + "_" + element + "_qualifier"); List<String> vals = getRepeatedParameter(request, schema + "_" + element, schema + "_" + element + "_value"); for (int z = 0; z < vals.size(); z++) { String thisQual = quals.get(z); if ("".equals(thisQual)) { thisQual = null; } String thisVal = vals.get(z); if (!buttonPressed.equals("submit_" + schema + "_" + element + "_remove_" + z) && !thisVal.equals("")) { item.addMetadata(schema, element, thisQual, null, thisVal); } } } else if ((inputType.equals("onebox")) || (inputType.equals("twobox")) || (inputType.equals("textarea"))) { readText(request, item, schema, element, qualifier, inputs[j] .getRepeatable(), LANGUAGE_QUALIFIER); } else { throw new ServletException("Field " + fieldName + " has an unknown input type: " + inputType); } // determine if more input fields were requested if (!moreInput && buttonPressed.equals("submit_" + fieldName + "_add")) { subInfo.setMoreBoxesFor(fieldName); subInfo.setJumpToField(fieldName); moreInput = true; } // was XMLUI's "remove" button pushed? else if (buttonPressed.equals("submit_" + fieldName + "_delete")) { subInfo.setJumpToField(fieldName); } } // Step 3: // Check to see if any fields are missing // Only check for required fields if user clicked the "next", the "previous" or the "progress bar" button if (buttonPressed.equals(NEXT_BUTTON) || buttonPressed.startsWith(PROGRESS_BAR_PREFIX) || buttonPressed.equals(PREVIOUS_BUTTON) || buttonPressed.equals(CANCEL_BUTTON)) { for (int i = 0; i < inputs.length; i++) { DCValue[] values = item.getMetadata(inputs[i].getSchema(), inputs[i].getElement(), inputs[i].getQualifier(), Item.ANY); if (inputs[i].isRequired() && values.length == 0) { // since this field is missing add to list of error fields addErrorField(request, getFieldName(inputs[i])); } } } // Step 4: // Save changes to database subInfo.getSubmissionItem().update(); // commit changes context.commit(); // check for request for more input fields, first if (moreInput) { return STATUS_MORE_INPUT_REQUESTED; } // if one or more fields errored out, return else if (getErrorFields(request) != null && getErrorFields(request).size() > 0) { return STATUS_MISSING_REQUIRED_FIELDS; } // completed without errors return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // by default, use the "default" collection handle String collectionHandle = DCInputsReader.DEFAULT_COLLECTION; if (subInfo.getSubmissionItem() != null) { collectionHandle = subInfo.getSubmissionItem().getCollection() .getHandle(); } // get number of input pages (i.e. "Describe" pages) try { return getInputsReader().getNumberInputPages(collectionHandle); } catch (DCInputsReaderException e) { throw new ServletException(e); } } /** * * @return the current DCInputsReader */ public static DCInputsReader getInputsReader() throws ServletException { // load inputsReader only the first time if (inputsReader == null) { // read configurable submissions forms data try { inputsReader = new DCInputsReader(); } catch (DCInputsReaderException e) { throw new ServletException(e); } } return inputsReader; } /** * @param filename * file to get the input reader for * @return the current DCInputsReader */ public static DCInputsReader getInputsReader(String filename) throws ServletException { try { inputsReader = new DCInputsReader(filename); } catch (DCInputsReaderException e) { throw new ServletException(e); } return inputsReader; } /** * @return the default language qualifier for metadata */ public static String getDefaultLanguageQualifier() { String language = ""; language = ConfigurationManager.getProperty("default.language"); if (StringUtils.isEmpty(language)) { language = "en"; } return language; } // **************************************************************** // **************************************************************** // METHODS FOR FILLING DC FIELDS FROM METADATA FORMS // **************************************************************** // **************************************************************** /** * Set relevant metadata fields in an item from name values in the form. * Some fields are repeatable in the form. If this is the case, and the * field is "dc.contributor.author", the names in the request will be from * the fields as follows: * * dc_contributor_author_last -> last name of first author * dc_contributor_author_first -> first name(s) of first author * dc_contributor_author_last_1 -> last name of second author * dc_contributor_author_first_1 -> first name(s) of second author * * and so on. If the field is unqualified: * * dc_contributor_last -> last name of first contributor * dc_contributor_first -> first name(s) of first contributor * * If the parameter "submit_dc_contributor_author_remove_n" is set, that * value is removed. * * Otherwise the parameters are of the form: * * dc_contributor_author_last dc_contributor_author_first * * The values will be put in separate DCValues, in the form "last name, * first name(s)", ordered as they appear in the list. These will replace * any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form */ protected void readNames(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated) { String metadataField = MetadataField .formKey(schema, element, qualifier); String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); boolean isAuthorityControlled = MetadataAuthorityManager.getManager().isAuthorityControlled(fieldKey); // Names to add List<String> firsts = new LinkedList<String>(); List<String> lasts = new LinkedList<String>(); List<String> auths = new LinkedList<String>(); List<String> confs = new LinkedList<String>(); if (repeated) { firsts = getRepeatedParameter(request, metadataField, metadataField + "_first"); lasts = getRepeatedParameter(request, metadataField, metadataField + "_last"); if(isAuthorityControlled) { auths = getRepeatedParameter(request, metadataField, metadataField + "_authority"); confs = getRepeatedParameter(request, metadataField, metadataField + "_confidence"); } // Find out if the relevant "remove" button was pressed // TODO: These separate remove buttons are only relevant // for DSpace JSP UI, and the code below can be removed // once the DSpace JSP UI is obsolete! String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed .substring(removeButton.length())); firsts.remove(valToRemove); lasts.remove(valToRemove); if(isAuthorityControlled) { auths.remove(valToRemove); confs.remove(valToRemove); } } } else { // Just a single name String lastName = request.getParameter(metadataField + "_last"); String firstNames = request.getParameter(metadataField + "_first"); String authority = request.getParameter(metadataField + "_authority"); String confidence = request.getParameter(metadataField + "_confidence"); if (lastName != null) { lasts.add(lastName); } if (firstNames != null) { firsts.add(firstNames); } auths.add(authority == null ? "" : authority); confs.add(confidence == null ? "" : confidence); } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < lasts.size(); i++) { String f = firsts.get(i); String l = lasts.get(i); // only add if lastname is non-empty if ((l != null) && !((l.trim()).equals(""))) { // Ensure first name non-null if (f == null) { f = ""; } // If there is a comma in the last name, we take everything // after that comma, and add it to the right of the // first name int comma = l.indexOf(','); if (comma >= 0) { f = f + l.substring(comma + 1); l = l.substring(0, comma); // Remove leading whitespace from first name while (f.startsWith(" ")) { f = f.substring(1); } } // Add to the database -- unless required authority is missing if (isAuthorityControlled) { String authKey = auths.size() > i ? auths.get(i) : null; String sconf = (authKey != null && confs.size() > i) ? confs.get(i) : null; if (MetadataAuthorityManager.getManager().isAuthorityRequired(fieldKey) && (authKey == null || authKey.length() == 0)) { log.warn("Skipping value of "+metadataField+" because the required Authority key is missing or empty."); addErrorField(request, metadataField); } else { item.addMetadata(schema, element, qualifier, null, new DCPersonName(l, f).toString(), authKey, (sconf != null && sconf.length() > 0) ? Choices.getConfidenceValue(sconf) : Choices.CF_ACCEPTED); } } else { item.addMetadata(schema, element, qualifier, null, new DCPersonName(l, f).toString()); } } } } /** * Fill out an item's metadata values from a plain standard text field. If * the field isn't repeatable, the input field name is called: * * element_qualifier * * or for an unqualified element: * * element * * Repeated elements are appended with an underscore then an integer. e.g.: * * dc_title_alternative dc_title_alternative_1 * * The values will be put in separate DCValues, ordered as they appear in * the list. These will replace any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the short schema name * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form * @param lang * language to set (ISO code) */ protected void readText(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated, String lang) { // FIXME: Of course, language should be part of form, or determined // some other way String metadataField = MetadataField .formKey(schema, element, qualifier); String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); boolean isAuthorityControlled = MetadataAuthorityManager.getManager().isAuthorityControlled(fieldKey); // Values to add List<String> vals = null; List<String> auths = null; List<String> confs = null; if (repeated) { vals = getRepeatedParameter(request, metadataField, metadataField); if (isAuthorityControlled) { auths = getRepeatedParameter(request, metadataField, metadataField+"_authority"); confs = getRepeatedParameter(request, metadataField, metadataField+"_confidence"); } // Find out if the relevant "remove" button was pressed // TODO: These separate remove buttons are only relevant // for DSpace JSP UI, and the code below can be removed // once the DSpace JSP UI is obsolete! String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed .substring(removeButton.length())); vals.remove(valToRemove); if(isAuthorityControlled) { auths.remove(valToRemove); confs.remove(valToRemove); } } } else { // Just a single name vals = new LinkedList<String>(); String value = request.getParameter(metadataField); if (value != null) { vals.add(value.trim()); } if (isAuthorityControlled) { auths = new LinkedList<String>(); confs = new LinkedList<String>(); String av = request.getParameter(metadataField+"_authority"); String cv = request.getParameter(metadataField+"_confidence"); auths.add(av == null ? "":av.trim()); confs.add(cv == null ? "":cv.trim()); } } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < vals.size(); i++) { // Add to the database if non-empty String s = vals.get(i); if ((s != null) && !s.equals("")) { if (isAuthorityControlled) { String authKey = auths.size() > i ? auths.get(i) : null; String sconf = (authKey != null && confs.size() > i) ? confs.get(i) : null; if (MetadataAuthorityManager.getManager().isAuthorityRequired(fieldKey) && (authKey == null || authKey.length() == 0)) { log.warn("Skipping value of "+metadataField+" because the required Authority key is missing or empty."); addErrorField(request, metadataField); } else { item.addMetadata(schema, element, qualifier, lang, s, authKey, (sconf != null && sconf.length() > 0) ? Choices.getConfidenceValue(sconf) : Choices.CF_ACCEPTED); } } else { item.addMetadata(schema, element, qualifier, lang, s); } } } } /** * Fill out a metadata date field with the value from a form. The date is * taken from the three parameters: * * element_qualifier_year element_qualifier_month element_qualifier_day * * The granularity is determined by the values that are actually set. If the * year isn't set (or is invalid) * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @throws SQLException */ protected void readDate(HttpServletRequest request, Item item, String schema, String element, String qualifier) throws SQLException { String metadataField = MetadataField .formKey(schema, element, qualifier); int year = Util.getIntParameter(request, metadataField + "_year"); int month = Util.getIntParameter(request, metadataField + "_month"); int day = Util.getIntParameter(request, metadataField + "_day"); // FIXME: Probably should be some more validation // Make a standard format date DCDate d = new DCDate(year, month, day, -1, -1, -1); // already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); if (year > 0) { // Only put in date if there is one! item.addMetadata(schema, element, qualifier, null, d.toString()); } } /** * Set relevant metadata fields in an item from series/number values in the * form. Some fields are repeatable in the form. If this is the case, and * the field is "relation.ispartof", the names in the request will be from * the fields as follows: * * dc_relation_ispartof_series dc_relation_ispartof_number * dc_relation_ispartof_series_1 dc_relation_ispartof_number_1 * * and so on. If the field is unqualified: * * dc_relation_series dc_relation_number * * Otherwise the parameters are of the form: * * dc_relation_ispartof_series dc_relation_ispartof_number * * The values will be put in separate DCValues, in the form "last name, * first name(s)", ordered as they appear in the list. These will replace * any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form */ protected void readSeriesNumbers(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated) { String metadataField = MetadataField .formKey(schema, element, qualifier); // Names to add List<String> series = new LinkedList<String>(); List<String> numbers = new LinkedList<String>(); if (repeated) { series = getRepeatedParameter(request, metadataField, metadataField + "_series"); numbers = getRepeatedParameter(request, metadataField, metadataField + "_number"); // Find out if the relevant "remove" button was pressed String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed .substring(removeButton.length())); series.remove(valToRemove); numbers.remove(valToRemove); } } else { // Just a single name String s = request.getParameter(metadataField + "_series"); String n = request.getParameter(metadataField + "_number"); // Only put it in if there was a name present if ((s != null) && !s.equals("")) { // if number is null, just set to a nullstring if (n == null) { n = ""; } series.add(s); numbers.add(n); } } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < series.size(); i++) { String s = (series.get(i)).trim(); String n = (numbers.get(i)).trim(); // Only add non-empty if (!s.equals("") || !n.equals("")) { item.addMetadata(schema, element, qualifier, null, new DCSeriesNumber(s, n).toString()); } } } /** * Get repeated values from a form. If "foo" is passed in as the parameter, * values in the form of parameters "foo", "foo_1", "foo_2", etc. are * returned. * <P> * This method can also handle "composite fields" (metadata fields which may * require multiple params, etc. a first name and last name). * * @param request * the HTTP request containing the form information * @param metadataField * the metadata field which can store repeated values * @param param * the repeated parameter on the page (used to fill out the * metadataField) * * @return a List of Strings */ protected List<String> getRepeatedParameter(HttpServletRequest request, String metadataField, String param) { List<String> vals = new LinkedList<String>(); int i = 1; //start index at the first of the previously entered values boolean foundLast = false; // Iterate through the values in the form. while (!foundLast) { String s = null; //First, add the previously entered values. // This ensures we preserve the order that these values were entered s = request.getParameter(param + "_" + i); // If there are no more previously entered values, // see if there's a new value entered in textbox if (s==null) { s = request.getParameter(param); //this will be the last value added foundLast = true; } // We're only going to add non-null values if (s != null) { boolean addValue = true; // Check to make sure that this value was not selected to be // removed. // (This is for the "remove multiple" option available in // Manakin) String[] selected = request.getParameterValues(metadataField + "_selected"); if (selected != null) { for (int j = 0; j < selected.length; j++) { if (selected[j].equals(metadataField + "_" + i)) { addValue = false; } } } if (addValue) { vals.add(s.trim()); } } i++; } log.debug("getRepeatedParameter: metadataField=" + metadataField + " param=" + metadataField + ", return count = "+vals.size()); return vals; } /** * Return the HTML / DRI field name for the given input. * * @param input * @return */ public static String getFieldName(DCInput input) { String dcSchema = input.getSchema(); String dcElement = input.getElement(); String dcQualifier = input.getQualifier(); if (dcQualifier != null && !dcQualifier.equals(Item.ANY)) { return dcSchema + "_" + dcElement + '_' + dcQualifier; } else { return dcSchema + "_" + dcElement; } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * Verify step for DSpace. Processes the user response to the * information they are presented with on the Review/Verify page. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class VerifyStep extends AbstractProcessingStep { /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { // nothing to process/save from the Verify Step. return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // always just one page for verify step return 1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.app.util.SubmissionInfo; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * This is a Sample Step class which can be used as template for creating new * custom Step processing classes! * <p> * Please Note: The basic methods you will want to override are described below. * However, obviously, you are completely free to create your own methods for * this Step, or override other methods. For more examples, look at the code * from one of the provided DSpace step classes in the "org.dspace.submit.step" * package. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * If you are utilizing the JSP-UI, you will also be required to create * a class which implements org.dspace.app.webui.submit.JSPStep, and provide * the necessary JSP-related methods. There is a corresponding sample * of such a class at org.dspace.app.webui.submit.step.JSPSampleStep. * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class SampleStep extends AbstractProcessingStep { /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ public static final int STATUS_USER_INPUT_ERROR = 1; /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { /* * In this method, you should do any processing of any user input (if * this step requires user input). If this step does not require user * interaction (i.e. it has no UI), then ALL of the backend processing * should occur in this method. * * Processing may include, but is not limited to: * * 1) Saving user input data to the database (e.g. saving metadata from * a web form that a user filled out) 2) Performing ALL backend * processing for non-interactive steps 3) Determine if any errors * occurred during processing, and if so, return those error flags. * * For steps with user interaction, this method is called right after * the web form or page is submitted. For steps without user * interaction, this method is called whenever the step itself is * supposed to be processed. * */ /* * HINT: * * If any errors occurred, its recommended to create a global "flag" to * represent that error. It's much easier then for the * JSP-UI or Manakin XML-UI to determine what to do with that error. * * For example, if an error occurred, you may specify the following * return call: * * return USER_INPUT_ERROR_FLAG; * * (Note: this flag is defined at the top of this class) */ // If no errors occurred, and there were no other special messages to // report to the doPostProcessing() method, just return STATUS_COMPLETE! return STATUS_COMPLETE; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { /* * This method reports how many "pages" to put in * the Progress Bar for this Step. * * Most steps should just return 1 (which means the Step only appears * once in the Progress Bar). * * If this Step should be shown as multiple "Pages" in the Progress Bar, * then return a value higher than 1. For example, return 2 in order to * have this Step appear twice in a row within the Progress Bar. * * If you return 0, this Step will not appear in the Progress Bar at * ALL! Therefore it is important for non-interactive steps to return 0. */ // in most cases, you'll want to just return 1 return 1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.submit.step; import java.io.IOException; import java.sql.SQLException; import java.util.LinkedList; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInput; import proj.oceandocs.submission.DCInputSetExt; import proj.oceandocs.submission.DCInputsReaderExt; import org.dspace.app.util.SubmissionInfo; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.DCPersonName; import org.dspace.content.DCSeriesNumber; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.authority.MetadataAuthorityManager; import org.dspace.content.authority.ChoiceAuthorityManager; import org.dspace.content.authority.Choices; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.submit.AbstractProcessingStep; /** * Describe step for DSpace submission process. Handles the gathering of * descriptive information (i.e. metadata) for an item being submitted into * DSpace. * <P> * This class performs all the behind-the-scenes processing that * this particular step requires. This class's methods are utilized * by both the JSP-UI and the Manakin XML-UI * <P> * * @see org.dspace.app.util.SubmissionConfig * @see org.dspace.app.util.SubmissionStepConfig * @see org.dspace.submit.AbstractProcessingStep * * @author Tim Donohue * @version $Revision: 5844 $ */ public class DescribeStepExt extends AbstractProcessingStep { /** log4j logger */ private static Logger log = Logger.getLogger(DescribeStep.class); /** hash of all submission forms details */ private static DCInputsReaderExt inputsReader = null; /*************************************************************************** * STATUS / ERROR FLAGS (returned by doProcessing() if an error occurs or * additional user interaction may be required) * * (Do NOT use status of 0, since it corresponds to STATUS_COMPLETE flag * defined in the JSPStepManager class) **************************************************************************/ // user requested an extra input field to be displayed public static final int STATUS_MORE_INPUT_REQUESTED = 1; // there were required fields that were not filled out public static final int STATUS_MISSING_REQUIRED_FIELDS = 2; // the metadata language qualifier public static final String LANGUAGE_QUALIFIER = getDefaultLanguageQualifier(); public static final String DOCTYPE_BUTTON = "select_doctype"; /** Constructor */ public DescribeStepExt() throws ServletException { //load the DCInputsReader getInputsReader(); } /** * Do any processing of the information input by the user, and/or perform * step processing (if no user interaction required) * <P> * It is this method's job to save any data to the underlying database, as * necessary, and return error messages (if any) which can then be processed * by the appropriate user interface (JSP-UI or XML-UI) * <P> * NOTE: If this step is a non-interactive step (i.e. requires no UI), then * it should perform *all* of its processing in this method! * * @param context * current DSpace context * @param request * current servlet request object * @param response * current servlet response object * @param subInfo * submission info object * @return Status or error flag which will be processed by * doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned, * no errors occurred!) */ @Override public int doProcessing(Context context, HttpServletRequest request, HttpServletResponse response, SubmissionInfo subInfo) throws ServletException, IOException, SQLException, AuthorizeException { boolean newDocType = false; // check what submit button was pressed in User Interface String buttonPressed = Util.getSubmitButton(request, DOCTYPE_BUTTON); Collection c = subInfo.getSubmissionItem().getCollection(); // get the item and current page Item item = subInfo.getSubmissionItem().getItem(); int currentPage = getCurrentPage(request); String doctype = request.getParameter(DOCTYPE_BUTTON); String formerDocType = null; if (DOCTYPE_BUTTON.equals(buttonPressed)) { newDocType = true; } if (doctype == null) { doctype = getCurrentDocType(item, c); // the document type did not change formerDocType = doctype; } else { // the document type has changed. Save former document type. formerDocType = getCurrentDocType(item, c); // the type returned is the type stored in the DB, i.e., the type before it changed item.clearMetadata("dc", "type", null, Item.ANY); item.addMetadata("dc", "type", null, Item.ANY, doctype); // set the new doctype item.update(); } // lookup applicable inputs List<DCInput> inputs = null; try { // the inputs on the current page of the FORMER document type // (possibly the same as the current one) must be updated! DCInputSetExt inset = inputsReader.getInputs(c.getHandle(), formerDocType); if (inset != null) { inputs = inset.getPageInputs(currentPage); } } catch (DCInputsReaderException e) { throw new ServletException(e); } // Step 1: // clear out all item metadata defined on this page for (int i = 0; i < inputs.size(); i++) { if (!inputs.get(i).isVisible(subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE)) { continue; } String qualifier = inputs.get(i).getQualifier(); if (qualifier == null && inputs.get(i).getInputType().equals("qualdrop_value")) { qualifier = Item.ANY; } item.clearMetadata(inputs.get(i).getSchema(), inputs.get(i).getElement(), qualifier, Item.ANY); } // Clear required-field errors first since missing authority // values can add them too. clearErrorFields(request); // Step 2: // now update the item metadata. String fieldName; boolean moreInput = false; for (int j = 0; j < inputs.size(); j++) { if (!inputs.get(j).isVisible(subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE)) { continue; } String element = inputs.get(j).getElement(); String qualifier = inputs.get(j).getQualifier(); String schema = inputs.get(j).getSchema(); if (qualifier != null && !qualifier.equals(Item.ANY)) { fieldName = schema + "_" + element + '_' + qualifier; } else { fieldName = schema + "_" + element; } String language_qual = request.getParameter(fieldName + "_lang"); String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); ChoiceAuthorityManager cmgr = ChoiceAuthorityManager.getManager(); String inputType = inputs.get(j).getInputType(); if (inputType.equals("name")) { readNames(request, item, schema, element, qualifier, inputs.get(j).getRepeatable()); } else if (inputType.equals("date")) { readDate(request, item, schema, element, qualifier); } // choice-controlled input with "select" presentation type is // always rendered as a dropdown menu else if (inputType.equals("dropdown") || inputType.equals("list") || (cmgr.isChoicesConfigured(fieldKey) && "select".equals(cmgr.getPresentation(fieldKey)))) { String[] vals = request.getParameterValues(fieldName); if (vals != null) { for (int z = 0; z < vals.length; z++) { if (!vals[z].equals("")) { item.addMetadata(schema, element, qualifier, language_qual == null ? LANGUAGE_QUALIFIER : language_qual, vals[z]); } } } } else if (inputType.equals("series")) { readSeriesNumbers(request, item, schema, element, qualifier, inputs.get(j).getRepeatable()); } else if (inputType.equals("qualdrop_value")) { List<String> quals = getRepeatedParameter(request, schema + "_" + element, schema + "_" + element + "_qualifier"); List<String> vals = getRepeatedParameter(request, schema + "_" + element, schema + "_" + element + "_value"); for (int z = 0; z < vals.size(); z++) { String thisQual = quals.get(z); if ("".equals(thisQual)) { thisQual = null; } String thisVal = vals.get(z); if (!buttonPressed.equals("submit_" + schema + "_" + element + "_remove_" + z) && !thisVal.equals("")) { item.addMetadata(schema, element, thisQual, null, thisVal); } } } else if ((inputType.equals("onebox")) || (inputType.equals("twobox")) || (inputType.equals("textarea"))) { readText(request, item, schema, element, qualifier, inputs.get(j).getRepeatable(), language_qual == null ? LANGUAGE_QUALIFIER : language_qual); } else { throw new ServletException("Field " + fieldName + " has an unknown input type: " + inputType); } // determine if more input fields were requested if (!moreInput && buttonPressed.equals("submit_" + fieldName + "_add")) { subInfo.setMoreBoxesFor(fieldName); subInfo.setJumpToField(fieldName); moreInput = true; } // was XMLUI's "remove" button pushed? else if (buttonPressed.equals("submit_" + fieldName + "_delete")) { subInfo.setJumpToField(fieldName); } } // Step 3: // Check to see if any fields are missing // Only check for required fields if user clicked the "next", the "previous" or the "progress bar" button if (buttonPressed.equals(NEXT_BUTTON) || buttonPressed.startsWith(PROGRESS_BAR_PREFIX) || buttonPressed.equals(PREVIOUS_BUTTON) || buttonPressed.equals(CANCEL_BUTTON)) { for (int i = 0; i < inputs.size(); i++) { DCValue[] values = item.getMetadata(inputs.get(i).getSchema(), inputs.get(i).getElement(), inputs.get(i).getQualifier(), Item.ANY); if (inputs.get(i).isRequired() && values.length == 0) { // since this field is missing add to list of error fields addErrorField(request, getFieldName(inputs.get(i))); } } } // Step 4: // Save changes to database subInfo.getSubmissionItem().update(); // commit changes context.commit(); // check for request for more input fields, first if (moreInput) { return STATUS_MORE_INPUT_REQUESTED; } // if one or more fields errored out, return else if (getErrorFields(request) != null && getErrorFields(request).size() > 0) { return STATUS_MISSING_REQUIRED_FIELDS; } else if (newDocType) { return NEW_DOC_TYPE; } // completed without errors return STATUS_COMPLETE; } /** * Gets the document type (value for dc.type) for the given item, or a default one if type absent * @param item the item whose type to return * @param c the collection the item belongs to * @return the document type of the item, or a default one for the given collection */ private String getCurrentDocType(Item item, Collection c) { String doctype = null; DCValue[] itemsTypes = item.getMetadata("dc", "type", null, Item.ANY); if (itemsTypes.length > 0) { doctype = itemsTypes[0].value; } else { List<String> types = inputsReader.getTypesListforCollection(c.getHandle()); if (types.size() > 0) { if (!types.contains(doctype)) { doctype = types.get(0); } } else { doctype = ""; } } return doctype; } /** * Retrieves the number of pages that this "step" extends over. This method * is used to build the progress bar. * <P> * This method may just return 1 for most steps (since most steps consist of * a single page). But, it should return a number greater than 1 for any * "step" which spans across a number of HTML pages. For example, the * configurable "Describe" step (configured using input-forms.xml) overrides * this method to return the number of pages that are defined by its * configuration file. * <P> * Steps which are non-interactive (i.e. they do not display an interface to * the user) should return a value of 1, so that they are only processed * once! * * @param request * The HTTP Request * @param subInfo * The current submission information object * * @return the number of pages in this step */ @Override public int getNumberOfPages(HttpServletRequest request, SubmissionInfo subInfo) throws ServletException { // by default, use the "default" collection handle String collectionHandle = DCInputsReaderExt.DEFAULT_COLLECTION; if (subInfo.getSubmissionItem() != null) { collectionHandle = subInfo.getSubmissionItem().getCollection().getHandle(); } // get number of input pages (i.e. "Describe" pages) try { String documentType = (String) request.getAttribute("submission.doctype"); if (documentType == null) { documentType = request.getParameter(DOCTYPE_BUTTON); if (documentType == null) { List documentsList = inputsReader.getTypesListforCollection(collectionHandle); if (documentsList.size() > 0) { documentType = (String) documentsList.get(0); } } } return inputsReader.getNumberInputPages(collectionHandle, documentType); } catch (DCInputsReaderException e) { throw new ServletException(e); } } /** * * @return the current DCInputsReader */ public static DCInputsReaderExt getInputsReader() throws ServletException { // load inputsReader only the first time if (inputsReader == null) { // read configurable submissions forms data try { inputsReader = new DCInputsReaderExt(); } catch (DCInputsReaderException e) { throw new ServletException(e); } } return inputsReader; } /** * @param filename * file to get the input reader for * @return the current DCInputsReader */ public static DCInputsReaderExt getInputsReader(String filename) throws ServletException { try { inputsReader = new DCInputsReaderExt(filename); } catch (DCInputsReaderException e) { throw new ServletException(e); } return inputsReader; } /** * @return the default language qualifier for metadata */ public static String getDefaultLanguageQualifier() { String language = ""; language = ConfigurationManager.getProperty("default.language"); if (StringUtils.isEmpty(language)) { language = "en"; } return language; } // **************************************************************** // **************************************************************** // METHODS FOR FILLING DC FIELDS FROM METADATA FORMS // **************************************************************** // **************************************************************** /** * Set relevant metadata fields in an item from name values in the form. * Some fields are repeatable in the form. If this is the case, and the * field is "dc.contributor.author", the names in the request will be from * the fields as follows: * * dc_contributor_author_last -> last name of first author * dc_contributor_author_first -> first name(s) of first author * dc_contributor_author_last_1 -> last name of second author * dc_contributor_author_first_1 -> first name(s) of second author * * and so on. If the field is unqualified: * * dc_contributor_last -> last name of first contributor * dc_contributor_first -> first name(s) of first contributor * * If the parameter "submit_dc_contributor_author_remove_n" is set, that * value is removed. * * Otherwise the parameters are of the form: * * dc_contributor_author_last dc_contributor_author_first * * The values will be put in separate DCValues, in the form "last name, * first name(s)", ordered as they appear in the list. These will replace * any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form */ protected void readNames(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated) { String metadataField = MetadataField.formKey(schema, element, qualifier); String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); boolean isAuthorityControlled = MetadataAuthorityManager.getManager().isAuthorityControlled(fieldKey); // Names to add List<String> firsts = new LinkedList<String>(); List<String> lasts = new LinkedList<String>(); List<String> auths = new LinkedList<String>(); List<String> confs = new LinkedList<String>(); List<String> langs = new LinkedList<String>(); if (repeated) { firsts = getRepeatedParameter(request, metadataField, metadataField + "_first"); lasts = getRepeatedParameter(request, metadataField, metadataField + "_last"); langs = getRepeatedParameter(request, metadataField, metadataField + "_lang"); if (isAuthorityControlled) { auths = getRepeatedParameter(request, metadataField, metadataField + "_authority"); confs = getRepeatedParameter(request, metadataField, metadataField + "_confidence"); } // Find out if the relevant "remove" button was pressed // TODO: These separate remove buttons are only relevant // for DSpace JSP UI, and the code below can be removed // once the DSpace JSP UI is obsolete! String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed.substring(removeButton.length())); firsts.remove(valToRemove); lasts.remove(valToRemove); if (valToRemove < langs.size()) { langs.remove(valToRemove); } if (isAuthorityControlled) { auths.remove(valToRemove); confs.remove(valToRemove); } } } else { // Just a single name String lastName = request.getParameter(metadataField + "_last"); String firstNames = request.getParameter(metadataField + "_first"); String nameLang = request.getParameter(metadataField + "_lang"); String authority = request.getParameter(metadataField + "_authority"); String confidence = request.getParameter(metadataField + "_confidence"); if (lastName != null) { lasts.add(lastName); } if (firstNames != null) { firsts.add(firstNames); } if (nameLang != null) { langs.add(nameLang); } auths.add(authority == null ? "" : authority); confs.add(confidence == null ? "" : confidence); } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < lasts.size(); i++) { String f = firsts.get(i); String l = lasts.get(i); String ll = "*"; if (i < langs.size()) { ll = langs.get(i); } // only add if lastname is non-empty if ((l != null) && !((l.trim()).equals(""))) { // Ensure first name non-null if (f == null) { f = ""; } // If there is a comma in the last name, we take everything // after that comma, and add it to the right of the // first name int comma = l.indexOf(','); if (comma >= 0) { f = f + l.substring(comma + 1); l = l.substring(0, comma); // Remove leading whitespace from first name while (f.startsWith(" ")) { f = f.substring(1); } } // Add to the database -- unless required authority is missing if (isAuthorityControlled) { String authKey = auths.size() > i ? auths.get(i) : null; String sconf = (authKey != null && confs.size() > i) ? confs.get(i) : null; if (MetadataAuthorityManager.getManager().isAuthorityRequired(fieldKey) && (authKey == null || authKey.length() == 0)) { log.warn("Skipping value of " + metadataField + " because the required Authority key is missing or empty."); addErrorField(request, metadataField); } else { item.addMetadata(schema, element, qualifier, ll, new DCPersonName(l, f).toString(), authKey, (sconf != null && sconf.length() > 0) ? Choices.getConfidenceValue(sconf) : Choices.CF_ACCEPTED); } } else { item.addMetadata(schema, element, qualifier, ll, new DCPersonName(l, f).toString()); } } } } /** * Fill out an item's metadata values from a plain standard text field. If * the field isn't repeatable, the input field name is called: * * element_qualifier * * or for an unqualified element: * * element * * Repeated elements are appended with an underscore then an integer. e.g.: * * dc_title_alternative dc_title_alternative_1 * * The values will be put in separate DCValues, ordered as they appear in * the list. These will replace any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the short schema name * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form * @param lang * language to set (ISO code) */ protected void readText(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated, String lang) { // FIXME: Of course, language should be part of form, or determined // some other way String metadataField = MetadataField.formKey(schema, element, qualifier); String fieldKey = MetadataAuthorityManager.makeFieldKey(schema, element, qualifier); boolean isAuthorityControlled = MetadataAuthorityManager.getManager().isAuthorityControlled(fieldKey); // Values to add List<String> vals = null; List<String> auths = null; List<String> confs = null; List<String> langs = null; if (repeated) { vals = getRepeatedParameter(request, metadataField, metadataField); langs = getRepeatedParameter(request, metadataField, metadataField + "_lang"); if (isAuthorityControlled) { auths = getRepeatedParameter(request, metadataField, metadataField + "_authority"); confs = getRepeatedParameter(request, metadataField, metadataField + "_confidence"); } // Find out if the relevant "remove" button was pressed // TODO: These separate remove buttons are only relevant // for DSpace JSP UI, and the code below can be removed // once the DSpace JSP UI is obsolete! String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed.substring(removeButton.length())); vals.remove(valToRemove); if (valToRemove < langs.size()) { langs.remove(valToRemove); } if (isAuthorityControlled) { auths.remove(valToRemove); confs.remove(valToRemove); } } } else { // Just a single name vals = new LinkedList<String>(); langs = new LinkedList<String>(); String value = request.getParameter(metadataField); String ll = request.getParameter(metadataField + "_lang"); if (value != null) { vals.add(value.trim()); } if (ll != null) { langs.add(ll); } if (isAuthorityControlled) { auths = new LinkedList<String>(); confs = new LinkedList<String>(); String av = request.getParameter(metadataField + "_authority"); String cv = request.getParameter(metadataField + "_confidence"); auths.add(av == null ? "" : av.trim()); confs.add(cv == null ? "" : cv.trim()); } } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < vals.size(); i++) { // Add to the database if non-empty String s = vals.get(i); String l = lang; if (i < langs.size()) { l = langs.get(i); } if ((s != null) && !s.equals("")) { if (isAuthorityControlled) { String authKey = auths.size() > i ? auths.get(i) : null; String sconf = (authKey != null && confs.size() > i) ? confs.get(i) : null; if (MetadataAuthorityManager.getManager().isAuthorityRequired(fieldKey) && (authKey == null || authKey.length() == 0)) { log.warn("Skipping value of " + metadataField + " because the required Authority key is missing or empty."); addErrorField(request, metadataField); } else { item.addMetadata(schema, element, qualifier, l, s, authKey, (sconf != null && sconf.length() > 0) ? Choices.getConfidenceValue(sconf) : Choices.CF_ACCEPTED); } } else { item.addMetadata(schema, element, qualifier, l, s); } } } } /** * Fill out a metadata date field with the value from a form. The date is * taken from the three parameters: * * element_qualifier_year element_qualifier_month element_qualifier_day * * The granularity is determined by the values that are actually set. If the * year isn't set (or is invalid) * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @throws SQLException */ protected void readDate(HttpServletRequest request, Item item, String schema, String element, String qualifier) throws SQLException { String metadataField = MetadataField.formKey(schema, element, qualifier); int year = Util.getIntParameter(request, metadataField + "_year"); int month = Util.getIntParameter(request, metadataField + "_month"); int day = Util.getIntParameter(request, metadataField + "_day"); // FIXME: Probably should be some more validation // Make a standard format date DCDate d = new DCDate(year, month, day, -1, -1, -1); // already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); if (year > 0) { // Only put in date if there is one! item.addMetadata(schema, element, qualifier, null, d.toString()); } } /** * Set relevant metadata fields in an item from series/number values in the * form. Some fields are repeatable in the form. If this is the case, and * the field is "relation.ispartof", the names in the request will be from * the fields as follows: * * dc_relation_ispartof_series dc_relation_ispartof_number * dc_relation_ispartof_series_1 dc_relation_ispartof_number_1 * * and so on. If the field is unqualified: * * dc_relation_series dc_relation_number * * Otherwise the parameters are of the form: * * dc_relation_ispartof_series dc_relation_ispartof_number * * The values will be put in separate DCValues, in the form "last name, * first name(s)", ordered as they appear in the list. These will replace * any existing values. * * @param request * the request object * @param item * the item to update * @param schema * the metadata schema * @param element * the metadata element * @param qualifier * the metadata qualifier, or null if unqualified * @param repeated * set to true if the field is repeatable on the form */ protected void readSeriesNumbers(HttpServletRequest request, Item item, String schema, String element, String qualifier, boolean repeated) { String metadataField = MetadataField.formKey(schema, element, qualifier); // Names to add List<String> series = new LinkedList<String>(); List<String> numbers = new LinkedList<String>(); if (repeated) { series = getRepeatedParameter(request, metadataField, metadataField + "_series"); numbers = getRepeatedParameter(request, metadataField, metadataField + "_number"); // Find out if the relevant "remove" button was pressed String buttonPressed = Util.getSubmitButton(request, ""); String removeButton = "submit_" + metadataField + "_remove_"; if (buttonPressed.startsWith(removeButton)) { int valToRemove = Integer.parseInt(buttonPressed.substring(removeButton.length())); series.remove(valToRemove); numbers.remove(valToRemove); } } else { // Just a single name String s = request.getParameter(metadataField + "_series"); String n = request.getParameter(metadataField + "_number"); // Only put it in if there was a name present if ((s != null) && !s.equals("")) { // if number is null, just set to a nullstring if (n == null) { n = ""; } series.add(s); numbers.add(n); } } // Remove existing values, already done in doProcessing see also bug DS-203 // item.clearMetadata(schema, element, qualifier, Item.ANY); // Put the names in the correct form for (int i = 0; i < series.size(); i++) { String s = (series.get(i)).trim(); String n = (numbers.get(i)).trim(); // Only add non-empty if (!s.equals("") || !n.equals("")) { item.addMetadata(schema, element, qualifier, null, new DCSeriesNumber(s, n).toString()); } } } /** * Get repeated values from a form. If "foo" is passed in as the parameter, * values in the form of parameters "foo", "foo_1", "foo_2", etc. are * returned. * <P> * This method can also handle "composite fields" (metadata fields which may * require multiple params, etc. a first name and last name). * * @param request * the HTTP request containing the form information * @param metadataField * the metadata field which can store repeated values * @param param * the repeated parameter on the page (used to fill out the * metadataField) * * @return a List of Strings */ protected List<String> getRepeatedParameter(HttpServletRequest request, String metadataField, String param) { List<String> vals = new LinkedList<String>(); int i = 1; //start index at the first of the previously entered values boolean foundLast = false; // Iterate through the values in the form. while (!foundLast) { String s = null; //First, add the previously entered values. // This ensures we preserve the order that these values were entered s = request.getParameter(param + "_" + i); // If there are no more previously entered values, // see if there's a new value entered in textbox if (s == null) { s = request.getParameter(param); //this will be the last value added foundLast = true; } // We're only going to add non-null values if (s != null) { boolean addValue = true; // Check to make sure that this value was not selected to be // removed. // (This is for the "remove multiple" option available in // Manakin) String[] selected = request.getParameterValues(metadataField + "_selected"); if (selected != null) { for (int j = 0; j < selected.length; j++) { if (selected[j].equals(metadataField + "_" + i)) { addValue = false; } } } if (addValue) { vals.add(s.trim()); } } i++; } log.debug("getRepeatedParameter: metadataField=" + metadataField + " param=" + metadataField + ", return count = " + vals.size()); return vals; } /** * Return the HTML / DRI field name for the given input. * * @param input * @return */ public static String getFieldName(DCInput input) { String dcSchema = input.getSchema(); String dcElement = input.getElement(); String dcQualifier = input.getQualifier(); if (dcQualifier != null && !dcQualifier.equals(Item.ANY)) { return dcSchema + "_" + dcElement + '_' + dcQualifier; } else { return dcSchema + "_" + dcElement; } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; /** * Indicates fatal error in Plugin Manager configuration. * <p> * This error is only thrown when the effect of a configuration problem * (<i>e.g.</i> missing value for a Single Plugin) is likely to leave * the DSpace system in an unusable state. * * @author Larry Stone * @version $Revision: 5844 $ * @see PluginManager */ public class PluginConfigurationError extends Error { /** * @param msg Error message text. */ public PluginConfigurationError(String msg) { super(msg); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.BigInteger; import java.rmi.dgc.VMID; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.text.ParseException; import java.util.Arrays; import java.util.Random; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Date; import java.util.Calendar; import java.util.GregorianCalendar; import java.text.SimpleDateFormat; import java.text.ParseException; import org.apache.log4j.Logger; /** * Utility functions for DSpace. * * @author Peter Breton * @version $Revision: 5844 $ */ public final class Utils { /** log4j logger */ private static Logger log = Logger.getLogger(Utils.class); private static final Pattern DURATION_PATTERN = Pattern .compile("(\\d+)([smhdwy])"); private static final long MS_IN_SECOND = 1000L; private static final long MS_IN_MINUTE = 60000L; private static final long MS_IN_HOUR = 3600000L; private static final long MS_IN_DAY = 86400000L; private static final long MS_IN_WEEK = 604800000L; private static final long MS_IN_YEAR = 31536000000L; private static int counter = 0; private static Random random = new Random(); private static VMID vmid = new VMID(); // for parseISO8601Date private static SimpleDateFormat parseFmt[] = { // first try at parsing, has milliseconds (note General time zone) new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSz"), // second try at parsing, no milliseconds (note General time zone) new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssz"), // finally, try without any timezone (defaults to current TZ) new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSS"), new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss") }; // for formatISO8601Date // output canonical format (note RFC22 time zone, easier to hack) private static SimpleDateFormat outFmtSecond = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ"); // output format with millsecond precision private static SimpleDateFormat outFmtMillisec = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ"); private static Calendar outCal = GregorianCalendar.getInstance(); /** Private Constructor */ private Utils() { } /** * Return an MD5 checksum for data in hex format. * * @param data * The data to checksum. * @return MD5 checksum for the data in hex format. */ public static String getMD5(String data) { return getMD5(data.getBytes()); } /** * Return an MD5 checksum for data in hex format. * * @param data * The data to checksum. * @return MD5 checksum for the data in hex format. */ public static String getMD5(byte[] data) { return toHex(getMD5Bytes(data)); } /** * Return an MD5 checksum for data as a byte array. * * @param data * The data to checksum. * @return MD5 checksum for the data as a byte array. */ public static byte[] getMD5Bytes(byte[] data) { try { MessageDigest digest = MessageDigest.getInstance("MD5"); return digest.digest(data); } catch (NoSuchAlgorithmException nsae) { } // Should never happen return null; } /** * Return a hex representation of the byte array * * @param data * The data to transform. * @return A hex representation of the data. */ public static String toHex(byte[] data) { if ((data == null) || (data.length == 0)) { return null; } StringBuffer result = new StringBuffer(); // This is far from the most efficient way to do things... for (int i = 0; i < data.length; i++) { int low = (int) (data[i] & 0x0F); int high = (int) (data[i] & 0xF0); result.append(Integer.toHexString(high).substring(0, 1)); result.append(Integer.toHexString(low)); } return result.toString(); } /** * Generate a unique key. The key is a long (length 38 to 40) sequence of * digits. * * @return A unique key as a long sequence of base-10 digits. */ public static String generateKey() { return new BigInteger(generateBytesKey()).abs().toString(); } /** * Generate a unique key. The key is a 32-character long sequence of hex * digits. * * @return A unique key as a long sequence of hex digits. */ public static String generateHexKey() { return toHex(generateBytesKey()); } /** * Generate a unique key as a byte array. * * @return A unique key as a byte array. */ public static synchronized byte[] generateBytesKey() { byte[] junk = new byte[16]; random.nextBytes(junk); String input = new StringBuffer().append(vmid).append( new java.util.Date()).append(Arrays.toString(junk)).append(counter++).toString(); return getMD5Bytes(input.getBytes()); } // The following two methods are taken from the Jakarta IOUtil class. /** * Copy stream-data from source to destination. This method does not buffer, * flush or close the streams, as to do so would require making non-portable * assumptions about the streams' origin and further use. If you wish to * perform a buffered copy, use {@link #bufferedCopy}. * * @param input * The InputStream to obtain data from. * @param output * The OutputStream to copy data to. */ public static void copy(final InputStream input, final OutputStream output) throws IOException { final int BUFFER_SIZE = 1024 * 4; final byte[] buffer = new byte[BUFFER_SIZE]; while (true) { final int count = input.read(buffer, 0, BUFFER_SIZE); if (-1 == count) { break; } // write out those same bytes output.write(buffer, 0, count); } // needed to flush cache // output.flush(); } /** * Copy stream-data from source to destination, with buffering. This is * equivalent to passing {@link #copy}a * <code>java.io.BufferedInputStream</code> and * <code>java.io.BufferedOutputStream</code> to {@link #copy}, and * flushing the output stream afterwards. The streams are not closed after * the copy. * * @param source * The InputStream to obtain data from. * @param destination * The OutputStream to copy data to. */ public static void bufferedCopy(final InputStream source, final OutputStream destination) throws IOException { final BufferedInputStream input = new BufferedInputStream(source); final BufferedOutputStream output = new BufferedOutputStream( destination); copy(input, output); output.flush(); } /** * Replace characters that could be interpreted as HTML codes with symbolic * references (entities). This function should be called before displaying * any metadata fields that could contain the characters " <", ">", "&", * "'", and double quotation marks. This will effectively disable HTML links * in metadata. * * @param value * the metadata value to be scrubbed for display * * @return the passed-in string, with html special characters replaced with * entities. */ public static String addEntities(String value) { if (value==null || value.length() == 0) { return value; } value = value.replaceAll("&", "&amp;"); value = value.replaceAll("\"", "&quot;"); // actually, &apos; is an XML entity, not in HTML. // that's why it's commented out. // value = value.replaceAll("'", "&apos;"); value = value.replaceAll("<", "&lt;"); value = value.replaceAll(">", "&gt;"); return value; } /** * Utility method to parse durations defined as \d+[smhdwy] (seconds, * minutes, hours, days, weeks, years) * * @param duration * specified duration * * @return number of milliseconds equivalent to duration. * * @throws ParseException * if the duration is of incorrect format */ public static long parseDuration(String duration) throws ParseException { Matcher m = DURATION_PATTERN.matcher(duration.trim()); if (!m.matches()) { throw new ParseException("'" + duration + "' is not a valid duration definition", 0); } String units = m.group(2); long multiplier = MS_IN_SECOND; if ("s".equals(units)) { multiplier = MS_IN_SECOND; } else if ("m".equals(units)) { multiplier = MS_IN_MINUTE; } else if ("h".equals(units)) { multiplier = MS_IN_HOUR; } else if ("d".equals(units)) { multiplier = MS_IN_DAY; } else if ("w".equals(units)) { multiplier = MS_IN_WEEK; } else if ("y".equals(units)) { multiplier = MS_IN_YEAR; } else { throw new ParseException(units + " is not a valid time unit (must be 'y', " + "'w', 'd', 'h', 'm' or 's')", duration.indexOf(units)); } long qint = Long.parseLong(m.group(1)); return qint * multiplier; } /** * Translates timestamp from an ISO 8601-standard format, which * is commonly used in XML and RDF documents. * This method is synchronized because it depends on a non-reentrant * static DateFormat (more efficient than creating a new one each call). * * @param s the input string * @return Date object, or null if there is a problem translating. */ public static synchronized Date parseISO8601Date(String s) { // attempt to normalize the timezone to something we can parse; // SimpleDateFormat can't handle "Z" char tzSign = s.charAt(s.length()-6); if (s.endsWith("Z")) { s = s.substring(0, s.length() - 1) + "GMT+00:00"; } // check for trailing timezone else if (tzSign == '-' || tzSign == '+') { s = s.substring(0, s.length() - 6) + "GMT" + s.substring(s.length() - 6); } // try to parse without milliseconds ParseException lastError = null; for (int i = 0; i < parseFmt.length; ++i) { try { return parseFmt[i].parse(s); } catch (ParseException e) { lastError = e; } } if (lastError != null) { log.error("Error parsing date:", lastError); } return null; } /** * Convert a Date to String in the ISO 8601 standard format. * The RFC822 timezone is almost right, still need to insert ":". * This method is synchronized because it depends on a non-reentrant * static DateFormat (more efficient than creating a new one each call). * * @param d the input Date * @return String containing formatted date. */ public static synchronized String formatISO8601Date(Date d) { String result; outCal.setTime(d); if (outCal.get(Calendar.MILLISECOND) == 0) { result = outFmtSecond.format(d); } else { result = outFmtMillisec.format(d); } int rl = result.length(); return result.substring(0, rl-2) + ":" + result.substring(rl-2); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import org.apache.log4j.Logger; import java.io.File; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Properties; import javax.activation.DataHandler; import javax.activation.FileDataSource; import javax.mail.Address; import javax.mail.Authenticator; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.PasswordAuthentication; import javax.mail.Session; import javax.mail.Transport; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; /** * Class representing an e-mail message, also used to send e-mails. * <P> * Typical use: * <P> * <code>Email email = ConfigurationManager.getEmail(name);</code><br> * <code>email.addRecipient("foo@bar.com");</code><br> * <code>email.addArgument("John");</code><br> * <code>email.addArgument("On the Testing of DSpace");</code><br> * <code>email.send();</code><br> * <P> * <code>name</code> is the name of an email template in * <code>dspace-dir/config/emails/</code> (which also includes the subject.) * <code>arg0</code> and <code>arg1</code> are arguments to fill out the * message with. * <P> * Emails are formatted using <code>java.text.MessageFormat.</code> * Additionally, comment lines (starting with '#') are stripped, and if a line * starts with "Subject:" the text on the right of the colon is used for the * subject line. For example: * <P> * * <pre> * * # This is a comment line which is stripped * # * # Parameters: {0} is a person's name * # {1} is the name of a submission * # * Subject: Example e-mail * * Dear {0}, * * Thank you for sending us your submission &quot;{1}&quot;. * * </pre> * * <P> * If the example code above was used to send this mail, the resulting mail * would have the subject <code>Example e-mail</code> and the body would be: * <P> * * <pre> * * * Dear John, * * Thank you for sending us your submission &quot;On the Testing of DSpace&quot;. * * </pre> * * <P> * Note that parameters like <code>{0}</code> cannot be placed in the subject * of the e-mail; they won't get filled out. * * * @author Robert Tansley * @author Jim Downing - added attachment handling code * @version $Revision: 5844 $ */ public class Email { /* * Implementation note: It might be necessary to add a quick utility method * like "send(to, subject, message)". We'll see how far we get without it - * having all emails as templates in the config allows customisation and * internationalisation. * * Note that everything is stored and the run in send() so that only send() * throws a MessagingException. */ /** The content of the message */ private String content; /** The subject of the message */ private String subject; /** The arguments to fill out */ private List<Object> arguments; /** The recipients */ private List<String> recipients; /** Reply to field, if any */ private String replyTo; private List<FileAttachment> attachments; /** The character set this message will be sent in */ private String charset; private static final Logger log = Logger.getLogger(Email.class); /** * Create a new email message. */ Email() { arguments = new ArrayList<Object>(50); recipients = new ArrayList<String>(50); attachments = new ArrayList<FileAttachment>(10); subject = ""; content = ""; replyTo = null; charset = null; } /** * Add a recipient * * @param email * the recipient's email address */ public void addRecipient(String email) { recipients.add(email); } /** * Set the content of the message. Setting this "resets" the message * formatting -<code>addArgument</code> will start. Comments and any * "Subject:" line must be stripped. * * @param cnt * the content of the message */ void setContent(String cnt) { content = cnt; arguments = new ArrayList<Object>(); } /** * Set the subject of the message * * @param s * the subject of the message */ void setSubject(String s) { subject = s; } /** * Set the reply-to email address * * @param email * the reply-to email address */ public void setReplyTo(String email) { replyTo = email; } /** * Fill out the next argument in the template * * @param arg * the value for the next argument */ public void addArgument(Object arg) { arguments.add(arg); } public void addAttachment(File f, String name) { attachments.add(new FileAttachment(f, name)); } public void setCharset(String cs) { charset = cs; } /** * "Reset" the message. Clears the arguments and recipients, but leaves the * subject and content intact. */ public void reset() { arguments = new ArrayList<Object>(50); recipients = new ArrayList<String>(50); attachments = new ArrayList<FileAttachment>(10); replyTo = null; charset = null; } /** * Sends the email. * * @throws MessagingException * if there was a problem sending the mail. */ public void send() throws MessagingException { // Get the mail configuration properties String server = ConfigurationManager.getProperty("mail.server"); String from = ConfigurationManager.getProperty("mail.from.address"); boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false); if (disabled) { log.info("message not sent due to mail.server.disabled: " + subject); return; } // Set up properties for mail session Properties props = System.getProperties(); props.put("mail.smtp.host", server); // Set the port number for the mail server String portNo = ConfigurationManager.getProperty("mail.server.port"); if (portNo == null) { portNo = "25"; } props.put("mail.smtp.port", portNo.trim()); // If no character set specified, attempt to retrieve a default if (charset == null) { charset = ConfigurationManager.getProperty("mail.charset"); } // Get session Session session; // Get the SMTP server authentication information String username = ConfigurationManager.getProperty("mail.server.username"); String password = ConfigurationManager.getProperty("mail.server.password"); if (username != null) { props.put("mail.smtp.auth", "true"); SMTPAuthenticator smtpAuthenticator = new SMTPAuthenticator( username, password); session = Session.getDefaultInstance(props, smtpAuthenticator); } else { session = Session.getDefaultInstance(props); } // Set extra configuration properties String extras = ConfigurationManager.getProperty("mail.extraproperties"); if ((extras != null) && (!"".equals(extras.trim()))) { String arguments[] = extras.split(","); String key, value; for (String argument : arguments) { key = argument.substring(0, argument.indexOf('=')).trim(); value = argument.substring(argument.indexOf('=') + 1).trim(); props.put(key, value); } } // Create message MimeMessage message = new MimeMessage(session); // Set the recipients of the message Iterator<String> i = recipients.iterator(); while (i.hasNext()) { message.addRecipient(Message.RecipientType.TO, new InternetAddress( i.next())); } // Format the mail message Object[] args = arguments.toArray(); String fullMessage = MessageFormat.format(content, args); Date date = new Date(); message.setSentDate(date); message.setFrom(new InternetAddress(from)); // Set the subject of the email (may contain parameters) String fullSubject = MessageFormat.format(subject, args); if (charset != null) { message.setSubject(fullSubject, charset); } else { message.setSubject(fullSubject); } // Add attachments if (attachments.isEmpty()) { // If a character set has been specified, or a default exists if (charset != null) { message.setText(fullMessage, charset); } else { message.setText(fullMessage); } } else { Multipart multipart = new MimeMultipart(); // create the first part of the email BodyPart messageBodyPart = new MimeBodyPart(); messageBodyPart.setText(fullMessage); multipart.addBodyPart(messageBodyPart); for (Iterator<FileAttachment> iter = attachments.iterator(); iter.hasNext();) { FileAttachment f = iter.next(); // add the file messageBodyPart = new MimeBodyPart(); messageBodyPart.setDataHandler(new DataHandler( new FileDataSource(f.file))); messageBodyPart.setFileName(f.name); multipart.addBodyPart(messageBodyPart); } message.setContent(multipart); } if (replyTo != null) { Address[] replyToAddr = new Address[1]; replyToAddr[0] = new InternetAddress(replyTo); message.setReplyTo(replyToAddr); } Transport.send(message); } /** * Test method to send an email to check email server settings * * @param args Command line options */ public static void main(String[] args) { String to = ConfigurationManager.getProperty("mail.admin"); String subject = "DSpace test email"; String server = ConfigurationManager.getProperty("mail.server"); String url = ConfigurationManager.getProperty("dspace.url"); Email e = new Email(); e.setSubject(subject); e.addRecipient(to); e.content = "This is a test email sent from DSpace: " + url; System.out.println("\nAbout to send test email:"); System.out.println(" - To: " + to); System.out.println(" - Subject: " + subject); System.out.println(" - Server: " + server); try { e.send(); } catch (MessagingException me) { System.err.println("\nError sending email:"); System.err.println(" - Error: " + me); System.err.println("\nPlease see the DSpace documentation for assistance.\n"); System.err.println("\n"); System.exit(1); } System.out.println("\nEmail sent successfully!\n"); } /** * Utility struct class for handling file attachments. * * @author ojd20 * */ private static class FileAttachment { public FileAttachment(File f, String n) { this.file = f; this.name = n; } File file; String name; } /** * Inner Class for SMTP authentication information */ private static class SMTPAuthenticator extends Authenticator { // User name private String name; // Password private String password; public SMTPAuthenticator(String n, String p) { name = n; password = p; } protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(name, password); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dspace.eperson.EPerson; import java.io.File; import java.util.Locale; import java.util.MissingResourceException; import java.util.ResourceBundle; import java.util.StringTokenizer; import java.util.List; import java.util.ArrayList; /** * I18nUtil.java * * Some Utilities for i18n Support. * - getting the default Locale for this DSpace Instance * - getting all supported Locales for this DSpace Instance * - getting email template, help file, input forms for a given Locale * * * @author Bernadette Schlonsok and Claudia Juergen * * @version 1.0 */ public class I18nUtil { private static final Logger log = Logger.getLogger(I18nUtil.class); // the default Locale of this DSpace Instance public static final Locale DEFAULTLOCALE = getDefaultLocale(); // delimiters between elements of UNIX/POSIX locale spec, e.g. en_US.UTF-8 private static final String LOCALE_DELIMITERS = " _."; /** * Gets the default locale as defined in dspace.cfg If no default locale is * defined, the Locale of the JVM is used * * @return defaultLocale * the default Locale for this DSpace instance */ public static Locale getDefaultLocale() { // First, try configured default locale Locale defaultLocale = null; if (!StringUtils.isEmpty(ConfigurationManager.getProperty("default.locale"))) { defaultLocale = makeLocale(ConfigurationManager.getProperty("default.locale")); } // Finally, get the Locale of the JVM if (defaultLocale == null) { defaultLocale = Locale.getDefault(); } return defaultLocale; } // Translate a string locale specification (e.g. "en_US.UTF-8") into Locale // This is needed because Locale constructor expects args for // language, territory, and variant to be separated already. private static Locale makeLocale(String localeSpec) { StringTokenizer st = new StringTokenizer(localeSpec, LOCALE_DELIMITERS); int countTokens = st.countTokens(); switch (countTokens) { case 1: return new Locale(st.nextToken().trim()); case 2: return new Locale(st.nextToken().trim(), st.nextToken().trim()); case 3: return new Locale(st.nextToken().trim(), st.nextToken().trim(), st.nextToken().trim()); } return null; } /** * Get the Locale for a specified EPerson. If the language is missing, * return the default Locale for the repository. * * @param ep * @return */ public static Locale getEPersonLocale(EPerson ep) { if (ep == null) { log.error("No EPerson specified, returning default locale"); return I18nUtil.getDefaultLocale(); } String lang = ep.getLanguage(); if (StringUtils.isBlank(lang)) { log.error("No language specified for EPerson " + ep.getID()); return I18nUtil.getDefaultLocale(); } return I18nUtil.getSupportedLocale(new Locale(lang)); } /** * get the available Locales for the User Interface as defined in dspace.cfg * returns an array of Locales or null * * @return an array of supported Locales or null */ public static Locale[] getSupportedLocales() { String ll = ConfigurationManager.getProperty("webui.supported.locales"); if (ll != null) { return parseLocales(ll); } else { Locale[] availableLocales = new Locale[1]; availableLocales[0] = DEFAULTLOCALE; return availableLocales; } } /** * Gets the appropriate supported Locale according for a given Locale If * no appropriate supported locale is found, the DEFAULTLOCALE is used * * @param locale * Locale to find the corresponding Locale * @return supportedLocale * Locale for session according to locales supported by this DSpace instance as set in dspace.cfg */ public static Locale getSupportedLocale(Locale locale) { Locale[] availableLocales = getSupportedLocales(); boolean isSupported = false; Locale supportedLocale = null; String testLocale = ""; if (availableLocales == null) { supportedLocale = DEFAULTLOCALE; } else { if (!locale.getVariant().equals("")) { testLocale = locale.toString(); for (int i = 0; i < availableLocales.length; i++) { if (testLocale.equalsIgnoreCase(availableLocales[i] .toString())) { isSupported = true; supportedLocale = availableLocales[i]; } } } if (!(isSupported && locale.getCountry().equals(""))) { testLocale = locale.getLanguage() + "_" + locale.getCountry(); for (int i = 0; i < availableLocales.length; i++) { if (testLocale.equalsIgnoreCase(availableLocales[i] .toString())) { isSupported = true; supportedLocale = availableLocales[i]; } } } if (!isSupported) { testLocale = locale.getLanguage(); for (int i = 0; i < availableLocales.length; i++) { if (testLocale.equalsIgnoreCase(availableLocales[i] .toString())) { isSupported = true; supportedLocale = availableLocales[i]; } } } if (!isSupported) { supportedLocale = DEFAULTLOCALE; } } return supportedLocale; } /** * Get the appropriate localized version of input-forms.xml according to language settings * * @param locale * Locale, the local to get the input-forms.xml for * @return String - localized filename for input-forms.xml */ public static String getInputFormsFileName(Locale locale) { /** Name of the form definition XML file */ String fileName = ""; final String FORM_DEF_FILE = "input-forms-extended"; final String FILE_TYPE = ".xml"; String defsFilename = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator + FORM_DEF_FILE; fileName = getFilename(locale, defsFilename, FILE_TYPE); return fileName; } /** * et the i18n message string for a given key and use the default Locale * * @param key * String - name of the key to get the message for * * @return message * String of the message * * */ public static String getMessage(String key) throws MissingResourceException { String message = getMessage(key.trim(), DEFAULTLOCALE); return message; } /** * Get the i18n message string for a given key and locale * * @param key * String - name of the key to get the message for * @param locale * Locale, to get the message for * * @return message * String of the message * * */ public static String getMessage(String key, Locale locale) throws MissingResourceException { String message = ""; if (locale == null) { locale = DEFAULTLOCALE; } ResourceBundle messages = ResourceBundle.getBundle("Messages", locale); message = messages.getString(key.trim()); return message; } /** * Get the i18n message string for a given key and context * * @param key * String - name of the key to get the message for * @param c * Context having the desired Locale * * @return message * String of the message * * */ public static String getMessage(String key, Context c) throws MissingResourceException { return getMessage(key.trim(), c.getCurrentLocale()); } /** * Get the appropriate localized version of the default.license according to language settings * * @param context * the current DSpace context * @return fileName * String - localized filename for default.license */ public static String getDefaultLicense(Context context) { Locale locale = context.getCurrentLocale(); String fileName = ""; /** Name of the default license */ final String DEF_LIC_FILE = "default"; final String FILE_TYPE = ".license"; String defsFilename = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator + DEF_LIC_FILE; fileName = getFilename(locale, defsFilename, FILE_TYPE); return fileName; } /** * Get the appropriate localized version of a file according to language settings * e. g. help files in jsp/help/ * * @param locale * Locale to get the file for * @param fileName * String fileName, to get the localized file for * @param fileType * String file extension * @return localizedFileName * String - localized filename */ private static String getFilename(Locale locale, String fileName, String fileType) { String localizedFileName = null; boolean fileFound = false; // with Language, Country, Variant String fileNameLCV = null; // with Language, Country String fileNameLC = null; // with Language String fileNameL = null; fileNameL = fileName + "_" + locale.getLanguage(); if (fileType == null) { fileType = ""; } if (!("".equals(locale.getCountry()))) { fileNameLC = fileName + "_" + locale.getLanguage() + "_" + locale.getCountry(); if (!("".equals(locale.getVariant()))) { fileNameLCV = fileName + "_" + locale.getLanguage() + "_" + locale.getCountry() + "_" + locale.getVariant(); } } if (fileNameLCV != null && !fileFound) { File fileTmp = new File(fileNameLCV + fileType); if (fileTmp.exists()) { fileFound = true; localizedFileName = fileNameLCV + fileType; } } if (fileNameLC != null && !fileFound) { File fileTmp = new File(fileNameLC + fileType); if (fileTmp.exists()) { fileFound = true; localizedFileName = fileNameLC + fileType; } } if (fileNameL != null && !fileFound) { File fileTmp = new File(fileNameL + fileType); if (fileTmp.exists()) { fileFound = true; localizedFileName = fileNameL + fileType; } } if (!fileFound) { localizedFileName = fileName + fileType; } return localizedFileName; } /** * Get the appropriate localized version of an email template according to language settings * * @param locale * Locale for this request * @param name * String - base name of the email template * @return templateName * String - localized filename of an email template */ public static String getEmailFilename(Locale locale, String name) { String templateName = ""; String templateFile = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator + "emails" + File.separator + name; templateName = getFilename(locale, templateFile, ""); return templateName; } /** * Creates array of Locales from text list of locale-specifications. * Used to parse lists in DSpace configuration properties. * @param ll locale list of comma-separated values * @return array of locale results, possibly empty */ public static Locale[] parseLocales(String ll) { List<Locale> resultList = new ArrayList<Locale>(); for (String ls : ll.trim().split("\\s*,\\s*")) { Locale lc = makeLocale(ls); if (lc != null) { resultList.add(lc); } } return resultList.toArray(new Locale[resultList.size()]); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.log4j.Category; import org.apache.log4j.Logger; import org.apache.log4j.helpers.OptionConverter; /** * Class for reading the DSpace system configuration. The main configuration is * read in as properties from a standard properties file. Email templates and * configuration files for other tools are also be accessed via this class. * <P> * The main configuration is by default read from the <em>resource</em> * <code>/dspace.cfg</code>. * To specify a different configuration, the system property * <code>dspace.configuration</code> should be set to the <em>filename</em> * of the configuration file. * <P> * Other configuration files are read from the <code>config</code> directory * of the DSpace installation directory (specified as the property * <code>dspace.dir</code> in the main configuration file.) * * * @author Robert Tansley * @author Larry Stone - Interpolated values. * @author Mark Diggory - General Improvements to detection, logging and loading. * @version $Revision: 5844 $ */ public class ConfigurationManager { /** log4j category */ private static Logger log = Logger.getLogger(ConfigurationManager.class); /** The configuration properties */ private static Properties properties = null; /** module configuration properties */ private static Map<String, Properties> moduleProps = null; /** The default license */ private static String license; // limit of recursive depth of property variable interpolation in // configuration; anything greater than this is very likely to be a loop. private static final int RECURSION_LIMIT = 9; protected ConfigurationManager() { } /** * Identify if DSpace is properly configured * @return boolean true if configured, false otherwise */ public static boolean isConfigured() { return properties != null; } public static boolean isConfigured(String module) { return moduleProps.get(module) != null; } /** * REMOVED - Flushing the properties could be dangerous in the current DSpace state * Need to consider how it will affect in-flight processes * * Discard all current properties - will force a reload from disk when * any properties are requested. */ // public static void flush() // { // properties = null; // } /** * REMOVED - Flushing the properties could be dangerous in the current DSpace state * Need to consider how it will affect in-flight processes * * Discard properties for a module - will force a reload from disk * when any of module's properties are requested * * @param module the module name */ // public static void flush(String module) // { // moduleProps.remove(module); // } /** * Returns all properties in main configuration * * @return properties - all non-modular properties */ public static Properties getProperties() { Properties props = getMutableProperties(); return props == null ? null : (Properties)props.clone(); } private static Properties getMutableProperties() { if (properties == null) { loadConfig(null); } return properties; } /** * Returns all properties for a given module * * @param module * the name of the module * @return properties - all module's properties */ public static Properties getProperties(String module) { Properties props = getMutableProperties(module); return props == null ? null : (Properties)props.clone(); } private static Properties getMutableProperties(String module) { Properties retProps = (module != null) ? moduleProps.get(module) : properties; if (retProps == null) { loadModuleConfig(module); retProps = moduleProps.get(module); } return retProps; } /** * Get a configuration property * * @param property * the name of the property * * @return the value of the property, or <code>null</code> if the property * does not exist. */ public static String getProperty(String property) { Properties props = getMutableProperties(); String value = props == null ? null : props.getProperty(property); return (value != null) ? value.trim() : null; } /** * Get a module configuration property value. * * @param module * the name of the module, or <code>null</code> for regular configuration * property * @param property * the name (key) of the property * @return * the value of the property, or <code>null</code> if the * property does not exist */ public static String getProperty(String module, String property) { if (module == null) { return getProperty(property); } String value = null; Properties modProps = getMutableProperties(module); if (modProps != null) { value = modProps.getProperty(property); } if (value == null) { // look in regular properties with module name prepended value = getProperty(module + "." + property); } return (value != null) ? value.trim() : null; } /** * Get a configuration property as an integer * * @param property * the name of the property * * @return the value of the property. <code>0</code> is returned if the * property does not exist. To differentiate between this case and * when the property actually is zero, use <code>getProperty</code>. */ public static int getIntProperty(String property) { return getIntProperty(property, 0); } /** * Get a module configuration property as an integer * * @param module * the name of the module * * @param property * the name of the property * * @return the value of the property. <code>0</code> is returned if the * property does not exist. To differentiate between this case and * when the property actually is zero, use <code>getProperty</code>. */ public static int getIntProperty(String module, String property) { return getIntProperty(module, property, 0); } /** * Get a configuration property as an integer, with default * * @param property * the name of the property * * @param defaultValue * value to return if property is not found or is not an Integer. * * @return the value of the property. <code>default</code> is returned if * the property does not exist or is not an Integer. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static int getIntProperty(String property, int defaultValue) { return getIntProperty(null, property, defaultValue); } /** * Get a module configuration property as an integer, with default * * @param module * the name of the module * * @param property * the name of the property * * @param defaultValue * value to return if property is not found or is not an Integer. * * @return the value of the property. <code>default</code> is returned if * the property does not exist or is not an Integer. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static int getIntProperty(String module, String property, int defaultValue) { String stringValue = getProperty(module, property); int intValue = defaultValue; if (stringValue != null) { try { intValue = Integer.parseInt(stringValue.trim()); } catch (NumberFormatException e) { warn("Warning: Number format error in property: " + property); } } return intValue; } /** * Get a configuration property as a long * * @param property * the name of the property * * @return the value of the property. <code>0</code> is returned if the * property does not exist. To differentiate between this case and * when the property actually is zero, use <code>getProperty</code>. */ public static long getLongProperty(String property) { return getLongProperty(property, 0); } /** * Get a module configuration property as a long * * @param module * the name of the module * @param property * the name of the property * * @return the value of the property. <code>0</code> is returned if the * property does not exist. To differentiate between this case and * when the property actually is zero, use <code>getProperty</code>. */ public static long getLongProperty(String module, String property) { return getLongProperty(module, property, 0); } /** * Get a configuration property as an long, with default * * * @param property * the name of the property * * @param defaultValue * value to return if property is not found or is not a Long. * * @return the value of the property. <code>default</code> is returned if * the property does not exist or is not an Integer. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static long getLongProperty(String property, int defaultValue) { return getLongProperty(null, property, defaultValue); } /** * Get a configuration property as an long, with default * * @param module the module, or <code>null</code> for regular property * * @param property * the name of the property * * @param defaultValue * value to return if property is not found or is not a Long. * * @return the value of the property. <code>default</code> is returned if * the property does not exist or is not an Integer. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static long getLongProperty(String module, String property, int defaultValue) { String stringValue = getProperty(module, property); long longValue = defaultValue; if (stringValue != null) { try { longValue = Long.parseLong(stringValue.trim()); } catch (NumberFormatException e) { warn("Warning: Number format error in property: " + property); } } return longValue; } /** * Get the License * * @param * licenseFile file name * * @return * license text * */ public static String getLicenseText(String licenseFile) { // Load in default license FileReader fr = null; BufferedReader br = null; try { fr = new FileReader(licenseFile); br = new BufferedReader(fr); String lineIn; license = ""; while ((lineIn = br.readLine()) != null) { license = license + lineIn + '\n'; } } catch (IOException e) { fatal("Can't load configuration", e); // FIXME: Maybe something more graceful here, but with the // configuration we can't do anything throw new IllegalStateException("Failed to read default license.", e); } finally { if (br != null) { try { br.close(); } catch (IOException ioe) { } } if (fr != null) { try { fr.close(); } catch (IOException ioe) { } } } return license; } /** * Get a configuration property as a boolean. True is indicated if the value * of the property is <code>TRUE</code> or <code>YES</code> (case * insensitive.) * * @param property * the name of the property * * @return the value of the property. <code>false</code> is returned if * the property does not exist. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static boolean getBooleanProperty(String property) { return getBooleanProperty(property, false); } /** * Get a module configuration property as a boolean. True is indicated if * the value of the property is <code>TRUE</code> or <code>YES</code> (case * insensitive.) * * @param module the module, or <code>null</code> for regular property * * @param property * the name of the property * * @return the value of the property. <code>false</code> is returned if * the property does not exist. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static boolean getBooleanProperty(String module, String property) { return getBooleanProperty(module, property, false); } /** * Get a configuration property as a boolean, with default. * True is indicated if the value * of the property is <code>TRUE</code> or <code>YES</code> (case * insensitive.) * * @param property * the name of the property * * @param defaultValue * value to return if property is not found. * * @return the value of the property. <code>default</code> is returned if * the property does not exist. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static boolean getBooleanProperty(String property, boolean defaultValue) { return getBooleanProperty(null, property, defaultValue); } /** * Get a module configuration property as a boolean, with default. * True is indicated if the value * of the property is <code>TRUE</code> or <code>YES</code> (case * insensitive.) * * @param module module, or <code>null</code> for regular property * * @param property * the name of the property * * @param defaultValue * value to return if property is not found. * * @return the value of the property. <code>default</code> is returned if * the property does not exist. To differentiate between this case * and when the property actually is false, use * <code>getProperty</code>. */ public static boolean getBooleanProperty(String module, String property, boolean defaultValue) { String stringValue = getProperty(module, property); if (stringValue != null) { stringValue = stringValue.trim(); return stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("yes"); } else { return defaultValue; } } /** * Returns an enumeration of all the keys in the DSpace configuration * * @return an enumeration of all the keys in the DSpace configuration */ public static Enumeration<?> propertyNames() { return propertyNames(null); } /** * Returns an enumeration of all the keys in a module configuration * * @param module module, or <code>null</code> for regular property * * @return an enumeration of all the keys in the module configuration, * or <code>null</code> if the module does not exist. */ public static Enumeration<?> propertyNames(String module) { Properties props = getProperties(module); return props == null ? null : props.propertyNames(); } /** * Get the template for an email message. The message is suitable for * inserting values using <code>java.text.MessageFormat</code>. * * @param emailFile * full name for the email template, for example "/dspace/config/emails/register". * * @return the email object, with the content and subject filled out from * the template * * @throws IOException * if the template couldn't be found, or there was some other * error reading the template */ public static Email getEmail(String emailFile) throws IOException { String charset = null; String subject = ""; StringBuffer contentBuffer = new StringBuffer(); // Read in template BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(emailFile)); boolean more = true; while (more) { String line = reader.readLine(); if (line == null) { more = false; } else if (line.toLowerCase().startsWith("subject:")) { // Extract the first subject line - everything to the right // of the colon, trimmed of whitespace subject = line.substring(8).trim(); } else if (line.toLowerCase().startsWith("charset:")) { // Extract the character set from the email charset = line.substring(8).trim(); } else if (!line.startsWith("#")) { // Add non-comment lines to the content contentBuffer.append(line); contentBuffer.append("\n"); } } } finally { if (reader != null) { reader.close(); } } // Create an email Email email = new Email(); email.setSubject(subject); email.setContent(contentBuffer.toString()); if (charset != null) { email.setCharset(charset); } return email; } /** * Get the site-wide default license that submitters need to grant * * @return the default license */ public static String getDefaultSubmissionLicense() { if (properties == null) { loadConfig(null); } return license; } /** * Get the path for the news files. * */ public static String getNewsFilePath() { String filePath = ConfigurationManager.getProperty("dspace.dir") + File.separator + "config" + File.separator; return filePath; } /** * Reads news from a text file. * * @param newsFile * name of the news file to read in, relative to the news file path. */ public static String readNewsFile(String newsFile) { String fileName = getNewsFilePath(); fileName += newsFile; StringBuilder text = new StringBuilder(); try { // retrieve existing news from file FileInputStream fir = new FileInputStream(fileName); InputStreamReader ir = new InputStreamReader(fir, "UTF-8"); BufferedReader br = new BufferedReader(ir); String lineIn; while ((lineIn = br.readLine()) != null) { text.append(lineIn); } br.close(); } catch (IOException e) { warn("news_read: " + e.getLocalizedMessage()); } return text.toString(); } /** * Writes news to a text file. * * @param newsFile * name of the news file to read in, relative to the news file path. * @param news * the text to be written to the file. */ public static String writeNewsFile(String newsFile, String news) { String fileName = getNewsFilePath(); fileName += newsFile; try { // write the news out to the appropriate file FileOutputStream fos = new FileOutputStream(fileName); OutputStreamWriter osr = new OutputStreamWriter(fos, "UTF-8"); PrintWriter out = new PrintWriter(osr); out.print(news); out.close(); } catch (IOException e) { warn("news_write: " + e.getLocalizedMessage()); } return news; } /** * Writes license to a text file. * * @param licenseFile * name for the file int which license will be written, * relative to the current directory. */ public static void writeLicenseFile(String licenseFile, String newLicense) { try { // write the news out to the appropriate file FileOutputStream fos = new FileOutputStream(licenseFile); OutputStreamWriter osr = new OutputStreamWriter(fos, "UTF-8"); PrintWriter out = new PrintWriter(osr); out.print(newLicense); out.close(); } catch (IOException e) { warn("license_write: " + e.getLocalizedMessage()); } license = newLicense; } private static File loadedFile = null; /** * Return the file that configuration was actually loaded from. Only returns * a valid File after configuration has been loaded. * * @deprecated Please remove all direct usage of the configuration file. * @return File naming configuration data file, or null if not loaded yet. */ protected static File getConfigurationFile() { // in case it hasn't been done yet. if (loadedFile == null) { loadConfig(null); } return loadedFile; } private static synchronized void loadModuleConfig(String module) { // try to find it in modules File modFile = null; try { modFile = new File(getProperty("dspace.dir") + File.separator + "config" + File.separator + "modules" + File.separator + module + ".cfg"); if (modFile.exists()) { Properties modProps = new Properties(); InputStream modIS = null; try { modIS = new FileInputStream(modFile); modProps.load(modIS); } finally { if (modIS != null) { modIS.close(); } } for (Enumeration pe = modProps.propertyNames(); pe.hasMoreElements(); ) { String key = (String)pe.nextElement(); String ival = interpolate(key, modProps.getProperty(key), 1); if (ival != null) { modProps.setProperty(key, ival); } } moduleProps.put(module, modProps); } else { // log invalid request warn("Requested configuration module: " + module + " not found"); } } catch (IOException ioE) { fatal("Can't load configuration: " + (modFile == null ? "<unknown>" : modFile.getAbsolutePath()), ioE); } return; } /** * Load the DSpace configuration properties. Only does anything if * properties are not already loaded. Properties are loaded in from the * specified file, or default locations. * * @param configFile * The <code>dspace.cfg</code> configuration file to use, or * <code>null</code> to try default locations */ public static synchronized void loadConfig(String configFile) { if (properties != null) { return; } URL url = null; InputStream is = null; try { String configProperty = null; try { configProperty = System.getProperty("dspace.configuration"); } catch (SecurityException se) { // A security manager may stop us from accessing the system properties. // This isn't really a fatal error though, so catch and ignore log.warn("Unable to access system properties, ignoring.", se); } // should only occur after a flush() if (loadedFile != null) { info("Reloading current config file: " + loadedFile.getAbsolutePath()); url = loadedFile.toURI().toURL(); } else if (configFile != null) { info("Loading provided config file: " + configFile); loadedFile = new File(configFile); url = loadedFile.toURI().toURL(); } // Has the default configuration location been overridden? else if (configProperty != null) { info("Loading system provided config property (-Ddspace.configuration): " + configProperty); // Load the overriding configuration loadedFile = new File(configProperty); url = loadedFile.toURI().toURL(); } // Load configuration from default location else { url = ConfigurationManager.class.getResource("/dspace.cfg"); if (url != null) { info("Loading from classloader: " + url); loadedFile = new File(url.getPath()); } } if (url == null) { fatal("Cannot find dspace.cfg"); throw new IllegalStateException("Cannot find dspace.cfg"); } else { properties = new Properties(); moduleProps = new HashMap<String, Properties>(); is = url.openStream(); properties.load(is); // walk values, interpolating any embedded references. for (Enumeration<?> pe = properties.propertyNames(); pe.hasMoreElements(); ) { String key = (String)pe.nextElement(); String value = interpolate(key, properties.getProperty(key), 1); if (value != null) { properties.setProperty(key, value); } } } } catch (IOException e) { fatal("Can't load configuration: " + url, e); // FIXME: Maybe something more graceful here, but with the // configuration we can't do anything throw new IllegalStateException("Cannot load configuration: " + url, e); } finally { if (is != null) { try { is.close(); } catch (IOException ioe) { } } } // Load in default license File licenseFile = new File(getProperty("dspace.dir") + File.separator + "config" + File.separator + "default.license"); FileInputStream fir = null; InputStreamReader ir = null; BufferedReader br = null; try { fir = new FileInputStream(licenseFile); ir = new InputStreamReader(fir, "UTF-8"); br = new BufferedReader(ir); String lineIn; license = ""; while ((lineIn = br.readLine()) != null) { license = license + lineIn + '\n'; } br.close(); } catch (IOException e) { fatal("Can't load license: " + licenseFile.toString() , e); // FIXME: Maybe something more graceful here, but with the // configuration we can't do anything throw new IllegalStateException("Cannot load license: " + licenseFile.toString(),e); } finally { if (br != null) { try { br.close(); } catch (IOException ioe) { } } if (ir != null) { try { ir.close(); } catch (IOException ioe) { } } if (fir != null) { try { fir.close(); } catch (IOException ioe) { } } } try { /* * Initialize Logging once ConfigurationManager is initialized. * * This is selection from a property in dspace.cfg, if the property * is absent then nothing will be configured and the application * will use the defaults provided by log4j. * * Property format is: * * log.init.config = ${dspace.dir}/config/log4j.properties * or * log.init.config = ${dspace.dir}/config/log4j.xml * * See default log4j initialization documentation here: * http://logging.apache.org/log4j/docs/manual.html * * If there is a problem with the file referred to in * "log.configuration" it needs to be sent to System.err * so do not instantiate another Logging configuration. * */ String dsLogConfiguration = ConfigurationManager.getProperty("log.init.config"); if (dsLogConfiguration == null || System.getProperty("dspace.log.init.disable") != null) { /* * Do nothing if log config not set in dspace.cfg or "dspace.log.init.disable" * system property set. Leave it upto log4j to properly init its logging * via classpath or system properties. */ info("Using default log4j provided log configuration," + "if unintended, check your dspace.cfg for (log.init.config)"); } else { info("Using dspace provided log configuration (log.init.config)"); File logConfigFile = new File(dsLogConfiguration); if(logConfigFile.exists()) { info("Loading: " + dsLogConfiguration); OptionConverter.selectAndConfigure(logConfigFile.toURI() .toURL(), null, org.apache.log4j.LogManager .getLoggerRepository()); } else { info("File does not exist: " + dsLogConfiguration); } } } catch (MalformedURLException e) { fatal("Can't load dspace provided log4j configuration", e); throw new IllegalStateException("Cannot load dspace provided log4j configuration",e); } } /** * Recursively interpolate variable references in value of * property named "key". * @return new value if it contains interpolations, or null * if it had no variable references. */ private static String interpolate(String key, String value, int level) { if (level > RECURSION_LIMIT) { throw new IllegalArgumentException("ConfigurationManager: Too many levels of recursion in configuration property variable interpolation, property=" + key); } //String value = (String)properties.get(key); int from = 0; StringBuffer result = null; while (from < value.length()) { int start = value.indexOf("${", from); if (start >= 0) { int end = value.indexOf('}', start); if (end < 0) { break; } String var = value.substring(start+2, end); if (result == null) { result = new StringBuffer(value.substring(from, start)); } else { result.append(value.substring(from, start)); } if (properties.containsKey(var)) { String ivalue = interpolate(var, properties.getProperty(var), level+1); if (ivalue != null) { result.append(ivalue); properties.setProperty(var, ivalue); } else { result.append((String)properties.getProperty(var)); } } else { log.warn("Interpolation failed in value of property \""+key+ "\", there is no property named \""+var+"\""); } from = end+1; } else { break; } } if (result != null && from < value.length()) { result.append(value.substring(from)); } return (result == null) ? null : result.toString(); } /** * Command-line interface for running configuration tasks. Possible * arguments: * <ul> * <li><code>-property name</code> prints the value of the property * <code>name</code> from <code>dspace.cfg</code> to the standard * output. If the property does not exist, nothing is written.</li> * </ul> * * @param argv * command-line arguments */ public static void main(String[] argv) { if ((argv.length == 2) && argv[0].equals("-property")) { String val = getProperty(argv[1]); if (val != null) { System.out.println(val); } else { System.out.println(""); } System.exit(0); } else if ((argv.length == 4) && argv[0].equals("-module") && argv[2].equals("-property")) { String val = getProperty(argv[1], argv[3]); if (val != null) { System.out.println(val); } else { System.out.println(""); } System.exit(0); } else { System.err .println("Usage: ConfigurationManager OPTION\n [-module mod.name] -property prop.name get value of prop.name from module or dspace.cfg"); } System.exit(1); } private static void info(String string) { if (!isLog4jConfigured()) { System.out.println("INFO: " + string); } else { log.info(string); } } private static void warn(String string) { if (!isLog4jConfigured()) { System.out.println("WARN: " + string); } else { log.warn(string); } } private static void fatal(String string, Exception e) { if (!isLog4jConfigured()) { System.out.println("FATAL: " + string); e.printStackTrace(); } else { log.fatal(string, e); } } private static void fatal(String string) { if (!isLog4jConfigured()) { System.out.println("FATAL: " + string); } else { log.fatal(string); } } /* * Only current solution available to detect * if log4j is truly configured. */ private static boolean isLog4jConfigured() { Enumeration<?> en = org.apache.log4j.LogManager.getRootLogger() .getAllAppenders(); if (!(en instanceof org.apache.log4j.helpers.NullEnumeration)) { return true; } else { Enumeration<?> cats = Category.getCurrentCategories(); while (cats.hasMoreElements()) { Category c = (Category) cats.nextElement(); if (!(c.getAllAppenders() instanceof org.apache.log4j.helpers.NullEnumeration)) { return true; } } } return false; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.EmptyStackException; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Stack; import org.apache.log4j.Logger; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.event.Dispatcher; import org.dspace.event.Event; import org.dspace.event.EventManager; import org.dspace.storage.rdbms.DatabaseManager; /** * Class representing the context of a particular DSpace operation. This stores * information such as the current authenticated user and the database * connection being used. * <P> * Typical use of the context object will involve constructing one, and setting * the current user if one is authenticated. Several operations may be performed * using the context object. If all goes well, <code>complete</code> is called * to commit the changes and free up any resources used by the context. If * anything has gone wrong, <code>abort</code> is called to roll back any * changes and free up the resources. * <P> * The context object is also used as a cache for CM API objects. * * * @version $Revision: 5915 $ */ public class Context { private static final Logger log = Logger.getLogger(Context.class); /** Database connection */ private Connection connection; /** Current user - null means anonymous access */ private EPerson currentUser; /** Current Locale */ private Locale currentLocale; /** Extra log info */ private String extraLogInfo; /** Indicates whether authorisation subsystem should be ignored */ private boolean ignoreAuth; /** A stack with the history of authorisation system check modify */ private Stack<Boolean> authStateChangeHistory; /** * A stack with the name of the caller class that modify authorisation * system check */ private Stack<String> authStateClassCallHistory; /** Object cache for this context */ private Map<String, Object> objectCache; /** Group IDs of special groups user is a member of */ private List<Integer> specialGroups; /** Content events */ private List<Event> events = null; /** Event dispatcher name */ private String dispName = null; /** * Construct a new context object. A database connection is opened. No user * is authenticated. * * @exception SQLException * if there was an error obtaining a database connection */ public Context() throws SQLException { // Obtain a non-auto-committing connection connection = DatabaseManager.getConnection(); connection.setAutoCommit(false); currentUser = null; currentLocale = I18nUtil.DEFAULTLOCALE; extraLogInfo = ""; ignoreAuth = false; objectCache = new HashMap<String, Object>(); specialGroups = new ArrayList<Integer>(); authStateChangeHistory = new Stack<Boolean>(); authStateClassCallHistory = new Stack<String>(); } /** * Get the database connection associated with the context * * @return the database connection */ public Connection getDBConnection() { return connection; } /** * Set the current user. Authentication must have been performed by the * caller - this call does not attempt any authentication. * * @param user * the new current user, or <code>null</code> if no user is * authenticated */ public void setCurrentUser(EPerson user) { currentUser = user; } /** * Get the current (authenticated) user * * @return the current user, or <code>null</code> if no user is * authenticated */ public EPerson getCurrentUser() { return currentUser; } /** * Gets the current Locale * * @return Locale the current Locale */ public Locale getCurrentLocale() { return currentLocale; } /** * set the current Locale * * @param locale * the current Locale */ public void setCurrentLocale(Locale locale) { currentLocale = locale; } /** * Find out if the authorisation system should be ignored for this context. * * @return <code>true</code> if authorisation should be ignored for this * session. */ public boolean ignoreAuthorization() { return ignoreAuth; } /** * Turn Off the Authorisation System for this context and store this change * in a history for future use. */ public void turnOffAuthorisationSystem() { authStateChangeHistory.push(ignoreAuth); if (log.isDebugEnabled()) { Thread currThread = Thread.currentThread(); StackTraceElement[] stackTrace = currThread.getStackTrace(); String caller = stackTrace[stackTrace.length - 1].getClassName(); authStateClassCallHistory.push(caller); } ignoreAuth = true; } /** * Restore the previous Authorisation System State. If the state was not * changed by the current caller a warning will be displayed in log. Use: * <code> * mycontext.turnOffAuthorisationSystem(); * some java code that require no authorisation check * mycontext.restoreAuthSystemState(); * </code> If Context debug is enabled, the correct sequence calling will be * checked and a warning will be displayed if not. */ public void restoreAuthSystemState() { Boolean previousState; try { previousState = authStateChangeHistory.pop(); } catch (EmptyStackException ex) { log.warn(LogManager.getHeader(this, "restore_auth_sys_state", "not previous state info available " + ex.getLocalizedMessage())); previousState = Boolean.FALSE; } if (log.isDebugEnabled()) { Thread currThread = Thread.currentThread(); StackTraceElement[] stackTrace = currThread.getStackTrace(); String caller = stackTrace[stackTrace.length - 1].getClassName(); String previousCaller = (String) authStateClassCallHistory.pop(); // if previousCaller is not the current caller *only* log a warning if (!previousCaller.equals(caller)) { log .warn(LogManager .getHeader( this, "restore_auth_sys_state", "Class: " + caller + " call restore but previous state change made by " + previousCaller)); } } ignoreAuth = previousState.booleanValue(); } /** * Specify whether the authorisation system should be ignored for this * context. This should be used sparingly. * * @deprecated use turnOffAuthorisationSystem() for make the change and * restoreAuthSystemState() when change are not more required * @param b * if <code>true</code>, authorisation should be ignored for this * session. */ public void setIgnoreAuthorization(boolean b) { ignoreAuth = b; } /** * Set extra information that should be added to any message logged in the * scope of this context. An example of this might be the session ID of the * current Web user's session: * <P> * <code>setExtraLogInfo("session_id="+request.getSession().getId());</code> * * @param info * the extra information to log */ public void setExtraLogInfo(String info) { extraLogInfo = info; } /** * Get extra information to be logged with message logged in the scope of * this context. * * @return the extra log info - guaranteed non- <code>null</code> */ public String getExtraLogInfo() { return extraLogInfo; } /** * Close the context object after all of the operations performed in the * context have completed successfully. Any transaction with the database is * committed. * * @exception SQLException * if there was an error completing the database transaction * or closing the connection */ public void complete() throws SQLException { // FIXME: Might be good not to do a commit() if nothing has actually // been written using this connection try { // Commit any changes made as part of the transaction commit(); } finally { // Free the connection DatabaseManager.freeConnection(connection); connection = null; clearCache(); } } /** * Commit any transaction that is currently in progress, but do not close * the context. * * @exception SQLException * if there was an error completing the database transaction * or closing the connection */ public void commit() throws SQLException { // Commit any changes made as part of the transaction Dispatcher dispatcher = null; try { if (events != null) { if (dispName == null) { dispName = EventManager.DEFAULT_DISPATCHER; } dispatcher = EventManager.getDispatcher(dispName); connection.commit(); dispatcher.dispatch(this); } else { connection.commit(); } } finally { events = null; if (dispatcher != null) { EventManager.returnDispatcher(dispName, dispatcher); } } } /** * Select an event dispatcher, <code>null</code> selects the default * */ public void setDispatcher(String dispatcher) { if (log.isDebugEnabled()) { log.debug(this.toString() + ": setDispatcher(\"" + dispatcher + "\")"); } dispName = dispatcher; } /** * Add an event to be dispatched when this context is committed. * * @param event */ public void addEvent(Event event) { if (events == null) { events = new ArrayList<Event>(); } events.add(event); } /** * Get the current event list. If there is a separate list of events from * already-committed operations combine that with current list. * * TODO WARNING: events uses an ArrayList, a class not ready for concurrency. * Read http://download.oracle.com/javase/6/docs/api/java/util/Collections.html#synchronizedList%28java.util.List%29 * on how to properly synchronize the class when calling this method * * @return List of all available events. */ public List<Event> getEvents() { return events; } /** * Close the context, without committing any of the changes performed using * this context. The database connection is freed. No exception is thrown if * there is an error freeing the database connection, since this method may * be called as part of an error-handling routine where an SQLException has * already been thrown. */ public void abort() { try { if (!connection.isClosed()) { connection.rollback(); } } catch (SQLException se) { log.error(se.getMessage(), se); } finally { try { if (!connection.isClosed()) { DatabaseManager.freeConnection(connection); } } catch (Exception ex) { log.error("Exception aborting context", ex); } connection = null; events = null; clearCache(); } } /** * * Find out if this context is valid. Returns <code>false</code> if this * context has been aborted or completed. * * @return <code>true</code> if the context is still valid, otherwise * <code>false</code> */ public boolean isValid() { // Only return true if our DB connection is live return (connection != null); } /** * Store an object in the object cache. * * @param objectClass * Java Class of object to check for in cache * @param id * ID of object in cache * * @return the object from the cache, or <code>null</code> if it's not * cached. */ public Object fromCache(Class<?> objectClass, int id) { String key = objectClass.getName() + id; return objectCache.get(key); } /** * Store an object in the object cache. * * @param o * the object to store * @param id * the object's ID */ public void cache(Object o, int id) { String key = o.getClass().getName() + id; objectCache.put(key, o); } /** * Remove an object from the object cache. * * @param o * the object to remove * @param id * the object's ID */ public void removeCached(Object o, int id) { String key = o.getClass().getName() + id; objectCache.remove(key); } /** * Remove all the objects from the object cache */ public void clearCache() { objectCache.clear(); } /** * Get the count of cached objects, which you can use to instrument an * application to track whether it is "leaking" heap space by letting cached * objects build up. We recommend logging a cache count periodically or * episodically at the INFO or DEBUG level, but ONLY when you are diagnosing * cache leaks. * * @return count of entries in the cache. * * @return the number of items in the cache */ public int getCacheSize() { return objectCache.size(); } /** * set membership in a special group * * @param groupID * special group's ID */ public void setSpecialGroup(int groupID) { specialGroups.add(Integer.valueOf(groupID)); // System.out.println("Added " + groupID); } /** * test if member of special group * * @param groupID * ID of special group to test * @return true if member */ public boolean inSpecialGroup(int groupID) { if (specialGroups.contains(Integer.valueOf(groupID))) { // System.out.println("Contains " + groupID); return true; } return false; } /** * gets an array of all of the special groups that current user is a member * of * * @return * @throws SQLException */ public Group[] getSpecialGroups() throws SQLException { List<Group> myGroups = new ArrayList<Group>(); for (Integer groupId : specialGroups) { myGroups.add(Group.find(this, groupId.intValue())); } return myGroups.toArray(new Group[myGroups.size()]); } protected void finalize() throws Throwable { /* * If a context is garbage-collected, we roll back and free up the * database connection if there is one. */ if (connection != null) { abort(); } super.finalize(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import org.dspace.eperson.EPerson; /** * Class for generating standard log header * * @author David Stuve * @author Robert Tansley * @version $Revision: 5844 $ */ public class LogManager { /** * Generate the log header * * @param context * the current Context - safe to pass in <code>null</code> * @param action * string describing the action * @param extrainfo * string with extra information, like parameters * * @return the filled out log header */ public static String getHeader(Context context, String action, String extrainfo) { String email = "anonymous"; String contextExtraInfo; if (context != null) { EPerson e = context.getCurrentUser(); if (e != null) { email = e.getEmail(); } contextExtraInfo = context.getExtraLogInfo(); } else { contextExtraInfo = "no_context"; } StringBuilder result = new StringBuilder(); // Escape everthing but the extra context info because for some crazy reason two fields // are generated inside this entry one for the session id, and another for the ip // address. Everything else should be escaped. result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":").append(escapeLogField(action)).append(":").append(escapeLogField(extrainfo)); return result.toString(); } /** * If any string within the log line contains a field separator (:) they need to be escaped so as the * line may be parsed and analysed later. This method will escape a log field. * * Single slashes and colons will be escaped so that colons no longer appear in the logs * * @param field The unescaped log field * @return An escaped log field */ public static String escapeLogField(String field) { if (field != null) { field = field.replaceAll("\\\\", "\\\\\\\\;"); field = field.replaceAll(":","\\\\colon;"); } return field; } /** * Unescape a log field. * * @param field The escaped log field * @return the original log field */ public static String unescapeLogField(String field) { if (field != null) { field = field.replaceAll("\\\\colon;", ":"); field = field.replaceAll("\\\\\\\\;","\\\\"); } return field; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; /** * Class with constants and matching strings, for DSpace types. These numbers * must never be changed!! * * @author David Stuve * @version $Revision: 5844 $ */ public class Constants { /** Type of bitstream objects */ public static final int BITSTREAM = 0; /** Type of bundle objects */ public static final int BUNDLE = 1; /** Type of item objects */ public static final int ITEM = 2; /** Type of collection objects */ public static final int COLLECTION = 3; /** Type of community objects */ public static final int COMMUNITY = 4; /** DSpace site type */ public static final int SITE = 5; /** Type of eperson groups */ public static final int GROUP = 6; /** Type of individual eperson objects */ public static final int EPERSON = 7; /** * lets you look up type names from the type IDs */ public static final String[] typeText = { "BITSTREAM", "BUNDLE", "ITEM", "COLLECTION", "COMMUNITY", "SITE", "GROUP", "EPERSON" }; /** * Special Bundle and Bitstream Names: */ /** Magic name of item license, as bitstream in LICENSE_BUNDLE_NAME */ public static final String LICENSE_BITSTREAM_NAME = "license.txt"; /** Magic name of bundle containing item license */ public static final String LICENSE_BUNDLE_NAME = "LICENSE"; /** * Default bundle name for the "original" item content; * "derived" content such as thumbnails goes in other bundles. */ public static final String DEFAULT_BUNDLE_NAME = "ORIGINAL"; /** * Name of bundle for user-visible "content" (same as default for now). */ public static final String CONTENT_BUNDLE_NAME = "ORIGINAL"; /** Bundle name for structured metadata bitstreams. */ public static final String METADATA_BUNDLE_NAME = "METADATA"; /** Action of reading, viewing or downloading something */ public static final int READ = 0; /** Action of modifying something */ public static final int WRITE = 1; /** * Action of deleting something. Different from removing something from a * container. (DELETE is now obsolete) * * @see #REMOVE */ public static final int DELETE = 2; /** * Action of adding something to a container. For example, to add an item to * a collection, a user must have <code>ADD</code> permission on the * collection. */ public static final int ADD = 3; /** * Action of removing something from a container. Different from deletion. * * @see #DELETE */ public static final int REMOVE = 4; /** Action of performing workflow step 1 */ public static final int WORKFLOW_STEP_1 = 5; /** Action of performing workflow step 2 */ public static final int WORKFLOW_STEP_2 = 6; /** Action of performing workflow step 3 */ public static final int WORKFLOW_STEP_3 = 7; /** Action of performing a workflow */ public static final int WORKFLOW_ABORT = 8; /** Default Read policies for Bitstreams submitted to container */ public static final int DEFAULT_BITSTREAM_READ = 9; /** Default Read policies for Items submitted to container */ public static final int DEFAULT_ITEM_READ = 10; /** * @deprecated As of DSpace 1.6, replaced by Constants.ADMIN */ public static final int COLLECTION_ADMIN = 11; /** * Administrative actions - System Admin, Community Admin, Collection Admin */ public static final int ADMIN = 11; /** Position of front page news item -- top box */ public static final int NEWS_TOP = 0; /** Position of front page news item -- sidebar */ public static final int NEWS_SIDE = 1; /** * lets you look up action names from the action IDs */ public static final String[] actionText = { "READ", "WRITE", "OBSOLETE (DELETE)", "ADD", "REMOVE", "WORKFLOW_STEP_1", "WORKFLOW_STEP_2", "WORKFLOW_STEP_3", "WORKFLOW_ABORT", "DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN" }; /** * constants for the relevance array generating dynamicallis is simple: just * 1 < < TYPE */ public static final int RBITSTREAM = 1 << BITSTREAM; public static final int RBUNDLE = 1 << BUNDLE; public static final int RITEM = 1 << ITEM; public static final int RCOLLECTION = 1 << COLLECTION; public static final int RCOMMUNITY = 1 << COMMUNITY; /** * Array of relevances of actions to objects - used by the UI to only * display actions that are relevant to an object type To see if an action * is relevant to an object, just OR the relevance type above with the value * in actionTypeRelevance[] (To see if READ is relevant to community, just * test actionTypeRelevance[READ] | RCOMMUNITY, 0 = irrelevant */ public static final int[] actionTypeRelevance = { RBITSTREAM | RBUNDLE | RITEM | RCOLLECTION | RCOMMUNITY, // 0 - READ RBITSTREAM | RBUNDLE | RITEM | RCOLLECTION | RCOMMUNITY, // 1 - // WRITE 0, // 2 - DELETE (obsolete) RBUNDLE | RITEM | RCOLLECTION | RCOMMUNITY, // 3 - ADD RBUNDLE | RITEM | RCOLLECTION | RCOMMUNITY, // 4 - REMOVE 0, // 5 - WORKFLOW_STEP_1 0, // 6 - WORKFLOW_STEP_2 0, // 7 - WORKFLOW_STEP_3 0, // 8 - WORKFLOW_ABORT RCOLLECTION, // 9 - DEFAULT_BITSTREAM_READ RCOLLECTION, // 10 - DEFAULT_ITEM_READ RITEM | RCOLLECTION | RCOMMUNITY // 11 - ADMIN }; public static final String DEFAULT_ENCODING = "UTF-8"; /** * If you know the type string, look up the corresponding type ID constant. * * @param type * String with the name of the type (must be exact match) * * @return the corresponding type ID, or <code>-1</code> if the type * string is unknown */ public static int getTypeID(String type) { for (int i = 0; i < typeText.length; i++) { if (typeText[i].equals(type)) { return i; } } return -1; } /** * If you know the action string, look up the corresponding type ID * constant. * * @param action * String with the name of the action (must be exact match) * * @return the corresponding action ID, or <code>-1</code> if the action * string is unknown */ public static int getActionID(String action) { for (int i = 0; i < actionText.length; i++) { if (actionText[i].equals(action)) { return i; } } return -1; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; import java.io.Serializable; import java.util.Map; import java.util.HashMap; import java.util.HashSet; import java.util.Enumeration; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.regex.Pattern; import java.util.regex.Matcher; import java.lang.reflect.Array; import java.io.BufferedReader; import java.io.FileReader; import java.io.File; import java.io.IOException; import org.apache.log4j.Logger; /** * The Plugin Manager is a very simple component container. It creates and * organizes components (plugins), and helps select a plugin in the cases * where there are many possible choices. It also gives some limited * control over the lifecycle of a plugin. It manages three different types * (usage patterns) of plugins: * <p> * <ol><li> Singleton Plugin * <br> There is only one implementation class for the plugin. It is indicated * in the configuration. This type of plugin chooses an implementations of * a service, for the entire system, at configuration time. Your * application just fetches the plugin for that interface and gets the * configured-in choice. * * <p><li> Sequence Plugins * <br> You need a sequence or series of plugins, to implement a mechanism like * StackableAuthenticationMethods or a pipeline, where each plugin is * called in order to contribute its implementation of a process to the * whole. * <p><li> Named Plugins * <br> Use a named plugin when the application has to choose one plugin * implementation out of many available ones. Each implementation is bound * to one or more names (symbolic identifiers) in the configuration. * </ol><p> * The name is just a <code>String</code> to be associated with the * combination of implementation class and interface. It may contain * any characters except for comma (,) and equals (=). It may contain * embedded spaces. Comma is a special character used to separate * names in the configuration entry. * * @author Larry Stone * @version $Revision: 5844 $ * @see SelfNamedPlugin */ public class PluginManager { /** log4j category */ private static Logger log = Logger.getLogger(PluginManager.class); /** * Prefixes of names of properties to look for in DSpace Configuration */ private static final String SINGLE_PREFIX = "plugin.single."; private static final String SEQUENCE_PREFIX = "plugin.sequence."; private static final String NAMED_PREFIX = "plugin.named."; private static final String SELFNAMED_PREFIX = "plugin.selfnamed."; private static final String REUSABLE_PREFIX = "plugin.reusable."; // Separator character (from perl $;) to make "two dimensional" // hashtable key out of interface classname and plugin name; // this character separates the words. private static final String SEP = "\034"; // Map of plugin class to "reusable" metric (as Boolean, must be Object) // Key is Class, value is Boolean (true by default). private static Map<Class<Object>, Boolean> cacheMeCache = new HashMap<Class<Object>, Boolean>(); // Predicate -- whether or not to cache this class. Ironically, // the cacheability information is itself cached. private static boolean cacheMe(String module, Class implClass) { if (cacheMeCache.containsKey(implClass)) { return (cacheMeCache.get(implClass)).booleanValue(); } else { String key = REUSABLE_PREFIX+implClass.getName(); boolean reusable = (module != null) ? ConfigurationManager.getBooleanProperty(module, key, true) : ConfigurationManager.getBooleanProperty(key, true); cacheMeCache.put(implClass, Boolean.valueOf(reusable)); return reusable; } } /** * Returns an instance of the singleton (single) plugin implementing * the given interface. There must be exactly one single plugin * configured for this interface, otherwise the * <code>PluginConfigurationError</code> is thrown. * <p> * Note that this is the only "get plugin" method which throws an * exception. It is typically used at initialization time to set up * a permanent part of the system so any failure is fatal. * * @param interfaceClass interface Class object * @return instance of plugin * @throws PluginConfigurationError */ public static Object getSinglePlugin(Class interfaceClass) throws PluginConfigurationError, PluginInstantiationException { return getSinglePlugin(null, interfaceClass); } /** * Returns an instance of the singleton (single) plugin implementing * the given interface. There must be exactly one single plugin * configured for this interface, otherwise the * <code>PluginConfigurationError</code> is thrown. * <p> * Note that this is the only "get plugin" method which throws an * exception. It is typically used at initialization time to set up * a permanent part of the system so any failure is fatal. * * @param name of config module, or <code>null</code> for standard location * @param interfaceClass interface Class object * @return instance of plugin * @throws PluginConfigurationError */ public static Object getSinglePlugin(String module, Class interfaceClass) throws PluginConfigurationError, PluginInstantiationException { String iname = interfaceClass.getName(); // configuration format is prefix.<interface> = <classname> String classname = getConfigProperty(module, SINGLE_PREFIX+iname); if (classname != null) { return getAnonymousPlugin(module, classname.trim()); } else { throw new PluginConfigurationError("No Single Plugin configured for interface \""+iname+"\""); } } // cache of config data for Sequence Plugins; format its // <interface-name> -> [ <classname>.. ] (value is Array) private static Map<String, String[]> sequenceConfig = new HashMap<String, String[]>(); /** * Returns instances of all plugins that implement the interface * intface, in an Array. Returns an empty array if no there are no * matching plugins. * <p> * The order of the plugins in the array is the same as their class * names in the configuration's value field. * * @param intfc interface for which to find plugins. * @return an array of plugin instances; if none are * available an empty array is returned. */ public static Object[] getPluginSequence(Class intfc) throws PluginInstantiationException { return getPluginSequence(null, intfc); } /** * Returns instances of all plugins that implement the interface * intface, in an Array. Returns an empty array if no there are no * matching plugins. * <p> * The order of the plugins in the array is the same as their class * names in the configuration's value field. * * @param module name of config module, or <code>null</code> for standard * @param intfc interface for which to find plugins. * @return an array of plugin instances; if none are * available an empty array is returned. */ public static Object[] getPluginSequence(String module, Class intfc) throws PluginInstantiationException { // cache the configuration for this interface after grovelling it once: // format is prefix.<interface> = <classname> String iname = intfc.getName(); String classname[] = null; if (!sequenceConfig.containsKey(iname)) { String val = getConfigProperty(module, SEQUENCE_PREFIX+iname); if (val == null) { log.warn("No Configuration entry found for Sequence Plugin interface="+iname); return new Object[0]; } classname = val.trim().split("\\s*,\\s*"); sequenceConfig.put(iname, classname); } else { classname = sequenceConfig.get(iname); } Object result[] = (Object[])Array.newInstance(intfc, classname.length); for (int i = 0; i < classname.length; ++i) { log.debug("Adding Sequence plugin for interface= "+iname+", class="+classname[i]); result[i] = getAnonymousPlugin(module, classname[i]); } return result; } // Map of cached (reusable) single plugin instances - class -> instance. private static Map<Serializable, Object> anonymousInstanceCache = new HashMap<Serializable, Object>(); // Get possibly-cached plugin instance for un-named plugin, // this is shared by Single and Sequence plugins. private static Object getAnonymousPlugin(String module, String classname) throws PluginInstantiationException { try { Class pluginClass = Class.forName(classname); if (cacheMe(module, pluginClass)) { Object cached = anonymousInstanceCache.get(pluginClass); if (cached == null) { cached = pluginClass.newInstance(); anonymousInstanceCache.put(pluginClass, cached); } return cached; } else { return pluginClass.newInstance(); } } catch (ClassNotFoundException e) { throw new PluginInstantiationException("Cannot load plugin class: " + e.toString(), e); } catch (InstantiationException e) { throw new PluginInstantiationException(e); } catch (IllegalAccessException e) { throw new PluginInstantiationException(e); } } // Map of named plugin classes, [intfc,name] -> class // Also contains intfc -> "marker" to mark when interface has been loaded. private static Map<String, String> namedPluginClasses = new HashMap<String, String>(); // Map of cached (reusable) named plugin instances, [class,name] -> instance private static Map<Serializable, Object> namedInstanceCache = new HashMap<Serializable, Object>(); // load and cache configuration data for the given interface. private static void configureNamedPlugin(String module, String iname) throws ClassNotFoundException { int found = 0; /** * First load the class map for this interface (if not done yet): * key is [intfc,name], value is class. * There is ALSO a "marker key" of "intfc" by itself to show we * loaded this intfc's configuration. */ if (!namedPluginClasses.containsKey(iname)) { // 1. Get classes named by the configuration. format is: // plugin.named.<INTF> = <CLASS> = <name>, <name> [,] \ // <CLASS> = <name>, <name> [ ... ] String namedVal = getConfigProperty(module, NAMED_PREFIX+iname); if (namedVal != null) { namedVal = namedVal.trim(); log.debug("Got Named configuration for interface="+iname+", config="+namedVal); // match "<classname> =" Pattern classnameEqual = Pattern.compile("([\\w\\p{Sc}\\.]+)\\s*\\="); int prevEnd = -1; String prevClassName = null; Matcher classMatcher = classnameEqual.matcher(namedVal); while (classMatcher.find()) { if (prevClassName != null) { found += installNamedConfigs(iname, prevClassName, namedVal.substring(prevEnd, classMatcher.start()).trim().split("\\s*,\\s*")); } prevClassName = classMatcher.group(1); prevEnd = classMatcher.end(); } if (prevClassName != null) { found += installNamedConfigs(iname, prevClassName, namedVal.substring(prevEnd).trim().split("\\s*,\\s*")); } } // 2. Get Self-named config entries: // format is plugin.selfnamed.<INTF> = <CLASS> , <CLASS> .. String selfNamedVal = getConfigProperty(module, SELFNAMED_PREFIX+iname); if (selfNamedVal != null) { String classnames[] = selfNamedVal.trim().split("\\s*,\\s*"); for (int i = 0; i < classnames.length; ++i) { try { Class pluginClass = Class.forName(classnames[i]); String names[] = (String[])pluginClass.getMethod("getPluginNames"). invoke(null); if (names == null || names.length == 0) { log.error("Self-named plugin class \"" + classnames[i] + "\" returned null or empty name list!"); } else { found += installNamedConfigs(iname, classnames[i], names); } } catch (NoSuchMethodException e) { log.error("Implementation Class \""+classnames[i]+"\" is not a subclass of SelfNamedPlugin, it has no getPluginNames() method."); } catch (Exception e) { log.error("While configuring self-named plugin: " + e.toString()); } } } namedPluginClasses.put(iname, "org.dspace.core.marker"); if (found == 0) { log.error("No named plugins found for interface=" + iname); } } } // add info for a named plugin to cache, under all its names. private static int installNamedConfigs(String iname, String classname, String names[]) throws ClassNotFoundException { int found = 0; for (int i = 0; i < names.length; ++i) { String key = iname+SEP+names[i]; if (namedPluginClasses.containsKey(key)) { log.error("Name collision in named plugin, implementation class=\"" + classname + "\", name=\"" + names[i] + "\""); } else { namedPluginClasses.put(key, classname); } log.debug("Got Named Plugin, intfc="+iname+", name="+names[i]+", class="+classname); ++found; } return found; } /** * Returns an instance of a plugin that implements the interface * intface and is bound to a name matching name. If there is no * matching plugin, it returns null. The names are matched by * String.equals(). * * @param intfc the interface class of the plugin * @param name under which the plugin implementation is configured. * @return instance of plugin implementation, or null if there is no match or an error. */ public static Object getNamedPlugin(Class intfc, String name) throws PluginInstantiationException { return getNamedPlugin(null, intfc, name); } /** * Returns an instance of a plugin that implements the interface * intface and is bound to a name matching name. If there is no * matching plugin, it returns null. The names are matched by * String.equals(). * * @param module config module, or <code>null</code> for standard location * @param intfc the interface class of the plugin * @param name under which the plugin implementation is configured. * @return instance of plugin implementation, or null if there is no match or an error. */ public static Object getNamedPlugin(String module, Class intfc, String name) throws PluginInstantiationException { try { String iname = intfc.getName(); configureNamedPlugin(module, iname); String key = iname + SEP + name; String cname = namedPluginClasses.get(key); if (cname == null) { log.warn("Cannot find named plugin for interface=" + iname + ", name=\"" + name + "\""); } else { Class pluginClass = Class.forName(cname); if (cacheMe(module, pluginClass)) { String nkey = pluginClass.getName() + SEP + name; Object cached = namedInstanceCache.get(nkey); if (cached == null) { log.debug("Creating cached instance of: " + cname + " for interface=" + iname + " pluginName=" + name ); cached = pluginClass.newInstance(); if (cached instanceof SelfNamedPlugin) { ((SelfNamedPlugin) cached).setPluginInstanceName(name); } namedInstanceCache.put(nkey, cached); } return cached; } else { log.debug("Creating UNcached instance of: " + cname + " for interface=" + iname + " pluginName=" + name ); Object result = pluginClass.newInstance(); if (result instanceof SelfNamedPlugin) { ((SelfNamedPlugin) result).setPluginInstanceName(name); } return result; } } } catch (ClassNotFoundException e) { throw new PluginInstantiationException("Cannot load plugin class: " + e.toString(), e); } catch (InstantiationException e) { throw new PluginInstantiationException(e); } catch (IllegalAccessException e) { throw new PluginInstantiationException(e); } return null; } /** * Returns whether a plugin exists which implements the specified interface * and has a specified name. If a matching plugin is found to be configured, * return true. If there is no matching plugin, return false. * * @param intfc the interface class of the plugin * @param name under which the plugin implementation is configured. * @return true if plugin was found to be configured, false otherwise */ public static boolean hasNamedPlugin(Class intfc, String name) throws PluginInstantiationException { return hasNamedPlugin(null, intfc, name); } /** * Returns whether a plugin exists which implements the specified interface * and has a specified name. If a matching plugin is found to be configured, * return true. If there is no matching plugin, return false. * * @param module the config module or <code>null</code> for regular location * @param intfc the interface class of the plugin * @param name under which the plugin implementation is configured. * @return true if plugin was found to be configured, false otherwise */ public static boolean hasNamedPlugin(String module, Class intfc, String name) throws PluginInstantiationException { try { String iname = intfc.getName(); configureNamedPlugin(module, iname); String key = iname + SEP + name; return namedPluginClasses.get(key) != null; } catch (ClassNotFoundException e) { throw new PluginInstantiationException("Cannot load plugin class: " + e.toString(), e); } } /** * Returns all of the names under which a named plugin implementing * the interface intface can be requested (with getNamedPlugin()). * The array is empty if there are no matches. Use this to populate * a menu of plugins for interactive selection, or to document what * the possible choices are. * <p> * NOTE: The names are NOT returned in any deterministic order. * * @param intfc plugin interface for which to return names. * @return an array of strings with every name; if none are * available an empty array is returned. */ public static String[] getAllPluginNames(Class intfc) { return getAllPluginNames(null, intfc); } /** * Returns all of the names under which a named plugin implementing * the interface intface can be requested (with getNamedPlugin()). * The array is empty if there are no matches. Use this to populate * a menu of plugins for interactive selection, or to document what * the possible choices are. * <p> * NOTE: The names are NOT returned in any deterministic order. * * @param module the module name * @param intfc plugin interface for which to return names. * @return an array of strings with every name; if none are * available an empty array is returned. */ public static String[] getAllPluginNames(String module, Class intfc) { try { String iname = intfc.getName(); configureNamedPlugin(module, iname); String prefix = iname + SEP; ArrayList<String> result = new ArrayList<String>(); for (String key : namedPluginClasses.keySet()) { if (key.startsWith(prefix)) { result.add(key.substring(prefix.length())); } } if (result.size() == 0) { log.error("Cannot find any names for named plugin, interface=" + iname); } return result.toArray(new String[result.size()]); } catch (ClassNotFoundException e) { return new String[0]; } } /** * Tells the Plugin Manager to let go of any references to a * reusable plugin, to prevent it from being given out again and to * allow the object to be garbage-collected. Call this when a * plugin instance must be taken out of circulation. * * @param plugin the object to release, must have been created by * <code>getNamedPlugin</code> etc. */ public static void releasePlugin(Object plugin) { forgetInstance(plugin, namedInstanceCache); forgetInstance(plugin, anonymousInstanceCache); } private static void forgetInstance(Object plugin, Map<Serializable, Object> cacheMap) { Collection values = cacheMap.values(); Iterator ci = values.iterator(); while (ci.hasNext()) { // Identity comparison is valid for this usage Object val = ci.next(); if (val == plugin) { values.remove(val); } } } /* ----------------------------------------------------------------- * Code to check configuration is all below this line * ----------------------------------------------------------------- */ // true if classname is valid and loadable. private static boolean checkClassname(String iname, String msg) { try { if (Class.forName(iname) != null) { return true; } } catch (ClassNotFoundException ce) { log.error("No class definition found for "+msg+": \""+iname+"\""); } return false; } // true if classname is loadable AND is subclass of SelfNamedPlugin private static boolean checkSelfNamed(String iname) { try { if (!checkSelfNamed(Class.forName(iname))) { log.error("The class \"" + iname + "\" is NOT a subclass of SelfNamedPlugin but it should be!"); } } catch (ClassNotFoundException ce) { log.error("No class definition found for self-named class interface: \""+iname+"\""); } return false; } // recursively climb superclass stack until we find SelfNamedPlugin private static boolean checkSelfNamed(Class cls) { Class sup = cls.getSuperclass(); if (sup == null) { return false; } else if (sup.equals(SelfNamedPlugin.class)) { return true; } else { return checkSelfNamed(sup); } } // check named-plugin names by interface -- call the usual // configuration and let it find missing or duplicate names. private static void checkNames(String iname) { try { configureNamedPlugin(null, iname); } catch (ClassNotFoundException ce) { // bogus classname should be old news by now. } } // get module-specific, or generic configuration property private static String getConfigProperty(String module, String property) { if (module != null) { return ConfigurationManager.getProperty(module, property); } return ConfigurationManager.getProperty(property); } /** * Validate the entries in the DSpace Configuration relevant to * PluginManager. Look for inconsistencies, illegal syntax, etc. * Announce violations with "log.error" so they appear in the log * or in the standard error stream if this is run interactively. * <ul> * <li>Look for duplicate keys (by parsing the config file) * <li>Interface in plugin.single, plugin.sequence, plugin.named, plugin.selfnamed is valid. * <li>Classname in plugin.reusable exists and matches a plugin config. * <li>Classnames in config values exist. * <li>Classnames in plugin.selfnamed loads and is subclass of <code>SelfNamedPlugin</code> * <li>Implementations of named plugin have no name collisions. * <li>Named plugin entries lacking names. * </ul> */ public static void checkConfiguration() throws IOException { FileReader fr = null; BufferedReader cr = null; /* XXX TODO: (maybe) test that implementation class is really a * subclass or impl of the plugin "interface" */ // tables of config keys for each type of config line: Map<String, String> singleKey = new HashMap<String, String>(); Map<String, String> sequenceKey = new HashMap<String, String>(); Map<String, String> namedKey = new HashMap<String, String>(); Map<String, String> selfnamedKey = new HashMap<String, String>(); Map<String, String> reusableKey = new HashMap<String, String>(); HashMap<String, String> keyMap = new HashMap<String, String>(); // 1. First pass -- grovel the actual config file to check for // duplicate keys, since Properties class hides them from us. // Also build lists of each type of key, check for misspellings. File config = ConfigurationManager.getConfigurationFile(); try { fr = new FileReader(config); cr = new BufferedReader(fr); String line = null; boolean continued = false; Pattern keyPattern = Pattern.compile("([^\\s\\=\\:]+)"); while ((line = cr.readLine()) != null) { line = line.trim(); if (line.startsWith("!") || line.startsWith("#")) { continued = false; } else { if (!continued && line.startsWith("plugin.")) { Matcher km = keyPattern.matcher(line); if (km.find()) { String key = line.substring(0, km.end(1)); if (keyMap.containsKey(key)) { log.error("Duplicate key \"" + key + "\" in DSpace configuration file=" + config.toString()); } else { keyMap.put(key, key); } if (key.startsWith(SINGLE_PREFIX)) { singleKey.put(key.substring(SINGLE_PREFIX.length()), key); } else if (key.startsWith(SEQUENCE_PREFIX)) { sequenceKey.put(key.substring(SEQUENCE_PREFIX.length()), key); } else if (key.startsWith(NAMED_PREFIX)) { namedKey.put(key.substring(NAMED_PREFIX.length()), key); } else if (key.startsWith(SELFNAMED_PREFIX)) { selfnamedKey.put(key.substring(SELFNAMED_PREFIX.length()), key); } else if (key.startsWith(REUSABLE_PREFIX)) { reusableKey.put(key.substring(REUSABLE_PREFIX.length()), key); } else { log.error("Key with unknown prefix \"" + key + "\" in DSpace configuration file=" + config.toString()); } } } continued = line.length() > 0 && line.charAt(line.length()-1) == '\\'; } } } finally { if (cr != null) { try { cr.close(); } catch (IOException ioe) { } } if (fr != null) { try { fr.close(); } catch (IOException ioe) { } } } // 1.1 Sanity check, make sure keyMap == set of keys from Configuration Enumeration<String> pne = (Enumeration<String>)ConfigurationManager.propertyNames(); HashSet<String> pn = new HashSet<String>(); while (pne.hasMoreElements()) { String nk = pne.nextElement(); if (nk.startsWith("plugin.")) { pn.add(nk); if (!keyMap.containsKey(nk)) { log.error("Key is in ConfigurationManager.propertyNames() but NOT text crawl: \"" + nk + "\""); } } } Iterator<String> pi = keyMap.keySet().iterator(); while (pi.hasNext()) { String key = pi.next(); if (!pn.contains(key)) { log.error("Key is in text crawl but NOT ConfigurationManager.propertyNames(): \"" + key + "\""); } } // 2. Build up list of all interfaces and test that they are loadable. // don't bother testing that they are "interface" rather than "class" // since either one will work for the Plugin Manager. ArrayList<String> allInterfaces = new ArrayList<String>(); allInterfaces.addAll(singleKey.keySet()); allInterfaces.addAll(sequenceKey .keySet()); allInterfaces.addAll(namedKey.keySet()); allInterfaces.addAll(selfnamedKey.keySet()); allInterfaces.addAll(reusableKey.keySet()); Iterator<String> ii = allInterfaces.iterator(); while (ii.hasNext()) { checkClassname(ii.next(), "key interface or class"); } // Check implementation classes: // - each class is loadable. // - plugin.selfnamed values are each subclass of SelfNamedPlugin // - save classname in allImpls Map<String, String> allImpls = new HashMap<String, String>(); // single plugins - just check that it has a valid impl. class ii = singleKey.keySet().iterator(); while (ii.hasNext()) { String key = ii.next(); String val = ConfigurationManager.getProperty(SINGLE_PREFIX+key); if (val == null) { log.error("Single plugin config not found for: " + SINGLE_PREFIX + key); } else { val = val.trim(); if (checkClassname(val, "implementation class")) { allImpls.put(val, val); } } } // sequence plugins - all values must be classes ii = sequenceKey.keySet().iterator(); while (ii.hasNext()) { String key = ii.next(); String val = ConfigurationManager.getProperty(SEQUENCE_PREFIX+key); if (val == null) { log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key); } else { val = val.trim(); String classname[] = val.split("\\s*,\\s*"); for (int i = 0; i < classname.length; ++i) { if (checkClassname(classname[i], "implementation class")) { allImpls.put(classname[i], classname[i]); } } } } // 3. self-named plugins - grab and check all values // then make sure it is a subclass of SelfNamedPlugin ii = selfnamedKey.keySet().iterator(); while (ii.hasNext()) { String key = ii.next(); String val = ConfigurationManager.getProperty(SELFNAMED_PREFIX+key); if (val == null) { log.error("Selfnamed plugin config not found for: " + SELFNAMED_PREFIX + key); } else { val = val.trim(); String classname[] = val.split("\\s*,\\s*"); for (int i = 0; i < classname.length; ++i) { if (checkClassname(classname[i], "selfnamed implementation class")) { allImpls.put(classname[i], classname[i]); checkSelfNamed(classname[i]); } } checkNames(key); } } // 4. named plugins - extract the classnames and treat same as sequence. // use named plugin config mechanism to test for duplicates, unnamed. ii = namedKey.keySet().iterator(); Pattern classnameEqual = Pattern.compile("([\\w\\p{Sc}\\.]+)\\s*\\="); while (ii.hasNext()) { String key = ii.next(); String val = ConfigurationManager.getProperty(NAMED_PREFIX+key); if (val == null) { log.error("Named plugin config not found for: " + NAMED_PREFIX + key); } else { checkNames(key); val = val.trim(); Matcher classMatcher = classnameEqual.matcher(val); while (classMatcher.find()) { String classname = classMatcher.group(1); if (checkClassname(classname, "implementation class")) { allImpls.put(classname, classname); } } } } // 5. all classes named in Reusable config lines must be other classes. Iterator<String> ri = reusableKey.keySet().iterator(); while (ri.hasNext()) { String rk = ri.next(); if (!(allImpls.containsKey(rk))) { log.error("In plugin.reusable configuration, class \"" + rk + "\" is NOT a plugin implementation class."); } } } /** * Invoking this class from the command line just runs * <code>checkConfiguration</code> and shows the results. * There are no command-line options. */ public static void main(String[] argv) throws Exception { checkConfiguration(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; /** * Simple lightweight "framework" for managing plugins. * <p> * This is a superclass of all classes which are managed as self-named * plugins. They must extend <code>SelfNamedPlugin</code> or its * subclass. * <p> * Unfortunately, this has to be an <code>abstract class</code> because * an <code>interface</code> may not have static methods. The * <code>pluginAliases</code> method is static so it can be invoked * without creating an instance, and thus let the aliases live in the * class itself so there is no need for name mapping in a separate * configuration file. * <p> * See the documentation in the * <code>PluginManager</code> class for more details. * * @author Larry Stone * @version $Revision: 5844 $ * @see PluginManager */ public abstract class SelfNamedPlugin { // the specific alias used to find the class that created this instance. private String myName = null; /** * Get the names of this plugin implementation. * Returns all names to which this plugin answers. * <p> * A name should be a short generic name illustrative of the * service, e.g. <code>"PDF"</code>, <code>"JPEG"</code>, <code>"GIF"</code> * for media filters. * <p> * Each name must be unique among all the plugins implementing any * given interface, but it can be the same as a name of * a plugin for a different interface. For example, two classes * may each have a <code>"default"</code> name if they do not * implement any of the same interfaces. * * @return array of names of this plugin */ public static String[] getPluginNames() { return null; } /** * Get an instance's particular name. * Returns the name by which the class was chosen when * this instance was created. Only works for instances created * by <code>PluginManager</code>, or if someone remembers to call <code>setPluginName.</code> * <p> * Useful when the implementation class wants to be configured differently * when it is invoked under different names. * * @return name or null if not available. */ public String getPluginInstanceName() { return myName; } /** * Set the name under which this plugin was instantiated. * Not to be invoked by application code, it is * called automatically by <code>PluginManager.getNamedPlugin()</code> * when the plugin is instantiated. * * @param name -- name used to select this class. */ protected void setPluginInstanceName(String name) { myName = name; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.core; /** * This exception indicates a fatal error when instantiating a plugin class. * <p> * It should only be thrown when something unexpected happens in the * course of instantiating a plugin, e.g. an access error, class not found, * etc. Simply not finding a class in the configuration is not an exception. * <p> * This is a RuntimeException so it doesn't have to be declared, and can * be passed all the way up to a generalized fatal exception handler. * * @author Larry Stone * @version $Revision: 5844 $ * @see PluginManager */ public class PluginInstantiationException extends RuntimeException { /** * @param msg Error message text. */ public PluginInstantiationException(String msg) { super(msg); } /** * @param msg Error message text. * @cause cause other exception that this one is wrapping. */ public PluginInstantiationException(String msg, Throwable cause) { super(msg, cause); } /** * @cause cause other exception that this one is wrapping. */ public PluginInstantiationException(Throwable cause) { super(cause); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; /** * Implements existing DSpace initial article word behaviour * * Note: This only works for languages defined with ISO code entries. * * @author Graham Triggs */ public class StandardInitialArticleWord extends InitialArticleWord { private static final String[] articleWords = { "the", "an", "a" }; protected String[] getArticleWords(String lang) { if (lang != null && lang.startsWith("en")) { return articleWords; } return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; import com.ibm.icu.text.Normalizer; /** * Decompose diacritic characters to character + diacritic * * @author Graham Triggs */ public class DecomposeDiactritics implements TextFilter { public String filter(String str) { return Normalizer.normalize(str, Normalizer.NFD); } public String filter(String str, String lang) { return Normalizer.normalize(str, Normalizer.NFD); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; /** * Strips decomposed diacritic characters from the supplied string * * @author Graham Triggs * */ public class StripDiacritics implements TextFilter { public String filter(String str) { return str.replaceAll("\\p{InCombiningDiacriticalMarks}+", ""); } public String filter(String str, String lang) { return str.replaceAll("\\p{InCombiningDiacriticalMarks}+", ""); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; import java.util.HashMap; import java.util.Map; /** * Define languages - both as IANA and ISO639-2 codes * * @author Graham Triggs */ public class Language { public final String IANA; public final String ISO639_1; public final String ISO639_2; public static final Language AFRIKAANS = Language.create("af", "af", "afr"); public static final Language ALBANIAN = Language.create("sq", "sq", "alb"); public static final Language ARABIC = Language.create("ar", "ar", "ara"); public static final Language BALUCHI = Language.create("bal", "", "bal"); public static final Language BASQUE = Language.create("eu", "", "baq"); public static final Language BRAHUI = Language.create("", "", ""); public static final Language CATALAN = Language.create("ca", "ca", "cat"); public static final Language CLASSICAL_GREEK = Language.create("grc", "", "grc"); public static final Language DANISH = Language.create("da", "da", "dan"); public static final Language DUTCH = Language.create("nl", "ni", "dut"); public static final Language ENGLISH = Language.create("en", "en", "eng"); public static final Language ESPERANTO = Language.create("eo", "eo", "epo"); public static final Language FRENCH = Language.create("fr", "fr", "fre"); public static final Language FRISIAN = Language.create("fy", "fy", "fri"); public static final Language GALICIAN = Language.create("gl", "gl", "glg"); public static final Language GERMAN = Language.create("de", "de", "ger"); public static final Language GREEK = Language.create("el", "el", "gre"); public static final Language HAWAIIAN = Language.create("haw", "", "haw"); public static final Language HEBREW = Language.create("he", "he", "heb"); public static final Language HUNGARIAN = Language.create("hu", "hu", "hun"); public static final Language ICELANDIC = Language.create("is", "is", "ice"); public static final Language IRISH = Language.create("ga", "ga", "gle"); public static final Language ITALIAN = Language.create("it", "it", "ita"); public static final Language MALAGASY = Language.create("mg", "mg", "mlg"); public static final Language MALTESE = Language.create("mt", "mt", "mlt"); public static final Language NEAPOLITAN_ITALIAN = Language.create("nap", "", "nap"); public static final Language NORWEGIAN = Language.create("no", "no", "nor"); public static final Language PORTUGUESE = Language.create("pt", "pt", "por"); public static final Language PANJABI = Language.create("pa", "pa", "pan"); public static final Language PERSIAN = Language.create("fa", "fa", "per"); public static final Language PROVENCAL = Language.create("pro", "", "pro"); public static final Language PROVENCAL_OCCITAN = Language.create("oc", "oc", "oci"); public static final Language ROMANIAN = Language.create("ro", "ro", "rum"); public static final Language SCOTS = Language.create("sco", "", "sco"); public static final Language SCOTTISH_GAELIC = Language.create("gd", "gd", "gae"); public static final Language SHETLAND_ENGLISH = Language.create("", "", ""); public static final Language SPANISH = Language.create("es", "es", "spa"); public static final Language SWEDISH = Language.create("sv", "sv", "swe"); public static final Language TAGALOG = Language.create("tl", "tl", "tgl"); public static final Language TURKISH = Language.create("tr", "tr", "tur"); public static final Language URDU = Language.create("ur", "ur", "urd"); public static final Language WALLOON = Language.create("wa", "wa", "wln"); public static final Language WELSH = Language.create("cy", "cy", "wel"); public static final Language YIDDISH = Language.create("yi", "yi", "yid"); public static Language getLanguage(String lang) { return LanguageMaps.getLanguage(lang); } public static Language getLanguageForIANA(String iana) { return LanguageMaps.getLanguageForIANA(iana); } public static Language getLanguageForISO639_2(String iso) { return LanguageMaps.getLanguageForISO639_2(iso); } private static synchronized Language create(String iana, String iso639_1, String iso639_2) { Language lang = LanguageMaps.getLanguageForIANA(iana); lang = (lang != null ? lang : LanguageMaps.getLanguageForISO639_1(iso639_1)); lang = (lang != null ? lang : LanguageMaps.getLanguageForISO639_2(iso639_2)); return (lang != null ? lang : new Language(iana, iso639_1, iso639_2)); } private static class LanguageMaps { private static final Map<String, Language> langMapIANA = new HashMap<String, Language>(); private static final Map<String, Language> langMapISO639_1 = new HashMap<String, Language>(); private static final Map<String, Language> langMapISO639_2 = new HashMap<String, Language>(); static void add(Language l) { if (l.IANA != null && l.IANA.length() > 0 && !langMapIANA.containsKey(l.IANA)) { langMapIANA.put(l.IANA, l); } if (l.ISO639_1 != null && l.ISO639_1.length() > 0 && !langMapISO639_1.containsKey(l.ISO639_1)) { langMapISO639_1.put(l.ISO639_1, l); } if (l.ISO639_2 != null && l.ISO639_2.length() > 0 && !langMapISO639_2.containsKey(l.ISO639_2)) { langMapISO639_2.put(l.ISO639_2, l); } } public static Language getLanguage(String lang) { if (langMapIANA.containsKey(lang)) { return langMapIANA.get(lang); } return langMapISO639_2.get(lang); } public static Language getLanguageForIANA(String iana) { return langMapIANA.get(iana); } public static Language getLanguageForISO639_1(String iso) { return langMapISO639_1.get(iso); } public static Language getLanguageForISO639_2(String iso) { return langMapISO639_2.get(iso); } } private Language(String iana, String iso639_1, String iso639_2) { IANA = iana; ISO639_1 = iso639_1; ISO639_2 = iso639_2; LanguageMaps.add(this); } private Language() { IANA = null; ISO639_1 = null; ISO639_2 = null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; /** * Abstract class for implementing initial article word filters * Allows you to create new classes with their own rules for mapping * languages to article word lists. * * @author Graham Triggs */ public abstract class InitialArticleWord implements TextFilter { /** * When no language is passed, use null and let implementation decide what to do */ public String filter(String str) { return filter(str, null); } /** * Do an initial definite/indefinite article filter on the passed string. * On matching an initial word, can strip or move to the end, depending on the * configuration of the implementing class. * * @param str The string to parse * @param lang The language of the passed string * @return String The filtered string */ public String filter(String str, String lang) { // Get the list of article words for this language String[] articleWordArr = getArticleWords(lang); // If we have an article word array, process the string if (articleWordArr != null && articleWordArr.length > 0) { String initialArticleWord = null; int curPos = 0; int initialStart = -1; int initialEnd = -1; // Iterate through the characters until we find something significant, or hit the end while (initialEnd < 0 && curPos < str.length()) { // Have we found a significant character if (Character.isLetterOrDigit(str.charAt(curPos))) { // Mark this as the cut point for the initial word initialStart = curPos; // Loop through the article words looking for a match for (int idx = 0; initialEnd < 0 && idx < articleWordArr.length; idx++) { // Extract a fragment from the string to test // Must be same length as the article word if (idx > 1 && initialArticleWord != null) { // Only need to do so if we haven't already got one // of the right length if (initialArticleWord.length() != articleWordArr[idx].length()) { initialArticleWord = extractText(str, curPos, articleWordArr[idx].length()); } } else { initialArticleWord = extractText(str, curPos, articleWordArr[idx].length()); } // Does the fragment match an article word? if (initialArticleWord!= null && initialArticleWord.equalsIgnoreCase(articleWordArr[idx])) { // Check to see if the next character in the source // is a whitespace boolean isNextWhitespace = Character.isWhitespace( str.charAt(curPos + articleWordArr[idx].length()) ); // Check to see if the last character of the article word is a letter or digit boolean endsLetterOrDigit = Character.isLetterOrDigit(initialArticleWord.charAt(initialArticleWord.length() - 1)); // If the last character of the article word is a letter or digit, // then it must be followed by whitespace, if not, it can be anything // Setting endPos signifies that we have found an article word if (endsLetterOrDigit && isNextWhitespace) { initialEnd = curPos + initialArticleWord.length(); } else if (!endsLetterOrDigit) { initialEnd = curPos + initialArticleWord.length(); } } } // Quit the loop, as we have a significant character break; } // Keep going curPos++; } // If endPos is positive, then we've found an article word if (initialEnd > 0) { // Find a cut point in the source string, removing any whitespace after the article word int cutPos = initialEnd; while (cutPos < str.length() && Character.isWhitespace(str.charAt(cutPos))) { cutPos++; } // Are we stripping the article word? if (stripInitialArticle) { // Yes, simply return everything after the cut return str.substring(cutPos); } else { // No - move the initial article word to the end return new StringBuffer(str.substring(cutPos)) .append(wordSeparator) .append(str.substring(initialStart, initialEnd)) .toString(); } } } // Didn't do any processing, or didn't find an initial article word // Return the original string return str; } protected InitialArticleWord(boolean stripWord) { stripInitialArticle = stripWord; } protected InitialArticleWord() { stripInitialArticle = false; } /** * Abstract method to get the list of words to use in the initial word filter * * @param lang The language to retrieve article words for * @return An array of definite/indefinite article words */ protected abstract String[] getArticleWords(String lang); // Separator to use when appending article to end private String wordSeparator = ", "; // Flag to signify initial article word should be removed // If false, then the initial article word is appended to the end private boolean stripInitialArticle = false; /** * Helper method to extract text from a string. * Ensures that there is significant data (ie. non-whitespace) * after the segment requested. * * @param str * @param pos * @param len * @return */ private String extractText(String str, int pos, int len) { int testPos = pos + len; while (testPos < str.length() && Character.isWhitespace(str.charAt(testPos))) { testPos++; } if (testPos < str.length()) { return str.substring(pos, pos + len); } return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; /** * Lowercase and trim leading / trailing whitespace * * @author Graham Triggs */ public class LowerCaseAndTrim implements TextFilter { public String filter(String str) { return str.toLowerCase().trim(); } public String filter(String str, String lang) { return str.toLowerCase().trim(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; public class StripLeadingNonAlphaNum implements TextFilter { public String filter(String str) { int i = 0; while (i < str.length() && !Character.isLetterOrDigit(str.charAt(i))) { i++; } if (i > 0) { return str.substring(i); } return str; } public String filter(String str, String lang) { return filter(str); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; /** * Define an interface for all browse ordering filters. * @author Graham Triggs */ public interface TextFilter { public String filter(String str); public String filter(String str, String lang); }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.text.filter; import java.io.Serializable; import java.util.*; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.dspace.core.ConfigurationManager; /** * Implements MARC 21 standards to disregard initial * definite or indefinite article in sorting. * * Note: This only works for languages defined with IANA code entries. * * @author Graham Triggs */ public class MARC21InitialArticleWord extends InitialArticleWord { public MARC21InitialArticleWord() { // Default behaviour is to strip the initial word completely super(true); } public MARC21InitialArticleWord(boolean stripWord) { super(stripWord); } /** * Return the list of definite and indefinite article codes * for this language. */ protected String[] getArticleWords(String lang) { // No language - no words if (StringUtils.isEmpty(lang)) { return defaultWords; } Language l = Language.getLanguage(lang); // Is the language in our map? if (l != null && ianaArticleMap.containsKey(l.IANA)) { // Get the list of words for this language ArticlesForLang articles = ianaArticleMap.get(l.IANA); if (articles != null) { return articles.words; } } return null; } // Mapping of IANA codes to article word lists private static Map<String, ArticlesForLang> ianaArticleMap = new HashMap<String, ArticlesForLang>(); private static String[] defaultWords = null; // Static initialisation - convert word -> languages map // into language -> words map static { /* Define a mapping for article words to the languages that have them. * Take from: http://www.loc.gov/marc/bibliographic/bdapp-e.html */ Object[][] articleWordArray = { { "a", Language.ENGLISH, Language.GALICIAN, Language.HUNGARIAN, Language.PORTUGUESE, Language.ROMANIAN, Language.SCOTS, Language.YIDDISH }, { "a'", Language.SCOTTISH_GAELIC }, { "al", Language.ROMANIAN }, { "al-", Language.ARABIC, Language.BALUCHI, Language.BRAHUI, Language.PANJABI, Language.PERSIAN, Language.TURKISH, Language.URDU }, { "am", Language.SCOTTISH_GAELIC }, { "an", Language.ENGLISH, Language.IRISH, Language.SCOTS, Language.SCOTTISH_GAELIC, Language.YIDDISH }, { "an t-", Language.IRISH, Language.SCOTTISH_GAELIC }, { "ane", Language.SCOTS }, { "ang", Language.TAGALOG }, { "ang mga", Language.TAGALOG }, { "as", Language.GALICIAN, Language.PORTUGUESE }, { "az", Language.HUNGARIAN }, { "bat", Language.BASQUE }, { "bir", Language.TURKISH }, { "d'", Language.ENGLISH }, { "da", Language.SHETLAND_ENGLISH }, { "das", Language.GERMAN }, { "de", Language.DANISH, Language.DUTCH, Language.ENGLISH, Language.FRISIAN, Language.NORWEGIAN, Language.SWEDISH }, { "dei", Language.NORWEGIAN }, { "dem", Language.GERMAN }, { "den", Language.DANISH, Language.GERMAN, Language.NORWEGIAN, Language.SWEDISH }, { "der", Language.GERMAN, Language.YIDDISH }, { "des", Language.GERMAN, Language.WALLOON }, { "det", Language.DANISH, Language.NORWEGIAN, Language.SWEDISH }, { "di", Language.YIDDISH }, { "die", Language.AFRIKAANS, Language.GERMAN, Language.YIDDISH }, { "dos", Language.YIDDISH }, { "e", Language.NORWEGIAN }, { "e", Language.FRISIAN }, // should be 'e - leading apostrophes are ignored { "een", Language.DUTCH }, { "eene", Language.DUTCH }, { "egy", Language.HUNGARIAN }, { "ei", Language.NORWEGIAN }, { "ein", Language.GERMAN, Language.NORWEGIAN, Language.WALLOON }, { "eine", Language.GERMAN }, { "einem", Language.GERMAN }, { "einen", Language.GERMAN }, { "einer", Language.GERMAN }, { "eines", Language.GERMAN }, { "eit", Language.NORWEGIAN }, { "el", Language.CATALAN, Language.SPANISH }, { "el-", Language.ARABIC }, { "els", Language.CATALAN }, { "en", Language.CATALAN, Language.DANISH, Language.NORWEGIAN, Language.SWEDISH }, { "enne", Language.WALLOON }, { "et", Language.DANISH, Language.NORWEGIAN }, { "ett", Language.SWEDISH }, { "eyn", Language.YIDDISH }, { "eyne", Language.YIDDISH }, { "gl'", Language.ITALIAN }, { "gli", Language.PROVENCAL }, { "ha-", Language.HEBREW }, { "hai", Language.CLASSICAL_GREEK, Language.GREEK }, { "he", Language.HAWAIIAN }, { "h\u0113", Language.CLASSICAL_GREEK, Language.GREEK }, // e macron { "he-", Language.HEBREW }, { "heis", Language.GREEK }, { "hen", Language.GREEK }, { "hena", Language.GREEK }, { "henas", Language.GREEK }, { "het", Language.DUTCH }, { "hin", Language.ICELANDIC }, { "hina", Language.ICELANDIC }, { "hinar", Language.ICELANDIC }, { "hinir", Language.ICELANDIC }, { "hinn", Language.ICELANDIC }, { "hinna", Language.ICELANDIC }, { "hinnar", Language.ICELANDIC }, { "hinni", Language.ICELANDIC }, { "hins", Language.ICELANDIC }, { "hinu", Language.ICELANDIC }, { "hinum", Language.ICELANDIC }, { "hi\u01d2", Language.ICELANDIC }, { "ho", Language.CLASSICAL_GREEK, Language.GREEK }, { "hoi", Language.CLASSICAL_GREEK, Language.GREEK }, { "i", Language.ITALIAN }, { "ih'", Language.PROVENCAL }, { "il", Language.ITALIAN, Language.PROVENCAL_OCCITAN }, { "il-", Language.MALTESE }, { "in", Language.FRISIAN }, { "it", Language.FRISIAN }, { "ka", Language.HAWAIIAN }, { "ke", Language.HAWAIIAN }, { "l'", Language.CATALAN, Language.FRENCH, Language.ITALIAN, Language.PROVENCAL_OCCITAN, Language.WALLOON }, { "l-", Language.MALTESE }, { "la", Language.CATALAN, Language.ESPERANTO, Language.FRENCH, Language.ITALIAN, Language.PROVENCAL_OCCITAN, Language.SPANISH }, { "las", Language.PROVENCAL_OCCITAN, Language.SPANISH }, { "le", Language.FRENCH, Language.ITALIAN, Language.PROVENCAL_OCCITAN }, { "les", Language.CATALAN, Language.FRENCH, Language.PROVENCAL_OCCITAN, Language.WALLOON }, { "lh", Language.PROVENCAL_OCCITAN }, { "lhi", Language.PROVENCAL_OCCITAN }, { "li", Language.PROVENCAL_OCCITAN }, { "lis", Language.PROVENCAL_OCCITAN }, { "lo", Language.ITALIAN, Language.PROVENCAL_OCCITAN, Language.SPANISH }, { "los", Language.PROVENCAL_OCCITAN, Language.SPANISH }, { "lou", Language.PROVENCAL_OCCITAN }, { "lu", Language.PROVENCAL_OCCITAN }, { "mga", Language.TAGALOG }, { "m\u0303ga", Language.TAGALOG }, { "mia", Language.GREEK }, { "n", Language.AFRIKAANS, Language.DUTCH, Language.FRISIAN }, // should be 'n - leading apostrophes are ignored { "na", Language.HAWAIIAN, Language.IRISH, Language.SCOTTISH_GAELIC }, { "na h-", Language.IRISH, Language.SCOTTISH_GAELIC }, { "nje", Language.ALBANIAN }, { "ny", Language.MALAGASY }, { "o", Language.NEAPOLITAN_ITALIAN }, // should be 'o - leading apostrophes are ignored { "o", Language.GALICIAN, Language.HAWAIIAN, Language.PORTUGUESE, Language.ROMANIAN }, { "os", Language.PORTUGUESE }, { "r", Language.ICELANDIC }, // should be 'r - leading apostrophes are ignored { "s", Language.GERMAN }, // should be 's - leading apostrophes are ignored { "sa", Language.TAGALOG }, { "sa mga", Language.TAGALOG }, { "si", Language.TAGALOG }, { "sin\u00e1", Language.TAGALOG }, { "t", Language.DUTCH, Language.FRISIAN }, // should be 't - leading apostrophes are ignored { "ta", Language.CLASSICAL_GREEK, Language.GREEK }, { "tais", Language.CLASSICAL_GREEK }, { "tas", Language.CLASSICAL_GREEK }, { "t\u0113", Language.CLASSICAL_GREEK }, // e macron { "t\u0113n", Language.CLASSICAL_GREEK, Language.GREEK }, // e macron { "t\u0113s", Language.CLASSICAL_GREEK, Language.GREEK }, // e macron { "the", Language.ENGLISH }, { "t\u014d", Language.CLASSICAL_GREEK, Language.GREEK }, // o macron { "tois", Language.CLASSICAL_GREEK }, { "t\u014dn", Language.CLASSICAL_GREEK, Language.GREEK }, // o macron { "tou", Language.CLASSICAL_GREEK, Language.GREEK }, { "um", Language.PORTUGUESE }, { "uma", Language.PORTUGUESE }, { "un", Language.CATALAN, Language.FRENCH, Language.ITALIAN, Language.PROVENCAL_OCCITAN, Language.ROMANIAN, Language.SPANISH }, { "un'", Language.ITALIAN }, { "una", Language.CATALAN, Language.ITALIAN, Language.PROVENCAL_OCCITAN, Language.SPANISH }, { "une", Language.FRENCH }, { "unei", Language.ROMANIAN }, { "unha", Language.GALICIAN }, { "uno", Language.ITALIAN, Language.PROVENCAL_OCCITAN }, { "uns", Language.PROVENCAL_OCCITAN }, { "unui", Language.ROMANIAN }, { "us", Language.PROVENCAL_OCCITAN }, { "y", Language.WELSH }, { "ye", Language.ENGLISH }, { "yr", Language.WELSH } }; // Initialize the lang -> article map ianaArticleMap = new HashMap<String, ArticlesForLang>(); int wordIdx = 0; int langIdx = 0; // Iterate through word/language array // Generate temporary language map Map<Language, List<String>> langWordMap = new HashMap<Language, List<String>>(); for (wordIdx = 0; wordIdx < articleWordArray.length; wordIdx++) { for (langIdx = 1; langIdx < articleWordArray[wordIdx].length; langIdx++) { Language lang = (Language)articleWordArray[wordIdx][langIdx]; if (lang != null && lang.IANA.length() > 0) { List<String> words = langWordMap.get(lang); if (words == null) { words = new ArrayList<String>(); langWordMap.put(lang, words); } // Add language to list if we haven't done so already if (!words.contains(articleWordArray[wordIdx][0])) { words.add((String)articleWordArray[wordIdx][0]); } } } } // Iterate through languages for (Map.Entry<Language, List<String>> langToWord : langWordMap.entrySet()) { Language lang = langToWord.getKey(); List<String> wordList = langToWord.getValue(); // Convert the list into an array of strings String[] words = new String[wordList.size()]; for (int idx = 0; idx < wordList.size(); idx++) { words[idx] = wordList.get(idx); } // Sort the array into length order - longest to shortest // This ensures maximal matching on the article words Arrays.sort(words, new MARC21InitialArticleWord.InverseLengthComparator() ); // Add language/article entry to map ianaArticleMap.put(lang.IANA, new MARC21InitialArticleWord.ArticlesForLang(lang, words)); } // Setup default stop words for null languages String defaultLangs = ConfigurationManager.getProperty("marc21wordfilter.defaultlang"); if (!StringUtils.isEmpty(defaultLangs)) { String[] langArr = defaultLangs.split("[, ]+"); if (langArr != null && langArr.length > 0) { int wordCount = 0; ArticlesForLang[] afl = new ArticlesForLang[langArr.length]; for (int idx = 0; idx < afl.length; idx++) { Language l = Language.getLanguage(langArr[idx]); if (l != null && ianaArticleMap.containsKey(l.IANA)) { afl[idx] = ianaArticleMap.get(l.IANA); if (afl[idx] != null) { wordCount += afl[idx].words.length; } } } if (wordCount > 0) { int destPos = 0; defaultWords = new String[wordCount]; for (int idx = 0; idx < afl.length; idx++) { if (afl[idx] != null) { System.arraycopy(afl[idx].words, 0, defaultWords, destPos, afl[idx].words.length); destPos += afl[idx].words.length; } } } } } } // Wrapper class for inserting word arrays into a map private static class ArticlesForLang { final Language lang; final String[] words; ArticlesForLang(Language lang, String[] words) { this.lang = lang; this.words = (String[]) ArrayUtils.clone(words); } } // Compare strings according to their length - longest to shortest private static class InverseLengthComparator implements Comparator, Serializable { public int compare(Object arg0, Object arg1) { return ((String)arg1).length() - ((String)arg0).length(); }; }; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import org.apache.lucene.document.Document; import org.apache.lucene.index.Term; class IndexingTask { enum Action { ADD, UPDATE, DELETE }; private Action action; private Term term; private Document doc; IndexingTask(Action pAction, Term pTerm, Document pDoc) { action = pAction; term = pTerm; doc = pDoc; } boolean isAdd() { return action == Action.ADD; } boolean isDelete() { return action == Action.DELETE; } boolean isUpdate() { return action == Action.UPDATE; } Term getTerm() { return term; } Document getDocument() { return doc; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.Reader; import java.util.Set; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.PorterStemFilter; import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.standard.StandardFilter; import org.dspace.core.ConfigurationManager; /** * Custom Lucene Analyzer that combines the standard filter, lowercase filter, * stemming and stopword filters. */ public class DSAnalyzer extends Analyzer { /* * An array containing some common words that are not usually useful for * searching. */ private static final String[] STOP_WORDS = { // new stopwords (per MargretB) "a", "am", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "no", "not", "of", "on", "or", "the", "to", "was" // old stopwords (Lucene default) /* * "a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", * "into", "is", "it", "no", "not", "of", "on", "or", "s", "such", "t", * "that", "the", "their","then", "there","these", "they", "this", "to", * "was", "will", "with" */ }; /* * Stop table */ private static final Set stopSet = StopFilter.makeStopSet(STOP_WORDS); /* * Create a token stream for this analyzer. */ public final TokenStream tokenStream(String fieldName, final Reader reader) { TokenStream result = new DSTokenizer(reader); result = new StandardFilter(result); result = new LowerCaseFilter(result); result = new StopFilter(result, stopSet); result = new PorterStemFilter(result); return result; } public int getPositionIncrementGap(String fieldName) { // If it is the default field, or bounded fields is turned off in the config, return the default value if ("default".equalsIgnoreCase(fieldName) || !ConfigurationManager.getBooleanProperty("search.boundedfields", false)) { return super.getPositionIncrementGap(fieldName); } // Not the default field, and we want bounded fields, so return an large gap increment return 10; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.Reader; import org.apache.lucene.analysis.CharTokenizer; /** * Customized Lucene Tokenizer, since the standard one rejects numbers from * indexing/querying. */ public final class DSTokenizer extends CharTokenizer { /** * Construct a new LowerCaseTokenizer. */ public DSTokenizer(Reader in) { super(in); } /** * Collects only characters which satisfy {@link Character#isLetter(char)}. */ protected char normalize(char c) { return Character.toLowerCase(c); } /** * Collects only characters which do not satisfy * {@link Character#isWhitespace(char)}. */ protected boolean isTokenChar(char c) { return Character.isLetterOrDigit(c); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.log4j.Logger; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.queryParser.TokenMgrError; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Hits; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Searcher; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.sort.SortOption; // issues // need to filter query string for security // cmd line query needs to process args correctly (seems to split them up) /** * DSIndexer contains various static methods for performing queries on indices, * for collections and communities. * */ public class DSQuery { // Result types static final String ALL = "999"; static final String ITEM = "" + Constants.ITEM; static final String COLLECTION = "" + Constants.COLLECTION; static final String COMMUNITY = "" + Constants.COMMUNITY; // cache a Lucene IndexSearcher for more efficient searches private static IndexSearcher searcher = null; private static String indexDir = null; private static String operator = null; private static long lastModified; /** log4j logger */ private static Logger log = Logger.getLogger(DSQuery.class); static { String maxClauses = ConfigurationManager.getProperty("search.max-clauses"); if (maxClauses != null) { BooleanQuery.setMaxClauseCount(Integer.parseInt(maxClauses)); } indexDir = ConfigurationManager.getProperty("search.dir"); operator = ConfigurationManager.getProperty("search.operator"); } /** * Do a query, returning a QueryResults object * * @param c context * @param args query arguments in QueryArgs object * * @return query results QueryResults */ public static QueryResults doQuery(Context c, QueryArgs args) throws IOException { String querystring = args.getQuery(); QueryResults qr = new QueryResults(); List<String> hitHandles = new ArrayList<String>(); List<Integer> hitIds = new ArrayList<Integer>(); List<Integer> hitTypes = new ArrayList<Integer>(); // set up the QueryResults object qr.setHitHandles(hitHandles); qr.setHitIds(hitIds); qr.setHitTypes(hitTypes); qr.setStart(args.getStart()); qr.setPageSize(args.getPageSize()); qr.setEtAl(args.getEtAl()); // massage the query string a bit querystring = checkEmptyQuery(querystring); // change nulls to an empty string // We no longer need to work around the Lucene bug with recent versions //querystring = workAroundLuceneBug(querystring); // logicals changed to && ||, etc. querystring = stripHandles(querystring); // remove handles from query string querystring = stripAsterisk(querystring); // remove asterisk from beginning of string try { // grab a searcher, and do the search Searcher searcher = getSearcher(c); QueryParser qp = new QueryParser("default", DSIndexer.getAnalyzer()); log.debug("Final query string: " + querystring); if (operator == null || operator.equals("OR")) { qp.setDefaultOperator(QueryParser.OR_OPERATOR); } else { qp.setDefaultOperator(QueryParser.AND_OPERATOR); } Query myquery = qp.parse(querystring); Hits hits = null; try { if (args.getSortOption() == null) { SortField[] sortFields = new SortField[] { new SortField("search.resourcetype", true), new SortField(null, SortField.SCORE, SortOption.ASCENDING.equals(args.getSortOrder())) }; hits = searcher.search(myquery, new Sort(sortFields)); } else { SortField[] sortFields = new SortField[] { new SortField("search.resourcetype", true), new SortField("sort_" + args.getSortOption().getName(), SortOption.DESCENDING.equals(args.getSortOrder())), SortField.FIELD_SCORE }; hits = searcher.search(myquery, new Sort(sortFields)); } } catch (Exception e) { // Lucene can throw an exception if it is unable to determine a sort time from the specified field // Provide a fall back that just works on relevancy. log.error("Unable to use speficied sort option: " + (args.getSortOption() == null ? "type/relevance": args.getSortOption().getName())); hits = searcher.search(myquery, new Sort(SortField.FIELD_SCORE)); } // set total number of hits qr.setHitCount(hits.length()); // We now have a bunch of hits - snip out a 'window' // defined in start, count and return the handles // from that window // first, are there enough hits? if (args.getStart() < hits.length()) { // get as many as we can, up to the window size // how many are available after snipping off at offset 'start'? int hitsRemaining = hits.length() - args.getStart(); int hitsToProcess = (hitsRemaining < args.getPageSize()) ? hitsRemaining : args.getPageSize(); for (int i = args.getStart(); i < (args.getStart() + hitsToProcess); i++) { Document d = hits.doc(i); String resourceId = d.get("search.resourceid"); String resourceType = d.get("search.resourcetype"); String handleText = d.get("handle"); String handleType = d.get("type"); switch (Integer.parseInt( resourceType != null ? resourceType : handleType)) { case Constants.ITEM: hitTypes.add(Integer.valueOf(Constants.ITEM)); break; case Constants.COLLECTION: hitTypes.add(Integer.valueOf(Constants.COLLECTION)); break; case Constants.COMMUNITY: hitTypes.add(Integer.valueOf(Constants.COMMUNITY)); break; } hitHandles.add( handleText ); hitIds.add( resourceId == null ? null: Integer.parseInt(resourceId) ); } } } catch (NumberFormatException e) { log.warn(LogManager.getHeader(c, "Number format exception", "" + e)); qr.setErrorMsg("number-format-exception"); } catch (ParseException e) { // a parse exception - log and return null results log.warn(LogManager.getHeader(c, "Invalid search string", "" + e)); qr.setErrorMsg("invalid-search-string"); } catch (TokenMgrError tme) { // Similar to parse exception log.warn(LogManager.getHeader(c, "Invalid search string", "" + tme)); qr.setErrorMsg("invalid-search-string"); } catch(BooleanQuery.TooManyClauses e) { log.warn(LogManager.getHeader(c, "Query too broad", e.toString())); qr.setErrorMsg("query-too-broad"); } return qr; } static String checkEmptyQuery(String myquery) { if (myquery == null || myquery.equals("()") || myquery.equals("")) { myquery = "empty_query_string"; } return myquery; } /** * Workaround Lucene bug that breaks wildcard searching. * This is no longer required with Lucene upgrades. * * @param myquery * @return * @deprecated */ static String workAroundLuceneBug(String myquery) { // Lucene currently has a bug which breaks wildcard // searching when you have uppercase characters. // Here we substitute the boolean operators -- which // have to be uppercase -- before transforming the // query string to lowercase. return myquery.replaceAll(" AND ", " && ") .replaceAll(" OR ", " || ") .replaceAll(" NOT ", " ! ") .toLowerCase(); } static String stripHandles(String myquery) { // Drop beginning pieces of full handle strings return myquery.replaceAll("^\\s*http://hdl\\.handle\\.net/", "") .replaceAll("^\\s*hdl:", ""); } static String stripAsterisk(String myquery) { // query strings (or words) beginning with "*" cause a null pointer error return myquery.replaceAll("^\\*", "") .replaceAll("\\s\\*", " ") .replaceAll("\\(\\*", "(") .replaceAll(":\\*", ":"); } /** * Do a query, restricted to a collection * * @param c * context * @param args * query args * @param coll * collection to restrict to * * @return QueryResults same results as doQuery, restricted to a collection */ public static QueryResults doQuery(Context c, QueryArgs args, Collection coll) throws IOException { String querystring = args.getQuery(); querystring = checkEmptyQuery(querystring); String location = "l" + (coll.getID()); String newquery = "+(" + querystring + ") +location:\"" + location + "\""; args.setQuery(newquery); return doQuery(c, args); } /** * Do a query, restricted to a community * * @param c * context * @param args * query args * @param comm * community to restrict to * * @return QueryResults same results as doQuery, restricted to a collection */ public static QueryResults doQuery(Context c, QueryArgs args, Community comm) throws IOException { String querystring = args.getQuery(); querystring = checkEmptyQuery(querystring); String location = "m" + (comm.getID()); String newquery = "+(" + querystring + ") +location:\"" + location + "\""; args.setQuery(newquery); return doQuery(c, args); } /** * Do a query, printing results to stdout largely for testing, but it is * useful */ public static void doCMDLineQuery(String query) { System.out.println("Command line query: " + query); System.out.println("Only reporting default-sized results list"); try { Context c = new Context(); QueryArgs args = new QueryArgs(); args.setQuery(query); QueryResults results = doQuery(c, args); Iterator i = results.getHitHandles().iterator(); Iterator j = results.getHitTypes().iterator(); while (i.hasNext()) { String thisHandle = (String) i.next(); Integer thisType = (Integer) j.next(); String type = Constants.typeText[thisType.intValue()]; // also look up type System.out.println(type + "\t" + thisHandle); } } catch (Exception e) { System.out.println("Exception caught: " + e); } } /** * Close any IndexSearcher that is currently open. */ public static synchronized void close() { if (searcher != null) { try { searcher.close(); searcher = null; } catch (IOException ioe) { log.error("DSQuery: Unable to close open IndexSearcher", ioe); } } } public static void main(String[] args) { if (args.length > 0) { DSQuery.doCMDLineQuery(args[0]); } } /*--------- protected methods ----------*/ /** * get an IndexReader. * @throws IOException */ protected static IndexReader getIndexReader() throws IOException { return getSearcher(null).getIndexReader(); } /** * get an IndexSearcher, hopefully a cached one (gives much better * performance.) checks to see if the index has been modified - if so, it * creates a new IndexSearcher */ protected static synchronized IndexSearcher getSearcher(Context c) throws IOException { // If we have already opened a searcher, check to see if the index has been updated // If it has, we need to close the existing searcher - we will open a new one later if (searcher != null && lastModified != IndexReader.getCurrentVersion(indexDir)) { try { // Close the cached IndexSearcher searcher.close(); } catch (IOException ioe) { // Index is probably corrupt. Log the error, but continue to either: // 1) Return existing searcher (may yet throw exception, no worse than throwing here) log.warn("DSQuery: Unable to check for updated index", ioe); } finally { searcher = null; } } // There is no existing searcher - either this is the first execution, // or the index has been updated and we closed the old index. if (searcher == null) { // So, open a new searcher lastModified = IndexReader.getCurrentVersion(indexDir); String osName = System.getProperty("os.name"); if (osName != null && osName.toLowerCase().contains("windows")) { searcher = new IndexSearcher(indexDir){ /* * TODO: Has Lucene fixed this bug yet? * Lucene doesn't release read locks in * windows properly on finalize. Our hack * extend IndexSearcher to force close(). */ protected void finalize() throws Throwable { this.close(); super.finalize(); } }; } else { searcher = new IndexSearcher(indexDir); } } return searcher; } } // it's now up to the display page to do the right thing displaying // items & communities & collections
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.Serializable; import java.sql.SQLException; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.LinkedList; import java.util.List; import java.util.TimeZone; import org.apache.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.handle.HandleManager; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; import org.dspace.authorize.AuthorizeManager; import org.dspace.eperson.Group; /** * Utility class for extracting information about items, possibly just within a * certain community or collection, that have been created, modified or * withdrawn within a particular range of dates. * * @author Robert Tansley * @version $Revision: 5844 $ */ public class Harvest { /** log4j logger */ private static Logger log = Logger.getLogger(Harvest.class); /** * Obtain information about items that have been created, modified or * withdrawn within a given date range. You can also specify 'offset' and * 'limit' so that a big harvest can be split up into smaller sections. * <P> * Note that dates are passed in the standard ISO8601 format used by DSpace * (and OAI-PMH). * <P> * FIXME: Assumes all in_archive items have public metadata * * @param context * DSpace context * @param scope * a Collection, Community, or <code>null</code> indicating the scope is * all of DSpace * @param startDate * start of date range, or <code>null</code> * @param endDate * end of date range, or <code>null</code> * @param offset * for a partial harvest, the point in the overall list of * matching items to start at. 0 means just start at the * beginning. * @param limit * the number of matching items to return in a partial harvest. * Specify 0 to return the whole list (or the rest of the list if * an offset was specified.) * @param items * if <code>true</code> the <code>item</code> field of each * <code>HarvestedItemInfo</code> object is filled out * @param collections * if <code>true</code> the <code>collectionHandles</code> * field of each <code>HarvestedItemInfo</code> object is * filled out * @param withdrawn * If <code>true</code>, information about withdrawn items is * included * @param nonAnon * If items without anonymous access should be included or not * @return List of <code>HarvestedItemInfo</code> objects * @throws SQLException * @throws ParseException If the date is not in a supported format */ public static List<HarvestedItemInfo> harvest(Context context, DSpaceObject scope, String startDate, String endDate, int offset, int limit, boolean items, boolean collections, boolean withdrawn, boolean nonAnon) throws SQLException, ParseException { // Put together our query. Note there is no need for an // "in_archive=true" condition, we are using the existence of // Handles as our 'existence criterion'. // FIXME: I think the "DISTINCT" is redundant String query = "SELECT DISTINCT handle.handle, handle.resource_id, item.withdrawn, item.last_modified FROM handle, item"; // We are building a complex query that may contain a variable // about of input data points. To accommodate this while still // providing type safety we build a list of parameters to be // plugged into the query at the database level. List<Serializable> parameters = new ArrayList<Serializable>(); if (scope != null) { if (scope.getType() == Constants.COLLECTION) { query += ", collection2item"; } else if (scope.getType() == Constants.COMMUNITY) { query += ", communities2item"; } } query += " WHERE handle.resource_type_id=" + Constants.ITEM + " AND handle.resource_id=item.item_id "; if (scope != null) { if (scope.getType() == Constants.COLLECTION) { query += " AND collection2item.collection_id= ? " + " AND collection2item.item_id=handle.resource_id "; parameters.add(Integer.valueOf(scope.getID())); } else if (scope.getType() == Constants.COMMUNITY) { query += " AND communities2item.community_id= ? " + " AND communities2item.item_id=handle.resource_id"; parameters.add(Integer.valueOf(scope.getID())); } } if (startDate != null) { query = query + " AND item.last_modified >= ? "; parameters.add(toTimestamp(startDate, false)); } if (endDate != null) { /* * If the end date has seconds precision, e.g.: * * 2004-04-29T13:45:43Z * * we need to add 999 milliseconds to this. This is because SQL * TIMESTAMPs have millisecond precision, and so might have a value: * * 2004-04-29T13:45:43.952Z * * and so <= '2004-04-29T13:45:43Z' would not pick this up. Reading * things out of the database, TIMESTAMPs are rounded down, so the * above value would be read as '2004-04-29T13:45:43Z', and * therefore a caller would expect <= '2004-04-29T13:45:43Z' to * include that value. * * Got that? ;-) */ boolean selfGenerated = false; if (endDate.length() == 20) { endDate = endDate.substring(0, 19) + ".999Z"; selfGenerated = true; } query += " AND item.last_modified <= ? "; parameters.add(toTimestamp(endDate, selfGenerated)); } if (!withdrawn) { // Exclude withdrawn items if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { query += " AND withdrawn=0 "; } else { // postgres uses booleans query += " AND withdrawn=false "; } } // Order by item ID, so that for a given harvest the order will be // consistent. This is so that big harvests can be broken up into // several smaller operations (e.g. for OAI resumption tokens.) query += " ORDER BY handle.resource_id"; log.debug(LogManager.getHeader(context, "harvest SQL", query)); // Execute Object[] parametersArray = parameters.toArray(); TableRowIterator tri = DatabaseManager.query(context, query, parametersArray); List<HarvestedItemInfo> infoObjects = new LinkedList<HarvestedItemInfo>(); int index = 0; int itemCounter = 0; try { // Process results of query into HarvestedItemInfo objects while (tri.hasNext()) { TableRow row = tri.next(); /** * If we are looking for public-only items, we need to scan all objects * for permissions in order to properly calculate the offset */ if ((!nonAnon) && (index < offset)) { HarvestedItemInfo itemInfo = new HarvestedItemInfo(); itemInfo.itemID = row.getIntColumn("resource_id"); itemInfo.item = Item.find(context, itemInfo.itemID); Group[] authorizedGroups = AuthorizeManager.getAuthorizedGroups(context, itemInfo.item, Constants.READ); boolean added = false; for (int i = 0; i < authorizedGroups.length; i++) { if ((authorizedGroups[i].getID() == 0) && (!added)) { added = true; } } if (!added) { offset++; } } /* * This conditional ensures that we only process items within any * constraints specified by 'offset' and 'limit' parameters. */ else if ((index >= offset) && ((limit == 0) || (itemCounter < limit))) { HarvestedItemInfo itemInfo = new HarvestedItemInfo(); itemInfo.context = context; itemInfo.handle = row.getStringColumn("handle"); itemInfo.itemID = row.getIntColumn("resource_id"); itemInfo.datestamp = row.getDateColumn("last_modified"); itemInfo.withdrawn = row.getBooleanColumn("withdrawn"); if (collections) { fillCollections(context, itemInfo); } if (items) { // Get the item itemInfo.item = Item.find(context, itemInfo.itemID); } if ((nonAnon) || (itemInfo.item == null) || (withdrawn && itemInfo.withdrawn)) { infoObjects.add(itemInfo); itemCounter++; } else { Group[] authorizedGroups = AuthorizeManager.getAuthorizedGroups(context, itemInfo.item, Constants.READ); boolean added = false; for (int i = 0; i < authorizedGroups.length; i++) { if ((authorizedGroups[i].getID() == 0) && (!added)) { infoObjects.add(itemInfo); added = true; itemCounter++; } } } } index++; } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } return infoObjects; } /** * Get harvested item info for a single item. <code>item</code> field in * returned <code>HarvestedItemInfo</code> object is always filled out. * * @param context * DSpace context * @param handle * Prefix-less Handle of item * @param collections * if <code>true</code> the <code>collectionHandles</code> * field of the <code>HarvestedItemInfo</code> object is filled * out * * @return <code>HarvestedItemInfo</code> object for the single item, or * <code>null</code> * @throws SQLException */ public static HarvestedItemInfo getSingle(Context context, String handle, boolean collections) throws SQLException { // FIXME: Assume Handle is item Item i = (Item) HandleManager.resolveToObject(context, handle); if (i == null) { return null; } // Fill out OAI info item object HarvestedItemInfo itemInfo = new HarvestedItemInfo(); itemInfo.context = context; itemInfo.item = i; itemInfo.handle = handle; itemInfo.withdrawn = i.isWithdrawn(); itemInfo.datestamp = i.getLastModified(); itemInfo.itemID = i.getID(); // Get the sets if (collections) { fillCollections(context, itemInfo); } return itemInfo; } /** * Fill out the containers field of the HarvestedItemInfo object * * @param context * DSpace context * @param itemInfo * HarvestedItemInfo object to fill out * @throws SQLException */ private static void fillCollections(Context context, HarvestedItemInfo itemInfo) throws SQLException { // Get the collection Handles from DB TableRowIterator colRows = DatabaseManager.query(context, "SELECT handle.handle FROM handle, collection2item WHERE handle.resource_type_id= ? " + "AND collection2item.collection_id=handle.resource_id AND collection2item.item_id = ? ", Constants.COLLECTION, itemInfo.itemID); try { // Chuck 'em in the itemInfo object itemInfo.collectionHandles = new LinkedList(); while (colRows.hasNext()) { TableRow r = colRows.next(); itemInfo.collectionHandles.add(r.getStringColumn("handle")); } } finally { if (colRows != null) { colRows.close(); } } } /** * Convert a String to a java.sql.Timestamp object * * @param t The timestamp String * @param selfGenerated Is this a self generated timestamp (e.g. it has .999 on the end) * @return The converted Timestamp * @throws ParseException */ private static Timestamp toTimestamp(String t, boolean selfGenerated) throws ParseException { SimpleDateFormat df; // Choose the correct date format based on string length if (t.length() == 10) { df = new SimpleDateFormat("yyyy-MM-dd"); } else if (t.length() == 20) { df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); } else if (selfGenerated) { df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); } else { // Not self generated, and not in a guessable format throw new ParseException("", 0); } // Parse the date df.setCalendar(Calendar.getInstance(TimeZone.getTimeZone("UTC"))); return new Timestamp(df.parse(t).getTime()); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import javax.servlet.http.HttpServletRequest; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.sort.SortOption; import org.apache.commons.lang.StringUtils; /** * Contains the arguments for a query. Fill it out and pass to the query engine */ public class QueryArgs { // the query string private String query; // start and count defines a search 'cursor' or page // query will return 'count' hits beginning at offset 'start' private int start = 0; // default values private int pageSize = 10; private SortOption sortOption = null; private String sortOrder = SortOption.DESCENDING; /** number of metadata elements to display before truncating using "et al" */ private int etAl = ConfigurationManager.getIntProperty("webui.itemlist.author-limit"); /** * @return the number of metadata fields at which to truncate with "et al" */ public int getEtAl() { return etAl; } /** * set the number of metadata fields at which to truncate with "et al" * * @param etAl */ public void setEtAl(int etAl) { this.etAl = etAl; } /** * set the query string * * @param newQuery */ public void setQuery(String newQuery) { query = newQuery; } /** * retrieve the query string * * @return the current query string */ public String getQuery() { return query; } /** * set the offset of the desired search results, beginning with 0 ; used to * page results (the default value is 0) * * @param newStart * index of first desired result */ public void setStart(int newStart) { start = newStart; } /** * read the search's starting offset * * @return current index of first desired result */ public int getStart() { return start; } /** * set the count of hits to return; used to implement paged searching see * the initializer for the default value * * @param newSize * number of hits per page */ public void setPageSize(int newSize) { pageSize = newSize; } /** * get the count of hits to return * * @return number of results per page */ public int getPageSize() { return pageSize; } public SortOption getSortOption() { return sortOption; } public void setSortOption(SortOption sortOption) { this.sortOption = sortOption; } public String getSortOrder() { return sortOrder; } public void setSortOrder(String sortOrder) { this.sortOrder = sortOrder; } /** * Builds an advanced-query description string. * * The string is built using the passed in values * query{1,2,3}, field{1,2,3} and conjunction{1,2} taken from * the parameter request. * * @param request the request object to take the values from * * @return the query description string built */ public String buildQuery(HttpServletRequest request) { String newquery = "("; String numFieldStr = request.getParameter("num_search_field"); // for backward compatibility if (numFieldStr == null) { numFieldStr = "3"; } int numField = Integer.parseInt(numFieldStr); List<String> query = new ArrayList<String>(); List<String> field = new ArrayList<String>(); List<String> conjunction = new ArrayList<String>(); for (int i = 1; i <= numField; i++) { String tmp_query = request.getParameter("query"+i); String tmp_field = request.getParameter("field"+i); // TODO: Ensure a valid field from config // Disarm fields with regexp control characters if (tmp_field != null) { tmp_field = tmp_field.replace('/', ' '); tmp_field = tmp_field.replace('<', ' '); tmp_field = tmp_field.replace('\\', ' '); tmp_field = tmp_field.replace(':', ' '); } if (tmp_query != null && !tmp_query.equals("")) { query.add(tmp_query.trim()); if (tmp_field == null) { field.add("ANY"); } else { field.add(tmp_field.trim()); } if (i != numField) { conjunction.add(request.getParameter("conjunction"+i) != null? request.getParameter("conjunction"+i):"AND"); } } } Iterator<String> iquery = query.iterator(); Iterator<String> ifield = field.iterator(); Iterator<String> iconj = conjunction.iterator(); String conj_curr = ""; while (iquery.hasNext()) { newquery = newquery + conj_curr; String query_curr = iquery.next(); String field_curr = ifield.next(); newquery = newquery + buildQueryPart(query_curr,field_curr); if (iconj.hasNext()) { conj_curr = " " + iconj.next() + " "; } } newquery = newquery + ")"; return (newquery); } /** * Builds a query-part using the field and value passed in * with ' --&gt; " (single to double quote) translation. * * @param myquery the value the query will look for * @param myfield the field myquery will be looked for in * * @return the query created */ private String buildQueryPart(String myquery, String myfield) { StringBuilder newQuery = new StringBuilder(); newQuery.append("("); boolean newTerm = true; boolean inPhrase = false; char phraseChar = '\"'; StringTokenizer qtok = new StringTokenizer(myquery, " \t\n\r\f\"\'", true); while (qtok.hasMoreTokens()) { String token = qtok.nextToken(); if (StringUtils.isWhitespace(token)) { if (!inPhrase) { newTerm = true; } newQuery.append(token); } else { // Matched the end of the phrase if (inPhrase && token.charAt(0) == phraseChar) { newQuery.append("\""); inPhrase = false; } else { // If we aren't dealing with a new term, and have a single quote // don't touch it. (for example, the apostrophe in it's). if (!newTerm && token.charAt(0) == '\'') { newQuery.append(token); } else { // Treat - my"phrased query" - as - my "phrased query" if (!newTerm && token.charAt(0) == '\"') { newQuery.append(" "); newTerm = true; } // This is a new term in the query (ie. preceeded by nothing or whitespace) // so apply a field restriction if specified if (newTerm && !myfield.equals("ANY")) { newQuery.append(myfield).append(":"); } // Open a new phrase, and closing at the corresponding character // ie. 'my phrase' or "my phrase" if (token.charAt(0) == '\"' || token.charAt(0) == '\'') { newQuery.append("\""); inPhrase = true; newTerm = false; phraseChar = token.charAt(0); } else { newQuery.append(token); newTerm = false; } } } } } newQuery.append(")"); return newQuery.toString(); } /** * Constructs a HashMap with the keys field{1,2,3}, query{1,2,3} and * conjunction{1,2} taking the values from the passed-in argument * defaulting to "". * * @param request the request-describing object to take the values from * * @return the created HashMap */ public Map<String, String> buildQueryMap(HttpServletRequest request) { Map<String, String> queryMap = new HashMap<String, String>(); String numFieldStr = request.getParameter("num_search_field"); // for backward compatibility if (numFieldStr == null) { numFieldStr = "3"; } int numField = Integer.parseInt(numFieldStr); for (int i = 1; i < numField; i++) { String queryStr = "query" + i; String fieldStr = "field" + i; String conjunctionStr = "conjunction" + i; queryMap.put(queryStr, StringUtils.defaultString(request.getParameter(queryStr), "")); queryMap.put(fieldStr, StringUtils.defaultString(request.getParameter(fieldStr), "ANY")); queryMap.put(conjunctionStr, StringUtils.defaultString(request.getParameter(conjunctionStr), "AND")); } String queryStr = "query" + numField; String fieldStr = "field" + numField; queryMap.put(queryStr, StringUtils.defaultString(request.getParameter(queryStr), "")); queryMap.put(fieldStr, StringUtils.defaultString(request.getParameter(fieldStr), "ANY")); return (queryMap); } /** * Builds an HTTP query string for some parameters with the value * taken from the request context passed in. * * The returned string includes key/value pairs in the HTTP query string * format (key1=value1&amp;key2=value2...) for the keys query{1,2,3}, * field{1,2,3} and conjunction{1,2} with values taken from request * and defaulting to "". * <P> * Note, that the values are url-encoded using the UTF-8 encoding scheme * as the corresponding W3C recommendation states. * <P> * Also note that neither leading ? (question mark) * nor leading &amp; (ampersand mark) is included. * Take this into account when appending to a real URL. * * @param request the request object to take the values from * * @return the query string that can be used without further * transformationin URLs * */ public String buildHTTPQuery(HttpServletRequest request) throws UnsupportedEncodingException { StringBuilder queryString = new StringBuilder(); Map<String, String> queryMap = buildQueryMap(request); for (Map.Entry<String, String> query : queryMap.entrySet()) { queryString.append("&") .append(query.getKey()) .append("=") .append(URLEncoder.encode(query.getValue(), Constants.DEFAULT_ENCODING)); } if (request.getParameter("num_search_field") != null) { queryString.append("&num_search_field=").append(request.getParameter("num_search_field")); } // return the result with the leading "&" removed return queryString.substring(1); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.util.ArrayList; import java.util.List; import org.dspace.core.ConfigurationManager; /** * Contains the results of a query. Use access methods to examine and retrieve * the results. */ public class QueryResults { private int hitCount; // total hits returned by search engine private int start; // offset of query 'page' private int pageSize; // max number of hits returned private List<String> hitHandles = new ArrayList<String>(); // handles of content (items, collections, communities) private List<Integer> hitTypes = new ArrayList<Integer>(); // Resource type - from Constants private List<Integer> hitIds = new ArrayList<Integer>(); // Resource ids private String errorMsg; //error string, if there is one /** number of metadata elements to display before truncating using "et al" */ private int etAl = ConfigurationManager.getIntProperty("webui.itemlist.author-limit"); /** * @return the number of metadata fields at which to truncate with "et al" */ public int getEtAl() { return etAl; } /** * set the number of metadata fields at which to truncate with "et al" * * @param etAl */ public void setEtAl(int etAl) { this.etAl = etAl; } /** set total number of hits found by search engine, not number in hitHandles */ public void setHitCount(int newCount) { hitCount = newCount; } /** * get total number of hits found by search engine, not just number of * returned results */ public int getHitCount() { return hitCount; } /** set start of 'page' of results */ public void setStart(int newStart) { start = newStart; } /** get start of 'page' of results */ public int getStart() { return start; } /** set length of 'page' of results */ public void setPageSize(int newSize) { pageSize = newSize; } /** get length of 'page' of results */ public int getPageSize() { return pageSize; } /** set the List of handles corresponding to hits */ public void setHitHandles(List<String> myHits) { hitHandles = myHits != null ? myHits : new ArrayList<String>(); } /** get the List of handles corresponding to hits */ public List<String> getHitHandles() { return hitHandles; } /** set the List of ids corresponding to hits */ public void setHitIds(List<Integer> myHits) { hitIds = myHits != null ? myHits : new ArrayList<Integer>(); } /** get the List of handles corresponding to hits */ public List<Integer> getHitIds() { return hitIds; } /** set the List of types corresponding to handles */ public void setHitTypes(List<Integer> newTypes) { hitTypes = newTypes != null ? newTypes : new ArrayList<Integer>(); } /** get the List of types corresponding to handles */ public List<Integer> getHitTypes() { return hitTypes; } /** set error message */ public void setErrorMsg(String msg) { errorMsg = msg; } /** get error message */ public String getErrorMsg() { return errorMsg; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.util.HashSet; import java.util.Set; import org.apache.log4j.Logger; import org.dspace.content.Bundle; import org.dspace.content.DSpaceObject; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.event.Consumer; import org.dspace.event.Event; /** * Class for updating search indices from content events. * * @version $Revision: 5844 $ */ public class SearchConsumer implements Consumer { /** log4j logger */ private static Logger log = Logger.getLogger(SearchConsumer.class); // collect Items, Collections, Communities that need indexing private Set<DSpaceObject> objectsToUpdate = null; // handles to delete since IDs are not useful by now. private Set<String> handlesToDelete = null; public void initialize() throws Exception { // No-op } /** * Consume a content event -- just build the sets of objects to add (new) to * the index, update, and delete. * * @param ctx * DSpace context * @param event * Content event */ public void consume(Context ctx, Event event) throws Exception { if (objectsToUpdate == null) { objectsToUpdate = new HashSet<DSpaceObject>(); handlesToDelete = new HashSet<String>(); } int st = event.getSubjectType(); if (!(st == Constants.ITEM || st == Constants.BUNDLE || st == Constants.COLLECTION || st == Constants.COMMUNITY)) { log .warn("SearchConsumer should not have been given this kind of Subject in an event, skipping: " + event.toString()); return; } DSpaceObject subject = event.getSubject(ctx); DSpaceObject object = event.getObject(ctx); // If event subject is a Bundle and event was Add or Remove, // transform the event to be a Modify on the owning Item. // It could be a new bitstream in the TEXT bundle which // would change the index. int et = event.getEventType(); if (st == Constants.BUNDLE) { if ((et == Event.ADD || et == Event.REMOVE) && subject != null && "TEXT".equals( ((Bundle) subject).getName()) ) { st = Constants.ITEM; et = Event.MODIFY; subject = ((Bundle) subject).getItems()[0]; if (log.isDebugEnabled()) { log.debug("Transforming Bundle event into MODIFY of Item " + subject.getHandle()); } } else { return; } } switch (et) { case Event.CREATE: case Event.MODIFY: case Event.MODIFY_METADATA: if (subject == null) { log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getSubjectTypeAsString() + " id=" + String.valueOf(event.getSubjectID()) + ", perhaps it has been deleted."); } else { log.debug("consume() adding event to update queue: " + event.toString()); objectsToUpdate.add(subject); } break; case Event.REMOVE: case Event.ADD: if (object == null) { log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getObjectTypeAsString() + " id=" + String.valueOf(event.getObjectID()) + ", perhaps it has been deleted."); } else { log.debug("consume() adding event to update queue: " + event.toString()); objectsToUpdate.add(object); } break; case Event.DELETE: String detail = event.getDetail(); if (detail == null) { log.warn("got null detail on DELETE event, skipping it."); } else { log.debug("consume() adding event to delete queue: " + event.toString()); handlesToDelete.add(detail); } break; default: log .warn("SearchConsumer should not have been given a event of type=" + event.getEventTypeAsString() + " on subject=" + event.getSubjectTypeAsString()); break; } } /** * Process sets of objects to add, update, and delete in index. Correct for * interactions between the sets -- e.g. objects which were deleted do not * need to be added or updated, new objects don't also need an update, etc. */ public void end(Context ctx) throws Exception { if(objectsToUpdate != null && handlesToDelete != null) { // update the changed Items not deleted because they were on create list for (DSpaceObject iu : objectsToUpdate) { /* we let all types through here and * allow the search DSIndexer to make * decisions on indexing and/or removal */ String hdl = iu.getHandle(); if (hdl != null && !handlesToDelete.contains(hdl)) { try { DSIndexer.indexContent(ctx, iu, true); log.debug("Indexed " + Constants.typeText[iu.getType()] + ", id=" + String.valueOf(iu.getID()) + ", handle=" + hdl); } catch (Exception e) { log.error("Failed while indexing object: ", e); } } } for (String hdl : handlesToDelete) { try { DSIndexer.unIndexContent(ctx, hdl); if (log.isDebugEnabled()) { log.debug("UN-Indexed Item, handle=" + hdl); } } catch (Exception e) { log.error("Failed while UN-indexing object: " + hdl, e); } } } // "free" the resources objectsToUpdate = null; handlesToDelete = null; } public void finish(Context ctx) throws Exception { // No-op } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.Reader; import java.io.StringWriter; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.Calendar; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import java.text.SimpleDateFormat; import java.text.ParseException; import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.DateTools; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermDocs; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DCValue; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.ItemIterator; import org.dspace.content.authority.ChoiceAuthorityManager; import org.dspace.content.authority.MetadataAuthorityManager; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.LogManager; import org.dspace.handle.HandleManager; import org.dspace.sort.SortOption; import org.dspace.sort.OrderFormat; /** * DSIndexer contains the methods that index Items and their metadata, * collections, communities, etc. It is meant to either be invoked from the * command line (see dspace/bin/index-all) or via the indexContent() methods * within DSpace. * * As of 1.4.2 this class has new incremental update of index functionality * and better detection of locked state thanks to Lucene 2.1 moving write.lock. * It will attempt to attain a lock on the index in the event that an update * is requested and will wait a maximum of 30 seconds (a worst case scenario) * to attain the lock before giving up and logging the failure to log4j and * to the DSpace administrator email account. * * The Administrator can choose to run DSIndexer in a cron that * repeats regularly, a failed attempt to index from the UI will be "caught" up * on in that cron. * * @author Mark Diggory * @author Graham Triggs */ public class DSIndexer { private static final Logger log = Logger.getLogger(DSIndexer.class); private static final String LAST_INDEXED_FIELD = "DSIndexer.lastIndexed"; private static final String DOCUMENT_STATUS_FIELD = "DSIndexer.status"; private static final long WRITE_LOCK_TIMEOUT = 30000 /* 30 sec */; private static Thread delayedIndexFlusher = null; private static int indexFlushDelay = ConfigurationManager.getIntProperty("search.index.delay", -1); private static int batchFlushAfterDocuments = ConfigurationManager.getIntProperty("search.batch.documents", 20); private static boolean batchProcessingMode = false; // Class to hold the index configuration (one instance per config line) private static class IndexConfig { String indexName; String schema; String element; String qualifier = null; String type = "text"; IndexConfig() { } IndexConfig(String indexName, String schema, String element, String qualifier, String type) { this.indexName = indexName; this.schema = schema; this.element = element; this.qualifier = qualifier; this.type = type; } } private static String indexDirectory = ConfigurationManager.getProperty("search.dir"); private static int maxfieldlength = -1; // TODO: Support for analyzers per language, or multiple indices /** The analyzer for this DSpace instance */ private static volatile Analyzer analyzer = null; /** Static initialisation of index configuration */ /** Includes backwards compatible default configuration */ private static IndexConfig[] indexConfigArr = new IndexConfig[] { new IndexConfig("author", "dc", "contributor", Item.ANY, "text") , new IndexConfig("author", "dc", "creator", Item.ANY, "text"), new IndexConfig("author", "dc", "description", "statementofresponsibility", "text"), new IndexConfig("title", "dc", "title", Item.ANY, "text"), new IndexConfig("keyword", "dc", "subject", Item.ANY, "text"), new IndexConfig("abstract", "dc", "description", "abstract", "text"), new IndexConfig("abstract", "dc", "description", "tableofcontents", "text"), new IndexConfig("series", "dc", "relation", "ispartofseries", "text"), new IndexConfig("mimetype", "dc", "format", "mimetype", "text"), new IndexConfig("sponsor", "dc", "description", "sponsorship", "text"), new IndexConfig("identifier", "dc", "identifier", Item.ANY, "text") }; static { // calculate maxfieldlength if (ConfigurationManager.getProperty("search.maxfieldlength") != null) { maxfieldlength = ConfigurationManager.getIntProperty("search.maxfieldlength"); } // read in indexes from the config ArrayList<String> indexConfigList = new ArrayList<String>(); // read in search.index.1, search.index.2.... for (int i = 1; ConfigurationManager.getProperty("search.index." + i) != null; i++) { indexConfigList.add(ConfigurationManager.getProperty("search.index." + i)); } if (indexConfigList.size() > 0) { indexConfigArr = new IndexConfig[indexConfigList.size()]; for (int i = 0; i < indexConfigList.size(); i++) { indexConfigArr[i] = new IndexConfig(); String index = indexConfigList.get(i); String[] configLine = index.split(":"); indexConfigArr[i].indexName = configLine[0]; // Get the schema, element and qualifier for the index // TODO: Should check valid schema, element, qualifier? String[] parts = configLine[1].split("\\."); switch (parts.length) { case 3: indexConfigArr[i].qualifier = parts[2]; // Fall through for other parts of the array case 2: indexConfigArr[i].schema = parts[0]; indexConfigArr[i].element = parts[1]; break; default: log.warn("Malformed configuration line: search.index." + i); // FIXME: Can't proceed here, no suitable exception to throw throw new IllegalStateException("Malformed configuration line: search.index." + i); } if (configLine.length > 2) { indexConfigArr[i].type = configLine[2]; } } } /* * Increase the default write lock so that Indexing can be interrupted. */ IndexWriter.setDefaultWriteLockTimeout(WRITE_LOCK_TIMEOUT); /* * Create the index directory if it doesn't already exist. */ if (!IndexReader.indexExists(indexDirectory)) { try { if (!new File(indexDirectory).mkdirs()) { log.error("Unable to create index directory: " + indexDirectory); } openIndex(true).close(); } catch (IOException e) { throw new IllegalStateException("Could not create search index: " + e.getMessage(),e); } } } public static void setBatchProcessingMode(boolean mode) { batchProcessingMode = mode; if (mode == false) { flushIndexingTaskQueue(); } } /** * If the handle for the "dso" already exists in the index, and * the "dso" has a lastModified timestamp that is newer than * the document in the index then it is updated, otherwise a * new document is added. * * @param context Users Context * @param dso DSpace Object (Item, Collection or Community * @throws SQLException * @throws IOException */ public static void indexContent(Context context, DSpaceObject dso) throws SQLException { indexContent(context, dso, false); } /** * If the handle for the "dso" already exists in the index, and * the "dso" has a lastModified timestamp that is newer than * the document in the index then it is updated, otherwise a * new document is added. * * @param context Users Context * @param dso DSpace Object (Item, Collection or Community * @param force Force update even if not stale. * @throws SQLException * @throws IOException */ public static void indexContent(Context context, DSpaceObject dso, boolean force) throws SQLException { try { IndexingTask task = prepareIndexingTask(dso, force); if (task != null) { processIndexingTask(task); } } catch (IOException e) { log.error(e); } } /** * unIndex removes an Item, Collection, or Community only works if the * DSpaceObject has a handle (uses the handle for its unique ID) * * @param context DSpace context * @param dso DSpace Object, can be Community, Item, or Collection * @throws SQLException * @throws IOException */ public static void unIndexContent(Context context, DSpaceObject dso) throws SQLException, IOException { try { unIndexContent(context, dso.getHandle()); } catch(Exception exception) { log.error(exception.getMessage(),exception); emailException(exception); } } /** * Unindex a Document in the Lucene Index. * * @param context * @param handle * @throws SQLException * @throws IOException */ public static void unIndexContent(Context context, String handle) throws SQLException, IOException { if (handle != null) { IndexingTask task = new IndexingTask(IndexingTask.Action.DELETE, new Term("handle", handle), null); if (task != null) { processIndexingTask(task); } } else { log.warn("unindex of content with null handle attempted"); // FIXME: no handle, fail quietly - should log failure //System.out.println("Error in unIndexContent: Object had no // handle!"); } } /** * reIndexContent removes something from the index, then re-indexes it * * @param context context object * @param dso object to re-index */ public static void reIndexContent(Context context, DSpaceObject dso) throws SQLException, IOException { try { indexContent(context, dso); } catch(Exception exception) { log.error(exception.getMessage(),exception); emailException(exception); } } /** * create full index - wiping old index * * @param c context to use */ public static void createIndex(Context c) throws SQLException, IOException { /* Create a new index, blowing away the old. */ openIndex(true).close(); /* Reindex all content preemptively. */ DSIndexer.updateIndex(c, true); } /** * Optimize the existing index. Important to do regularly to reduce * filehandle usage and keep performance fast! * * @param c Users Context * @throws SQLException * @throws IOException */ public static void optimizeIndex(Context c) throws SQLException, IOException { IndexWriter writer = openIndex(false); try { flushIndexingTaskQueue(writer); writer.optimize(); } finally { writer.close(); } } /** * When invoked as a command-line tool, creates, updates, removes * content from the whole index * * @param args * the command-line arguments, none used * @throws IOException * @throws SQLException */ public static void main(String[] args) throws SQLException, IOException { Date startTime = new Date(); try { setBatchProcessingMode(true); Context context = new Context(); context.setIgnoreAuthorization(true); String usage = "org.dspace.search.DSIndexer [-cbhof[r <item handle>]] or nothing to update/clean an existing index."; Options options = new Options(); HelpFormatter formatter = new HelpFormatter(); CommandLine line = null; options.addOption(OptionBuilder .withArgName("item handle") .hasArg(true) .withDescription( "remove an Item, Collection or Community from index based on its handle") .create("r")); options.addOption(OptionBuilder.isRequired(false).withDescription( "optimize existing index").create("o")); options.addOption(OptionBuilder .isRequired(false) .withDescription( "clean existing index removing any documents that no longer exist in the db") .create("c")); options.addOption(OptionBuilder.isRequired(false).withDescription( "(re)build index, wiping out current one if it exists").create( "b")); options.addOption(OptionBuilder .isRequired(false) .withDescription( "if updating existing index, force each handle to be reindexed even if uptodate") .create("f")); options.addOption(OptionBuilder.isRequired(false).withDescription( "print this help message").create("h")); try { line = new PosixParser().parse(options, args); } catch (Exception e) { // automatically generate the help statement formatter.printHelp(usage, e.getMessage(), options, ""); System.exit(1); } if (line.hasOption("h")) { // automatically generate the help statement formatter.printHelp(usage, options); System.exit(1); } if (line.hasOption("r")) { log.info("Removing " + line.getOptionValue("r") + " from Index"); unIndexContent(context, line.getOptionValue("r")); } else if (line.hasOption("o")) { log.info("Optimizing Index"); optimizeIndex(context); } else if (line.hasOption("c")) { log.info("Cleaning Index"); cleanIndex(context); } else if (line.hasOption("b")) { log.info("(Re)building index from scratch."); createIndex(context); } else { log.info("Updating and Cleaning Index"); cleanIndex(context); updateIndex(context, line.hasOption("f")); } log.info("Done with indexing"); } finally { setBatchProcessingMode(false); Date endTime = new Date(); System.out.println("Started: " + startTime.getTime()); System.out.println("Ended: " + endTime.getTime()); System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)"); } } /** * Iterates over all Items, Collections and Communities. And updates * them in the index. Uses decaching to control memory footprint. * Uses indexContent and isStale ot check state of item in index. * * @param context */ public static void updateIndex(Context context) { updateIndex(context,false); } /** * Iterates over all Items, Collections and Communities. And updates * them in the index. Uses decaching to control memory footprint. * Uses indexContent and isStale to check state of item in index. * * At first it may appear counterintuitive to have an IndexWriter/Reader * opened and closed on each DSO. But this allows the UI processes * to step in and attain a lock and write to the index even if other * processes/jvms are running a reindex. * * @param context * @param force */ public static void updateIndex(Context context, boolean force) { try { ItemIterator items = null; try { for(items = Item.findAll(context);items.hasNext();) { Item item = (Item) items.next(); indexContent(context, item); item.decache(); } } finally { if (items != null) { items.close(); } } for (Collection collection : Collection.findAll(context)) { indexContent(context, collection); context.removeCached(collection, collection.getID()); } for (Community community : Community.findAll(context)) { indexContent(context, community); context.removeCached(community, community.getID()); } optimizeIndex(context); } catch(Exception e) { log.error(e.getMessage(), e); } } /** * Iterates over all documents in the Lucene index and verifies they * are in database, if not, they are removed. * * @param context * @throws IOException * @throws SQLException */ public static void cleanIndex(Context context) throws IOException, SQLException { IndexReader reader = DSQuery.getIndexReader(); for(int i = 0 ; i < reader.numDocs(); i++) { if(!reader.isDeleted(i)) { Document doc = reader.document(i); String handle = doc.get("handle"); if (!StringUtils.isEmpty(handle)) { DSpaceObject o = HandleManager.resolveToObject(context, handle); if (o == null) { log.info("Deleting: " + handle); /* Use IndexWriter to delete, its easier to manage write.lock */ DSIndexer.unIndexContent(context, handle); } else { context.removeCached(o, o.getID()); log.debug("Keeping: " + handle); } } } else { log.debug("Encountered deleted doc: " + i); } } } /** * Get the Lucene analyzer to use according to current configuration (or * default). TODO: Should have multiple analyzers (and maybe indices?) for * multi-lingual DSpaces. * * @return <code>Analyzer</code> to use * @throws IllegalStateException * if the configured analyzer can't be instantiated */ static Analyzer getAnalyzer() { if (analyzer == null) { // We need to find the analyzer class from the configuration String analyzerClassName = ConfigurationManager.getProperty("search.analyzer"); if (analyzerClassName == null) { // Use default analyzerClassName = "org.dspace.search.DSAnalyzer"; } try { Class analyzerClass = Class.forName(analyzerClassName); analyzer = (Analyzer) analyzerClass.newInstance(); } catch (Exception e) { log.fatal(LogManager.getHeader(null, "no_search_analyzer", "search.analyzer=" + analyzerClassName), e); throw new IllegalStateException(e.toString()); } } return analyzer; } static IndexingTask prepareIndexingTask(DSpaceObject dso, boolean force) throws SQLException, IOException { String handle = dso.getHandle(); Term term = new Term("handle", handle); IndexingTask action = null; switch (dso.getType()) { case Constants.ITEM : Item item = (Item)dso; if (item.isArchived() && !item.isWithdrawn()) { /** If the item is in the repository now, add it to the index*/ if (requiresIndexing(term, ((Item)dso).getLastModified()) || force) { log.info("Writing Item: " + handle + " to Index"); action = new IndexingTask(IndexingTask.Action.UPDATE, term, buildDocumentForItem((Item)dso)); } } else { action = new IndexingTask(IndexingTask.Action.DELETE, term, null); } break; case Constants.COLLECTION : log.info("Writing Collection: " + handle + " to Index"); action = new IndexingTask(IndexingTask.Action.UPDATE, term, buildDocumentForCollection((Collection)dso)); break; case Constants.COMMUNITY : log.info("Writing Community: " + handle + " to Index"); action = new IndexingTask(IndexingTask.Action.UPDATE, term, buildDocumentForCommunity((Community)dso)); break; default : log.error("Only Items, Collections and Communities can be Indexed"); } return action; } static void processIndexingTask(IndexingTask task) throws IOException { if (batchProcessingMode) { addToIndexingTaskQueue(task); } else if (indexFlushDelay > 0) { addToIndexingTaskQueue(task); startDelayedIndexFlusher(); } else { IndexWriter writer = null; try { writer = openIndex(false); executeIndexingTask(writer, task); } finally { if (task.getDocument() != null) { closeAllReaders(task.getDocument()); } if (writer != null) { try { writer.close(); } catch (IOException e) { log.error("Unable to close IndexWriter", e); } } } } } private static void executeIndexingTask(IndexWriter writer, IndexingTask action) throws IOException { if (action != null) { if (action.isDelete()) { if (action.getDocument() != null) { writer.updateDocument(action.getTerm(), action.getDocument()); } else { writer.deleteDocuments(action.getTerm()); } } else { writer.updateDocument(action.getTerm(), action.getDocument()); } } } private static Map<String, IndexingTask> queuedTaskMap = new HashMap<String, IndexingTask>(); static synchronized void addToIndexingTaskQueue(IndexingTask action) { if (action != null) { queuedTaskMap.put(action.getTerm().text(), action); if (queuedTaskMap.size() >= batchFlushAfterDocuments) { flushIndexingTaskQueue(); } } } static void flushIndexingTaskQueue() { if (queuedTaskMap.size() > 0) { IndexWriter writer = null; try { writer = openIndex(false); flushIndexingTaskQueue(writer); } catch (IOException e) { log.error(e); } finally { if (writer != null) { try { writer.close(); } catch (IOException ex) { log.error(ex); } } } } } private static synchronized void flushIndexingTaskQueue(IndexWriter writer) { for (IndexingTask action : queuedTaskMap.values()) { try { executeIndexingTask(writer, action); } catch (IOException e) { log.error(e); } finally { if (action.getDocument() != null) { closeAllReaders(action.getDocument()); } } } queuedTaskMap.clear(); // We've flushed, so we don't need this thread if (delayedIndexFlusher != null) { delayedIndexFlusher.interrupt(); delayedIndexFlusher = null; } } //////////////////////////////////// // Private //////////////////////////////////// private static void emailException(Exception exception) { // Also email an alert, system admin may need to check for stale lock try { String recipient = ConfigurationManager .getProperty("alert.recipient"); if (recipient != null) { Email email = ConfigurationManager.getEmail(I18nUtil.getEmailFilename(Locale.getDefault(), "internal_error")); email.addRecipient(recipient); email.addArgument(ConfigurationManager .getProperty("dspace.url")); email.addArgument(new Date()); String stackTrace; if (exception != null) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); exception.printStackTrace(pw); pw.flush(); stackTrace = sw.toString(); } else { stackTrace = "No exception"; } email.addArgument(stackTrace); email.send(); } } catch (Exception e) { // Not much we can do here! log.warn("Unable to send email alert", e); } } /** * Is stale checks the lastModified time stamp in the database and the index * to determine if the index is stale. * * @param lastModified * @throws SQLException * @throws IOException */ private static boolean requiresIndexing(Term t, Date lastModified) throws SQLException, IOException { boolean reindexItem = false; boolean inIndex = false; IndexReader ir = DSQuery.getIndexReader(); TermDocs docs = ir.termDocs(t); while(docs.next()) { inIndex = true; int id = docs.doc(); Document doc = ir.document(id); Field lastIndexed = doc.getField(LAST_INDEXED_FIELD); if (lastIndexed == null || Long.parseLong(lastIndexed.stringValue()) < lastModified.getTime()) { reindexItem = true; } } return reindexItem || !inIndex; } /** * prepare index, opening writer, and wiping out existing index if necessary */ private static IndexWriter openIndex(boolean wipeExisting) throws IOException { IndexWriter writer = new IndexWriter(indexDirectory, getAnalyzer(), wipeExisting); /* Set maximum number of terms to index if present in dspace.cfg */ if (maxfieldlength == -1) { writer.setMaxFieldLength(Integer.MAX_VALUE); } else { writer.setMaxFieldLength(maxfieldlength); } return writer; } /** * @param myitem * @return * @throws SQLException */ private static String buildItemLocationString(Item myitem) throws SQLException { // build list of community ids Community[] communities = myitem.getCommunities(); // build list of collection ids Collection[] collections = myitem.getCollections(); // now put those into strings StringBuffer location = new StringBuffer(); int i = 0; for (i = 0; i < communities.length; i++) { location.append(" m").append(communities[i].getID()); } for (i = 0; i < collections.length; i++) { location.append(" l").append(collections[i].getID()); } return location.toString(); } private static String buildCollectionLocationString(Collection target) throws SQLException { // build list of community ids Community[] communities = target.getCommunities(); // now put those into strings StringBuffer location = new StringBuffer(); int i = 0; for (i = 0; i < communities.length; i++) { location.append(" m").append(communities[i].getID()); } return location.toString(); } /** * Build a Lucene document for a DSpace Community. * * @param community Community to be indexed * @throws SQLException * @throws IOException */ private static Document buildDocumentForCommunity(Community community) throws SQLException, IOException { // Create Lucene Document Document doc = buildDocument(Constants.COMMUNITY, community.getID(), community.getHandle(), null); // and populate it String name = community.getMetadata("name"); if (name != null) { doc.add(new Field("name", name, Field.Store.NO, Field.Index.TOKENIZED)); doc.add(new Field("default", name, Field.Store.NO, Field.Index.TOKENIZED)); } return doc; } /** * Build a Lucene document for a DSpace Collection. * * @param collection Collection to be indexed * @throws SQLException * @throws IOException */ private static Document buildDocumentForCollection(Collection collection) throws SQLException, IOException { String location_text = buildCollectionLocationString(collection); // Create Lucene Document Document doc = buildDocument(Constants.COLLECTION, collection.getID(), collection.getHandle(), location_text); // and populate it String name = collection.getMetadata("name"); if (name != null) { doc.add(new Field("name", name, Field.Store.NO, Field.Index.TOKENIZED)); doc.add(new Field("default", name, Field.Store.NO, Field.Index.TOKENIZED)); } return doc; } /** * Build a Lucene document for a DSpace Item and write the index * * @param item The DSpace Item to be indexed * @throws SQLException * @throws IOException */ private static Document buildDocumentForItem(Item item) throws SQLException, IOException { String handle = item.getHandle(); // get the location string (for searching by collection & community) String location = buildItemLocationString(item); Document doc = buildDocument(Constants.ITEM, item.getID(), handle, location); log.debug("Building Item: " + handle); int j; if (indexConfigArr.length > 0) { DCValue[] mydc; for (int i = 0; i < indexConfigArr.length; i++) { // extract metadata (ANY is wildcard from Item class) if (indexConfigArr[i].qualifier!= null && indexConfigArr[i].qualifier.equals("*")) { mydc = item.getMetadata(indexConfigArr[i].schema, indexConfigArr[i].element, Item.ANY, Item.ANY); } else { mydc = item.getMetadata(indexConfigArr[i].schema, indexConfigArr[i].element, indexConfigArr[i].qualifier, Item.ANY); } for (j = 0; j < mydc.length; j++) { if (!StringUtils.isEmpty(mydc[j].value)) { if ("timestamp".equalsIgnoreCase(indexConfigArr[i].type)) { Date d = toDate(mydc[j].value); if (d != null) { doc.add( new Field(indexConfigArr[i].indexName, DateTools.dateToString(d, DateTools.Resolution.SECOND), Field.Store.NO, Field.Index.UN_TOKENIZED)); doc.add( new Field(indexConfigArr[i].indexName + ".year", DateTools.dateToString(d, DateTools.Resolution.YEAR), Field.Store.NO, Field.Index.UN_TOKENIZED)); } } else if ("date".equalsIgnoreCase(indexConfigArr[i].type)) { Date d = toDate(mydc[j].value); if (d != null) { doc.add( new Field(indexConfigArr[i].indexName, DateTools.dateToString(d, DateTools.Resolution.DAY), Field.Store.NO, Field.Index.UN_TOKENIZED)); doc.add( new Field(indexConfigArr[i].indexName + ".year", DateTools.dateToString(d, DateTools.Resolution.YEAR), Field.Store.NO, Field.Index.UN_TOKENIZED)); } } else { List<String> variants = null; if (mydc[j].authority != null && mydc[j].confidence >= MetadataAuthorityManager.getManager() .getMinConfidence(mydc[j].schema, mydc[j].element, mydc[j].qualifier)) { variants = ChoiceAuthorityManager.getManager() .getVariants(mydc[j].schema, mydc[j].element, mydc[j].qualifier, mydc[j].authority, mydc[j].language); doc.add( new Field(indexConfigArr[i].indexName+"_authority", mydc[j].authority, Field.Store.NO, Field.Index.UN_TOKENIZED)); boolean valueAlreadyIndexed = false; if (variants != null) { for (String var : variants) { // TODO: use a delegate to allow custom 'types' to be used to reformat the field doc.add( new Field(indexConfigArr[i].indexName, var, Field.Store.NO, Field.Index.TOKENIZED)); if (var.equals(mydc[j].value)) { valueAlreadyIndexed = true; } else { // add to default index too... // (only variants, main value is already take) doc.add( new Field("default", var, Field.Store.NO, Field.Index.TOKENIZED)); } } } if (!valueAlreadyIndexed) { // TODO: use a delegate to allow custom 'types' to be used to reformat the field doc.add( new Field(indexConfigArr[i].indexName, mydc[j].value, Field.Store.NO, Field.Index.TOKENIZED)); } } else { // TODO: use a delegate to allow custom 'types' to be used to reformat the field doc.add( new Field(indexConfigArr[i].indexName, mydc[j].value, Field.Store.NO, Field.Index.TOKENIZED)); } } doc.add( new Field("default", mydc[j].value, Field.Store.NO, Field.Index.TOKENIZED)); } } } } log.debug(" Added Metadata"); try { // Now get the configured sort options, and add those as untokenized fields // Note that we will use the sort order delegates to normalise the values written for (SortOption so : SortOption.getSortOptions()) { String[] somd = so.getMdBits(); DCValue[] dcv = item.getMetadata(somd[0], somd[1], somd[2], Item.ANY); if (dcv.length > 0) { String value = OrderFormat.makeSortString(dcv[0].value, dcv[0].language, so.getType()); doc.add( new Field("sort_" + so.getName(), value, Field.Store.NO, Field.Index.UN_TOKENIZED) ); } } } catch (Exception e) { log.error(e.getMessage(),e); } log.debug(" Added Sorting"); try { // now get full text of any bitstreams in the TEXT bundle // trundle through the bundles Bundle[] myBundles = item.getBundles(); for (int i = 0; i < myBundles.length; i++) { if ((myBundles[i].getName() != null) && myBundles[i].getName().equals("TEXT")) { // a-ha! grab the text out of the bitstreams Bitstream[] myBitstreams = myBundles[i].getBitstreams(); for (j = 0; j < myBitstreams.length; j++) { try { // Add each InputStream to the Indexed Document (Acts like an Append) doc.add(new Field("default", new BufferedReader(new InputStreamReader(myBitstreams[j].retrieve())))); log.debug(" Added BitStream: " + myBitstreams[j].getStoreNumber() + " " + myBitstreams[j].getSequenceID() + " " + myBitstreams[j].getName()); } catch (Exception e) { // this will never happen, but compiler is now happy. log.error(e.getMessage(),e); } } } } } catch(Exception e) { log.error(e.getMessage(),e); } log.info("Wrote Item: " + handle + " to Index"); return doc; } /** * Create Lucene document with all the shared fields initialized. * * @param type Type of DSpace Object * @param id *@param handle * @param location @return */ private static Document buildDocument(int type, int id, String handle, String location) { Document doc = new Document(); // want to be able to check when last updated // (not tokenized, but it is indexed) doc.add(new Field(LAST_INDEXED_FIELD, Long.toString(System.currentTimeMillis()), Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field(DOCUMENT_STATUS_FIELD, "archived", Field.Store.YES, Field.Index.UN_TOKENIZED)); // KEPT FOR BACKWARDS COMPATIBILITY // do location, type, handle first doc.add(new Field("type", Integer.toString(type), Field.Store.YES, Field.Index.NO)); // New fields to weaken the dependence on handles, and allow for faster list display doc.add(new Field("search.resourcetype", Integer.toString(type), Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field("search.resourceid", Integer.toString(id), Field.Store.YES, Field.Index.NO)); // want to be able to search for handle, so use keyword // (not tokenized, but it is indexed) if (handle != null) { // ??? not sure what the "handletext" field is but it was there in writeItemIndex ??? doc.add(new Field("handletext", handle, Field.Store.YES, Field.Index.TOKENIZED)); // want to be able to search for handle, so use keyword // (not tokenized, but it is indexed) doc.add(new Field("handle", handle, Field.Store.YES, Field.Index.UN_TOKENIZED)); // add to full text index doc.add(new Field("default", handle, Field.Store.NO, Field.Index.TOKENIZED)); } if(location != null) { doc.add(new Field("location", location, Field.Store.NO, Field.Index.TOKENIZED)); doc.add(new Field("default", location, Field.Store.NO, Field.Index.TOKENIZED)); } return doc; } private static Document buildDocumentForDeletedHandle(String handle) { Document doc = new Document(); // want to be able to check when last updated // (not tokenized, but it is indexed) doc.add(new Field(LAST_INDEXED_FIELD, Long.toString(System.currentTimeMillis()), Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field(DOCUMENT_STATUS_FIELD, "deleted", Field.Store.YES, Field.Index.UN_TOKENIZED)); // Do not add any other fields, as we don't want to be able to find it - just check the last indexed time return doc; } private static Document buildDocumentForWithdrawnItem(Item item) { Document doc = new Document(); // want to be able to check when last updated // (not tokenized, but it is indexed) doc.add(new Field(LAST_INDEXED_FIELD, Long.toString(System.currentTimeMillis()), Field.Store.YES, Field.Index.UN_TOKENIZED)); doc.add(new Field(DOCUMENT_STATUS_FIELD, "withdrawn", Field.Store.YES, Field.Index.UN_TOKENIZED)); // Do not add any other fields, as we don't want to be able to find it - just check the last indexed time return doc; } private static void closeAllReaders(Document doc) { if (doc != null) { int count = 0; List fields = doc.getFields(); if (fields != null) { for (Field field : (List<Field>)fields) { Reader r = field.readerValue(); if (r != null) { try { r.close(); count++; } catch (IOException e) { log.error("Unable to close reader", e); } } } } if (count > 0) { log.debug("closed " + count + " readers"); } } } /** * Helper function to retrieve a date using a best guess of the potential date encodings on a field * * @param t * @return */ private static Date toDate(String t) { SimpleDateFormat[] dfArr; // Choose the likely date formats based on string length switch (t.length()) { case 4: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy") }; break; case 6: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyyMM") }; break; case 7: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy-MM") }; break; case 8: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyyMMdd"), new SimpleDateFormat("yyyy MMM") }; break; case 10: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy-MM-dd") }; break; case 11: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy MMM dd") }; break; case 20: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") }; break; default: dfArr = new SimpleDateFormat[] { new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") }; break; } for (SimpleDateFormat df : dfArr) { try { // Parse the date df.setCalendar(Calendar.getInstance(TimeZone.getTimeZone("UTC"))); df.setLenient(false); return df.parse(t); } catch (ParseException pe) { log.error("Unable to parse date format", pe); } } return null; } private static synchronized void startDelayedIndexFlusher() { if (delayedIndexFlusher != null && !delayedIndexFlusher.isAlive()) { delayedIndexFlusher = null; } if (delayedIndexFlusher == null && queuedTaskMap.size() > 0) { delayedIndexFlusher = new Thread(new DelayedIndexFlushThread()); delayedIndexFlusher.start(); } } private static class DelayedIndexFlushThread implements Runnable { @Override public void run() { try { Thread.sleep(indexFlushDelay); DSIndexer.flushIndexingTaskQueue(); } catch (InterruptedException e) { log.debug(e); } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.search; import java.util.Date; import java.util.List; import org.dspace.content.Item; import org.dspace.core.Context; /** * Simple container class containing information about a harvested DSpace item. * * @author Robert Tansley * @version $Revision: 5844 $ */ public class HarvestedItemInfo { /** Context used when creating this object */ public Context context; /** Internal item ID (as opposed to item's OAI ID, which is the Handle) */ public int itemID; /** The Handle, with no prefix */ public String handle; /** The datestamp */ public Date datestamp; /** The item. Only filled out if requested */ public Item item; /** * A List of Strings. The Handles of collections this item is in. Only * filled out if originally requested when invoking <code>Harvest</code> * (N.B. not Collection objects) */ public List<String> collectionHandles; /** True if this item has been withdrawn */ public boolean withdrawn; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.testing; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.dspace.content.DCValue; import org.jdom.Document; import org.jdom.Element; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; /** * Simple class to transform a medline.xml file from PubMed into DSpace import package(s) * * This is a distinctly incomplete implementation - it doesn't even attempt to map a number of fields, * and has no means of customizing the mapping. More importantly, it makes assumptions in parsing the xml * that would be problematic for a production instance. * * However, it does use SAX parsing, which means it has no problems with handling a 1GB+ input file. * This means it is a good way to generate a large number of realistic import packages very quickly - * simply go to http://www.ncbi.nlm.nih.gov/pubmed and search for something that returns a lot of records * ('nature' returns over 300,000 for example). Download the results as a medline.xml (and yes, it will attempt * to download all 300,000+ into a single file), and then run this class over that file to spit out import packages * which can then be loaded into DSpace using ItemImport. */ public class PubMedToImport { private static final Logger log = Logger.getLogger(PubMedToImport.class); private static File outputDir = null; public static void main(String args[]) { Options options = new Options(); options.addOption(new Option("s", "source", true, "Source xml")); options.addOption(new Option("o", "output", true, "Output directory")); try { CommandLine cli = new PosixParser().parse(options, args); String source = cli.getOptionValue("s"); String output = cli.getOptionValue("o"); if (!new File(source).exists()) { throw new IllegalArgumentException("Source file does not exist"); } outputDir = new File(output); if (outputDir.exists()) { if (outputDir.list().length > 0) { throw new IllegalStateException("Output directory must be empty"); } } else { if (!outputDir.mkdirs()) { throw new IllegalStateException("Unable to create output directory"); } } SAXParserFactory factory = SAXParserFactory.newInstance(); SAXParser saxParser = factory.newSAXParser(); saxParser.parse(source, new PubMedHandler()); } catch (Exception e) { } } private static class PubMedHandler extends DefaultHandler { private static int recordCount = 1; private static List<DCValue> dcValues; private static StringBuilder value; private static StringBuilder lastName; private static StringBuilder firstName; private static boolean isCorrection = false; private static boolean isLastName = false; private static boolean isFirstName = false; private static void addDCValue(String element, String qualifier, String value) { if (dcValues == null) { dcValues = new ArrayList<DCValue>(); } DCValue thisValue = new DCValue(); thisValue.schema = "dc"; thisValue.element = element; thisValue.qualifier = qualifier; thisValue.value = value; dcValues.add(thisValue); } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { if ("PubmedArticle".equals(qName)) { System.out.println("Starting record " + recordCount); } else if ("CommensCorrectionsList".equals(qName)) { isCorrection = true; } else if ("ForeName".equals(qName)) { isFirstName = true; firstName = new StringBuilder(); } else if ("LastName".equals(qName)) { isLastName = true; lastName = new StringBuilder(); } else { value = new StringBuilder(); } super.startElement(uri, localName, qName, attributes); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (!isCorrection) { if ("PMID".equals(qName)) { addDCValue("identifier", null, value.toString()); } else if ("ISSN".equals(qName)) { addDCValue("identifier", "issn", value.toString()); } else if ("ArticleTitle".equals(qName)) { addDCValue("title", null, value.toString()); } else if ("AbstractText".equals(qName)) { addDCValue("description", "abstract", value.toString()); } else if ("PublicationType".equals(qName)) { addDCValue("type", null, value.toString()); } else if ("Author".equals(qName)) { addDCValue("contributor", "author", lastName + ", " + firstName); } else if ("DescriptorName".equals(qName)) { addDCValue("subject", "mesh", value.toString()); } } else { if ("MedlineCitation".equals(qName)) { isCorrection = false; } } if ("PubmedArticle".equals(qName)) { try { writeItem(); } catch (IOException e) { throw new IllegalStateException("Unable to export record", e); } System.out.println("Ending record " + recordCount); recordCount++; } isFirstName = false; isLastName = false; super.endElement(uri, localName, qName); } @Override public void characters(char[] chars, int start, int length) throws SAXException { if (isFirstName) { firstName.append(chars, start, length); // firstName = String.copyValueOf(chars, start, length); } else if (isLastName) { lastName.append(chars, start, length); // lastName = String.copyValueOf(chars, start, length); } else { value.append(chars, start, length); // value = String.copyValueOf(chars, start, length); } super.characters(chars, start, length); } private void writeItem() throws IOException { File itemDir = new File(outputDir, String.valueOf(recordCount)); itemDir.mkdirs(); new File(itemDir, "contents").createNewFile(); Document doc = new Document(); Element root = new Element("dublin_core"); doc.setRootElement(root); for (DCValue dcValue : dcValues) { Element dcNode = new Element("dcvalue"); dcNode.setAttribute("element", dcValue.element); if (!StringUtils.isEmpty(dcValue.qualifier)) { dcNode.setAttribute("qualifier", dcValue.qualifier); } dcNode.setText(dcValue.value); root.addContent(dcNode); } File dc = new File(itemDir, "dublin_core.xml"); XMLOutputter dcOutput = new XMLOutputter(Format.getPrettyFormat().setEncoding("UTF-8")); OutputStream out = null; try { out = new BufferedOutputStream(new FileOutputStream(dc)); dcOutput.output(doc, out); } finally { if (out != null) { out.close(); } } dcValues.clear(); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.sql.SQLException; import java.io.IOException; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DCDate; import org.dspace.content.Item; import org.dspace.core.Context; /** * Plugin interface for the embargo setting function. * * @author Larry Stone * @author Richard Rodgers */ public interface EmbargoSetter { /** * Get lift date of embargo from the "terms" supplied in the * metadata (or other available state) of this Item. Return null * if it is clear this should not be under embargo -- that is to be * expected since this method is invoked on all newly-archived Items. * <p> * Note that the value (if any) of the metadata field configured to * contain embargo terms is passed explicitly, but this method is * free to explore other metadata fields, and even Bitstream contents, * to determine the embargo status and lift date. * <p> * Expect this method to be called at the moment before the Item is * installed into the archive (i.e. after workflow). This may be * significant if the embargo lift date is computed relative to the present. * * @param context the DSpace context * @param item the item to embargo * @param terms value of the metadata field configured as embargo terms, if any. * @return absolute date on which the embargo is to be lifted, or null if none */ public DCDate parseTerms(Context context, Item item, String terms) throws SQLException, AuthorizeException, IOException; /** * Enforce embargo by (for example) turning off all read access to * bitstreams in this Item. * * @param context the DSpace context * @param item the item to embargo */ public void setEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException; /** * Check that embargo is properly set on Item. For example: no read access * to bitstreams. It is expected to report any noteworthy * discrepancies by writing on the stream System.err, although * logging is also encouraged. Only report conditions that * constitute a risk of exposing Bitstreams that should be under * embargo -- e.g. readable Bitstreams or ORIGINAL bundles. A * readable bundle named "TEXT" does not constitute a direct risk so * long as its member Bitstreams are not readable. * * @param context the DSpace context * @param item the item to embargo */ public void checkEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ /** * <p> * Embargo allows the deposit of Items whose content should not be made visible * until later. Some journals, for example, permit self-publication after a * period of exclusive access through the journal. * </p> * <p> * Embargo policy is applied through a pair of pluggable classes: an * {@link org.dspace.embargo.EmbargoSetter} and an * {@link org.dspace.embargo.EmbargoLifter}. The {@link org.dspace.embargo.EmbargoManager} * must be configured to specify these classes, as well as names of two metadata * fields for use by the embargo facility: an embargo lift date (when the * content will be released) and the embargo terms (which the EmbargoSetter will * use to calculate the lift date). You must select or create appropriate * metadata fields for this purpose. * </p> * <p> * See {@link org.dspace.embargo.DefaultEmbargoSetter}, * {@link org.dspace.embargo.DayTableEmbargoSetter}, and * {@link org.dspace.embargo.DefaultEmbargoLifter} for simple policy classes * which ship with DSpace. You can supply your own classes to implement more * elaborate policies. * </p> * <p> * Embargo is applied when an Item is installed in a Collection. An Item subject * to embargo passes through several stages: * </p> * <ol> * <li>During submission, the metadata field established for embargo terms must * be set to a value which is interpretable by the selected setter. Typically * this will be a date or an interval. There is no specific mechanism for * requesting embargo; you must customize your submission forms as needed, * create a template Item which applies a standard value, or in some other way * cause the specified metadata field to be set. * </li> * <li>When the Item is accepted into a Collection, the setter will apply the * embargo, making the content inaccessible. * </li> * <li>The site should run the embargo lifter tool ({@code dspace embargo-lifter}) * from time to time, for example using an automatic daily job. This discovers * Items which have passed their embargo lift dates and makes their content * accessible. * </li> * </ol> */ package org.dspace.embargo;
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.io.IOException; import java.sql.SQLException; import java.util.Date; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DCDate; import org.dspace.content.DCValue; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.ItemIterator; import org.dspace.content.MetadataSchema; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.PluginManager; import org.dspace.handle.HandleManager; /** * Public interface to the embargo subsystem. * <p> * Configuration properties: (with examples) * <br/># DC metadata field to hold the user-supplied embargo terms * <br/>embargo.field.terms = dc.embargo.terms * <br/># DC metadata field to hold computed "lift date" of embargo * <br/>embargo.field.lift = dc.date.available * <br/># String to indicate indefinite (forever) embargo in terms * <br/>embargo.terms.open = Indefinite * <br/># implementation of embargo setter plugin * <br/>plugin.single.org.dspace.embargo.EmbargoSetter = edu.my.Setter * <br/># implementation of embargo lifter plugin * <br/>plugin.single.org.dspace.embargo.EmbargoLifter = edu.my.Lifter * * @author Larry Stone * @author Richard Rodgers */ public class EmbargoManager { /** Special date signalling an Item is to be embargoed forever. ** The actual date is the first day of the year 10,000 UTC. **/ public static final DCDate FOREVER = new DCDate("10000-01-01"); /** log4j category */ private static Logger log = Logger.getLogger(EmbargoManager.class); // Metadata field components for user-supplied embargo terms // set from the DSpace configuration by init() private static String terms_schema = null; private static String terms_element = null; private static String terms_qualifier = null; // Metadata field components for lift date, encoded as a DCDate // set from the DSpace configuration by init() private static String lift_schema = null; private static String lift_element = null; private static String lift_qualifier = null; // plugin implementations // set from the DSpace configuration by init() private static EmbargoSetter setter = null; private static EmbargoLifter lifter = null; /** * Put an Item under embargo until the specified lift date. * Calls EmbargoSetter plugin to adjust Item access control policies. * * @param context the DSpace context * @param item the item to embargo * @param lift date on which the embargo is to be lifted. */ public static void setEmbargo(Context context, Item item, DCDate lift) throws SQLException, AuthorizeException, IOException { init(); // if lift is null, we might be restoring an item from an AIP DCDate myLift = lift; if (myLift == null) { if ((myLift = recoverEmbargoDate(item)) == null) { return; } } String slift = myLift.toString(); boolean ignoreAuth = context.ignoreAuthorization(); try { context.setIgnoreAuthorization(true); item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY); item.addMetadata(lift_schema, lift_element, lift_qualifier, null, slift); log.info("Set embargo on Item "+item.getHandle()+", expires on: "+slift); setter.setEmbargo(context, item); item.update(); } finally { context.setIgnoreAuthorization(ignoreAuth); } } /** * Get the embargo lift date for an Item, if any. This looks for the * metadata field configured to hold embargo terms, and gives it * to the EmbargoSetter plugin's method to interpret it into * an absolute timestamp. This is intended to be called at the time * the Item is installed into the archive. * <p> * Note that the plugin is *always* called, in case it gets its cue for * the embargo date from sources other than, or in addition to, the * specified field. * * @param context the DSpace context * @param item the item to embargo * @return lift date on which the embargo is to be lifted, or null if none */ public static DCDate getEmbargoDate(Context context, Item item) throws SQLException, AuthorizeException, IOException { init(); DCValue terms[] = item.getMetadata(terms_schema, terms_element, terms_qualifier, Item.ANY); DCDate result = null; // Its poor form to blindly use an object that could be null... if(terms != null && terms.length > 0) { result = setter.parseTerms(context, item, terms.length > 0 ? terms[0].value : null); } // sanity check: do not allow an embargo lift date in the past. if (result != null && result.toDate().before(new Date())) { throw new IllegalArgumentException("Embargo lift date must be in the future, but this is in the past: "+result.toString()); } return result; } /** * Lift the embargo on an item which is assumed to be under embargo. * Call the plugin to manage permissions in its own way, then delete * the administrative metadata fields that dictated embargo date. * * @param context the DSpace context * @param item the item on which to lift the embargo */ public static void liftEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException { init(); lifter.liftEmbargo(context, item); item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY); // set the dc.date.available value to right now item.clearMetadata(MetadataSchema.DC_SCHEMA, "date", "available", Item.ANY); item.addMetadata(MetadataSchema.DC_SCHEMA, "date", "available", null, DCDate.getCurrent().toString()); log.info("Lifting embargo on Item "+item.getHandle()); item.update(); } /** * Command-line service to scan for every Item with an expired embargo, * and then lift that embargo. * <p> * Options: * <dl> * <dt>-c,--check</dt> * <dd> Function: ONLY check the state of embargoed Items, do * NOT lift any embargoes.</dd> * <dt>-h,--help</dt> * <dd> Help.</dd> * <dt>-i,--identifier</dt> * <dd> Process ONLY this Handle identifier(s), which must be * an Item. Can be repeated.</dd> * <dt>-l,--lift</dt> * <dd> Function: ONLY lift embargoes, do NOT check the state * of any embargoed Items.</dd> * <dt>-n,--dryrun</dt> * <dd> Do not change anything in the data model; print * message instead.</dd> * <dt>-v,--verbose</dt> * <dd> Print a line describing action taken for each * embargoed Item found.</dd> * <dt>-q,--quiet</dt> * <dd> No output except upon error.</dd> * </dl> */ public static void main(String argv[]) { init(); int status = 0; Options options = new Options(); options.addOption("v", "verbose", false, "Print a line describing action taken for each embargoed Item found."); options.addOption("q", "quiet", false, "Do not print anything except for errors."); options.addOption("n", "dryrun", false, "Do not change anything in the data model, print message instead."); options.addOption("i", "identifier", true, "Process ONLY this Handle identifier(s), which must be an Item. Can be repeated."); options.addOption("c", "check", false, "Function: ONLY check the state of embargoed Items, do NOT lift any embargoes."); options.addOption("l", "lift", false, "Function: ONLY lift embargoes, do NOT check the state of any embargoed Items."); options.addOption("h", "help", false, "help"); CommandLine line = null; try { line = new PosixParser().parse(options, argv); } catch(ParseException e) { System.err.println("Command error: " + e.getMessage()); new HelpFormatter().printHelp(EmbargoManager.class.getName(), options); System.exit(1); } if (line.hasOption('h')) { new HelpFormatter().printHelp(EmbargoManager.class.getName(), options); System.exit(0); } // sanity check, --lift and --check are mutually exclusive: if (line.hasOption('l') && line.hasOption('c')) { System.err.println("Command error: --lift and --check are mutually exclusive, try --help for assistance."); System.exit(1); } Context context = null; try { context = new Context(); context.setIgnoreAuthorization(true); Date now = new Date(); // scan items under embargo if (line.hasOption('i')) { for (String handle : line.getOptionValues('i')) { DSpaceObject dso = HandleManager.resolveToObject(context, handle); if (dso == null) { System.err.println("Error, cannot resolve handle="+handle+" to a DSpace Item."); status = 1; } else if (dso.getType() != Constants.ITEM) { System.err.println("Error, the handle="+handle+" is not a DSpace Item."); status = 1; } else { if (processOneItem(context, (Item)dso, line, now)) { status = 1; } } } } else { ItemIterator ii = Item.findByMetadataField(context, lift_schema, lift_element, lift_qualifier, Item.ANY); while (ii.hasNext()) { if (processOneItem(context, ii.next(), line, now)) { status = 1; } } } log.debug("Cache size at end = "+context.getCacheSize()); context.complete(); context = null; } catch (Exception e) { System.err.println("ERROR, got exception: "+e); e.printStackTrace(); status = 1; } finally { if (context != null) { try { context.abort(); } catch (Exception e) { } } } System.exit(status); } // lift or check embargo on one Item, handle exceptions // return false on success, true if there was fatal exception. private static boolean processOneItem(Context context, Item item, CommandLine line, Date now) throws Exception { boolean status = false; DCValue lift[] = item.getMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY); if (lift.length > 0) { // need to survive any failure on a single item, go on to process the rest. try { DCDate liftDate = new DCDate(lift[0].value); log.debug("Testing embargo on item="+item.getHandle()+", date="+liftDate.toString()); if (liftDate.toDate().before(now)) { if (line.hasOption('v')) { System.err.println("Lifting embargo from Item handle=" + item.getHandle() + ", lift date=" + lift[0].value); } if (line.hasOption('n')) { if (!line.hasOption('q')) { System.err.println("DRY RUN: would have lifted embargo from Item handle=" + item.getHandle() + ", lift date=" + lift[0].value); } } else if (!line.hasOption('c')) { liftEmbargo(context, item); } } else if (!line.hasOption('l')) { if (line.hasOption('v')) { System.err.println("Checking current embargo on Item handle=" + item.getHandle() + ", lift date=" + lift[0].value); } setter.checkEmbargo(context, item); } } catch (Exception e) { log.error("Failed attempting to lift embargo, item="+item.getHandle()+": ", e); System.err.println("Failed attempting to lift embargo, item="+item.getHandle()+": "+ e); status = true; } } context.removeCached(item, item.getID()); return status; } // initialize - get plugins and MD field settings from config private static void init() { if (terms_schema == null) { String terms = ConfigurationManager.getProperty("embargo.field.terms"); String lift = ConfigurationManager.getProperty("embargo.field.lift"); if (terms == null || lift == null) { throw new IllegalStateException("Missing one or more of the required DSpace configuration properties for EmbargoManager, check your configuration file."); } terms_schema = getSchemaOf(terms); terms_element = getElementOf(terms); terms_qualifier = getQualifierOf(terms); lift_schema = getSchemaOf(lift); lift_element = getElementOf(lift); lift_qualifier = getQualifierOf(lift); setter = (EmbargoSetter)PluginManager.getSinglePlugin(EmbargoSetter.class); if (setter == null) { throw new IllegalStateException("The EmbargoSetter plugin was not defined in DSpace configuration."); } lifter = (EmbargoLifter)PluginManager.getSinglePlugin(EmbargoLifter.class); if (lifter == null) { throw new IllegalStateException("The EmbargoLifter plugin was not defined in DSpace configuration."); } } } // return the schema part of "schema.element.qualifier" metadata field spec private static String getSchemaOf(String field) { String sa[] = field.split("\\.", 3); return sa[0]; } // return the element part of "schema.element.qualifier" metadata field spec, if any private static String getElementOf(String field) { String sa[] = field.split("\\.", 3); return sa.length > 1 ? sa[1] : null; } // return the qualifier part of "schema.element.qualifier" metadata field spec, if any private static String getQualifierOf(String field) { String sa[] = field.split("\\.", 3); return sa.length > 2 ? sa[2] : null; } // return the lift date assigned when embargo was set, or null, if either: // it was never under embargo, or the lift date has passed. private static DCDate recoverEmbargoDate(Item item) { DCDate liftDate = null; DCValue lift[] = item.getMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY); if (lift.length > 0) { liftDate = new DCDate(lift[0].value); // sanity check: do not allow an embargo lift date in the past. if (liftDate.toDate().before(new Date())) { liftDate = null; } } return liftDate; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.sql.SQLException; import java.io.IOException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.DCDate; import org.dspace.content.Item; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.Constants; /** * Default plugin implementation of the embargo setting function. * The parseTerms() provides only very rudimentary terms logic - entry * of a configurable string (in terms field) for 'unlimited' embargo, otherwise * a standard ISO 8601 (yyyy-mm-dd) date is assumed. Users are encouraged * to override this method for enhanced functionality. * * @author Larry Stone * @author Richard Rodgers */ public class DefaultEmbargoSetter implements EmbargoSetter { protected String termsOpen = null; public DefaultEmbargoSetter() { super(); termsOpen = ConfigurationManager.getProperty("embargo.terms.open"); } /** * Parse the terms into a definite date. Terms are expected to consist of * either: a token (value configured in 'embargo.terms.open' property) to indicate * indefinite embargo, or a literal lift date formatted in ISO 8601 format (yyyy-mm-dd) * * @param context the DSpace context * @param item the item to embargo * @param terms the embargo terms * @return parsed date in DCDate format */ public DCDate parseTerms(Context context, Item item, String terms) throws SQLException, AuthorizeException, IOException { if (terms != null && terms.length() > 0) { if (termsOpen.equals(terms)) { return EmbargoManager.FOREVER; } else { return new DCDate(terms); } } return null; } /** * Enforce embargo by turning off all read access to bitstreams in * this Item. * * @param context the DSpace context * @param item the item to embargo */ public void setEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException { for (Bundle bn : item.getBundles()) { // Skip the LICENSE and METADATA bundles, they stay world-readable String bnn = bn.getName(); if (!(bnn.equals(Constants.LICENSE_BUNDLE_NAME) || bnn.equals(Constants.METADATA_BUNDLE_NAME))) { AuthorizeManager.removePoliciesActionFilter(context, bn, Constants.READ); for (Bitstream bs : bn.getBitstreams()) { AuthorizeManager.removePoliciesActionFilter(context, bs, Constants.READ); } } } } /** * Check that embargo is properly set on Item: no read access to bitstreams. * * @param context the DSpace context * @param item the item to embargo */ public void checkEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException { for (Bundle bn : item.getBundles()) { // Skip the LICENSE and METADATA bundles, they stay world-readable String bnn = bn.getName(); if (!(bnn.equals(Constants.LICENSE_BUNDLE_NAME) || bnn.equals(Constants.METADATA_BUNDLE_NAME))) { // don't report on "TEXT" or "THUMBNAIL" bundles; those // can have READ long as the bitstreams in them do not. if (!(bnn.equals("TEXT") || bnn.equals("THUMBNAIL"))) { // check for ANY read policies and report them: for (ResourcePolicy rp : AuthorizeManager.getPoliciesActionFilter(context, bn, Constants.READ)) { System.out.println("CHECK WARNING: Item "+item.getHandle()+", Bundle "+bn.getName()+" allows READ by "+ ((rp.getEPersonID() < 0) ? "Group "+rp.getGroup().getName() : "EPerson "+rp.getEPerson().getFullName())); } } for (Bitstream bs : bn.getBitstreams()) { for (ResourcePolicy rp : AuthorizeManager.getPoliciesActionFilter(context, bs, Constants.READ)) { System.out.println("CHECK WARNING: Item "+item.getHandle()+", Bitstream "+bs.getName()+" (in Bundle "+bn.getName()+") allows READ by "+ ((rp.getEPersonID() < 0) ? "Group "+rp.getGroup().getName() : "EPerson "+rp.getEPerson().getFullName())); } } } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.sql.SQLException; import java.io.IOException; import java.util.Date; import java.util.Properties; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DCDate; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; /** * Plugin implementation of the embargo setting function. The parseTerms() * method performs a look-up to a table that relates a terms expression * to a fixed number of days. Table constructed from a dspace.cfg property * with syntax: * * embargo.terms.days = 90 days:90,1 year:365,2 years:730 * * That is, an arbitrary, comma separated, list of <terms>:<days> where <terms> * can be any string and <days> must be a positive integer. * All the <terms> fields should be defined in a 'value-pairs' element, * and the field configured as the embargo terms should employ a drop-down using * that element in input_forms.xml, if user submission is desired. * * @author Richard Rodgers */ public class DayTableEmbargoSetter extends DefaultEmbargoSetter { private Properties termProps = new Properties(); public DayTableEmbargoSetter() { super(); // load properties String terms = ConfigurationManager.getProperty("embargo.terms.days"); if (terms != null && terms.length() > 0) { for (String term : terms.split(",")) { String[] parts = term.trim().split(":"); termProps.setProperty(parts[0].trim(), parts[1].trim()); } } } /** * Parse the terms into a definite date. Only terms expressions processed * are those defined in 'embargo.terms.days' configuration property. * * @param context the DSpace context * @param item the item to embargo * @param terms the embargo terms * @return parsed date in DCDate format */ public DCDate parseTerms(Context context, Item item, String terms) throws SQLException, AuthorizeException, IOException { if (terms != null) { if (termsOpen.equals(terms)) { return EmbargoManager.FOREVER; } String days = termProps.getProperty(terms); if (days != null && days.length() > 0) { long lift = System.currentTimeMillis() + (Long.parseLong(days) * 24 * 60 * 60 * 1000); return new DCDate(new Date(lift)); } } return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.sql.SQLException; import java.io.IOException; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.core.Context; /** * Default plugin implementation of the embargo lifting function. * * @author Larry Stone * @author Richard Rodgers */ public class DefaultEmbargoLifter implements EmbargoLifter { public DefaultEmbargoLifter() { super(); } /** * Enforce lifting of embargo by turning read access to bitstreams in * this Item back on. * * @param context the DSpace context * @param item the item to embargo */ public void liftEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException { // remove the item's policies and replace them with // the defaults from the collection item.inheritCollectionDefaultPolicies(item.getOwningCollection()); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.embargo; import java.sql.SQLException; import java.io.IOException; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.core.Context; /** * Plugin interface for the embargo lifting function. * * @author Larry Stone * @author Richard Rodgers */ public interface EmbargoLifter { /** * Implement the lifting of embargo in the "resource policies" * (access control) by (for example) turning on default read access to all * Bitstreams. * * @param context the DSpace context * @param item the Item on which to lift the embargo */ public void liftEmbargo(Context context, Item item) throws SQLException, AuthorizeException, IOException; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.usage; import org.dspace.services.model.Event; /** * A null implementation of AbstractUsageEvent to absorb events harmlessly and * cheaply. * * @author Mark H. Wood * @author Mark Diggory (mdiggory at atmire.com) * @version $Revision: 3734 $ */ public class PassiveUsageEventListener extends AbstractUsageEventListener { /** * Do nothing and return. Effectively, the event is discarded. */ public void receiveEvent(Event event) { return; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.usage; import org.apache.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Constants; import org.dspace.core.LogManager; import org.dspace.services.model.Event; import org.dspace.usage.UsageEvent.Action; /** * * @author Mark Diggory (mdiggory at atmire.com) * */ public class LoggerUsageEventListener extends AbstractUsageEventListener{ /** log4j category */ private static Logger log = Logger .getLogger(LoggerUsageEventListener.class); public void receiveEvent(Event event) { if(event instanceof UsageEvent) { UsageEvent ue = (UsageEvent)event; log.info(LogManager.getHeader( ue.getContext(), formatAction(ue.getAction(), ue.getObject()), formatMessage(ue.getObject())) ); } } private static String formatAction(Action action, DSpaceObject object) { try { String objText = Constants.typeText[object.getType()].toLowerCase(); return action.text() + "_" + objText; }catch(Exception e) { } return ""; } private static String formatMessage(DSpaceObject object) { try { String objText = Constants.typeText[object.getType()].toLowerCase(); String handle = object.getHandle(); /* Emulate Item logger */ if(handle != null && object instanceof Item) { return "handle=" + object.getHandle(); } else { return objText + "_id=" + object.getID(); } } catch(Exception e) { } return ""; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.usage; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; import org.dspace.services.model.Event; /** * Serialize AbstractUsageEvent data to a file as Tab deliminated. Requires * configuration: in dspace.cfg specify the path to the file as the value of * {@code usageEvent.tabFileLogger.file}. * * @author Mark H. Wood * @author Mark Diggory * @version $Revision: 3734 $ */ public class TabFileUsageEventListener extends AbstractUsageEventListener { /** log4j category */ private static Logger errorLog = Logger .getLogger(TabFileUsageEventListener.class); /** File on which to write event records */ static PrintWriter log = null; public TabFileUsageEventListener() { if (null == log) { boolean appending; String logPath = ConfigurationManager .getProperty("usageEvent.tabFileLogger.file"); if (null == logPath) { errorLog .error("UsageEventTabFileLogger unconfigured, will not log events"); return; } String logDir = null; if (!new File(logPath).isAbsolute()) { logDir = ConfigurationManager.getProperty("log.dir"); } File logFile = new File(logDir, logPath); appending = logFile.length() > 0; try { log = new PrintWriter(new OutputStreamWriter( new FileOutputStream(logFile, true))); } catch (FileNotFoundException e) { errorLog .error( "UsageEventTabFileLogger cannot open file, will not log events", e); return; } if (!appending) { log.println("date event objectType objectId sessionId sourceAddress eperson"); } } } public void receiveEvent(Event event) { System.out.println("got: " + event.toString()); if(event instanceof UsageEvent) { UsageEvent ue = (UsageEvent)event; if (null == log) { return; } SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMdd'T'HHmmssSSS"); String string = dateFormat.format(new Date()); string += "\t" + ue.getName(); // event type string += "\t" + ue.getObject().getType(); string += "\t" + ue.getObject().getID(); string += "\t" + ue.getRequest().getSession().getId(); string += "\t" + ue.getRequest().getRequestURI(); String epersonName = (null == ue.getContext().getCurrentUser() ? "anonymous" : ue.getContext().getCurrentUser().getEmail()); string += "\t" + epersonName; log.println(string); log.flush(); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.usage; import org.dspace.services.EventService; import org.dspace.services.model.EventListener; /** * AbstractUsageEventListener is used as the base class for listening events running * in the EventService. * * @author Mark Diggory (mdiggory at atmire.com) * @version $Revision: $ */ public abstract class AbstractUsageEventListener implements EventListener { public AbstractUsageEventListener() { super(); } /** * Empty String[] flags to have Listener * consume any event name prefixes. */ public String[] getEventNamePrefixes() { return new String[0]; } /** * Currently consumes events generated for * all resources. */ public String getResourcePrefix() { return null; } public void setEventService(EventService service) { if(service != null) { service.registerEventListener(this); } else { throw new IllegalStateException("EventService handed to Listener cannot be null"); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.usage; import javax.servlet.http.HttpServletRequest; import org.dspace.content.DSpaceObject; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.model.Event; /** * * @author Mark Diggory (mdiggory at atmire.com) * */ public class UsageEvent extends Event { public static enum Action { VIEW ("view"), CREATE ("create"), UPDATE ("update"), DELETE ("delete"), ADD ("add"), REMOVE ("remove"), BROWSE ("browse"), SEARCH ("search"), LOGIN ("login"), SUBSCRIBE ("subscribe"), UNSUBSCRIBE ("unsubscribe"), WITHDRAW ("withdraw"), REINSTATE ("reinstate"); private final String text; Action(String text) { this.text = text; } String text() { return text; } } /** * */ private static final long serialVersionUID = 1L; private transient HttpServletRequest request; private transient Context context; private transient DSpaceObject object; private Action action; private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object) { if(action == null) { throw new IllegalStateException("action cannot be null"); } if(request == null) { throw new IllegalStateException("request cannot be null"); } if(context == null) { throw new IllegalStateException("context cannot be null"); } if(object == null) { throw new IllegalStateException("object cannot be null"); } try { String objText = Constants.typeText[object.getType()].toLowerCase(); return objText + ":" + action.text(); }catch(Exception e) { } return ""; } public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object) { super(checkParams(action, request, context, object)); this.action = action; this.setResourceReference(object != null ? Constants.typeText[object.getType()].toLowerCase() + ":" + object.getID() : null); switch(action) { case CREATE: case UPDATE: case DELETE: case WITHDRAW: case REINSTATE: case ADD: case REMOVE: this.setModify(true); break; default : this.setModify(false); } if(context != null && context.getCurrentUser() != null) { this.setUserId( String.valueOf(context.getCurrentUser().getID())); } this.request = request; this.context = context; this.object = object; } public HttpServletRequest getRequest() { return request; } public void setRequest(HttpServletRequest request) { this.request = request; } public Context getContext() { return context; } public void setContext(Context context) { this.context = context; } public DSpaceObject getObject() { return object; } public void setObject(DSpaceObject object) { this.object = object; } public Action getAction() { return this.action; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.plugin; /** * General exception class for all code that runs as a plugin in DSpace * * @author Richard Jones * */ public class PluginException extends Exception { /** * basic constructor * */ public PluginException() { super(); } /** * Construct an exception with the passed message * * @param message a message for the exception */ public PluginException(String message) { super(message); } /** * Construct an exception with the passed message to encapsulate * the passed Throwable * * @param message a message for the exception * @param e throwable which triggered this exception */ public PluginException(String message, Throwable e) { super(message, e); } /** * Construct an exception to encapsulate the passed Throwable * * @param e the throwable which triggered this exception */ public PluginException(Throwable e) { super(e); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.plugin; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Community; import org.dspace.core.Context; /** * Interface that must be implemented by any plugin wanting to be called at * the inception of the Community home page (in HandleServlet). Classes that implement the process method * and appear in the configuration will be run before the at the start of preparing the community home page has any * chance to continue its execution * * @author Richard Jones * */ public interface CommunityHomeProcessor { /** * execute the process * * @param context the DSpace context * @param request the HTTP request * @param response the HTTP response * @param community The community object whose home page we are on * * @throws PluginException any particular problem with the plugin execution * @throws AuthorizeException Authorisation errors during plugin execution */ void process(Context context, HttpServletRequest request, HttpServletResponse response, Community community) throws PluginException, AuthorizeException; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.plugin; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.core.Context; /** * Interface that must be implemented by any plugin wanting to be called at the * inception of the Collection home page (in HandleServlet). Classes that * implement the process method and appear in the configuration will be run * before the at the start of preparing the collection home page has any chance * to continue its execution * * @author Richard Jones * */ public interface CollectionHomeProcessor { /** * execute the process * * @param context the DSpace context * @param request the HTTP request * @param response the HTTP response * @param collection the collection object whose home page we are on * * @throws PluginException any particular problem with the plugin execution * @throws AuthorizeException Authorisation errors during plugin execution */ void process(Context context, HttpServletRequest request, HttpServletResponse response, Collection collection) throws PluginException, AuthorizeException; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.apache.commons.pool.KeyedObjectPool; import org.apache.commons.pool.KeyedPoolableObjectFactory; import org.apache.commons.pool.PoolUtils; import org.apache.commons.pool.impl.GenericKeyedObjectPool; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; /** * Class for managing the content event environment. The EventManager mainly * acts as a factory for Dispatchers, which are used by the Context to send * events to consumers. It also contains generally useful utility methods. * * Version: $Revision: 5844 $ */ public class EventManager { /** log4j category */ private static Logger log = Logger.getLogger(EventManager.class); // The name of the default dispatcher assigned to every new context unless // overridden public static final String DEFAULT_DISPATCHER = "default"; private static DispatcherPoolFactory dispatcherFactory = null; private static GenericKeyedObjectPool.Config poolConfig = null; // Keyed FIFO Pool of event dispatchers private static KeyedObjectPool dispatcherPool = null; private static Map<String, Integer> consumerIndicies = null; private static final String CONSUMER_PFX = "event.consumer."; public EventManager() { initPool(); log.info("Event Dispatcher Pool Initialized"); } private static void initPool() { if (dispatcherPool == null) { // TODO EVENT Some of these pool configuration // parameters can live in dspace.cfg or a // separate configuration file // TODO EVENT Eviction parameters should be set poolConfig = new GenericKeyedObjectPool.Config(); poolConfig.maxActive = 100; poolConfig.maxIdle = 5; poolConfig.maxTotal = 100; try { dispatcherFactory = new DispatcherPoolFactory(); dispatcherPool = PoolUtils .synchronizedPool(new GenericKeyedObjectPool( dispatcherFactory, poolConfig)); enumerateConsumers(); } catch (Exception e) { e.printStackTrace(); } } } /** * Get dispatcher for configuration named by "name". Returns cached instance * if one exists. */ public static Dispatcher getDispatcher(String name) { if (dispatcherPool == null) { initPool(); } if (name == null) { name = DEFAULT_DISPATCHER; } try { return (Dispatcher) dispatcherPool.borrowObject(name); } catch (Exception e) { throw new IllegalStateException("Unable to aquire dispatcher named " + name, e); } } public static void returnDispatcher(String key, Dispatcher disp) { try { dispatcherPool.returnObject(key, disp); } catch (Exception e) { log.error(e.getMessage(), e); } } protected static int getConsumerIndex(String consumerClass) { Integer index = (Integer) consumerIndicies.get(consumerClass); return index != null ? index.intValue() : -1; } private static void enumerateConsumers() { Enumeration propertyNames = ConfigurationManager.propertyNames(); int bitSetIndex = 0; if (consumerIndicies == null) { consumerIndicies = new HashMap<String, Integer>(); } while (propertyNames.hasMoreElements()) { String ckey = ((String) propertyNames.nextElement()).trim(); if (ckey.startsWith(CONSUMER_PFX) && ckey.endsWith(".class")) { String consumerName = ckey.substring(CONSUMER_PFX.length(), ckey.length() - 6); consumerIndicies.put(consumerName, (Integer) bitSetIndex); bitSetIndex++; } } } static class DispatcherPoolFactory implements KeyedPoolableObjectFactory { // Prefix of keys in DSpace Configuration private static final String PROP_PFX = "event.dispatcher."; // Cache of event dispatchers, keyed by name, for re-use. private static Map<String, String> dispatchers = new HashMap<String, String>(); public DispatcherPoolFactory() { parseEventConfig(); log.info(""); } public Object makeObject(Object dispatcherName) throws Exception { Dispatcher dispatcher = null; String dispClass = dispatchers.get(dispatcherName); if (dispClass != null) { try { // all this to call a constructor with an argument final Class argTypes[] = { String.class }; Constructor dc = Class.forName(dispClass).getConstructor( argTypes); Object args[] = new Object[1]; args[0] = dispatcherName; dispatcher = (Dispatcher) dc.newInstance(args); // OK, now get its list of consumers/filters String consumerKey = PROP_PFX + dispatcherName + ".consumers"; String consumerList = ConfigurationManager .getProperty(consumerKey); if (consumerList == null) { throw new IllegalStateException( "No Configuration entry found for consumer list of event Dispatcher: \"" + consumerKey + "\""); } // Consumer list format: // <consumer-name>:<mode>, ... String[] consumerStanza = consumerList.trim().split( "\\s*,\\s*"); // I think this should be a fatal error.. --lcs if (consumerStanza.length < 1) { throw new IllegalStateException( "Cannot initialize Dispatcher, malformed Configuration value for " + consumerKey); } ConsumerProfile consumerProfile = null; // parts: 0 is name, part 1 is mode. for (int i = 0; i < consumerStanza.length; i++) { consumerProfile = ConsumerProfile .makeConsumerProfile(consumerStanza[i]); consumerProfile.getConsumer().initialize(); dispatcher.addConsumerProfile(consumerProfile); } } catch (NoSuchMethodException e) { throw new IllegalStateException( "Constructor not found for event dispatcher=" + dispatcherName, e); } catch (InvocationTargetException e) { throw new IllegalStateException( "Error creating event dispatcher=" + dispatcherName, e); } catch (ClassNotFoundException e) { throw new IllegalStateException( "Dispatcher/Consumer class not found for event dispatcher=" + dispatcherName, e); } catch (InstantiationException e) { throw new IllegalStateException( "Dispatcher/Consumer instantiation failure for event dispatcher=" + dispatcherName, e); } catch (IllegalAccessException e) { throw new IllegalStateException( "Dispatcher/Consumer access failure for event dispatcher=" + dispatcherName, e); } } else { throw new IllegalStateException( "Requested Dispatcher Does Not Exist In DSpace Configuration!"); } return dispatcher; } public void activateObject(Object arg0, Object arg1) throws Exception { // No-op return; } public void destroyObject(Object key, Object dispatcher) throws Exception { Context ctx = new Context(); for (Iterator ci = ((Dispatcher) dispatcher).getConsumers() .iterator(); ci.hasNext();) { ConsumerProfile cp = (ConsumerProfile) ci.next(); if (cp != null) { cp.getConsumer().finish(ctx); } } return; } public void passivateObject(Object arg0, Object arg1) throws Exception { // No-op return; } public boolean validateObject(Object arg0, Object arg1) { // No-op return false; } /** * Looks through the configuration for dispatcher configurations and * loads one of each into a HashMap. This Map will be used to clone new * objects when the pool needs them. * * Looks for configuration properties like: * * <pre> * # class of dispatcher &quot;default&quot; * event.dispatcher.default = org.dspace.event.BasicDispatcher * # list of consumers followed by filters for each, format is * # &lt;consumerClass&gt;:&lt;filter&gt;[:&lt;anotherFilter&gt;..] , ... * # and each filter is expressed as: * # &lt;objectType&gt;[|&lt;objectType&gt; ...] + &lt;eventType&gt;[|&lt;eventType&gt; ..] * org.dspace.event.TestConsumer:all+all, \ * org.dspace.eperson.SubscribeConsumer:Item+CREATE|DELETE:Collection+ADD, ... * </pre> * */ private void parseEventConfig() { Enumeration propertyNames = ConfigurationManager.propertyNames(); while (propertyNames.hasMoreElements()) { String ckey = ((String) propertyNames.nextElement()).trim(); if (ckey.startsWith(PROP_PFX) && ckey.endsWith(".class")) { String name = ckey.substring(PROP_PFX.length(), ckey .length() - 6); String dispatcherClass = ConfigurationManager .getProperty(ckey); // Can we grab all of the consumers configured for this // dispatcher // and store them also? Then there is no // ConfigurationManager call // upon other makeObject(key) requests resulting in a faster // pool // get. dispatchers.put(name, dispatcherClass); } } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.io.PrintStream; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.eperson.EPerson; /** * Demonstration and test consumer for the event system. This consumer only * makes an entry in the log, and on an output stream, for each event it * receives. It also logs when consume() and end() get called. It is intended * for testing, exploring, and debugging the event system. * * @version $Revision: 5844 $ */ public class TestConsumer implements Consumer { // Log4j logger private static Logger log = Logger.getLogger(TestConsumer.class); // Send diagnostic output here - set to null to turn it off. private static PrintStream out = ConfigurationManager .getBooleanProperty("testConsumer.verbose") ? System.out : null; public void initialize() throws Exception { log.info("EVENT: called TestConsumer.initialize();"); if (out != null) { out.println("TestConsumer.initialize();"); } } /** * Consume a content event - display it in detail. * * @param ctx * DSpace context * @param event * Content event */ public void consume(Context ctx, Event event) throws Exception { EPerson ep = ctx.getCurrentUser(); String user = (ep == null) ? "(none)" : ep.getEmail(); String detail = event.getDetail(); String msg = "EVENT: called TestConsumer.consume(): EventType=" + event.getEventTypeAsString() + ", SubjectType=" + event.getSubjectTypeAsString() + ", SubjectID=" + String.valueOf(event.getSubjectID()) + ", ObjectType=" + event.getObjectTypeAsString() + ", ObjectID=" + String.valueOf(event.getObjectID()) + ", TimeStamp=" + applyDateFormat(new Date(event.getTimeStamp())) + ", user=\"" + user + "\"" + ", extraLog=\"" + ctx.getExtraLogInfo() + "\"" + ", dispatcher=" + String.valueOf(event.getDispatcher()) + ", detail=" + (detail == null ? "[null]" : "\"" + detail + "\"") + ", transactionID=" + (event.getTransactionID() == null ? "[null]" : "\"" + event.getTransactionID() + "\"") + ", context=" + ctx.toString(); log.info(msg); if (out != null) { out.println("TestConsumer.consume(): " + msg); } } public void end(Context ctx) throws Exception { log.info("EVENT: called TestConsumer.end();"); if (out != null) { out.println("TestConsumer.end();"); } } public void finish(Context ctx) throws Exception { log.info("EVENT: called TestConsumer.finish();"); if (out != null) { out.println("TestConsumer.finish();"); } } private String applyDateFormat(Date thisDate) { return new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss.SSS Z").format(thisDate); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import org.dspace.core.Context; /** * Interface for content event consumers. Note that the consumer cannot tell if * it is invoked synchronously or asynchronously; the consumer interface and * sequence of calls is the same for both. Asynchronous consumers may see more * consume() calls between the start and end of the event stream, if they are * invoked asynchronously, once in a long time period, rather than synchronously * after every Context.commit(). * * @version $Revision: 5844 $ */ public interface Consumer { /** * Initialize - allocate any resources required to operate. This may include * initializing any pooled JMS resources. Called ONCE when created by the * dispatcher pool. This should be used to set up expensive resources that * will remain for the lifetime of the consumer. */ public void initialize() throws Exception; /** * Consume an event; events may get filtered at the dispatcher level, hiding * it from the consumer. This behavior is based on the dispatcher/consumer * configuration. Should include logic to initialize any resources required * for a batch of events. * * @param ctx * the execution context object * * @param event * the content event */ public void consume(Context ctx, Event event) throws Exception; /** * Signal that there are no more events queued in this event stream and * event processing for the preceding consume calls should be finished up. */ public void end(Context ctx) throws Exception; /** * Finish - free any allocated resources. Called when consumer (via it's * parent dispatcher) is going to be destroyed by the dispatcher pool. */ public void finish(Context ctx) throws Exception; }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.dspace.core.ConfigurationManager; /** * An instance of this class contains the configuration profile of a specific, * named Consumer, <em>in the context of a specific * Dispatcher</em>. This * includes the name, the class to instantiate and event filters. Note that all * characteristics are "global" and the same for all dispatchers. * * @version $Revision: 5844 $ */ public class ConsumerProfile { /** log4j category */ private static Logger log = Logger.getLogger(EventManager.class); /** Name matching the key in DSpace Configuration */ private String name; /** Instance of configured consumer class */ private Consumer consumer; /** Filters - each is an array of 2 bitmasks, action mask and subject mask */ private List<int[]> filters; // Prefix of keys in DSpace Configuration. private static final String CONSUMER_PREFIX = "event.consumer."; /** * Constructor. */ private ConsumerProfile(String name) { this.name = name; } /** * Factory method, create new profile from configuration. * * @param name * configuration name of the consumer profile * @return a new ConsumerProfile; never null. */ public static ConsumerProfile makeConsumerProfile(String name) throws IllegalArgumentException, ClassNotFoundException, InstantiationException, IllegalAccessException { ConsumerProfile result = new ConsumerProfile(name); result.readConfiguration(); return result; } // Get class and filters from DSpace Configuration. private void readConfiguration() throws IllegalArgumentException, ClassNotFoundException, InstantiationException, IllegalAccessException { String className = ConfigurationManager.getProperty(CONSUMER_PREFIX + name + ".class"); String filterString = ConfigurationManager.getProperty(CONSUMER_PREFIX + name + ".filters"); if (className == null) { throw new IllegalArgumentException( "No class configured for consumer named: " + name); } if (filterString == null) { throw new IllegalArgumentException( "No filters configured for consumer named: " + name); } consumer = (Consumer) Class.forName(className.trim()).newInstance(); // Each "filter" is <objectTypes> + <eventTypes> : ... filters = new ArrayList<int[]>(); String part[] = filterString.trim().split(":"); for (int j = 0; j < part.length; ++j) { String fpart[] = part[j].split("\\+"); if (fpart.length != 2) { log .error("Bad Filter clause in consumer stanza in Configuration entry for " + CONSUMER_PREFIX + name + ".consumers: " + part[j]); } else { int filter[] = new int[2]; filter[0] = 0; filter[1] = 0; String objectNames[] = fpart[0].split("\\|"); for (int k = 0; k < objectNames.length; ++k) { int ot = Event.parseObjectType(objectNames[k]); if (ot == 0) { log .error("Bad ObjectType in Consumer Stanza in Configuration entry for " + CONSUMER_PREFIX + name + ".consumers: " + objectNames[k]); } else { filter[Event.SUBJECT_MASK] |= ot; } } String eventNames[] = fpart[1].split("\\|"); for (int k = 0; k < eventNames.length; ++k) { int et = Event.parseEventType(eventNames[k]); if (et == 0) { log .error("Bad EventType in Consumer Stanza in Configuration entry for " + CONSUMER_PREFIX + name + ".consumers: " + eventNames[k]); } else { filter[Event.EVENT_MASK] |= et; } } filters.add(filter); } } } public Consumer getConsumer() { return consumer; } public List<int[]> getFilters() { return filters; } public String getName() { return name; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.io.Serializable; import java.sql.SQLException; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.core.Constants; import org.dspace.core.Context; /** * An Event object represents a single action that changed one object in the * DSpace data model. An "atomic" action at the application or business-logic * API level may spawn many of these events. * <p> * This class includes tools to help set and use the contents of the event. Note * that it describes DSpace data object types in two ways: by the type * identifiers in the Constants class, and also by an Event-specific bitmask * (used by its internal filters). All public API calls use the Constants * version of the data model types. * <p> * Note that the type of the event itself is actually descriptive of the * <em>action</em> it performs: ADD, MODIFY, etc. The most significant * elements of the event are: * <ul> * <li>(Action) Type</li> * <li>Subject -- DSpace object to which the action applies, e.g. the Collection * to which an ADD adds a member.</li> * <li>Object -- optional, when present it is the other object effected by an * action, e.g. the Item ADDed to a Collection by an ADD.</li> * <li>detail -- a textual summary of what changed. Content and its * significance varies by the combination of action and subject type.</li> * <li> - timestamp -- exact millisecond timestamp at which event was logged.</li> * </ul> * * @version $Revision: 5844 $ */ public class Event implements Serializable { private static final long serialVersionUID = 1L; /** ---------- Constants ------------- * */ /** Event (Action) types */ public static final int CREATE = 1 << 0; // create new object public static final int MODIFY = 1 << 1; // modify object public static final int MODIFY_METADATA = 1 << 2; // modify object public static final int ADD = 1 << 3; // add content to container public static final int REMOVE = 1 << 4; // remove content from container public static final int DELETE = 1 << 5; // destroy object public static final int INSTALL = 1 << 6; // object exits workspace/flow /** Index of filter parts in their array: */ public static final int SUBJECT_MASK = 0; // mask of subject types public static final int EVENT_MASK = 1; // mask of event type // XXX NOTE: keep this up to date with any changes to event (action) types. private static final String eventTypeText[] = { "CREATE", "MODIFY", "MODIFY_METADATA", "ADD", "REMOVE", "DELETE", "INSTALL" }; /** XXX NOTE: These constants must be kept synchronized * */ /** XXX NOTE: with ALL_OBJECTS_MASK *AND* objTypeToMask hash * */ private static final int NONE = 0; private static final int BITSTREAM = 1 << Constants.BITSTREAM; // 0 private static final int BUNDLE = 1 << Constants.BUNDLE; // 1 private static final int ITEM = 1 << Constants.ITEM; // 2 private static final int COLLECTION = 1 << Constants.COLLECTION; // 3 private static final int COMMUNITY = 1 << Constants.COMMUNITY; // 4 private static final int SITE = 1 << Constants.SITE; // 5 private static final int GROUP = 1 << Constants.GROUP; // 6 private static final int EPERSON = 1 << Constants.EPERSON; // 7 private static final int ALL_OBJECTS_MASK = BITSTREAM | BUNDLE | ITEM | COLLECTION | COMMUNITY | SITE | GROUP | EPERSON; private static Map<Integer, Integer> objTypeToMask = new HashMap<Integer, Integer>(); private static Map<Integer, Integer> objMaskToType = new HashMap<Integer, Integer>(); static { objTypeToMask.put(Constants.BITSTREAM, BITSTREAM); objMaskToType.put(BITSTREAM, Constants.BITSTREAM); objTypeToMask.put(Constants.BUNDLE, BUNDLE); objMaskToType.put(BUNDLE, Constants.BUNDLE); objTypeToMask.put(Constants.ITEM, ITEM); objMaskToType.put(ITEM, Constants.ITEM); objTypeToMask.put(Constants.COLLECTION, COLLECTION); objMaskToType.put(COLLECTION, Constants.COLLECTION); objTypeToMask.put(Constants.COMMUNITY, COMMUNITY); objMaskToType.put(COMMUNITY, Constants.COMMUNITY); objTypeToMask.put(Constants.SITE, SITE); objMaskToType.put(SITE, Constants.SITE); objTypeToMask.put(Constants.GROUP, GROUP); objMaskToType.put(GROUP, Constants.GROUP); objTypeToMask.put(Constants.EPERSON, EPERSON); objMaskToType.put(EPERSON, Constants.EPERSON); } /** ---------- Event Fields ------------- * */ /** identifier of Dispatcher that created this event (hash of its name) */ private int dispatcher; /** event (action) type - above enumeration */ private int eventType; /** object-type of SUBJECT - see above enumeration */ private int subjectType; /** content model identifier */ private int subjectID; /** object-type of SUBJECT - see above enumeration */ private int objectType = NONE; /** content model identifier */ private int objectID = -1; /** timestamp */ private long timeStamp; /** "detail" - arbitrary field for relevant detail, */ /** e.g. former handle for DELETE event since obj is no longer available. */ /** * FIXME This field is not a complete view of the DSpaceObject that was * modified. Providing these objects to the consumer (e.g. by storing * lifecycle versions of the changed objects in the context) would provide * for more complex consumer abilities that are beyond our purview. */ private String detail; /** unique key to bind together events from one context's transaction */ private String transactionID; /** identity of authenticated user, i.e. context.getCurrentUser(). */ /** Only needed in the event for marshalling for asynch event messages */ private int currentUser = -1; /** copy of context's "extraLogInfo" field. Used only for */ /** marshalling for asynch event messages. */ private String extraLogInfo = null; private BitSet consumedBy = new BitSet(); /** log4j category */ private static Logger log = Logger.getLogger(Event.class); /** * Constructor. * * @param eventType * action type, e.g. Event.ADD. * @param subjectType * DSpace Object Type of subject e.g. Constants.ITEM. * @param subjectID * database ID of subject instance. * @param detail * detail information that depends on context. */ public Event(int eventType, int subjectType, int subjectID, String detail) { this.eventType = eventType; this.subjectType = coreTypeToMask(subjectType); this.subjectID = subjectID; timeStamp = System.currentTimeMillis(); this.detail = detail; } /** * Constructor. * * @param eventType * action type, e.g. Event.ADD. * @param subjectType * DSpace Object Type of subject e.g. Constants.ITEM. * @param subjectID * database ID of subject instance. * @param objectType * DSpace Object Type of object e.g. Constants.BUNDLE. * @param objectID * database ID of object instance. * @param detail * detail information that depends on context. */ public Event(int eventType, int subjectType, int subjectID, int objectType, int objectID, String detail) { this.eventType = eventType; this.subjectType = coreTypeToMask(subjectType); this.subjectID = subjectID; this.objectType = coreTypeToMask(objectType); this.objectID = objectID; timeStamp = System.currentTimeMillis(); this.detail = detail; } /** * Compare two events. Ignore any difference in the timestamps. Also ignore * transactionID since that is not always set initially. * * @param other * the event to compare this one to * @return true if events are "equal", false otherwise. */ public boolean equals(Object other) { if (other instanceof Event) { Event otherEvent = (Event)other; return (this.detail == null ? otherEvent.detail == null : this.detail .equals(otherEvent.detail)) && this.eventType == otherEvent.eventType && this.subjectType == otherEvent.subjectType && this.subjectID == otherEvent.subjectID && this.objectType == otherEvent.objectType && this.objectID == otherEvent.objectID; } return false; } public int hashCode() { return new HashCodeBuilder().append(this.detail) .append(eventType) .append(subjectType) .append(subjectID) .append(objectType) .append(objectID) .toHashCode(); } /** * Set the identifier of the dispatcher that first processed this event. * * @param id * the unique (hash code) value characteristic of the dispatcher. */ public void setDispatcher(int id) { dispatcher = id; } // translate a "core.Constants" object type value to local bitmask value. private static int coreTypeToMask(int core) { Integer mask = objTypeToMask.get(core); if (mask == null) { return -1; } else { return mask.intValue(); } } // translate bitmask object-type to "core.Constants" object type. private static int maskTypeToCore(int mask) { Integer core = objMaskToType.get(mask); if (core == null) { return -1; } else { return core.intValue(); } } /** * Get the DSpace object which is the "object" of an event. * * @return DSpaceObject or null if none can be found or no object was set. */ public DSpaceObject getObject(Context context) throws SQLException { int type = getObjectType(); int id = getObjectID(); if (type < 0 || id < 0) { return null; } else { return DSpaceObject.find(context, type, id); } } /** * Syntactic sugar to get the DSpace object which is the "subject" of an * event. * * @return DSpaceObject or null if none can be found. */ public DSpaceObject getSubject(Context context) throws SQLException { return DSpaceObject.find(context, getSubjectType(), getSubjectID()); } /** * @return database ID of subject of this event. */ public int getSubjectID() { return subjectID; } /** * @return database ID of object of this event, or -1 if none was set. */ public int getObjectID() { return objectID; } /** * @return type number (e.g. Constants.ITEM) of subject of this event. */ public int getSubjectType() { return maskTypeToCore(subjectType); } /** * @return type number (e.g. Constants.ITEM) of object of this event, or -1 * if none was set. */ public int getObjectType() { return maskTypeToCore(objectType); } /** * @return type of subject of this event as a String, e.g. for logging. */ public String getSubjectTypeAsString() { int i = log2(subjectType); if (i >= 0 && i < Constants.typeText.length) { return Constants.typeText[i]; } else { return "(Unknown)"; } } /** * @return type of object of this event as a String, e.g. for logging. */ public String getObjectTypeAsString() { int i = log2(objectType); if (i >= 0 && i < Constants.typeText.length) { return Constants.typeText[i]; } else { return "(Unknown)"; } } /** * Translate a textual DSpace Object type name into an event subject-type * mask. NOTE: This returns a BIT-MASK, not a numeric type value; the mask * is only used within the event system. * * @param s * text name of object type. * @return numeric value of object type or 0 for error. */ public static int parseObjectType(String s) { if ("*".equals(s) || "all".equalsIgnoreCase(s)) { return ALL_OBJECTS_MASK; } else { int id = Constants.getTypeID(s.toUpperCase()); if (id >= 0) { return 1 << id; } } return 0; } /** * @return event-type (i.e. action) this event, one of the masks like * Event.ADD defined above. */ public int getEventType() { return eventType; } /** * Get the text name of event (action) type. * * @return event-type (i.e. action) this event as a String, e.g. for * logging. */ public String getEventTypeAsString() { int i = log2(eventType); if (i >= 0 && i < eventTypeText.length) { return eventTypeText[i]; } else { return "(Unknown)"; } } /** * Interpret named event type. * * @param s * name of event type. * @return numeric value of event type or 0 for error. */ public static int parseEventType(String s) { if ("*".equals(s) || "all".equalsIgnoreCase(s)) { int result = 0; for (int i = 0; i < eventTypeText.length; ++i) { result |= (1 << i); } return result; } for (int i = 0; i < eventTypeText.length; ++i) { if (eventTypeText[i].equalsIgnoreCase(s)) { return 1 << i; } } return 0; } /** * @return timestamp at which event occurred, as a count of milliseconds * since the epoch (standard Java format). */ public long getTimeStamp() { return timeStamp; } /** * @return hashcode identifier of name of Dispatcher which first dispatched * this event. (Needed by asynch dispatch code.) */ public int getDispatcher() { return dispatcher; } /** * @return value of detail element of the event. */ public String getDetail() { return detail; } /** * @return value of transactionID element of the event. */ public String getTransactionID() { return transactionID; } /** * Sets value of transactionID element of the event. * * @param tid * new value of transactionID. */ public void setTransactionID(String tid) { transactionID = tid; } public void setCurrentUser(int uid) { currentUser = uid; } public int getCurrentUser() { return currentUser; } public void setExtraLogInfo(String info) { extraLogInfo = info; } public String getExtraLogInfo() { return extraLogInfo; } /** * Test whether this event would pass through a list of filters. * * @param filters * list of filter masks; each one is an Array of two ints. * @return true if this event would be passed through the given filter * list. */ public boolean pass(List<int[]> filters) { boolean result = false; for (int filter[] : filters) { if ((subjectType & filter[SUBJECT_MASK]) != 0 && (eventType & filter[EVENT_MASK]) != 0) { result = true; } } if (log.isDebugEnabled()) { log.debug("Filtering event: " + "eventType=" + String.valueOf(eventType) + ", subjectType=" + String.valueOf(subjectType) + ", result=" + String.valueOf(result)); } return result; } // dumb integer "log base 2", returns -1 if there are no 1's in number. private static int log2(int n) { for (int i = 0; i < 32; ++i) { if (n == 1) { return i; } else { n = n >> 1; } } return -1; } /** * Keeps track of which consumers the event has been consumed by. Should be * called by a dispatcher when calling consume(Context ctx, String name, * Event event) on an event. * * @param consumerName */ public void setBitSet(String consumerName) { consumedBy.set(EventManager.getConsumerIndex(consumerName)); } /** * @return the set of consumers which have consumed this Event. */ public BitSet getBitSet() { return consumedBy; } /** * @return Detailed string representation of contents of this event, to * help in logging and debugging. */ public String toString() { return "org.dspace.event.Event(eventType=" + this.getEventTypeAsString() + ", SubjectType=" + this.getSubjectTypeAsString() + ", SubjectID=" + String.valueOf(subjectID) + ", ObjectType=" + this.getObjectTypeAsString() + ", ObjectID=" + String.valueOf(objectID) + ", TimeStamp=" + String.valueOf(timeStamp) + ", dispatcher=" + String.valueOf(dispatcher) + ", detail=" + (detail == null ? "[null]" : "\"" + detail + "\"") + ", transactionID=" + (transactionID == null ? "[null]" : "\"" + transactionID + "\"") + ")"; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.dspace.core.Context; /** * Interface for event dispatchers. The primary role of a dispatcher is to * deliver a set of events to a configured list of consumers. It may also * transform, consolidate, and otherwise optimize the event stream prior to * delivering events to its consumers. * * @version $Revision: 5844 $ */ public abstract class Dispatcher { protected String name; /** unique identifier of this dispatcher - cached hash of its text Name */ protected int identifier; /** * Map of consumers by their configured name. */ protected Map<String, ConsumerProfile> consumers = new HashMap<String, ConsumerProfile>(); protected Dispatcher(String name) { super(); this.name = name; this.identifier = name.hashCode(); } public Collection getConsumers() { return consumers.values(); } /** * @return unique integer that identifies this Dispatcher configuration. */ public int getIdentifier() { return identifier; } /** * Add a consumer profile to the end of the list. * * @param cp * the event consumer profile to add */ public abstract void addConsumerProfile(ConsumerProfile cp) throws IllegalArgumentException; /** * Dispatch all events added to this Context according to configured * consumers. * * @param ctx * the execution context object */ public abstract void dispatch(Context ctx); }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.event; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.log4j.Logger; import org.dspace.core.Context; import org.dspace.core.Utils; /** * BasicDispatcher implements the primary task of a Dispatcher: it delivers a * filtered list of events, synchronously, to a configured list of consumers. It * may be extended for more elaborate behavior. * * @version $Revision: 5844 $ */ public class BasicDispatcher extends Dispatcher { public BasicDispatcher(String name) { super(name); } /** log4j category */ private static Logger log = Logger.getLogger(BasicDispatcher.class); public void addConsumerProfile(ConsumerProfile cp) throws IllegalArgumentException { if (consumers.containsKey(cp.getName())) { throw new IllegalArgumentException( "This dispatcher already has a consumer named \"" + cp.getName() + "\""); } consumers.put(cp.getName(), cp); if (log.isDebugEnabled()) { int n = 0; for (Iterator i = cp.getFilters().iterator(); i.hasNext(); ++n) { int f[] = (int[]) i.next(); log.debug("Adding Consumer=\"" + cp.getName() + "\", instance=" + cp.getConsumer().toString() + ", filter[" + String.valueOf(n) + "]=(ObjMask=" + String.valueOf(f[Event.SUBJECT_MASK]) + ", EventMask=" + String.valueOf(f[Event.EVENT_MASK]) + ")"); } } } /** * Dispatch all events added to this Context according to configured * consumers. * * @param ctx * the execution context */ public void dispatch(Context ctx) { if (!consumers.isEmpty()) { List<Event> events = Collections.synchronizedList(ctx.getEvents()); if (events == null) { return; } if (log.isDebugEnabled()) { log.debug("Processing queue of " + String.valueOf(events.size()) + " events."); } // transaction identifier applies to all events created in // this context for the current transaction. Prefix it with // some letters so RDF readers don't mistake it for an integer. String tid = "TX" + Utils.generateKey(); for (Event event : events) { event.setDispatcher(getIdentifier()); event.setTransactionID(tid); if (log.isDebugEnabled()) { log.debug("Iterating over " + String.valueOf(consumers.values().size()) + " consumers..."); } for (Iterator ci = consumers.values().iterator(); ci.hasNext();) { ConsumerProfile cp = (ConsumerProfile) ci.next(); if (event.pass(cp.getFilters())) { if (log.isDebugEnabled()) { log.debug("Sending event to \"" + cp.getName() + "\": " + event.toString()); } try { cp.getConsumer().consume(ctx, event); // Record that the event has been consumed by this // consumer event.setBitSet(cp.getName()); } catch (Exception e) { log.error("Consumer(\"" + cp.getName() + "\").consume threw: " + e.toString(), e); } } } } // Call end on the consumers that got synchronous events. for (Iterator ci = consumers.values().iterator(); ci.hasNext();) { ConsumerProfile cp = (ConsumerProfile) ci.next(); if (cp != null) { if (log.isDebugEnabled()) { log.debug("Calling end for consumer \"" + cp.getName() + "\""); } try { cp.getConsumer().end(ctx); } catch (Exception e) { log.error("Error in Consumer(\"" + cp.getName() + "\").end: " + e.toString(), e); } } } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.sql.SQLException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; import org.dspace.storage.rdbms.DatabaseManager; /** * Class to represent the supervisor, primarily for use in applying supervisor * activities to the database, such as setting and unsetting supervision * orders and so forth. * * @author Richard Jones * @version $Revision: 5844 $ */ public class Supervisor { /** value to use for no policy set */ public static final int POLICY_NONE = 0; /** value to use for editor policies */ public static final int POLICY_EDITOR = 1; /** value to use for observer policies */ public static final int POLICY_OBSERVER = 2; /** Creates a new instance of Supervisor */ private Supervisor() { } /** * finds out if there is a supervision order that matches this set * of values * * @param context the context this object exists in * @param wsItemID the workspace item to be supervised * @param groupID the group to be doing the supervising * * @return boolean true if there is an order that matches, false if not */ public static boolean isOrder(Context context, int wsItemID, int groupID) throws SQLException { String query = "SELECT epersongroup2workspaceitem.* " + "FROM epersongroup2workspaceitem " + "WHERE epersongroup2workspaceitem.eperson_group_id = ? " + "AND epersongroup2workspaceitem.workspace_item_id = ? "; TableRowIterator tri = DatabaseManager.queryTable(context, "epersongroup2workspaceitem", query,groupID,wsItemID); try { return tri.hasNext(); } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } } /** * removes the requested group from the requested workspace item in terms * of supervision. This also removes all the policies that group has * associated with the item * * @param context the context this object exists in * @param wsItemID the ID of the workspace item * @param groupID the ID of the group to be removed from the item */ public static void remove(Context context, int wsItemID, int groupID) throws SQLException, AuthorizeException { // get the workspace item and the group from the request values WorkspaceItem wsItem = WorkspaceItem.find(context, wsItemID); Group group = Group.find(context, groupID); // remove the link from the supervisory database String query = "DELETE FROM epersongroup2workspaceitem " + "WHERE workspace_item_id = ? "+ "AND eperson_group_id = ? "; DatabaseManager.updateQuery(context, query, wsItemID, groupID); // get the item and have it remove the policies for the group Item item = wsItem.getItem(); item.removeGroupPolicies(group); } /** * removes redundant entries in the supervision orders database * * @param context the context this object exists in */ public static void removeRedundant(Context context) throws SQLException { // this horrid looking query tests to see if there are any groups or // workspace items which match up to the ones in the linking database // table. If there aren't, we know that the link is out of date, and // it can be deleted. String query = "DELETE FROM epersongroup2workspaceitem " + "WHERE NOT EXISTS ( " + "SELECT 1 FROM workspaceitem WHERE workspace_item_id " + "= epersongroup2workspaceitem.workspace_item_id " + ") OR NOT EXISTS ( " + "SELECT 1 FROM epersongroup WHERE eperson_group_id " + "= epersongroup2workspaceitem.eperson_group_id " + ")"; DatabaseManager.updateQuery(context, query); } /** * adds a supervision order to the database * * @param context the context this object exists in * @param groupID the ID of the group which will supervise * @param wsItemID the ID of the workspace item to be supervised * @param policy String containing the policy type to be used */ public static void add(Context context, int groupID, int wsItemID, int policy) throws SQLException, AuthorizeException { // make a table row in the database table, and update with the relevant // details TableRow row = DatabaseManager.row("epersongroup2workspaceitem"); row.setColumn("workspace_item_id", wsItemID); row.setColumn("eperson_group_id", groupID); DatabaseManager.insert(context,row); // If a default policy type has been requested, apply the policies using // the DSpace API for doing so if (policy != POLICY_NONE) { WorkspaceItem wsItem = WorkspaceItem.find(context, wsItemID); Item item = wsItem.getItem(); Group group = Group.find(context, groupID); // "Editor" implies READ, WRITE, ADD permissions // "Observer" implies READ permissions if (policy == POLICY_EDITOR) { ResourcePolicy r = ResourcePolicy.create(context); r.setResource(item); r.setGroup(group); r.setAction(Constants.READ); r.update(); r = ResourcePolicy.create(context); r.setResource(item); r.setGroup(group); r.setAction(Constants.WRITE); r.update(); r = ResourcePolicy.create(context); r.setResource(item); r.setGroup(group); r.setAction(Constants.ADD); r.update(); } else if (policy == POLICY_OBSERVER) { ResourcePolicy r = ResourcePolicy.create(context); r.setResource(item); r.setGroup(group); r.setAction(Constants.READ); r.update(); } } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import org.apache.log4j.Logger; import org.dspace.core.*; import org.dspace.event.Consumer; import org.dspace.event.Event; import javax.mail.MessagingException; import java.util.Date; /** * Class for handling updates to EPersons * * Recommended filter: EPerson+Create * * @version $Revision: 5844 $ * * @author Stuart Lewis */ public class EPersonConsumer implements Consumer { /** log4j logger */ private static Logger log = Logger.getLogger(EPersonConsumer.class); /** * Initalise the consumer * * @throws Exception */ public void initialize() throws Exception { } /** * Consume the event * * @param context * @param event * @throws Exception */ public void consume(Context context, Event event) throws Exception { int st = event.getSubjectType(); int et = event.getEventType(); int id = event.getSubjectID(); switch (st) { // If an EPerson is changed case Constants.EPERSON: if (et == Event.CREATE) { // Notify of new user registration String notifyRecipient = ConfigurationManager.getProperty("registration.notify"); if (notifyRecipient == null) { notifyRecipient = ""; } notifyRecipient = notifyRecipient.trim(); if(!notifyRecipient.equals("")) { try { EPerson eperson = EPerson.find(context, id); Email adminEmail = ConfigurationManager.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify")); adminEmail.addRecipient(notifyRecipient); adminEmail.addArgument(ConfigurationManager.getProperty("dspace.name")); adminEmail.addArgument(ConfigurationManager.getProperty("dspace.url")); adminEmail.addArgument(eperson.getFirstName() + " " + eperson.getLastName()); // Name adminEmail.addArgument(eperson.getEmail()); adminEmail.addArgument(new Date()); adminEmail.setReplyTo(eperson.getEmail()); adminEmail.send(); log.info(LogManager.getHeader(context, "registerion_alert", "user=" + eperson.getEmail())); } catch (MessagingException me) { log.warn(LogManager.getHeader(context, "error_emailing_administrator", ""), me); } } } else if (et == Event.DELETE) { // TODO: Implement this if required } break; default: log.warn("consume() got unrecognized event: " + event.toString()); } } /** * Handle the end of the event * * @param ctx * @throws Exception */ public void end(Context ctx) throws Exception { } /** * Finish the event * * @param ctx */ public void finish(Context ctx) { } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.util.List; /** * Exception indicating that an EPerson may not be deleted due to the presence * of the EPerson's ID in certain tables * * @author Grace Carpenter */ public class EPersonDeletionException extends Exception { private List<String> myTableList; //set of tables in which EPerson exists /** * Create an empty EPersonDeletionException */ public EPersonDeletionException() { super(); myTableList = null; } /** * Create an EPersonDeletionException * * @param tableList * tables in which the eperson ID exists. An person cannot be * deleted if it exists in these tables. * */ public EPersonDeletionException(List<String> tableList) { super(); myTableList = tableList; } /** * Return the list of offending tables. * * @return The tables in which the eperson ID exists. */ public List<String> getTables() { return myTableList; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.io.IOException; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.ResourceBundle; import javax.mail.MessagingException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.LogManager; import org.dspace.handle.HandleManager; import org.dspace.search.Harvest; import org.dspace.search.HarvestedItemInfo; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * Class defining methods for sending new item e-mail alerts to users * * @author Robert Tansley * @version $Revision: 5844 $ */ public class Subscribe { /** log4j logger */ private static Logger log = Logger.getLogger(Subscribe.class); /** * Subscribe an e-person to a collection. An e-mail will be sent every day a * new item appears in the collection. * * @param context * DSpace context * @param eperson * EPerson to subscribe * @param collection * Collection to subscribe to */ public static void subscribe(Context context, EPerson eperson, Collection collection) throws SQLException, AuthorizeException { // Check authorisation. Must be administrator, or the eperson. if (AuthorizeManager.isAdmin(context) || ((context.getCurrentUser() != null) && (context .getCurrentUser().getID() == eperson.getID()))) { // already subscribed? TableRowIterator r = DatabaseManager.query(context, "SELECT * FROM subscription WHERE eperson_id= ? " + " AND collection_id= ? ", eperson.getID(),collection.getID()); try { if (!r.hasNext()) { // Not subscribed, so add them TableRow row = DatabaseManager.row("subscription"); row.setColumn("eperson_id", eperson.getID()); row.setColumn("collection_id", collection.getID()); DatabaseManager.insert(context, row); log.info(LogManager.getHeader(context, "subscribe", "eperson_id=" + eperson.getID() + ",collection_id=" + collection.getID())); } } finally { // close the TableRowIterator to free up resources if (r != null) { r.close(); } } } else { throw new AuthorizeException( "Only admin or e-person themselves can subscribe"); } } /** * Unsubscribe an e-person to a collection. Passing in <code>null</code> * for the collection unsubscribes the e-person from all collections they * are subscribed to. * * @param context * DSpace context * @param eperson * EPerson to unsubscribe * @param collection * Collection to unsubscribe from */ public static void unsubscribe(Context context, EPerson eperson, Collection collection) throws SQLException, AuthorizeException { // Check authorisation. Must be administrator, or the eperson. if (AuthorizeManager.isAdmin(context) || ((context.getCurrentUser() != null) && (context .getCurrentUser().getID() == eperson.getID()))) { if (collection == null) { // Unsubscribe from all DatabaseManager.updateQuery(context, "DELETE FROM subscription WHERE eperson_id= ? ", eperson.getID()); } else { DatabaseManager.updateQuery(context, "DELETE FROM subscription WHERE eperson_id= ? " + "AND collection_id= ? ", eperson.getID(),collection.getID()); log.info(LogManager.getHeader(context, "unsubscribe", "eperson_id=" + eperson.getID() + ",collection_id=" + collection.getID())); } } else { throw new AuthorizeException( "Only admin or e-person themselves can unsubscribe"); } } /** * Find out which collections an e-person is subscribed to * * @param context * DSpace context * @param eperson * EPerson * @return array of collections e-person is subscribed to */ public static Collection[] getSubscriptions(Context context, EPerson eperson) throws SQLException { TableRowIterator tri = DatabaseManager.query(context, "SELECT collection_id FROM subscription WHERE eperson_id= ? ", eperson.getID()); List<Collection> collections = new ArrayList<Collection>(); try { while (tri.hasNext()) { TableRow row = tri.next(); collections.add(Collection.find(context, row .getIntColumn("collection_id"))); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } Collection[] collArray = new Collection[collections.size()]; return (Collection[]) collections.toArray(collArray); } /** * Is that e-person subscribed to that collection? * * @param context * DSpace context * @param eperson * find out if this e-person is subscribed * @param collection * find out if subscribed to this collection * @return <code>true</code> if they are subscribed */ public static boolean isSubscribed(Context context, EPerson eperson, Collection collection) throws SQLException { TableRowIterator tri = DatabaseManager.query(context, "SELECT * FROM subscription WHERE eperson_id= ? " + "AND collection_id= ? ", eperson.getID(),collection.getID()); try { return tri.hasNext(); } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } } /** * Process subscriptions. This must be invoked only once a day. Messages are * only sent out when a collection has actually received new items, so that * people's mailboxes are not clogged with many "no new items" mails. * <P> * Yesterday's newly available items are included. If this is run at for * example midday, any items that have been made available during the * current day will not be included, but will be included in the next day's * run. * <P> * For example, if today's date is 2002-10-10 (in UTC) items made available * during 2002-10-09 (UTC) will be included. * * @param context * DSpace context object * @param test */ public static void processDaily(Context context, boolean test) throws SQLException, IOException { // Grab the subscriptions TableRowIterator tri = DatabaseManager.query(context, "SELECT * FROM subscription ORDER BY eperson_id"); EPerson currentEPerson = null; List<Collection> collections = null; // List of Collections try { // Go through the list collating subscriptions for each e-person while (tri.hasNext()) { TableRow row = tri.next(); // Does this row relate to the same e-person as the last? if ((currentEPerson == null) || (row.getIntColumn("eperson_id") != currentEPerson .getID())) { // New e-person. Send mail for previous e-person if (currentEPerson != null) { try { sendEmail(context, currentEPerson, collections, test); } catch (MessagingException me) { log.error("Failed to send subscription to eperson_id=" + currentEPerson.getID()); log.error(me); } } currentEPerson = EPerson.find(context, row .getIntColumn("eperson_id")); collections = new ArrayList<Collection>(); } collections.add(Collection.find(context, row .getIntColumn("collection_id"))); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // Process the last person if (currentEPerson != null) { try { sendEmail(context, currentEPerson, collections, test); } catch (MessagingException me) { log.error("Failed to send subscription to eperson_id=" + currentEPerson.getID()); log.error(me); } } } /** * Sends an email to the given e-person with details of new items in the * given collections, items that appeared yesterday. No e-mail is sent if * there aren't any new items in any of the collections. * * @param context * DSpace context object * @param eperson * eperson to send to * @param collections * List of collection IDs (Integers) * @param test */ public static void sendEmail(Context context, EPerson eperson, List<Collection> collections, boolean test) throws IOException, MessagingException, SQLException { // Get a resource bundle according to the eperson language preferences Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); ResourceBundle labels = ResourceBundle.getBundle("Messages", supportedLocale); // Get the start and end dates for yesterday Date thisTimeYesterday = new Date(System.currentTimeMillis() - (24 * 60 * 60 * 1000)); DCDate dcDateYesterday = new DCDate(thisTimeYesterday); // this time yesterday in ISO 8601, stripping the time String isoDateYesterday = dcDateYesterday.toString().substring(0, 10); String startDate = isoDateYesterday; // FIXME: text of email should be more configurable from an // i18n viewpoint StringBuffer emailText = new StringBuffer(); boolean isFirst = true; for (int i = 0; i < collections.size(); i++) { Collection c = collections.get(i); try { boolean includeAll = ConfigurationManager.getBooleanProperty("harvest.includerestricted.subscription", true); // we harvest all the changed item from yesterday until now List<HarvestedItemInfo> itemInfos = Harvest.harvest(context, c, startDate, null, 0, // Limit // and // offset // zero, // get // everything 0, true, // Need item objects false, // But not containers false, // Or withdrawals includeAll); if (ConfigurationManager.getBooleanProperty("eperson.subscription.onlynew", false)) { // get only the items archived yesterday itemInfos = filterOutModified(itemInfos); } else { // strip out the item archived today or // not archived yesterday and modified today itemInfos = filterOutToday(itemInfos); } // Only add to buffer if there are new items if (itemInfos.size() > 0) { if (!isFirst) { emailText .append("\n---------------------------------------\n"); } else { isFirst = false; } emailText.append(labels.getString("org.dspace.eperson.Subscribe.new-items")).append(" ").append( c.getMetadata("name")).append(": ").append( itemInfos.size()).append("\n\n"); for (int j = 0; j < itemInfos.size(); j++) { HarvestedItemInfo hii = (HarvestedItemInfo) itemInfos .get(j); DCValue[] titles = hii.item.getDC("title", null, Item.ANY); emailText.append(" ").append(labels.getString("org.dspace.eperson.Subscribe.title")).append(" "); if (titles.length > 0) { emailText.append(titles[0].value); } else { emailText.append(labels.getString("org.dspace.eperson.Subscribe.untitled")); } DCValue[] authors = hii.item.getDC("contributor", Item.ANY, Item.ANY); if (authors.length > 0) { emailText.append("\n ").append(labels.getString("org.dspace.eperson.Subscribe.authors")).append(" ").append( authors[0].value); for (int k = 1; k < authors.length; k++) { emailText.append("\n ").append( authors[k].value); } } emailText.append("\n ").append(labels.getString("org.dspace.eperson.Subscribe.id")).append(" ").append( HandleManager.getCanonicalForm(hii.handle)).append( "\n\n"); } } } catch (ParseException pe) { // This should never get thrown as the Dates are auto-generated } } // Send an e-mail if there were any new items if (emailText.length() > 0) { if(test) { log.info(LogManager.getHeader(context, "subscription:", "eperson=" + eperson.getEmail() )); log.info(LogManager.getHeader(context, "subscription:", "text=" + emailText.toString() )); } else { Email email = ConfigurationManager.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscription")); email.addRecipient(eperson.getEmail()); email.addArgument(emailText.toString()); email.send(); log.info(LogManager.getHeader(context, "sent_subscription", "eperson_id=" + eperson.getID() )); } } } /** * Method for invoking subscriptions via the command line * * @param argv * command-line arguments, none used yet */ public static void main(String[] argv) { String usage = "org.dspace.eperson.Subscribe [-t] or nothing to send out subscriptions."; Options options = new Options(); HelpFormatter formatter = new HelpFormatter(); CommandLine line = null; { Option opt = new Option("t", "test", false, "Run test session"); opt.setRequired(false); options.addOption(opt); } { Option opt = new Option("h", "help", false, "Print this help message"); opt.setRequired(false); options.addOption(opt); } try { line = new PosixParser().parse(options, argv); } catch (Exception e) { // automatically generate the help statement formatter.printHelp(usage, e.getMessage(), options, ""); System.exit(1); } if (line.hasOption("h")) { // automatically generate the help statement formatter.printHelp(usage, options); System.exit(1); } boolean test = line.hasOption("t"); if(test) { log.setLevel(Level.DEBUG); } Context context = null; try { context = new Context(); processDaily(context, test); context.complete(); } catch( Exception e ) { log.fatal(e); } finally { if( context != null && context.isValid() ) { // Nothing is actually written context.abort(); } } } private static List<HarvestedItemInfo> filterOutToday(List<HarvestedItemInfo> completeList) { log.debug("Filtering out all today item to leave new items list size=" + completeList.size()); List<HarvestedItemInfo> filteredList = new ArrayList<HarvestedItemInfo>(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); String today = sdf.format(new Date()); // Get the start and end dates for yesterday Date thisTimeYesterday = new Date(System.currentTimeMillis() - (24 * 60 * 60 * 1000)); String yesterday = sdf.format(thisTimeYesterday); for (HarvestedItemInfo infoObject : completeList) { Date lastUpdate = infoObject.item.getLastModified(); String lastUpdateStr = sdf.format(lastUpdate); // has the item modified today? if (lastUpdateStr.equals(today)) { DCValue[] dateAccArr = infoObject.item.getMetadata("dc", "date", "accessioned", Item.ANY); // we need only the item archived yesterday if (dateAccArr != null && dateAccArr.length > 0) { for (DCValue date : dateAccArr) { if (date != null && date.value != null) { // if it hasn't been archived today if (date.value.startsWith(yesterday)) { filteredList.add(infoObject); log.debug("adding : " + dateAccArr[0].value + " : " + today + " : " + infoObject.handle); break; } else { log.debug("ignoring : " + dateAccArr[0].value + " : " + today + " : " + infoObject.handle); } } } } else { log.debug("no date accessioned, adding : " + infoObject.handle); filteredList.add(infoObject); } } else { // the item has been modified yesterday... filteredList.add(infoObject); } } return filteredList; } private static List<HarvestedItemInfo> filterOutModified(List<HarvestedItemInfo> completeList) { log.debug("Filtering out all modified to leave new items list size="+completeList.size()); List<HarvestedItemInfo> filteredList = new ArrayList<HarvestedItemInfo>(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); // Get the start and end dates for yesterday Date thisTimeYesterday = new Date(System.currentTimeMillis() - (24 * 60 * 60 * 1000)); String yesterday = sdf.format(thisTimeYesterday); for (HarvestedItemInfo infoObject : completeList) { DCValue[] dateAccArr = infoObject.item.getMetadata("dc", "date", "accessioned", Item.ANY); if (dateAccArr != null && dateAccArr.length > 0) { for(DCValue date : dateAccArr) { if(date != null && date.value != null) { // if it has been archived yesterday if (date.value.startsWith(yesterday)) { filteredList.add(infoObject); log.debug("adding : " + dateAccArr[0].value +" : " + yesterday + " : " + infoObject.handle); break; } else { log.debug("ignoring : " + dateAccArr[0].value +" : " + yesterday + " : " + infoObject.handle); } } } } else { log.debug("no date accessioned, adding : " + infoObject.handle); filteredList.add(infoObject); } } return filteredList; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.event.Event; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * Class representing a group of e-people. * * @author David Stuve * @version $Revision: 5926 $ */ public class Group extends DSpaceObject { // findAll sortby types public static final int ID = 0; // sort by ID public static final int NAME = 1; // sort by NAME (default) /** log4j logger */ private static Logger log = Logger.getLogger(Group.class); /** Our context */ private Context myContext; /** The row in the table representing this object */ private TableRow myRow; /** lists of epeople and groups in the group */ private List<EPerson> epeople = new ArrayList<EPerson>(); private List<Group> groups = new ArrayList<Group>(); /** lists that need to be written out again */ private boolean epeopleChanged = false; private boolean groupsChanged = false; /** is this just a stub, or is all data loaded? */ private boolean isDataLoaded = false; /** Flag set when metadata is modified, for events */ private boolean modifiedMetadata; /** * Construct a Group from a given context and tablerow * * @param context * @param row */ Group(Context context, TableRow row) throws SQLException { myContext = context; myRow = row; // Cache ourselves context.cache(this, row.getIntColumn("eperson_group_id")); modifiedMetadata = false; clearDetails(); } /** * Populate Group with eperson and group objects * * @throws SQLException */ public void loadData() { // only populate if not already populated if (!isDataLoaded) { // naughty thing to do - swallowing SQL exception and throwing it as // a RuntimeException - a hack to avoid changing the API all over // the place try { // get epeople objects TableRowIterator tri = DatabaseManager.queryTable(myContext,"eperson", "SELECT eperson.* FROM eperson, epersongroup2eperson WHERE " + "epersongroup2eperson.eperson_id=eperson.eperson_id AND " + "epersongroup2eperson.eperson_group_id= ?", myRow.getIntColumn("eperson_group_id")); try { while (tri.hasNext()) { TableRow r = (TableRow) tri.next(); // First check the cache EPerson fromCache = (EPerson) myContext.fromCache( EPerson.class, r.getIntColumn("eperson_id")); if (fromCache != null) { epeople.add(fromCache); } else { epeople.add(new EPerson(myContext, r)); } } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // now get Group objects tri = DatabaseManager.queryTable(myContext,"epersongroup", "SELECT epersongroup.* FROM epersongroup, group2group WHERE " + "group2group.child_id=epersongroup.eperson_group_id AND "+ "group2group.parent_id= ? ", myRow.getIntColumn("eperson_group_id")); try { while (tri.hasNext()) { TableRow r = (TableRow) tri.next(); // First check the cache Group fromCache = (Group) myContext.fromCache(Group.class, r.getIntColumn("eperson_group_id")); if (fromCache != null) { groups.add(fromCache); } else { groups.add(new Group(myContext, r)); } } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } } catch (Exception e) { throw new IllegalStateException(e); } isDataLoaded = true; } } /** * Create a new group * * @param context * DSpace context object */ public static Group create(Context context) throws SQLException, AuthorizeException { // FIXME - authorization? if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "You must be an admin to create an EPerson Group"); } // Create a table row TableRow row = DatabaseManager.create(context, "epersongroup"); Group g = new Group(context, row); log.info(LogManager.getHeader(context, "create_group", "group_id=" + g.getID())); context.addEvent(new Event(Event.CREATE, Constants.GROUP, g.getID(), null)); return g; } /** * get the ID of the group object * * @return id */ public int getID() { return myRow.getIntColumn("eperson_group_id"); } /** * get name of group * * @return name */ public String getName() { return myRow.getStringColumn("name"); } /** * set name of group * * @param name * new group name */ public void setName(String name) { myRow.setColumn("name", name); modifiedMetadata = true; addDetails("name"); } /** * add an eperson member * * @param e * eperson */ public void addMember(EPerson e) { loadData(); // make sure Group has data loaded if (isMember(e)) { return; } epeople.add(e); epeopleChanged = true; myContext.addEvent(new Event(Event.ADD, Constants.GROUP, getID(), Constants.EPERSON, e.getID(), e.getEmail())); } /** * add group to this group * * @param g */ public void addMember(Group g) { loadData(); // make sure Group has data loaded // don't add if it's already a member // and don't add itself if (isMember(g) || getID()==g.getID()) { return; } groups.add(g); groupsChanged = true; myContext.addEvent(new Event(Event.ADD, Constants.GROUP, getID(), Constants.GROUP, g.getID(), g.getName())); } /** * remove an eperson from a group * * @param e * eperson */ public void removeMember(EPerson e) { loadData(); // make sure Group has data loaded if (epeople.remove(e)) { epeopleChanged = true; myContext.addEvent(new Event(Event.REMOVE, Constants.GROUP, getID(), Constants.EPERSON, e.getID(), e.getEmail())); } } /** * remove group from this group * * @param g */ public void removeMember(Group g) { loadData(); // make sure Group has data loaded if (groups.remove(g)) { groupsChanged = true; myContext.addEvent(new Event(Event.REMOVE, Constants.GROUP, getID(), Constants.GROUP, g.getID(), g.getName())); } } /** * check to see if an eperson is a direct member. * If the eperson is a member via a subgroup will be returned <code>false</code> * * @param e * eperson to check membership */ public boolean isMember(EPerson e) { // special, group 0 is anonymous if (getID() == 0) { return true; } loadData(); // make sure Group has data loaded return epeople.contains(e); } /** * check to see if g is a direct group member. * If g is a subgroup via another group will be returned <code>false</code> * * @param g * group to check * @return */ public boolean isMember(Group g) { loadData(); // make sure Group has data loaded return groups.contains(g); } /** * fast check to see if an eperson is a member called with eperson id, does * database lookup without instantiating all of the epeople objects and is * thus a static method * * @param c * context * @param groupid * group ID to check */ public static boolean isMember(Context c, int groupid) throws SQLException { // special, everyone is member of group 0 (anonymous) if (groupid == 0) { return true; } EPerson currentuser = c.getCurrentUser(); return epersonInGroup(c, groupid, currentuser); } /** * Get all of the groups that an eperson is a member of * * @param c * @param e * @return * @throws SQLException */ public static Group[] allMemberGroups(Context c, EPerson e) throws SQLException { List<Group> groupList = new ArrayList<Group>(); Set<Integer> myGroups = allMemberGroupIDs(c, e); // now convert those Integers to Groups Iterator<Integer> i = myGroups.iterator(); while (i.hasNext()) { groupList.add(Group.find(c, (i.next()).intValue())); } return groupList.toArray(new Group[groupList.size()]); } /** * get Set of Integers all of the group memberships for an eperson * * @param c * @param e * @return Set of Integer groupIDs * @throws SQLException */ public static Set<Integer> allMemberGroupIDs(Context c, EPerson e) throws SQLException { Set<Integer> groupIDs = new HashSet<Integer>(); if (e != null) { // two queries - first to get groups eperson is a member of // second query gets parent groups for groups eperson is a member of TableRowIterator tri = DatabaseManager.queryTable(c, "epersongroup2eperson", "SELECT * FROM epersongroup2eperson WHERE eperson_id= ?", e .getID()); try { while (tri.hasNext()) { TableRow row = tri.next(); int childID = row.getIntColumn("eperson_group_id"); groupIDs.add(Integer.valueOf(childID)); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } } // Also need to get all "Special Groups" user is a member of! // Otherwise, you're ignoring the user's membership to these groups! // However, we only do this is we are looking up the special groups // of the current user, as we cannot look up the special groups // of a user who is not logged in. if ((c.getCurrentUser() == null) || (((c.getCurrentUser() != null) && (c.getCurrentUser().getID() == e.getID())))) { Group[] specialGroups = c.getSpecialGroups(); for(Group special : specialGroups) { groupIDs.add(Integer.valueOf(special.getID())); } } // all the users are members of the anonymous group groupIDs.add(Integer.valueOf(0)); // now we have all owning groups, also grab all parents of owning groups // yes, I know this could have been done as one big query and a union, // but doing the Oracle port taught me to keep to simple SQL! StringBuilder groupQuery = new StringBuilder(); groupQuery.append("SELECT * FROM group2groupcache WHERE "); Iterator<Integer> i = groupIDs.iterator(); // Build a list of query parameters Object[] parameters = new Object[groupIDs.size()]; int idx = 0; while (i.hasNext()) { int groupID = (i.next()).intValue(); parameters[idx++] = Integer.valueOf(groupID); groupQuery.append("child_id= ? "); if (i.hasNext()) { groupQuery.append(" OR "); } } // was member of at least one group // NOTE: even through the query is built dynamically, all data is // separated into the parameters array. TableRowIterator tri = DatabaseManager.queryTable(c, "group2groupcache", groupQuery.toString(), parameters); try { while (tri.hasNext()) { TableRow row = tri.next(); int parentID = row.getIntColumn("parent_id"); groupIDs.add(Integer.valueOf(parentID)); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } return groupIDs; } /** * Get all of the epeople who are a member of the * specified group, or a member of a sub-group of the * specified group, etc. * * @param c * DSpace context * @param g * Group object * @return Array of EPerson objects * @throws SQLException */ public static EPerson[] allMembers(Context c, Group g) throws SQLException { List<EPerson> epersonList = new ArrayList<EPerson>(); Set<Integer> myEpeople = allMemberIDs(c, g); // now convert those Integers to EPerson objects Iterator<Integer> i = myEpeople.iterator(); while (i.hasNext()) { epersonList.add(EPerson.find(c, (i.next()).intValue())); } return epersonList.toArray(new EPerson[epersonList.size()]); } /** * Get Set of all Integers all of the epeople * members for a group * * @param c * DSpace context * @param g * Group object * @return Set of Integer epersonIDs * @throws SQLException */ public static Set<Integer> allMemberIDs(Context c, Group g) throws SQLException { // two queries - first to get all groups which are a member of this group // second query gets all members of each group in the first query Set<Integer> epeopleIDs = new HashSet<Integer>(); // Get all groups which are a member of this group TableRowIterator tri = DatabaseManager.queryTable(c, "group2groupcache", "SELECT * FROM group2groupcache WHERE parent_id= ? ", g.getID()); Set<Integer> groupIDs = new HashSet<Integer>(); try { while (tri.hasNext()) { TableRow row = tri.next(); int childID = row.getIntColumn("child_id"); groupIDs.add(Integer.valueOf(childID)); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // now we have all the groups (including this one) // it is time to find all the EPeople who belong to those groups // and filter out all duplicates Object[] parameters = new Object[groupIDs.size()+1]; int idx = 0; Iterator<Integer> i = groupIDs.iterator(); // don't forget to add the current group to this query! parameters[idx++] = Integer.valueOf(g.getID()); StringBuilder epersonQuery = new StringBuilder(); epersonQuery.append("SELECT * FROM epersongroup2eperson WHERE "); epersonQuery.append("eperson_group_id= ? "); if (i.hasNext()) { epersonQuery.append(" OR "); } while (i.hasNext()) { int groupID = (i.next()).intValue(); parameters[idx++] = Integer.valueOf(groupID); epersonQuery.append("eperson_group_id= ? "); if (i.hasNext()) { epersonQuery.append(" OR "); } } //get all the EPerson IDs // Note: even through the query is dynamically built all data is separated // into the parameters array. tri = DatabaseManager.queryTable(c, "epersongroup2eperson", epersonQuery.toString(), parameters); try { while (tri.hasNext()) { TableRow row = tri.next(); int epersonID = row.getIntColumn("eperson_id"); epeopleIDs.add(Integer.valueOf(epersonID)); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } return epeopleIDs; } private static boolean epersonInGroup(Context c, int groupID, EPerson e) throws SQLException { Set<Integer> groupIDs = Group.allMemberGroupIDs(c, e); return groupIDs.contains(Integer.valueOf(groupID)); } /** * find the group by its ID * * @param context * @param id */ public static Group find(Context context, int id) throws SQLException { // First check the cache Group fromCache = (Group) context.fromCache(Group.class, id); if (fromCache != null) { return fromCache; } TableRow row = DatabaseManager.find(context, "epersongroup", id); if (row == null) { return null; } else { return new Group(context, row); } } /** * Find the group by its name - assumes name is unique * * @param context * @param name * * @return the named Group, or null if not found */ public static Group findByName(Context context, String name) throws SQLException { TableRow row = DatabaseManager.findByUnique(context, "epersongroup", "name", name); if (row == null) { return null; } else { // First check the cache Group fromCache = (Group) context.fromCache(Group.class, row .getIntColumn("eperson_group_id")); if (fromCache != null) { return fromCache; } else { return new Group(context, row); } } } /** * Finds all groups in the site * * @param context * DSpace context * @param sortField * field to sort by -- Group.ID or Group.NAME * * @return array of all groups in the site */ public static Group[] findAll(Context context, int sortField) throws SQLException { String s; switch (sortField) { case ID: s = "eperson_group_id"; break; case NAME: s = "name"; break; default: s = "name"; } // NOTE: The use of 's' in the order by clause can not cause an SQL // injection because the string is derived from constant values above. TableRowIterator rows = DatabaseManager.queryTable( context, "epersongroup", "SELECT * FROM epersongroup ORDER BY "+s); try { List<TableRow> gRows = rows.toList(); Group[] groups = new Group[gRows.size()]; for (int i = 0; i < gRows.size(); i++) { TableRow row = gRows.get(i); // First check the cache Group fromCache = (Group) context.fromCache(Group.class, row .getIntColumn("eperson_group_id")); if (fromCache != null) { groups[i] = fromCache; } else { groups[i] = new Group(context, row); } } return groups; } finally { if (rows != null) { rows.close(); } } } /** * Find the groups that match the search query across eperson_group_id or name * * @param context * DSpace context * @param query * The search string * * @return array of Group objects */ public static Group[] search(Context context, String query) throws SQLException { return search(context, query, -1, -1); } /** * Find the groups that match the search query across eperson_group_id or name * * @param context * DSpace context * @param query * The search string * @param offset * Inclusive offset * @param limit * Maximum number of matches returned * * @return array of Group objects */ public static Group[] search(Context context, String query, int offset, int limit) throws SQLException { String params = "%"+query.toLowerCase()+"%"; StringBuffer queryBuf = new StringBuffer(); queryBuf.append("SELECT * FROM epersongroup WHERE LOWER(name) LIKE LOWER(?) OR eperson_group_id = ? ORDER BY name ASC "); // Add offset and limit restrictions - Oracle requires special code if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { // First prepare the query to generate row numbers if (limit > 0 || offset > 0) { queryBuf.insert(0, "SELECT /*+ FIRST_ROWS(n) */ rec.*, ROWNUM rnum FROM ("); queryBuf.append(") "); } // Restrict the number of rows returned based on the limit if (limit > 0) { queryBuf.append("rec WHERE rownum<=? "); // If we also have an offset, then convert the limit into the maximum row number if (offset > 0) { limit += offset; } } // Return only the records after the specified offset (row number) if (offset > 0) { queryBuf.insert(0, "SELECT * FROM ("); queryBuf.append(") WHERE rnum>?"); } } else { if (limit > 0) { queryBuf.append(" LIMIT ? "); } if (offset > 0) { queryBuf.append(" OFFSET ? "); } } String dbquery = queryBuf.toString(); // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Create the parameter array, including limit and offset if part of the query Object[] paramArr = new Object[]{params, int_param}; if (limit > 0 && offset > 0) { paramArr = new Object[]{params, int_param, limit, offset}; } else if (limit > 0) { paramArr = new Object[]{params, int_param, limit}; } else if (offset > 0) { paramArr = new Object[]{params, int_param, offset}; } TableRowIterator rows = DatabaseManager.query(context, dbquery, paramArr); try { List<TableRow> groupRows = rows.toList(); Group[] groups = new Group[groupRows.size()]; for (int i = 0; i < groupRows.size(); i++) { TableRow row = groupRows.get(i); // First check the cache Group fromCache = (Group) context.fromCache(Group.class, row .getIntColumn("eperson_group_id")); if (fromCache != null) { groups[i] = fromCache; } else { groups[i] = new Group(context, row); } } return groups; } finally { if (rows != null) { rows.close(); } } } /** * Returns the total number of groups returned by a specific query, without the overhead * of creating the Group objects to store the results. * * @param context * DSpace context * @param query * The search string * * @return the number of groups matching the query */ public static int searchResultCount(Context context, String query) throws SQLException { String params = "%"+query.toLowerCase()+"%"; String dbquery = "SELECT count(*) as gcount FROM epersongroup WHERE LOWER(name) LIKE LOWER(?) OR eperson_group_id = ? "; // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Get all the epeople that match the query TableRow row = DatabaseManager.querySingle(context, dbquery, new Object[] {params, int_param}); // use getIntColumn for Oracle count data Long count; if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { count = Long.valueOf(row.getIntColumn("gcount")); } else //getLongColumn works for postgres { count = Long.valueOf(row.getLongColumn("gcount")); } return count.intValue(); } /** * Delete a group * */ public void delete() throws SQLException { // FIXME: authorizations myContext.addEvent(new Event(Event.DELETE, Constants.GROUP, getID(), getName())); // Remove from cache myContext.removeCached(this, getID()); // Remove any ResourcePolicies that reference this group AuthorizeManager.removeGroupPolicies(myContext, getID()); // Remove any group memberships first DatabaseManager.updateQuery(myContext, "DELETE FROM EPersonGroup2EPerson WHERE eperson_group_id= ? ", getID()); // remove any group2groupcache entries DatabaseManager.updateQuery(myContext, "DELETE FROM group2groupcache WHERE parent_id= ? OR child_id= ? ", getID(),getID()); // Now remove any group2group assignments DatabaseManager.updateQuery(myContext, "DELETE FROM group2group WHERE parent_id= ? OR child_id= ? ", getID(),getID()); // don't forget the new table deleteEpersonGroup2WorkspaceItem(); // Remove ourself DatabaseManager.delete(myContext, myRow); epeople.clear(); log.info(LogManager.getHeader(myContext, "delete_group", "group_id=" + getID())); } /** * @throws SQLException */ private void deleteEpersonGroup2WorkspaceItem() throws SQLException { DatabaseManager.updateQuery(myContext, "DELETE FROM EPersonGroup2WorkspaceItem WHERE eperson_group_id= ? ", getID()); } /** * Return EPerson members of a Group */ public EPerson[] getMembers() { loadData(); // make sure all data is loaded EPerson[] myArray = new EPerson[epeople.size()]; myArray = (EPerson[]) epeople.toArray(myArray); return myArray; } /** * Return Group members of a Group * * @return */ public Group[] getMemberGroups() { loadData(); // make sure all data is loaded Group[] myArray = new Group[groups.size()]; myArray = (Group[]) groups.toArray(myArray); return myArray; } /** * Return true if group has no direct or indirect members */ public boolean isEmpty() { loadData(); // make sure all data is loaded // the only fast check available is on epeople... boolean hasMembers = (epeople.size() != 0); if (hasMembers) { return false; } else { // well, groups is never null... for (Group subGroup : groups){ hasMembers = !subGroup.isEmpty(); if (hasMembers){ return false; } } return !hasMembers; } } /** * Update the group - writing out group object and EPerson list if necessary */ public void update() throws SQLException, AuthorizeException { // FIXME: Check authorisation DatabaseManager.update(myContext, myRow); if (modifiedMetadata) { myContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.GROUP, getID(), getDetails())); modifiedMetadata = false; clearDetails(); } // Redo eperson mappings if they've changed if (epeopleChanged) { // Remove any existing mappings DatabaseManager.updateQuery(myContext, "delete from epersongroup2eperson where eperson_group_id= ? ", getID()); // Add new mappings Iterator<EPerson> i = epeople.iterator(); while (i.hasNext()) { EPerson e = i.next(); TableRow mappingRow = DatabaseManager.row("epersongroup2eperson"); mappingRow.setColumn("eperson_id", e.getID()); mappingRow.setColumn("eperson_group_id", getID()); DatabaseManager.insert(myContext, mappingRow); } epeopleChanged = false; } // Redo Group mappings if they've changed if (groupsChanged) { // Remove any existing mappings DatabaseManager.updateQuery(myContext, "delete from group2group where parent_id= ? ", getID()); // Add new mappings Iterator<Group> i = groups.iterator(); while (i.hasNext()) { Group g = i.next(); TableRow mappingRow = DatabaseManager.row("group2group"); mappingRow.setColumn("parent_id", getID()); mappingRow.setColumn("child_id", g.getID()); DatabaseManager.insert(myContext, mappingRow); } // groups changed, now change group cache rethinkGroupCache(); groupsChanged = false; } log.info(LogManager.getHeader(myContext, "update_group", "group_id=" + getID())); } /** * Return <code>true</code> if <code>other</code> is the same Group as * this object, <code>false</code> otherwise * * @param obj * object to compare to * * @return <code>true</code> if object passed in represents the same group * as this object */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Group other = (Group) obj; if(this.getID() != other.getID()) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 59 * hash + (this.myRow != null ? this.myRow.hashCode() : 0); return hash; } public int getType() { return Constants.GROUP; } public String getHandle() { return null; } /** * Regenerate the group cache AKA the group2groupcache table in the database - * meant to be called when a group is added or removed from another group * */ private void rethinkGroupCache() throws SQLException { // read in the group2group table TableRowIterator tri = DatabaseManager.queryTable(myContext, "group2group", "SELECT * FROM group2group"); Map<Integer,Set<Integer>> parents = new HashMap<Integer,Set<Integer>>(); try { while (tri.hasNext()) { TableRow row = (TableRow) tri.next(); Integer parentID = Integer.valueOf(row.getIntColumn("parent_id")); Integer childID = Integer.valueOf(row.getIntColumn("child_id")); // if parent doesn't have an entry, create one if (!parents.containsKey(parentID)) { Set<Integer> children = new HashSet<Integer>(); // add child id to the list children.add(childID); parents.put(parentID, children); } else { // parent has an entry, now add the child to the parent's record // of children Set<Integer> children = parents.get(parentID); children.add(childID); } } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // now parents is a hash of all of the IDs of groups that are parents // and each hash entry is a hash of all of the IDs of children of those // parent groups // so now to establish all parent,child relationships we can iterate // through the parents hash for (Map.Entry<Integer, Set<Integer>> parent : parents.entrySet()) { Set<Integer> myChildren = getChildren(parents, parent.getKey()); parent.getValue().addAll(myChildren); } // empty out group2groupcache table DatabaseManager.updateQuery(myContext, "DELETE FROM group2groupcache WHERE id >= 0"); // write out new one for (Map.Entry<Integer, Set<Integer>> parent : parents.entrySet()) { int parentID = parent.getKey().intValue(); for (Integer child : parent.getValue()) { TableRow row = DatabaseManager.row("group2groupcache"); row.setColumn("parent_id", parentID); row.setColumn("child_id", child); DatabaseManager.insert(myContext, row); } } } /** * Used recursively to generate a map of ALL of the children of the given * parent * * @param parents * Map of parent,child relationships * @param parent * the parent you're interested in * @return Map whose keys are all of the children of a parent */ private Set<Integer> getChildren(Map<Integer,Set<Integer>> parents, Integer parent) { Set<Integer> myChildren = new HashSet<Integer>(); // degenerate case, this parent has no children if (!parents.containsKey(parent)) { return myChildren; } // got this far, so we must have children Set<Integer> children = parents.get(parent); // now iterate over all of the children Iterator<Integer> i = children.iterator(); while (i.hasNext()) { Integer childID = i.next(); // add this child's ID to our return set myChildren.add(childID); // and now its children myChildren.addAll(getChildren(parents, childID)); } return myChildren; } public DSpaceObject getParentObject() throws SQLException { // could a collection/community administrator manage related groups? // check before the configuration options could give a performance gain // if all group management are disallowed if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup() || AuthorizeConfiguration.canCollectionAdminManageSubmitters() || AuthorizeConfiguration.canCollectionAdminManageWorkflows() || AuthorizeConfiguration.canCommunityAdminManageAdminGroup() || AuthorizeConfiguration .canCommunityAdminManageCollectionAdminGroup() || AuthorizeConfiguration .canCommunityAdminManageCollectionSubmitters() || AuthorizeConfiguration .canCommunityAdminManageCollectionWorkflows()) { // is this a collection related group? TableRow qResult = DatabaseManager .querySingle( myContext, "SELECT collection_id, workflow_step_1, workflow_step_2, " + " workflow_step_3, submitter, admin FROM collection " + " WHERE workflow_step_1 = ? OR " + " workflow_step_2 = ? OR " + " workflow_step_3 = ? OR " + " submitter = ? OR " + " admin = ?", getID(), getID(), getID(), getID(), getID()); if (qResult != null) { Collection collection = Collection.find(myContext, qResult .getIntColumn("collection_id")); if ((qResult.getIntColumn("workflow_step_1") == getID() || qResult.getIntColumn("workflow_step_2") == getID() || qResult.getIntColumn("workflow_step_3") == getID())) { if (AuthorizeConfiguration.canCollectionAdminManageWorkflows()) { return collection; } else if (AuthorizeConfiguration.canCommunityAdminManageCollectionWorkflows()) { return collection.getParentObject(); } } if (qResult.getIntColumn("submitter") == getID()) { if (AuthorizeConfiguration.canCollectionAdminManageSubmitters()) { return collection; } else if (AuthorizeConfiguration.canCommunityAdminManageCollectionSubmitters()) { return collection.getParentObject(); } } if (qResult.getIntColumn("admin") == getID()) { if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) { return collection; } else if (AuthorizeConfiguration.canCommunityAdminManageCollectionAdminGroup()) { return collection.getParentObject(); } } } // is the group related to a community and community administrator allowed // to manage it? else if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()) { qResult = DatabaseManager.querySingle(myContext, "SELECT community_id FROM community " + "WHERE admin = ?", getID()); if (qResult != null) { Community community = Community.find(myContext, qResult .getIntColumn("community_id")); return community; } } } return null; } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.content.DSpaceObject; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.core.Utils; import org.dspace.event.Event; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * Class representing an e-person. * * @author David Stuve * @version $Revision: 5844 $ */ public class EPerson extends DSpaceObject { /** The e-mail field (for sorting) */ public static final int EMAIL = 1; /** The last name (for sorting) */ public static final int LASTNAME = 2; /** The e-mail field (for sorting) */ public static final int ID = 3; /** The netid field (for sorting) */ public static final int NETID = 4; /** The e-mail field (for sorting) */ public static final int LANGUAGE = 5; /** log4j logger */ private static Logger log = Logger.getLogger(EPerson.class); /** Our context */ private Context myContext; /** The row in the table representing this eperson */ private TableRow myRow; /** Flag set when data is modified, for events */ private boolean modified; /** Flag set when metadata is modified, for events */ private boolean modifiedMetadata; /** * Construct an EPerson * * @param context * the context this object exists in * @param row * the corresponding row in the table */ EPerson(Context context, TableRow row) { myContext = context; myRow = row; // Cache ourselves context.cache(this, row.getIntColumn("eperson_id")); modified = false; modifiedMetadata = false; clearDetails(); } /** * Return true if this object equals obj, false otherwise. * * @param obj * @return true if ResourcePolicy objects are equal */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final EPerson other = (EPerson) obj; if (this.getID() != other.getID()) { return false; } if (!this.getEmail().equals(other.getEmail())) { return false; } if (!this.getFullName().equals(other.getFullName())) { return false; } return true; } /** * Return a hash code for this object. * * @return int hash of object */ @Override public int hashCode() { int hash = 5; hash = 89 * hash + this.getID(); hash = 89 * hash + (this.getEmail() != null? this.getEmail().hashCode():0); hash = 89 * hash + (this.getFullName() != null? this.getFullName().hashCode():0); return hash; } /** * Get an EPerson from the database. * * @param context * DSpace context object * @param id * ID of the EPerson * * @return the EPerson format, or null if the ID is invalid. */ public static EPerson find(Context context, int id) throws SQLException { // First check the cache EPerson fromCache = (EPerson) context.fromCache(EPerson.class, id); if (fromCache != null) { return fromCache; } TableRow row = DatabaseManager.find(context, "eperson", id); if (row == null) { return null; } else { return new EPerson(context, row); } } /** * Find the eperson by their email address. * * @return EPerson, or {@code null} if none such exists. */ public static EPerson findByEmail(Context context, String email) throws SQLException, AuthorizeException { if (email == null) { return null; } // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRow row = DatabaseManager.findByUnique(context, "eperson", "email", email.toLowerCase()); if (row == null) { return null; } else { // First check the cache EPerson fromCache = (EPerson) context.fromCache(EPerson.class, row .getIntColumn("eperson_id")); if (fromCache != null) { return fromCache; } else { return new EPerson(context, row); } } } /** * Find the eperson by their netid. * * @param context * DSpace context * @param netid * Network ID * * @return corresponding EPerson, or <code>null</code> */ public static EPerson findByNetid(Context context, String netid) throws SQLException { if (netid == null) { return null; } TableRow row = DatabaseManager.findByUnique(context, "eperson", "netid", netid); if (row == null) { return null; } else { // First check the cache EPerson fromCache = (EPerson) context.fromCache(EPerson.class, row .getIntColumn("eperson_id")); if (fromCache != null) { return fromCache; } else { return new EPerson(context, row); } } } /** * Find the epeople that match the search query across firstname, lastname or email. * * @param context * DSpace context * @param query * The search string * * @return array of EPerson objects */ public static EPerson[] search(Context context, String query) throws SQLException { return search(context, query, -1, -1); } /** * Find the epeople that match the search query across firstname, lastname or email. * This method also allows offsets and limits for pagination purposes. * * @param context * DSpace context * @param query * The search string * @param offset * Inclusive offset * @param limit * Maximum number of matches returned * * @return array of EPerson objects */ public static EPerson[] search(Context context, String query, int offset, int limit) throws SQLException { String params = "%"+query.toLowerCase()+"%"; StringBuffer queryBuf = new StringBuffer(); queryBuf.append("SELECT * FROM eperson WHERE eperson_id = ? OR "); queryBuf.append("LOWER(firstname) LIKE LOWER(?) OR LOWER(lastname) LIKE LOWER(?) OR LOWER(email) LIKE LOWER(?) ORDER BY lastname, firstname ASC "); // Add offset and limit restrictions - Oracle requires special code if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { // First prepare the query to generate row numbers if (limit > 0 || offset > 0) { queryBuf.insert(0, "SELECT /*+ FIRST_ROWS(n) */ rec.*, ROWNUM rnum FROM ("); queryBuf.append(") "); } // Restrict the number of rows returned based on the limit if (limit > 0) { queryBuf.append("rec WHERE rownum<=? "); // If we also have an offset, then convert the limit into the maximum row number if (offset > 0) { limit += offset; } } // Return only the records after the specified offset (row number) if (offset > 0) { queryBuf.insert(0, "SELECT * FROM ("); queryBuf.append(") WHERE rnum>?"); } } else { if (limit > 0) { queryBuf.append(" LIMIT ? "); } if (offset > 0) { queryBuf.append(" OFFSET ? "); } } String dbquery = queryBuf.toString(); // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Create the parameter array, including limit and offset if part of the query Object[] paramArr = new Object[] {int_param,params,params,params}; if (limit > 0 && offset > 0) { paramArr = new Object[]{int_param, params, params, params, limit, offset}; } else if (limit > 0) { paramArr = new Object[]{int_param, params, params, params, limit}; } else if (offset > 0) { paramArr = new Object[]{int_param, params, params, params, offset}; } // Get all the epeople that match the query TableRowIterator rows = DatabaseManager.queryTable(context, "eperson", dbquery, paramArr); try { List<TableRow> epeopleRows = rows.toList(); EPerson[] epeople = new EPerson[epeopleRows.size()]; for (int i = 0; i < epeopleRows.size(); i++) { TableRow row = (TableRow) epeopleRows.get(i); // First check the cache EPerson fromCache = (EPerson) context.fromCache(EPerson.class, row .getIntColumn("eperson_id")); if (fromCache != null) { epeople[i] = fromCache; } else { epeople[i] = new EPerson(context, row); } } return epeople; } finally { if (rows != null) { rows.close(); } } } /** * Returns the total number of epeople returned by a specific query, without the overhead * of creating the EPerson objects to store the results. * * @param context * DSpace context * @param query * The search string * * @return the number of epeople matching the query */ public static int searchResultCount(Context context, String query) throws SQLException { String dbquery = "%"+query.toLowerCase()+"%"; Long count; // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Get all the epeople that match the query TableRow row = DatabaseManager.querySingle(context, "SELECT count(*) as epcount FROM eperson WHERE eperson_id = ? OR " + "LOWER(firstname) LIKE LOWER(?) OR LOWER(lastname) LIKE LOWER(?) OR LOWER(email) LIKE LOWER(?)", new Object[] {int_param,dbquery,dbquery,dbquery}); // use getIntColumn for Oracle count data if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { count = Long.valueOf(row.getIntColumn("epcount")); } else //getLongColumn works for postgres { count = Long.valueOf(row.getLongColumn("epcount")); } return count.intValue(); } /** * Find all the epeople that match a particular query * <ul> * <li><code>ID</code></li> * <li><code>LASTNAME</code></li> * <li><code>EMAIL</code></li> * <li><code>NETID</code></li> * </ul> * * @return array of EPerson objects */ public static EPerson[] findAll(Context context, int sortField) throws SQLException { String s; switch (sortField) { case ID: s = "eperson_id"; break; case EMAIL: s = "email"; break; case LANGUAGE: s = "language"; break; case NETID: s = "netid"; break; default: s = "lastname"; } // NOTE: The use of 's' in the order by clause can not cause an SQL // injection because the string is derived from constant values above. TableRowIterator rows = DatabaseManager.queryTable(context, "eperson", "SELECT * FROM eperson ORDER BY "+s); try { List<TableRow> epeopleRows = rows.toList(); EPerson[] epeople = new EPerson[epeopleRows.size()]; for (int i = 0; i < epeopleRows.size(); i++) { TableRow row = (TableRow) epeopleRows.get(i); // First check the cache EPerson fromCache = (EPerson) context.fromCache(EPerson.class, row .getIntColumn("eperson_id")); if (fromCache != null) { epeople[i] = fromCache; } else { epeople[i] = new EPerson(context, row); } } return epeople; } finally { if (rows != null) { rows.close(); } } } /** * Create a new eperson * * @param context * DSpace context object */ public static EPerson create(Context context) throws SQLException, AuthorizeException { // authorized? if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "You must be an admin to create an EPerson"); } // Create a table row TableRow row = DatabaseManager.create(context, "eperson"); EPerson e = new EPerson(context, row); log.info(LogManager.getHeader(context, "create_eperson", "eperson_id=" + e.getID())); context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(), null)); return e; } /** * Delete an eperson * */ public void delete() throws SQLException, AuthorizeException, EPersonDeletionException { // authorized? if (!AuthorizeManager.isAdmin(myContext)) { throw new AuthorizeException( "You must be an admin to delete an EPerson"); } // check for presence of eperson in tables that // have constraints on eperson_id List<String> constraintList = getDeleteConstraints(); // if eperson exists in tables that have constraints // on eperson, throw an exception if (constraintList.size() > 0) { throw new EPersonDeletionException(constraintList); } myContext.addEvent(new Event(Event.DELETE, Constants.EPERSON, getID(), getEmail())); // Remove from cache myContext.removeCached(this, getID()); // XXX FIXME: This sidesteps the object model code so it won't // generate REMOVE events on the affected Groups. // Remove any group memberships first DatabaseManager.updateQuery(myContext, "DELETE FROM EPersonGroup2EPerson WHERE eperson_id= ? ", getID()); // Remove any subscriptions DatabaseManager.updateQuery(myContext, "DELETE FROM subscription WHERE eperson_id= ? ", getID()); // Remove ourself DatabaseManager.delete(myContext, myRow); log.info(LogManager.getHeader(myContext, "delete_eperson", "eperson_id=" + getID())); } /** * Get the e-person's internal identifier * * @return the internal identifier */ public int getID() { return myRow.getIntColumn("eperson_id"); } /** * Get the e-person's language * * @return language */ public String getLanguage() { return myRow.getStringColumn("language"); } /** * Set the EPerson's language. Value is expected to be a Unix/POSIX * Locale specification of the form {language} or {language}_{territory}, * e.g. "en", "en_US", "pt_BR" (the latter is Brazilian Portugese). * * @param language * language */ public void setLanguage(String language) { myRow.setColumn("language", language); } public String getHandle() { // No Handles for e-people return null; } /** * Get the e-person's email address * * @return their email address */ public String getEmail() { return myRow.getStringColumn("email"); } /** * Set the EPerson's email * * @param s * the new email */ public void setEmail(String s) { if (s != null) { s = s.toLowerCase(); } myRow.setColumn("email", s); modified = true; } /** * Get the e-person's netid * * @return their netid */ public String getNetid() { return myRow.getStringColumn("netid"); } /** * Set the EPerson's netid * * @param s * the new netid */ public void setNetid(String s) { myRow.setColumn("netid", s); modified = true; } /** * Get the e-person's full name, combining first and last name in a * displayable string. * * @return their full name */ public String getFullName() { String f = myRow.getStringColumn("firstname"); String l = myRow.getStringColumn("lastname"); if ((l == null) && (f == null)) { return getEmail(); } else if (f == null) { return l; } else { return (f + " " + l); } } /** * Get the eperson's first name. * * @return their first name */ public String getFirstName() { return myRow.getStringColumn("firstname"); } /** * Set the eperson's first name * * @param firstname * the person's first name */ public void setFirstName(String firstname) { myRow.setColumn("firstname", firstname); modified = true; } /** * Get the eperson's last name. * * @return their last name */ public String getLastName() { return myRow.getStringColumn("lastname"); } /** * Set the eperson's last name * * @param lastname * the person's last name */ public void setLastName(String lastname) { myRow.setColumn("lastname", lastname); modified = true; } /** * Indicate whether the user can log in * * @param login * boolean yes/no */ public void setCanLogIn(boolean login) { myRow.setColumn("can_log_in", login); modified = true; } /** * Can the user log in? * * @return boolean, yes/no */ public boolean canLogIn() { return myRow.getBooleanColumn("can_log_in"); } /** * Set require cert yes/no * * @param isrequired * boolean yes/no */ public void setRequireCertificate(boolean isrequired) { myRow.setColumn("require_certificate", isrequired); modified = true; } /** * Get require certificate or not * * @return boolean, yes/no */ public boolean getRequireCertificate() { return myRow.getBooleanColumn("require_certificate"); } /** * Indicate whether the user self-registered * * @param sr * boolean yes/no */ public void setSelfRegistered(boolean sr) { myRow.setColumn("self_registered", sr); modified = true; } /** * Can the user log in? * * @return boolean, yes/no */ public boolean getSelfRegistered() { return myRow.getBooleanColumn("self_registered"); } /** * Get the value of a metadata field * * @param field * the name of the metadata field to get * * @return the value of the metadata field * * @exception IllegalArgumentException * if the requested metadata field doesn't exist */ public String getMetadata(String field) { return myRow.getStringColumn(field); } /** * Set a metadata value * * @param field * the name of the metadata field to get * @param value * value to set the field to * * @exception IllegalArgumentException * if the requested metadata field doesn't exist */ public void setMetadata(String field, String value) { myRow.setColumn(field, value); modifiedMetadata = true; addDetails(field); } /** * Set the EPerson's password * * @param s * the new email */ public void setPassword(String s) { // FIXME: encoding String encoded = Utils.getMD5(s); myRow.setColumn("password", encoded); modified = true; } /** * Set the EPerson's password hash * * @param s * hash of the password */ public void setPasswordHash(String s) { myRow.setColumn("password", s); modified = true; } /** * Return the EPerson's password hash * @return hash of the password */ public String getPasswordHash() { return myRow.getStringColumn("password"); } /** * Check EPerson's password * * @param attempt * the password attempt * @return boolean successful/unsuccessful */ public boolean checkPassword(String attempt) { String encoded = Utils.getMD5(attempt); return (encoded.equals(myRow.getStringColumn("password"))); } /** * Update the EPerson */ public void update() throws SQLException, AuthorizeException { // Check authorisation - if you're not the eperson // see if the authorization system says you can if (!myContext.ignoreAuthorization() && ((myContext.getCurrentUser() == null) || (getID() != myContext .getCurrentUser().getID()))) { AuthorizeManager.authorizeAction(myContext, this, Constants.WRITE); } DatabaseManager.update(myContext, myRow); log.info(LogManager.getHeader(myContext, "update_eperson", "eperson_id=" + getID())); if (modified) { myContext.addEvent(new Event(Event.MODIFY, Constants.EPERSON, getID(), null)); modified = false; } if (modifiedMetadata) { myContext.addEvent(new Event(Event.MODIFY_METADATA, Constants.EPERSON, getID(), getDetails())); modifiedMetadata = false; clearDetails(); } } /** * return type found in Constants */ public int getType() { return Constants.EPERSON; } /** * Check for presence of EPerson in tables that have constraints on * EPersons. Called by delete() to determine whether the eperson can * actually be deleted. * * An EPerson cannot be deleted if it exists in the item, workflowitem, or * tasklistitem tables. * * @return List of tables that contain a reference to the eperson. */ public List<String> getDeleteConstraints() throws SQLException { List<String> tableList = new ArrayList<String>(); // check for eperson in item table TableRowIterator tri = DatabaseManager.queryTable(myContext, "item", "SELECT * from item where submitter_id= ? ", getID()); try { if (tri.hasNext()) { tableList.add("item"); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // check for eperson in workflowitem table tri = DatabaseManager.queryTable(myContext, "workflowitem", "SELECT * from workflowitem where owner= ? ", getID()); try { if (tri.hasNext()) { tableList.add("workflowitem"); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // check for eperson in tasklistitem table tri = DatabaseManager.queryTable(myContext, "tasklistitem", "SELECT * from tasklistitem where eperson_id= ? ", getID()); try { if (tri.hasNext()) { tableList.add("tasklistitem"); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // the list of tables can be used to construct an error message // explaining to the user why the eperson cannot be deleted. return tableList; } public String getName() { return getEmail(); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.eperson; import java.io.IOException; import java.sql.SQLException; import java.util.Locale; import javax.mail.MessagingException; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; import org.dspace.core.Utils; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; /** * Methods for handling registration by email and forgotten passwords. When * someone registers as a user, or forgets their password, the * sendRegistrationInfo or sendForgotPasswordInfo methods can be used to send an * email to the user. The email contains a special token, a long string which is * randomly generated and thus hard to guess. When the user presents the token * back to the system, the AccountManager can use the token to determine the * identity of the eperson. * * *NEW* now ignores expiration dates so that tokens never expire * * @author Peter Breton * @version $Revision: 5844 $ */ public class AccountManager { /** log4j log */ private static Logger log = Logger.getLogger(AccountManager.class); /** Protected Constructor */ protected AccountManager() { } /** * Email registration info to the given email address. * * Potential error conditions: Cannot create registration data in database * (throws SQLException) Error sending email (throws MessagingException) * Error reading email template (throws IOException) Authorization error * (throws AuthorizeException) * * @param context * DSpace context * @param email * Email address to send the registration email to */ public static void sendRegistrationInfo(Context context, String email) throws SQLException, IOException, MessagingException, AuthorizeException { sendInfo(context, email, true, true); } /** * Email forgot password info to the given email address. * * Potential error conditions: No EPerson with that email (returns null) * Cannot create registration data in database (throws SQLException) Error * sending email (throws MessagingException) Error reading email template * (throws IOException) Authorization error (throws AuthorizeException) * * @param context * DSpace context * @param email * Email address to send the forgot-password email to */ public static void sendForgotPasswordInfo(Context context, String email) throws SQLException, IOException, MessagingException, AuthorizeException { sendInfo(context, email, false, true); } /** * <p> * Return the EPerson corresponding to token, where token was emailed to the * person by either the sendRegistrationInfo or sendForgotPasswordInfo * methods. * </p> * * <p> * If the token is not found return null. * </p> * * @param context * DSpace context * @param token * Account token * @return The EPerson corresponding to token, or null. * @exception SQLException * If the token or eperson cannot be retrieved from the * database. */ public static EPerson getEPerson(Context context, String token) throws SQLException, AuthorizeException { String email = getEmail(context, token); if (email == null) { return null; } EPerson ep = EPerson.findByEmail(context, email); return ep; } /** * Return the e-mail address referred to by a token, or null if email * address can't be found ignores expiration of token * * @param context * DSpace context * @param token * Account token * @return The email address corresponding to token, or null. */ public static String getEmail(Context context, String token) throws SQLException { TableRow rd = DatabaseManager.findByUnique(context, "RegistrationData", "token", token); if (rd == null) { return null; } /* * ignore the expiration date on tokens Date expires = * rd.getDateColumn("expires"); if (expires != null) { if ((new * java.util.Date()).after(expires)) return null; } */ return rd.getStringColumn("email"); } /** * Delete token. * * @param context * DSpace context * @param token * The token to delete * @exception SQLException * If a database error occurs */ public static void deleteToken(Context context, String token) throws SQLException { DatabaseManager.deleteByValue(context, "RegistrationData", "token", token); } /* * THIS IS AN INTERNAL METHOD. THE SEND PARAMETER ALLOWS IT TO BE USED FOR * TESTING PURPOSES. * * Send an info to the EPerson with the given email address. If isRegister * is TRUE, this is registration email; otherwise, it is forgot-password * email. If send is TRUE, the email is sent; otherwise it is skipped. * * Potential error conditions: No EPerson with that email (returns null) * Cannot create registration data in database (throws SQLException) Error * sending email (throws MessagingException) Error reading email template * (throws IOException) Authorization error (throws AuthorizeException) * * @param context DSpace context @param email Email address to send the * forgot-password email to @param isRegister If true, this is for * registration; otherwise, it is for forgot-password @param send If true, * send email; otherwise do not send any email */ protected static TableRow sendInfo(Context context, String email, boolean isRegister, boolean send) throws SQLException, IOException, MessagingException, AuthorizeException { // See if a registration token already exists for this user TableRow rd = DatabaseManager.findByUnique(context, "registrationdata", "email", email); // If it already exists, just re-issue it if (rd == null) { rd = DatabaseManager.row("RegistrationData"); rd.setColumn("token", Utils.generateHexKey()); // don't set expiration date any more // rd.setColumn("expires", getDefaultExpirationDate()); rd.setColumn("email", email); DatabaseManager.insert(context, rd); // This is a potential problem -- if we create the callback // and then crash, registration will get SNAFU-ed. // So FIRST leave some breadcrumbs if (log.isDebugEnabled()) { log.debug("Created callback " + rd.getIntColumn("registrationdata_id") + " with token " + rd.getStringColumn("token") + " with email \"" + email + "\""); } } if (send) { sendEmail(context, email, isRegister, rd); } return rd; } /** * Send a DSpace message to the given email address. * * If isRegister is <code>true</code>, this is registration email; * otherwise, it is a forgot-password email. * * @param email * The email address to mail to * @param isRegister * If true, this is registration email; otherwise it is * forgot-password email. * @param rd * The RDBMS row representing the registration data. * @exception MessagingException * If an error occurs while sending email * @exception IOException * If an error occurs while reading the email template. */ private static void sendEmail(Context context, String email, boolean isRegister, TableRow rd) throws MessagingException, IOException, SQLException { String base = ConfigurationManager.getProperty("dspace.url"); // Note change from "key=" to "token=" String specialLink = new StringBuffer().append(base).append( base.endsWith("/") ? "" : "/").append( isRegister ? "register" : "forgot").append("?") .append("token=").append(rd.getStringColumn("token")) .toString(); Locale locale = context.getCurrentLocale(); Email bean = ConfigurationManager.getEmail(I18nUtil.getEmailFilename(locale, isRegister ? "register" : "change_password")); bean.addRecipient(email); bean.addArgument(specialLink); bean.send(); // Breadcrumbs if (log.isInfoEnabled()) { log.info("Sent " + (isRegister ? "registration" : "account") + " information to " + email); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.xml.serialize.Method; import org.apache.xml.serialize.OutputFormat; import org.apache.xml.serialize.XMLSerializer; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.core.Context; import org.xml.sax.SAXException; /** * @author Graham Triggs * * This class creates an xml document as passed in the arguments and * from the metadata schemas for the repository. * * The form of the XML is as follows * * <metadata-schemas> * <schema> * <name>dc</name> * <namespace>http://dublincore.org/documents/dcmi-terms/</namespace> * </schema> * </metadata-schemas> */ public class MetadataExporter { /** * @param args * @throws ParseException * @throws SAXException * @throws IOException * @throws SQLException * @throws RegistryExportException */ public static void main(String[] args) throws ParseException, SQLException, IOException, SAXException, RegistryExportException { // create an options object and populate it CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption("f", "file", true, "output xml file for registry"); options.addOption("s", "schema", true, "the name of the schema to export"); CommandLine line = parser.parse(options, args); String file = null; String schema = null; if (line.hasOption('f')) { file = line.getOptionValue('f'); } else { usage(); System.exit(0); } if (line.hasOption('s')) { schema = line.getOptionValue('s'); } saveRegistry(file, schema); } public static void saveRegistry(String file, String schema) throws SQLException, IOException, SAXException, RegistryExportException { // create a context Context context = new Context(); context.setIgnoreAuthorization(true); OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true); xmlFormat.setLineWidth(120); xmlFormat.setIndent(4); XMLSerializer xmlSerializer = new XMLSerializer(new BufferedWriter(new FileWriter(file)), xmlFormat); // XMLSerializer xmlSerializer = new XMLSerializer(System.out, xmlFormat); xmlSerializer.startDocument(); xmlSerializer.startElement("dspace-dc-types", null); // Save the schema definition(s) saveSchema(context, xmlSerializer, schema); MetadataField[] mdFields = null; // If a single schema has been specified if (schema != null && !"".equals(schema)) { // Get the id of that schema MetadataSchema mdSchema = MetadataSchema.find(context, schema); if (mdSchema == null) { throw new RegistryExportException("no schema to export"); } // Get the metadata fields only for the specified schema mdFields = MetadataField.findAllInSchema(context, mdSchema.getSchemaID()); } else { // Get the metadata fields for all the schemas mdFields = MetadataField.findAll(context); } // Output the metadata fields for (MetadataField mdField : mdFields) { saveType(context, xmlSerializer, mdField); } xmlSerializer.endElement("dspace-dc-types"); xmlSerializer.endDocument(); // abort the context, as we shouldn't have changed it!! context.abort(); } /** * Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas * @param context * @param xmlSerializer * @param schema * @throws SQLException * @throws SAXException * @throws RegistryExportException */ public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema) throws SQLException, SAXException, RegistryExportException { if (schema != null && !"".equals(schema)) { // Find a single named schema MetadataSchema mdSchema = MetadataSchema.find(context, schema); saveSchema(xmlSerializer, mdSchema); } else { // Find all schemas MetadataSchema[] mdSchemas = MetadataSchema.findAll(context); for (MetadataSchema mdSchema : mdSchemas) { saveSchema(xmlSerializer, mdSchema); } } } /** * Serialize a single schema (namespace) registry entry * * @param xmlSerializer * @param mdSchema * @throws SAXException * @throws RegistryExportException */ private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema) throws SAXException, RegistryExportException { // If we haven't got a schema, it's an error if (mdSchema == null) { throw new RegistryExportException("no schema to export"); } String name = mdSchema.getName(); String namespace = mdSchema.getNamespace(); if (name == null || "".equals(name)) { System.out.println("name is null, skipping"); return; } if (namespace == null || "".equals(namespace)) { System.out.println("namespace is null, skipping"); return; } // Output the parent tag xmlSerializer.startElement("dc-schema", null); // Output the schema name xmlSerializer.startElement("name", null); xmlSerializer.characters(name.toCharArray(), 0, name.length()); xmlSerializer.endElement("name"); // Output the schema namespace xmlSerializer.startElement("namespace", null); xmlSerializer.characters(namespace.toCharArray(), 0, namespace.length()); xmlSerializer.endElement("namespace"); xmlSerializer.endElement("dc-schema"); } /** * Serialize a single metadata field registry entry to xml * * @param context * @param xmlSerializer * @param mdField * @throws SAXException * @throws RegistryExportException * @throws SQLException * @throws IOException */ private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField) throws SAXException, RegistryExportException, SQLException, IOException { // If we haven't been given a field, it's an error if (mdField == null) { throw new RegistryExportException("no field to export"); } // Get the data from the metadata field String schemaName = getSchemaName(context, mdField); String element = mdField.getElement(); String qualifier = mdField.getQualifier(); String scopeNote = mdField.getScopeNote(); // We must have a schema and element if (schemaName == null || element == null) { throw new RegistryExportException("incomplete field information"); } // Output the parent tag xmlSerializer.startElement("dc-type", null); // Output the schema name xmlSerializer.startElement("schema", null); xmlSerializer.characters(schemaName.toCharArray(), 0, schemaName.length()); xmlSerializer.endElement("schema"); // Output the element xmlSerializer.startElement("element", null); xmlSerializer.characters(element.toCharArray(), 0, element.length()); xmlSerializer.endElement("element"); // Output the qualifier, if present if (qualifier != null) { xmlSerializer.startElement("qualifier", null); xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length()); xmlSerializer.endElement("qualifier"); } else { xmlSerializer.comment("unqualified"); } // Output the scope note, if present if (scopeNote != null) { xmlSerializer.startElement("scope_note", null); xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length()); xmlSerializer.endElement("scope_note"); } else { xmlSerializer.comment("no scope note"); } xmlSerializer.endElement("dc-type"); } /** * Helper method to retrieve a schema name for the field. * Caches the name after looking up the id. */ static Map<Integer, String> schemaMap = new HashMap<Integer, String>(); private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException { // Get name from cache String name = schemaMap.get(Integer.valueOf(mdField.getSchemaID())); if (name == null) { // Name not retrieved before, so get the schema now MetadataSchema mdSchema = MetadataSchema.find(context, mdField.getSchemaID()); if (mdSchema != null) { name = mdSchema.getName(); schemaMap.put(Integer.valueOf(mdSchema.getSchemaID()), name); } else { // Can't find the schema throw new RegistryExportException("Can't get schema name for field"); } } return name; } /** * Print the usage message to stdout */ public static void usage() { String usage = "Use this class with the following options:\n" + " -f <xml output file> : specify the output file for the schemas\n" + " -s <schema> : name of the schema to export\n"; System.out.println(usage); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; /** * @author Graham Triggs * * An exception to report any problems with registry exports */ public class RegistryExportException extends Exception { /** * Create an empty authorize exception */ public RegistryExportException() { super(); } /** * create an exception with only a message * * @param message */ public RegistryExportException(String message) { super(message); } /** * create an exception with an inner exception and a message * * @param message * @param e */ public RegistryExportException(String message, Throwable e) { super(message, e); } /** * create an exception with an inner exception * * @param e */ public RegistryExportException(Throwable e) { super(e); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; import java.io.IOException; import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.NonUniqueMetadataException; import org.dspace.core.Context; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * @author Richard Jones * * This class takes an xml document as passed in the arguments and * uses it to create metadata elements in the Metadata Registry if * they do not already exist * * The format of the XML file is as follows: * * <dspace-dc-types> * <dc-type> * <schema>icadmin</schema> * <element>status</element> * <qualifier>dateset</qualifier> * <scope_note>the workflow status of an item</scope_note> * </dc-type> * * [....] * * </dspace-dc-types> */ public class MetadataImporter { /** * main method for reading user input from the command line */ public static void main(String[] args) throws ParseException, SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException { boolean forceUpdate = false; // create an options object and populate it CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption("f", "file", true, "source xml file for DC fields"); options.addOption("u", "update", false, "update an existing schema"); CommandLine line = parser.parse(options, args); String file = null; if (line.hasOption('f')) { file = line.getOptionValue('f'); } else { usage(); System.exit(0); } forceUpdate = line.hasOption('u'); loadRegistry(file, forceUpdate); } /** * Load the data from the specified file path into the database * * @param file the file path containing the source data */ public static void loadRegistry(String file, boolean forceUpdate) throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException { // create a context Context context = new Context(); context.setIgnoreAuthorization(true); // read the XML Document document = RegistryImporter.loadXML(file); // Get the nodes corresponding to types NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema"); // Add each one as a new format to the registry for (int i = 0; i < schemaNodes.getLength(); i++) { Node n = schemaNodes.item(i); loadSchema(context, n, forceUpdate); } // Get the nodes corresponding to types NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type"); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { Node n = typeNodes.item(i); loadType(context, n); } context.complete(); } /** * Process a node in the metadata registry XML file. If the * schema already exists, it will not be recreated * * @param context * DSpace context object * @param node * the node in the DOM tree * @throws NonUniqueMetadataException */ private static void loadSchema(Context context, Node node, boolean updateExisting) throws SQLException, IOException, TransformerException, AuthorizeException, NonUniqueMetadataException, RegistryImportException { // Get the values String name = RegistryImporter.getElementData(node, "name"); String namespace = RegistryImporter.getElementData(node, "namespace"); if (name == null || "".equals(name)) { throw new RegistryImportException("Name of schema must be supplied"); } if (namespace == null || "".equals(namespace)) { throw new RegistryImportException("Namespace of schema must be supplied"); } System.out.print("Registering Schema: " + name + " - " + namespace + " ... "); // check to see if the schema already exists MetadataSchema s = MetadataSchema.find(context, name); if (s == null) { // Schema does not exist - create MetadataSchema schema = new MetadataSchema(namespace, name); schema.create(context); System.out.println("created"); } else { // Schema exists - if it's the same namespace, allow the type imports to continue if (s.getNamespace().equals(namespace)) { System.out.println("already exists, skipping to type import"); return; } // It's a different namespace - have we been told to update? if (updateExisting) { // Update the existing schema namespace and continue to type import s.setNamespace(namespace); s.update(context); System.out.println("namespace updated (" + name + " = " + namespace + ")"); } else { // Don't update the existing namespace - abort abort abort System.out.println("schema exists, but with different namespace"); System.out.println("was: " + s.getNamespace()); System.out.println("xml: " + namespace); System.out.println("aborting - use -u to force the update"); throw new RegistryImportException("schema already registered with different namespace - use -u to update"); } } } /** * Process a node in the metadata registry XML file. The node must * be a "dc-type" node. If the type already exists, then it * will not be reimported * * @param context * DSpace context object * @param node * the node in the DOM tree * @throws NonUniqueMetadataException */ private static void loadType(Context context, Node node) throws SQLException, IOException, TransformerException, AuthorizeException, NonUniqueMetadataException, RegistryImportException { // Get the values String schema = RegistryImporter.getElementData(node, "schema"); String element = RegistryImporter.getElementData(node, "element"); String qualifier = RegistryImporter.getElementData(node, "qualifier"); String scopeNote = RegistryImporter.getElementData(node, "scope_note"); // If the schema is not provided default to DC if (schema == null) { schema = MetadataSchema.DC_SCHEMA; } System.out.print("Registering Metadata: " + schema + "." + element + "." + qualifier + " ... "); // Find the matching schema object MetadataSchema schemaObj = MetadataSchema.find(context, schema); if (schemaObj == null) { throw new RegistryImportException("Schema '" + schema + "' is not registered"); } MetadataField mf = MetadataField.findByElement(context, schemaObj.getSchemaID(), element, qualifier); if (mf != null) { System.out.println("already exists, skipping"); return; } MetadataField field = new MetadataField(); field.setSchemaID(schemaObj.getSchemaID()); field.setElement(element); field.setQualifier(qualifier); field.setScopeNote(scopeNote); field.create(context); System.out.println("created"); } /** * Print the usage message to stdout */ public static void usage() { String usage = "Use this class with the following option:\n" + " -f <xml source file> : specify which xml source file " + "contains the DC fields to import.\n"; System.out.println(usage); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; import org.dspace.content.DCDate; import org.dspace.core.Context; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * A command-line tool for performing necessary tweaks in the database for the * new last_modified column in the item table. * * @author Robert Tansley * @version $Revision: 5844 $ */ public class Upgrade101To11 { /** * For invoking via the command line * * @param argv * command-line arguments */ public static void main(String[] argv) { Context context = null; try { context = new Context(); // Deal with withdrawn items first. // last_modified takes the value of the deletion date TableRowIterator tri = DatabaseManager.queryTable(context, "item", "SELECT * FROM item WHERE withdrawal_date IS NOT NULL"); while (tri.hasNext()) { TableRow row = tri.next(); DCDate d = new DCDate(row.getStringColumn("withdrawal_date")); row.setColumn("last_modified", d.toDate()); DatabaseManager.update(context, row); } tri.close(); // Next, update those items with a date.available tri = DatabaseManager.query(context, "SELECT item.item_id, dcvalue.text_value FROM item, dctyperegistry, "+ "dcvalue WHERE item.item_id=dcvalue.item_id AND dcvalue.dc_type_id="+ "dctyperegistry.dc_type_id AND dctyperegistry.element LIKE 'date' "+ "AND dctyperegistry.qualifier LIKE 'available'"); while (tri.hasNext()) { TableRow resultRow = tri.next(); DCDate d = new DCDate(resultRow.getStringColumn("text_value")); // Can't update the row, have to do a separate query TableRow itemRow = DatabaseManager.find(context, "item", resultRow.getIntColumn("item_id")); itemRow.setColumn("last_modified", d.toDate()); DatabaseManager.update(context, itemRow); } tri.close(); // Finally, for all items that have no date.available or withdrawal // date, set the update time to now! DatabaseManager.updateQuery(context, "UPDATE item SET last_modified=now() WHERE last_modified IS NULL"); context.complete(); System.out.println("Last modified dates set"); System.exit(0); } catch (Exception e) { System.err.println("Exception occurred:" + e); e.printStackTrace(); if (context != null) { context.abort(); } System.exit(1); } } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.Locale; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang.StringUtils; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.I18nUtil; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; /** * A command-line tool for creating an initial administrator for setting up a * DSpace site. Prompts for an e-mail address, last name, first name and * password from standard input. An administrator group is then created and the * data passed in used to create an e-person in that group. * <P> * Alternatively, it can be used to take the email, first name, last name and * desired password as arguments thus: * * CreateAdministrator -e [email] -f [first name] -l [last name] -p [password] * * This is particularly convenient for automated deploy scripts that require an * initial administrator, for example, before deployment can be completed * * @author Robert Tansley * @author Richard Jones * * @version $Revision: 5844 $ */ public final class CreateAdministrator { /** DSpace Context object */ private Context context; /** * For invoking via the command line. If called with no command line arguments, * it will negotiate with the user for the administrator details * * @param argv * command-line arguments */ public static void main(String[] argv) throws Exception { CommandLineParser parser = new PosixParser(); Options options = new Options(); CreateAdministrator ca = new CreateAdministrator(); options.addOption("e", "email", true, "administrator email address"); options.addOption("f", "first", true, "administrator first name"); options.addOption("l", "last", true, "administrator last name"); options.addOption("c", "language", true, "administrator language"); options.addOption("p", "password", true, "administrator password"); CommandLine line = parser.parse(options, argv); if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") && line.hasOption("c") && line.hasOption("p")) { ca.createAdministrator(line.getOptionValue("e"), line.getOptionValue("f"), line.getOptionValue("l"), line.getOptionValue("c"), line.getOptionValue("p")); } else { ca.negotiateAdministratorDetails(); } } /** * constructor, which just creates and object with a ready context * * @throws Exception */ private CreateAdministrator() throws Exception { context = new Context(); } /** * Method which will negotiate with the user via the command line to * obtain the administrator's details * * @throws Exception */ private void negotiateAdministratorDetails() throws Exception { // For easier reading of typing BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Creating an initial administrator account"); boolean dataOK = false; String email = null; String firstName = null; String lastName = null; String password1 = null; String password2 = null; String language = I18nUtil.DEFAULTLOCALE.getLanguage(); while (!dataOK) { System.out.print("E-mail address: "); System.out.flush(); email = input.readLine(); if (email != null) { email = email.trim(); } System.out.print("First name: "); System.out.flush(); firstName = input.readLine(); if (firstName != null) { firstName = firstName.trim(); } System.out.print("Last name: "); System.out.flush(); lastName = input.readLine(); if (lastName != null) { lastName = lastName.trim(); } if (ConfigurationManager.getProperty("webui.supported.locales") != null) { System.out.println("Select one of the following languages: " + ConfigurationManager.getProperty("webui.supported.locales")); System.out.print("Language: "); System.out.flush(); language = input.readLine(); if (language != null) { language = language.trim(); language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage(); } } System.out.println("WARNING: Password will appear on-screen."); System.out.print("Password: "); System.out.flush(); password1 = input.readLine(); if (password1 != null) { password1 = password1.trim(); } System.out.print("Again to confirm: "); System.out.flush(); password2 = input.readLine(); if (password2 != null) { password2 = password2.trim(); } if (!StringUtils.isEmpty(password1) && StringUtils.equals(password1, password2)) { // password OK System.out.print("Is the above data correct? (y or n): "); System.out.flush(); String s = input.readLine(); if (s != null) { s = s.trim(); if (s.toLowerCase().startsWith("y")) { dataOK = true; } } } else { System.out.println("Passwords don't match"); } } // if we make it to here, we are ready to create an administrator createAdministrator(email, firstName, lastName, language, password1); } /** * Create the administrator with the given details. If the user * already exists then they are simply upped to administrator status * * @param email the email for the user * @param first user's first name * @param last user's last name * @param ps desired password * * @throws Exception */ private void createAdministrator(String email, String first, String last, String language, String pw) throws Exception { // Of course we aren't an administrator yet so we need to // circumvent authorisation context.setIgnoreAuthorization(true); // Find administrator group Group admins = Group.find(context, 1); if (admins == null) { throw new IllegalStateException("Error, no admin group (group 1) found"); } // Create the administrator e-person EPerson eperson = EPerson.findByEmail(context,email); // check if the email belongs to a registered user, // if not create a new user with this email if (eperson == null) { eperson = EPerson.create(context); eperson.setEmail(email); eperson.setCanLogIn(true); eperson.setRequireCertificate(false); eperson.setSelfRegistered(false); } eperson.setLastName(last); eperson.setFirstName(first); eperson.setLanguage(language); eperson.setPassword(pw); eperson.update(); admins.addMember(eperson); admins.update(); context.complete(); System.out.println("Administrator account created"); } }
Java
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.administer; import java.io.File; import java.io.IOException; import java.sql.SQLException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import org.apache.log4j.Logger; import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.BitstreamFormat; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.NonUniqueMetadataException; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Loads the bitstream format and Dublin Core type registries into the database. * Intended for use as a command-line tool. * <P> * Example usage: * <P> * <code>RegistryLoader -bitstream bitstream-formats.xml</code> * <P> * <code>RegistryLoader -dc dc-types.xml</code> * * @author Robert Tansley * @version $Revision: 5844 $ */ public class RegistryLoader { /** log4j category */ private static Logger log = Logger.getLogger(RegistryLoader.class); /** * For invoking via the command line * * @param argv * command-line arguments */ public static void main(String[] argv) throws Exception { String usage = "Usage: " + RegistryLoader.class.getName() + " (-bitstream | -dc) registry-file.xml"; Context context = null; try { context = new Context(); // Can't update registries anonymously, so we need to turn off // authorisation context.setIgnoreAuthorization(true); // Work out what we're loading if (argv[0].equalsIgnoreCase("-bitstream")) { RegistryLoader.loadBitstreamFormats(context, argv[1]); } else if (argv[0].equalsIgnoreCase("-dc")) { loadDublinCoreTypes(context, argv[1]); } else { System.err.println(usage); } context.complete(); System.exit(0); } catch (ArrayIndexOutOfBoundsException ae) { System.err.println(usage); if (context != null) { context.abort(); } System.exit(1); } catch (Exception e) { log.fatal(LogManager.getHeader(context, "error_loading_registries", ""), e); if (context != null) { context.abort(); } System.err.println("Error: \n - " + e.getMessage()); System.exit(1); } } /** * Load Bitstream Format metadata * * @param context * DSpace context object * @param filename * the filename of the XML file to load */ public static void loadBitstreamFormats(Context context, String filename) throws SQLException, IOException, ParserConfigurationException, SAXException, TransformerException, AuthorizeException { Document document = loadXML(filename); // Get the nodes corresponding to formats NodeList typeNodes = XPathAPI.selectNodeList(document, "dspace-bitstream-types/bitstream-type"); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { Node n = typeNodes.item(i); loadFormat(context, n); } log.info(LogManager.getHeader(context, "load_bitstream_formats", "number_loaded=" + typeNodes.getLength())); } /** * Process a node in the bitstream format registry XML file. The node must * be a "bitstream-type" node * * @param context * DSpace context object * @param node * the node in the DOM tree */ private static void loadFormat(Context context, Node node) throws SQLException, IOException, TransformerException, AuthorizeException { // Get the values String mimeType = getElementData(node, "mimetype"); String shortDesc = getElementData(node, "short_description"); String desc = getElementData(node, "description"); String supportLevelString = getElementData(node, "support_level"); int supportLevel = Integer.parseInt(supportLevelString); String internalString = getElementData(node, "internal"); boolean internal = Boolean.valueOf(internalString).booleanValue(); String[] extensions = getRepeatedElementData(node, "extension"); // Create the format object BitstreamFormat format = BitstreamFormat.create(context); // Fill it out with the values format.setMIMEType(mimeType); format.setShortDescription(shortDesc); format.setDescription(desc); format.setSupportLevel(supportLevel); format.setInternal(internal); format.setExtensions(extensions); // Write to database format.update(); } /** * Load Dublin Core types * * @param context * DSpace context object * @param filename * the filename of the XML file to load * @throws NonUniqueMetadataException */ public static void loadDublinCoreTypes(Context context, String filename) throws SQLException, IOException, ParserConfigurationException, SAXException, TransformerException, AuthorizeException, NonUniqueMetadataException { Document document = loadXML(filename); // Get the nodes corresponding to schemas NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema"); // Add each schema for (int i = 0; i < schemaNodes.getLength(); i++) { Node n = schemaNodes.item(i); loadMDSchema(context, n); } // Get the nodes corresponding to fields NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type"); // Add each one as a new field to the schema for (int i = 0; i < typeNodes.getLength(); i++) { Node n = typeNodes.item(i); loadDCType(context, n); } log.info(LogManager.getHeader(context, "load_dublin_core_types", "number_loaded=" + typeNodes.getLength())); } /** * Load Dublin Core Schemas * * @param context * @param node */ private static void loadMDSchema(Context context, Node node) throws TransformerException, SQLException, AuthorizeException, NonUniqueMetadataException { // Get the values String shortname = getElementData(node, "name"); String namespace = getElementData(node, "namespace"); // Check if the schema exists already MetadataSchema schema = MetadataSchema.find(context, shortname); if (schema == null) { // If not create it. schema = new MetadataSchema(); schema.setNamespace(namespace); schema.setName(shortname); schema.create(context); } } /** * Process a node in the bitstream format registry XML file. The node must * be a "bitstream-type" node * * @param context * DSpace context object * @param node * the node in the DOM tree * @throws NonUniqueMetadataException */ private static void loadDCType(Context context, Node node) throws SQLException, IOException, TransformerException, AuthorizeException, NonUniqueMetadataException { // Get the values String schema = getElementData(node, "schema"); String element = getElementData(node, "element"); String qualifier = getElementData(node, "qualifier"); String scopeNote = getElementData(node, "scope_note"); // If the schema is not provided default to DC if (schema == null) { schema = MetadataSchema.DC_SCHEMA; } // Find the matching schema object MetadataSchema schemaObj = MetadataSchema.find(context, schema); MetadataField field = new MetadataField(); field.setSchemaID(schemaObj.getSchemaID()); field.setElement(element); field.setQualifier(qualifier); field.setScopeNote(scopeNote); field.create(context); } // ===================== XML Utility Methods ========================= /** * Load in the XML from file. * * @param filename * the filename to load from * * @return the DOM representation of the XML file */ private static Document loadXML(String filename) throws IOException, ParserConfigurationException, SAXException { DocumentBuilder builder = DocumentBuilderFactory.newInstance() .newDocumentBuilder(); return builder.parse(new File(filename)); } /** * Get the CDATA of a particular element. For example, if the XML document * contains: * <P> * <code> * &lt;foo&gt;&lt;mimetype&gt;application/pdf&lt;/mimetype&gt;&lt;/foo&gt; * </code> * passing this the <code>foo</code> node and <code>mimetype</code> will * return <code>application/pdf</code>. * </P> * Why this isn't a core part of the XML API I do not know... * * @param parentElement * the element, whose child element you want the CDATA from * @param childName * the name of the element you want the CDATA from * * @return the CDATA as a <code>String</code> */ private static String getElementData(Node parentElement, String childName) throws TransformerException { // Grab the child node Node childNode = XPathAPI.selectSingleNode(parentElement, childName); if (childNode == null) { // No child node, so no values return null; } // Get the #text Node dataNode = childNode.getFirstChild(); if (dataNode == null) { return null; } // Get the data String value = dataNode.getNodeValue().trim(); return value; } /** * Get repeated CDATA for a particular element. For example, if the XML * document contains: * <P> * <code> * &lt;foo&gt; * &lt;bar&gt;val1&lt;/bar&gt; * &lt;bar&gt;val2&lt;/bar&gt; * &lt;/foo&gt; * </code> * passing this the <code>foo</code> node and <code>bar</code> will * return <code>val1</code> and <code>val2</code>. * </P> * Why this also isn't a core part of the XML API I do not know... * * @param parentElement * the element, whose child element you want the CDATA from * @param childName * the name of the element you want the CDATA from * * @return the CDATA as a <code>String</code> */ private static String[] getRepeatedElementData(Node parentElement, String childName) throws TransformerException { // Grab the child node NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); String[] data = new String[childNodes.getLength()]; for (int i = 0; i < childNodes.getLength(); i++) { // Get the #text node Node dataNode = childNodes.item(i).getFirstChild(); // Get the data data[i] = dataNode.getNodeValue().trim(); } return data; } }
Java