gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.01.11 at 02:39:34 PM EST // package schemas.docbook; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docbook.org/ns/docbook}inlinemediaobject"/> * &lt;element ref="{http://docbook.org/ns/docbook}remark"/> * &lt;element ref="{http://docbook.org/ns/docbook}superscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}subscript"/> * &lt;element ref="{http://docbook.org/ns/docbook}xref"/> * &lt;element ref="{http://docbook.org/ns/docbook}link"/> * &lt;element ref="{http://docbook.org/ns/docbook}olink"/> * &lt;element ref="{http://docbook.org/ns/docbook}anchor"/> * &lt;element ref="{http://docbook.org/ns/docbook}biblioref"/> * &lt;element ref="{http://docbook.org/ns/docbook}alt"/> * &lt;element ref="{http://docbook.org/ns/docbook}annotation"/> * &lt;element ref="{http://docbook.org/ns/docbook}indexterm"/> * &lt;element ref="{http://docbook.org/ns/docbook}phrase"/> * &lt;element ref="{http://docbook.org/ns/docbook}replaceable"/> * &lt;element ref="{http://docbook.org/ns/docbook}co"/> * &lt;/choice> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.attributes"/> * &lt;attGroup ref="{http://docbook.org/ns/docbook}db.common.linking.attributes"/> * &lt;attribute name="role" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="class"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}token"> * &lt;enumeration value="command"/> * &lt;enumeration value="function"/> * &lt;enumeration value="option"/> * &lt;enumeration value="parameter"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "replaceable") public class Replaceable { @XmlElementRefs({ @XmlElementRef(name = "link", namespace = "http://docbook.org/ns/docbook", type = Link.class, required = false), @XmlElementRef(name = "phrase", namespace = "http://docbook.org/ns/docbook", type = Phrase.class, required = false), @XmlElementRef(name = "anchor", namespace = "http://docbook.org/ns/docbook", type = Anchor.class, required = false), @XmlElementRef(name = "alt", namespace = "http://docbook.org/ns/docbook", type = Alt.class, required = false), @XmlElementRef(name = "indexterm", namespace = "http://docbook.org/ns/docbook", type = Indexterm.class, required = false), @XmlElementRef(name = "remark", namespace = "http://docbook.org/ns/docbook", type = Remark.class, required = false), @XmlElementRef(name = "xref", namespace = "http://docbook.org/ns/docbook", type = Xref.class, required = false), @XmlElementRef(name = "co", namespace = "http://docbook.org/ns/docbook", type = Co.class, required = false), @XmlElementRef(name = "inlinemediaobject", namespace = "http://docbook.org/ns/docbook", type = Inlinemediaobject.class, required = false), @XmlElementRef(name = "olink", namespace = "http://docbook.org/ns/docbook", type = Olink.class, required = false), @XmlElementRef(name = "biblioref", namespace = "http://docbook.org/ns/docbook", type = Biblioref.class, required = false), @XmlElementRef(name = "annotation", namespace = "http://docbook.org/ns/docbook", type = Annotation.class, required = false), @XmlElementRef(name = "replaceable", namespace = "http://docbook.org/ns/docbook", type = Replaceable.class, required = false), @XmlElementRef(name = "superscript", namespace = "http://docbook.org/ns/docbook", type = Superscript.class, required = false), @XmlElementRef(name = "subscript", namespace = "http://docbook.org/ns/docbook", type = Subscript.class, required = false) }) @XmlMixed protected List<Object> content; @XmlAttribute(name = "role") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(name = "class") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String clazz; @XmlAttribute(name = "id", namespace = "http://www.w3.org/XML/1998/namespace") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "version") @XmlSchemaType(name = "anySimpleType") protected String commonVersion; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String xmlLang; @XmlAttribute(name = "base", namespace = "http://www.w3.org/XML/1998/namespace") @XmlSchemaType(name = "anySimpleType") protected String base; @XmlAttribute(name = "remap") @XmlSchemaType(name = "anySimpleType") protected String remap; @XmlAttribute(name = "xreflabel") @XmlSchemaType(name = "anySimpleType") protected String xreflabel; @XmlAttribute(name = "revisionflag") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String revisionflag; @XmlAttribute(name = "dir") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String dir; @XmlAttribute(name = "arch") @XmlSchemaType(name = "anySimpleType") protected String arch; @XmlAttribute(name = "audience") @XmlSchemaType(name = "anySimpleType") protected String audience; @XmlAttribute(name = "condition") @XmlSchemaType(name = "anySimpleType") protected String condition; @XmlAttribute(name = "conformance") @XmlSchemaType(name = "anySimpleType") protected String conformance; @XmlAttribute(name = "os") @XmlSchemaType(name = "anySimpleType") protected String os; @XmlAttribute(name = "revision") @XmlSchemaType(name = "anySimpleType") protected String commonRevision; @XmlAttribute(name = "security") @XmlSchemaType(name = "anySimpleType") protected String security; @XmlAttribute(name = "userlevel") @XmlSchemaType(name = "anySimpleType") protected String userlevel; @XmlAttribute(name = "vendor") @XmlSchemaType(name = "anySimpleType") protected String vendor; @XmlAttribute(name = "wordsize") @XmlSchemaType(name = "anySimpleType") protected String wordsize; @XmlAttribute(name = "annotations") @XmlSchemaType(name = "anySimpleType") protected String annotations; @XmlAttribute(name = "linkend") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object linkend; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "type", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkType; @XmlAttribute(name = "role", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkRole; @XmlAttribute(name = "arcrole", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String arcrole; @XmlAttribute(name = "title", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String xlinkTitle; @XmlAttribute(name = "show", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(name = "actuate", namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Link } * {@link Phrase } * {@link Anchor } * {@link Alt } * {@link Indexterm } * {@link Remark } * {@link Xref } * {@link Co } * {@link Inlinemediaobject } * {@link Olink } * {@link Biblioref } * {@link Annotation } * {@link Replaceable } * {@link String } * {@link Superscript } * {@link Subscript } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the clazz property. * * @return * possible object is * {@link String } * */ public String getClazz() { return clazz; } /** * Sets the value of the clazz property. * * @param value * allowed object is * {@link String } * */ public void setClazz(String value) { this.clazz = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the commonVersion property. * * @return * possible object is * {@link String } * */ public String getCommonVersion() { return commonVersion; } /** * Sets the value of the commonVersion property. * * @param value * allowed object is * {@link String } * */ public void setCommonVersion(String value) { this.commonVersion = value; } /** * Gets the value of the xmlLang property. * * @return * possible object is * {@link String } * */ public String getXmlLang() { return xmlLang; } /** * Sets the value of the xmlLang property. * * @param value * allowed object is * {@link String } * */ public void setXmlLang(String value) { this.xmlLang = value; } /** * Gets the value of the base property. * * @return * possible object is * {@link String } * */ public String getBase() { return base; } /** * Sets the value of the base property. * * @param value * allowed object is * {@link String } * */ public void setBase(String value) { this.base = value; } /** * Gets the value of the remap property. * * @return * possible object is * {@link String } * */ public String getRemap() { return remap; } /** * Sets the value of the remap property. * * @param value * allowed object is * {@link String } * */ public void setRemap(String value) { this.remap = value; } /** * Gets the value of the xreflabel property. * * @return * possible object is * {@link String } * */ public String getXreflabel() { return xreflabel; } /** * Sets the value of the xreflabel property. * * @param value * allowed object is * {@link String } * */ public void setXreflabel(String value) { this.xreflabel = value; } /** * Gets the value of the revisionflag property. * * @return * possible object is * {@link String } * */ public String getRevisionflag() { return revisionflag; } /** * Sets the value of the revisionflag property. * * @param value * allowed object is * {@link String } * */ public void setRevisionflag(String value) { this.revisionflag = value; } /** * Gets the value of the dir property. * * @return * possible object is * {@link String } * */ public String getDir() { return dir; } /** * Sets the value of the dir property. * * @param value * allowed object is * {@link String } * */ public void setDir(String value) { this.dir = value; } /** * Gets the value of the arch property. * * @return * possible object is * {@link String } * */ public String getArch() { return arch; } /** * Sets the value of the arch property. * * @param value * allowed object is * {@link String } * */ public void setArch(String value) { this.arch = value; } /** * Gets the value of the audience property. * * @return * possible object is * {@link String } * */ public String getAudience() { return audience; } /** * Sets the value of the audience property. * * @param value * allowed object is * {@link String } * */ public void setAudience(String value) { this.audience = value; } /** * Gets the value of the condition property. * * @return * possible object is * {@link String } * */ public String getCondition() { return condition; } /** * Sets the value of the condition property. * * @param value * allowed object is * {@link String } * */ public void setCondition(String value) { this.condition = value; } /** * Gets the value of the conformance property. * * @return * possible object is * {@link String } * */ public String getConformance() { return conformance; } /** * Sets the value of the conformance property. * * @param value * allowed object is * {@link String } * */ public void setConformance(String value) { this.conformance = value; } /** * Gets the value of the os property. * * @return * possible object is * {@link String } * */ public String getOs() { return os; } /** * Sets the value of the os property. * * @param value * allowed object is * {@link String } * */ public void setOs(String value) { this.os = value; } /** * Gets the value of the commonRevision property. * * @return * possible object is * {@link String } * */ public String getCommonRevision() { return commonRevision; } /** * Sets the value of the commonRevision property. * * @param value * allowed object is * {@link String } * */ public void setCommonRevision(String value) { this.commonRevision = value; } /** * Gets the value of the security property. * * @return * possible object is * {@link String } * */ public String getSecurity() { return security; } /** * Sets the value of the security property. * * @param value * allowed object is * {@link String } * */ public void setSecurity(String value) { this.security = value; } /** * Gets the value of the userlevel property. * * @return * possible object is * {@link String } * */ public String getUserlevel() { return userlevel; } /** * Sets the value of the userlevel property. * * @param value * allowed object is * {@link String } * */ public void setUserlevel(String value) { this.userlevel = value; } /** * Gets the value of the vendor property. * * @return * possible object is * {@link String } * */ public String getVendor() { return vendor; } /** * Sets the value of the vendor property. * * @param value * allowed object is * {@link String } * */ public void setVendor(String value) { this.vendor = value; } /** * Gets the value of the wordsize property. * * @return * possible object is * {@link String } * */ public String getWordsize() { return wordsize; } /** * Sets the value of the wordsize property. * * @param value * allowed object is * {@link String } * */ public void setWordsize(String value) { this.wordsize = value; } /** * Gets the value of the annotations property. * * @return * possible object is * {@link String } * */ public String getAnnotations() { return annotations; } /** * Sets the value of the annotations property. * * @param value * allowed object is * {@link String } * */ public void setAnnotations(String value) { this.annotations = value; } /** * Gets the value of the linkend property. * * @return * possible object is * {@link Object } * */ public Object getLinkend() { return linkend; } /** * Sets the value of the linkend property. * * @param value * allowed object is * {@link Object } * */ public void setLinkend(Object value) { this.linkend = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the xlinkType property. * * @return * possible object is * {@link String } * */ public String getXlinkType() { return xlinkType; } /** * Sets the value of the xlinkType property. * * @param value * allowed object is * {@link String } * */ public void setXlinkType(String value) { this.xlinkType = value; } /** * Gets the value of the xlinkRole property. * * @return * possible object is * {@link String } * */ public String getXlinkRole() { return xlinkRole; } /** * Sets the value of the xlinkRole property. * * @param value * allowed object is * {@link String } * */ public void setXlinkRole(String value) { this.xlinkRole = value; } /** * Gets the value of the arcrole property. * * @return * possible object is * {@link String } * */ public String getArcrole() { return arcrole; } /** * Sets the value of the arcrole property. * * @param value * allowed object is * {@link String } * */ public void setArcrole(String value) { this.arcrole = value; } /** * Gets the value of the xlinkTitle property. * * @return * possible object is * {@link String } * */ public String getXlinkTitle() { return xlinkTitle; } /** * Sets the value of the xlinkTitle property. * * @param value * allowed object is * {@link String } * */ public void setXlinkTitle(String value) { this.xlinkTitle = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }
package org.sakaiproject.connector.fck; import java.lang.reflect.Method; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sakaiproject.assignment.api.AssignmentService; import org.sakaiproject.assignment.api.Assignment; import org.sakaiproject.authz.api.AuthzGroupService; import org.sakaiproject.authz.api.Role; import org.sakaiproject.authz.api.SecurityService; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.entitybroker.EntityBroker; import org.sakaiproject.entitybroker.entityprovider.search.Restriction; import org.sakaiproject.entitybroker.entityprovider.search.Search; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.entitybroker.entityprovider.capabilities.CollectionResolvable; import org.sakaiproject.entitybroker.entityprovider.extension.ActionReturn; import org.sakaiproject.entitybroker.entityprovider.extension.EntityData; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.site.api.SiteService.SortType; import org.sakaiproject.site.api.SitePage; import org.sakaiproject.time.api.Time; import org.sakaiproject.time.api.TimeBreakdown; import org.sakaiproject.tool.assessment.services.assessment.PublishedAssessmentService; import org.sakaiproject.tool.assessment.facade.PublishedAssessmentFacade; import org.sakaiproject.tool.cover.SessionManager; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.cover.UserDirectoryService; public class ConnectorHelper { private static String ASSIGNMENT_TOOL_ID = "sakai.assignment.grades"; private static String JFORUMS_TOOL_ID = "sakai.jforum.tool"; private static String FORUMS_TOOL_ID = "sakai.forums"; private static Logger M_log = LoggerFactory.getLogger(ConnectorHelper.class); private SiteService siteService = null; private AssignmentService assignmentService = null; private SecurityService securityService = null; private AuthzGroupService authzGroupService = null; private EntityBroker entityBroker = null; private List sites = null; private String loggedInUserId = null; private String loggedInUserEid = null; private boolean userKnown = true; // private final String ASSIGNMENT_ENTITY_PREFIX = "site_assignments"; private final String ASSIGNMENT_ENTITY_PREFIX = "assignment"; private final String ASSESSMENT_ENTITY_PREFIX = "sam_pub"; private final String FORUM_TOPIC_ENTITY_PREFIX = "topic"; public void init() { M_log.info("init ConnectorHelper"); siteService = (SiteService) ComponentManager.get("org.sakaiproject.site.api.SiteService"); assignmentService = (AssignmentService) ComponentManager.get("org.sakaiproject.assignment.api.AssignmentService"); authzGroupService = (AuthzGroupService) ComponentManager.get("org.sakaiproject.authz.api.AuthzGroupService"); securityService = (SecurityService) ComponentManager.get("org.sakaiproject.authz.api.SecurityService"); sites = siteService.getSites(org.sakaiproject.site.api.SiteService.SelectionType.UPDATE, null, null, null, SortType.TITLE_ASC, null); loggedInUserId = SessionManager.getCurrentSession().getUserId(); entityBroker = (EntityBroker) ComponentManager.get(EntityBroker.class); } public List getAssignments(){ Vector returnAssignmentList = new Vector(); if(!userKnown) return new Vector(); Iterator siteIterator = sites.iterator(); while(siteIterator.hasNext()){ String placementId = null; Site thisSite = (Site) siteIterator.next(); List thisSitePages = thisSite.getPages(); boolean assignmentToolNotFound = true; Iterator pageIterator = thisSitePages.iterator(); while(pageIterator.hasNext() && assignmentToolNotFound){ SitePage thisPage = (SitePage) pageIterator.next(); List sitePageTools = thisPage.getTools(); Iterator sitePageToolIterator = sitePageTools.iterator(); while(sitePageToolIterator.hasNext() && assignmentToolNotFound){ ToolConfiguration thisToolConfiguration = (ToolConfiguration) sitePageToolIterator.next(); if(thisToolConfiguration.getToolId().equalsIgnoreCase(ASSIGNMENT_TOOL_ID)){ assignmentToolNotFound = false; placementId = thisToolConfiguration.getId(); } } } if(!securityService.unlock(loggedInUserId, "site.upd", "/site/" + thisSite.getId())){ System.out.println("Assignment - no show"+loggedInUserEid+" is not an instructor."); return returnAssignmentList; } Iterator assignmentIterator = assignmentService.getAssignmentsForContext(thisSite.getId(), loggedInUserId); java.util.Date now = new java.util.Date(); long nowMs = now.getTime(); while(assignmentIterator.hasNext()){ Assignment thisAssignment = (Assignment) assignmentIterator.next(); System.out.println("Assignment"+thisAssignment.getId()); Time thisAssignmentCloseTime = thisAssignment.getCloseTime(); boolean assignmentClosed = true; if(thisAssignmentCloseTime!=null){ if(thisAssignmentCloseTime.getTime()<nowMs){ assignmentClosed=false; } }else{ assignmentClosed=false; } if(thisAssignment.getDraft() | assignmentClosed ) continue; StringBuffer assignmentUrlBuildBuffer = new StringBuffer(); /* assignmentUrlBuildBuffer.append("/portal/tool/"); assignmentUrlBuildBuffer.append(placementId+"?"); assignmentUrlBuildBuffer.append("assignmentReference=/assignment/a/"); assignmentUrlBuildBuffer.append(thisAssignment.getContext()+"/"); assignmentUrlBuildBuffer.append(thisAssignment.getId()); assignmentUrlBuildBuffer.append("&panel=Main&sakai_action=doView_submission"); */ assignmentUrlBuildBuffer.append("/direct/assignment/"); assignmentUrlBuildBuffer.append(thisAssignment.getId()); String[] thisAssignmentDescriptor = new String[2]; thisAssignmentDescriptor[0] = "Assignment in site:"+thisSite.getTitle()+" - "+thisAssignment.getTitle(); thisAssignmentDescriptor[1] = assignmentUrlBuildBuffer.toString(); System.out.println("Adding assignment:"+assignmentUrlBuildBuffer.toString()); returnAssignmentList.add(thisAssignmentDescriptor); } } return returnAssignmentList; } public List getSiteAssignments(String siteId){ if(!userKnown) return new Vector(); Site thisSite = null; String placementId = null; Vector returnAssignmentList = new Vector(); boolean assignmentToolNotFound = true; try { thisSite = siteService.getSite(siteId); } catch (IdUnusedException e) { return returnAssignmentList; } try{ placementId = toolPlacementInSite(siteId, ASSIGNMENT_TOOL_ID); assignmentToolNotFound = false; } catch (IdUnusedException e) { return returnAssignmentList; }catch(Exception e){ assignmentToolNotFound = true; } if(!securityService.unlock(loggedInUserId, "site.upd", "/site/" + siteId)){ System.out.println("Assignment - no show"+loggedInUserEid+" is not an instructor."); return returnAssignmentList; } java.util.Date now = new java.util.Date(); SimpleDateFormat df = new SimpleDateFormat(); System.out.println("now:"+df.format(now)); long nowMs = now.getTime(); if(!assignmentToolNotFound){ HashMap params = new HashMap(); ActionReturn ret = entityBroker.executeCustomAction("/assignment/site/"+siteId, "site", params, null); ActionReturn dlReturn = null; List returnedDlRefs = null; HashMap thisDLReferenceData = null; List returnedEntities = ret.getEntitiesList(); Iterator returnedAssignments = returnedEntities.iterator(); String thisAssignmentId =null; while(returnedAssignments.hasNext()){ EntityData thisEntityData = (EntityData) returnedAssignments.next(); try{ Object a = thisEntityData.getData(); Method m = a.getClass().getMethod("getId",null); Object[] noArgs = null; thisAssignmentId = (String) m.invoke(a,noArgs); StringBuffer urlBuffer = new StringBuffer(); /* urlBuffer.append(ServerConfigurationService.getPortalUrl()); urlBuffer.append("/tool/"); urlBuffer.append(placementId); urlBuffer.append("?assignmentReference=/assignment/a/"); urlBuffer.append(siteId); urlBuffer.append("/"); urlBuffer.append(thisAssignmentId); urlBuffer.append("&panel=Main&sakai_action=doView_submission_evap"); */ urlBuffer.append("/direct/assignment/"); urlBuffer.append(thisAssignmentId); String thisAssignmentUrl = urlBuffer.toString(); dlReturn = entityBroker.executeCustomAction("/assignment/deepLink/"+siteId+"/"+thisAssignmentId, "deepLink", params, null); EntityData thisDlReferenceEd = dlReturn.getEntityData(); thisDLReferenceData = (HashMap) thisDlReferenceEd.getData(); String[] thisAssignmentDescriptor = new String[2]; thisAssignmentDescriptor[0] = (String) thisDLReferenceData.get("assignmentTitle"); thisAssignmentDescriptor[1] = thisAssignmentUrl; returnAssignmentList.add(thisAssignmentDescriptor); }catch(Exception e){ String ex = e.getMessage(); } } } return returnAssignmentList; } public List getPublishedAssements(String siteId){ if(!userKnown) return new Vector(); Site thisSite = null; try { thisSite = siteService.getSite(siteId); } catch (IdUnusedException e) { return new Vector(); } if(!securityService.unlock(loggedInUserId, "site.upd", "/site/" + siteId)){ System.out.println("Assignment - no show"+loggedInUserEid+" is not an instructor."); return new Vector(); } Restriction currentUserRestriction = new Restriction("userId", loggedInUserId, Restriction.EQUALS); org.sakaiproject.entitybroker.entityprovider.search.Search s1 = new Search(currentUserRestriction); Restriction currentSiteRestriction = new Restriction("context", siteId, Restriction.EQUALS); s1.addRestriction(currentSiteRestriction); HashMap params = new HashMap(); List entitiesObj = entityBroker.fetchEntities(ASSESSMENT_ENTITY_PREFIX, s1, params); Vector returnAssessmentList = new Vector(); Iterator entitiesObjIterator = entitiesObj.iterator(); while(entitiesObjIterator.hasNext()){ String[] thisAssessment = new String[2]; PublishedAssessmentFacade thisPublishedAssessmentFacade = (PublishedAssessmentFacade) entitiesObjIterator.next(); thisAssessment[0]=thisPublishedAssessmentFacade.getTitle(); thisAssessment[1]="/direct/" + ASSESSMENT_ENTITY_PREFIX + "/" + thisPublishedAssessmentFacade.getPublishedAssessmentId(); returnAssessmentList.add(thisAssessment); } return returnAssessmentList; } public List getForumTopicReferences(String siteId){ List forumTopicReferences = new Vector(); List entitiesObj = null; boolean forumToolNotFound = true; String placementId = null; try{ placementId = toolPlacementInSite(siteId, FORUMS_TOOL_ID); forumToolNotFound = false; } catch (IdUnusedException e) { return forumTopicReferences; }catch(Exception e){ forumToolNotFound = true; } if(!forumToolNotFound){ Restriction currentUserRestriction = new Restriction("userId", loggedInUserId, Restriction.EQUALS); org.sakaiproject.entitybroker.entityprovider.search.Search s1 = new Search(currentUserRestriction); Restriction siteRestriction = new Restriction(CollectionResolvable.SEARCH_LOCATION_REFERENCE, "/site/" + siteId); s1.addRestriction(siteRestriction); HashMap params = new HashMap(); entitiesObj = entityBroker.getEntities(FORUM_TOPIC_ENTITY_PREFIX, s1, params); } return entitiesObj; } private String toolPlacementInSite(String siteId, String toolId) throws Exception{ Site thisSite = null; String placementId = null; try { thisSite = siteService.getSite(siteId); } catch (IdUnusedException e) { throw e; } List thisSitePages = thisSite.getPages(); boolean toolNotFound = true; Iterator pageIterator = thisSitePages.iterator(); while(pageIterator.hasNext() && toolNotFound){ SitePage thisPage = (SitePage) pageIterator.next(); List sitePageTools = thisPage.getTools(); Iterator sitePageToolIterator = sitePageTools.iterator(); while(sitePageToolIterator.hasNext() && toolNotFound){ ToolConfiguration thisToolConfiguration = (ToolConfiguration) sitePageToolIterator.next(); if(thisToolConfiguration.getToolId().equalsIgnoreCase(toolId)){ toolNotFound = false; placementId = thisToolConfiguration.getId(); } } } if(toolNotFound){ throw new Exception("tool not placed in:"+siteId); }else{ return placementId; } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.hadoopjobexecutor; import java.io.FileDescriptor; import java.net.InetAddress; import java.security.Permission; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.pentaho.di.i18n.BaseMessages; /** * A security manager that prevents JVM halts (e.g. {@link Runtime#exit(int)}). */ public class NoExitSecurityManager extends SecurityManager { private SecurityManager decorated; private Set<Thread> blockedThreads; /** * Indicates an error occurred while using the{@link NoExitSecurityManager}. */ public static class NoExitSecurityException extends SecurityException { private int status; public NoExitSecurityException(int status, String message) { super(message); this.status = status; } public int getStatus() { return status; } } public NoExitSecurityManager(SecurityManager decorated) { this.decorated = decorated; blockedThreads = Collections.synchronizedSet(new HashSet<Thread>()); } /** * Add a thread that should be prevented from calling {@link Runtime#exit(int)}. * * @param t Thread to prevent exiting the JVM */ public void addBlockedThread(Thread t) { blockedThreads.add(t); } @Override public void checkExit(int status) { if (decorated != null) { decorated.checkExit(status); } if (blockedThreads.contains(Thread.currentThread())) { throw new NoExitSecurityException(status, BaseMessages.getString(getClass(), "NoSystemExit")); } } @Override public void checkExec(String cmd) { if (decorated != null) { decorated.checkExec(cmd); } } @Override public void checkLink(String lib) { if (decorated != null) { decorated.checkLink(lib); } } @Override public void checkRead(FileDescriptor fd) { if (decorated != null) { decorated.checkRead(fd); } } @Override public void checkRead(String file) { if (decorated != null) { decorated.checkRead(file); } } @Override public void checkRead(String file, Object context) { if (decorated != null) { decorated.checkRead(file, context); } } @Override public void checkWrite(FileDescriptor fd) { if (decorated != null) { decorated.checkWrite(fd); } } @Override public void checkWrite(String file) { if (decorated != null) { decorated.checkWrite(file); } } @Override public void checkDelete(String file) { if (decorated != null) { decorated.checkDelete(file); } } @Override public void checkConnect(String host, int port) { if (decorated != null) { decorated.checkConnect(host, port); } } @Override public void checkConnect(String host, int port, Object context) { if (decorated != null) { decorated.checkConnect(host, port, context); } } @Override public void checkListen(int port) { if (decorated != null) { decorated.checkListen(port); } } @Override public void checkAccept(String host, int port) { if (decorated != null) { decorated.checkAccept(host, port); } } @Override public void checkMulticast(InetAddress maddr) { if (decorated != null) { decorated.checkMulticast(maddr); } } @Override @Deprecated public void checkMulticast(InetAddress maddr, byte ttl) { if (decorated != null) { decorated.checkMulticast(maddr, ttl); } } @Override public void checkPropertiesAccess() { if (decorated != null) { decorated.checkPropertiesAccess(); } } @Override public void checkPropertyAccess(String key) { if (decorated != null) { decorated.checkPropertyAccess(key); } } @Override public boolean checkTopLevelWindow(Object window) { if (decorated != null) { return decorated.checkTopLevelWindow(window); } return true; // Everything is allowed to put up top-level windows } @Override public void checkPrintJobAccess() { if (decorated != null) { decorated.checkPrintJobAccess(); } } @Override public void checkSystemClipboardAccess() { if (decorated != null) { decorated.checkSystemClipboardAccess(); } } @Override public void checkAwtEventQueueAccess() { if (decorated != null) { decorated.checkAwtEventQueueAccess(); } } @Override public void checkPackageAccess(String pkg) { if (decorated != null) { decorated.checkPackageAccess(pkg); } } @Override public void checkPackageDefinition(String pkg) { if (decorated != null) { decorated.checkPackageDefinition(pkg); } } @Override public void checkSetFactory() { if (decorated != null) { decorated.checkSetFactory(); } } @Override public void checkMemberAccess(Class<?> clazz, int which) { if (decorated != null) { decorated.checkMemberAccess(clazz, which); } } @Override public void checkSecurityAccess(String target) { if (decorated != null) { decorated.checkSecurityAccess(target); } } @Override public void checkPermission(Permission perm) { if (decorated != null) { decorated.checkPermission(perm); } } @Override public void checkPermission(Permission perm, Object context) { if (decorated != null) { decorated.checkPermission(perm, context); } } @Override public void checkCreateClassLoader() { if (decorated != null) { decorated.checkCreateClassLoader(); } } @Override public void checkAccess(Thread t) { if (decorated != null) { decorated.checkAccess(t); } } @Override public void checkAccess(ThreadGroup g) { if (decorated != null) { decorated.checkAccess(g); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.sql.translator; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.calcite.DataContext; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.validate.SqlUserDefinedFunction; import org.apache.samza.SamzaException; import org.apache.samza.context.ContainerContext; import org.apache.samza.context.Context; import org.apache.samza.metrics.Counter; import org.apache.samza.metrics.MetricsRegistry; import org.apache.samza.metrics.SamzaHistogram; import org.apache.samza.operators.MessageStream; import org.apache.samza.operators.functions.MapFunction; import org.apache.samza.sql.SamzaSqlRelRecord; import org.apache.samza.sql.data.Expression; import org.apache.samza.sql.data.SamzaSqlExecutionContext; import org.apache.samza.sql.data.SamzaSqlRelMessage; import org.apache.samza.sql.data.SamzaSqlRelMsgMetadata; import org.apache.samza.sql.runner.SamzaSqlApplicationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Translator to translate the Project node in the relational graph to the corresponding StreamGraph * implementation. */ public class ProjectTranslator { private static final Logger LOG = LoggerFactory.getLogger(ProjectTranslator.class); //private transient int messageIndex = 0; private final int queryId; ProjectTranslator(int queryId) { this.queryId = queryId; } /** * Converts the resulting row from Calcite Expression Evaluator to SamzaRelRecord to be sent downstream. * * @param objects input objects to be converted * @param rowType Calcite row type of the resulting row * @return return a valid message Stream of type SamzaSqlRelRecord */ public static SamzaSqlRelRecord buildSamzaRelRecord(Object[] objects, RelDataType rowType) { Preconditions.checkNotNull(objects, "Input objects can not be null"); Preconditions.checkState(rowType.isStruct(), "Row Type has to be a Struct and got " + rowType.getSqlTypeName()); Preconditions.checkState(objects.length == rowType.getFieldCount(), "Objects counts and type counts must match " + objects.length + " vs " + rowType.getFieldCount()); List<String> names = new ArrayList<>(rowType.getFieldNames()); List<Object> values = new ArrayList<>(rowType.getFieldCount()); for (int i = 0; i < objects.length; i++) { Object val = objects[i]; if (val == null) { values.add(null); continue; } final RelDataType valueType = rowType.getFieldList().get(i).getType(); values.add(convertToSamzaSqlType(val, valueType)); } return new SamzaSqlRelRecord(names, values); } /** * Recursively converts a Primitive Java Object to valid Samza Rel Record field type. * * @param value input value to be converted * @param dataType value type as derived by Calcite * @return SamzaRelRecord or primitive SamzaRelRecord field. * */ private static Object convertToSamzaSqlType(Object value, RelDataType dataType) { if (value == null) { return null; } switch (dataType.getSqlTypeName()) { case ROW: List<String> names = new ArrayList<>(dataType.getFieldNames()); // Row Struct is represent as Object array in Calcite. Object[] row = (Object[]) value; List<Object> values = new ArrayList<>(row.length); for (int i = 0; i < row.length; i++) { values.add(convertToSamzaSqlType(row[i], dataType.getFieldList().get(i).getType())); } return new SamzaSqlRelRecord(names, values); case MAP: Map<Object, Object> objectMap = (Map<Object, Object>) value; Map<Object, Object> resultMap = new HashMap<>(); final RelDataType valuesType = dataType.getValueType(); objectMap.forEach((key, v) -> resultMap.put(key, convertToSamzaSqlType(v, valuesType))); return resultMap; case ARRAY: List<Object> objectList = (List<Object>) value; final RelDataType elementsType = dataType.getComponentType(); return objectList.stream().map(e -> convertToSamzaSqlType(e, elementsType)).collect(Collectors.toList()); case BOOLEAN: case BIGINT: case BINARY: case INTEGER: case TINYINT: case DOUBLE: case FLOAT: case REAL: case VARCHAR: case CHAR: case VARBINARY: case ANY: case OTHER: // today we treat everything else as Type Any or Other, this is not ideal. // this will change when adding timestamps support or more complex non java primitive types. // TODO in a better world we need to add type factory that can do the conversion between calcite and samza. return value; default: // As of today we treat everything else as type ANY throw new IllegalStateException("Unknown SQL type " + dataType.getSqlTypeName()); } } /** * Converts the Samza Record to a Java Primitive Row format that's in convention with Calcite Enum operators. * * @param samzaSqlRelRecord input record. * @return row of Java Primitive conform to org.apache.calcite.adapter.enumerable.JavaRowFormat#ARRAY */ public static Object[] convertToJavaRow(SamzaSqlRelRecord samzaSqlRelRecord) { if (samzaSqlRelRecord == null) { return null; } Object[] inputRow = new Object[samzaSqlRelRecord.getFieldValues().size()]; for (int i = 0; i < inputRow.length; i++) { inputRow[i] = asPrimitiveJavaRow(samzaSqlRelRecord.getFieldValues().get(i)); } return inputRow; } private static Object asPrimitiveJavaRow(Object inputObject) { if (inputObject == null) { return null; } if (inputObject instanceof SamzaSqlRelRecord) { return convertToJavaRow((SamzaSqlRelRecord) inputObject); } if (inputObject instanceof List) { return ((List) inputObject).stream().map(e -> asPrimitiveJavaRow(e)).collect(Collectors.toList()); } if (inputObject instanceof Map) { Map<Object, Object> objectMap = new HashMap<>(); ((Map<Object, Object>) inputObject).forEach((k, v) -> objectMap.put(k, asPrimitiveJavaRow(v))); return objectMap; } return inputObject; } /** * ProjectMapFunction implements MapFunction to map input SamzaSqlRelMessages, one at a time, to a new * SamzaSqlRelMessage which consists of the projected fields */ private static class ProjectMapFunction implements MapFunction<SamzaSqlRelMessage, SamzaSqlRelMessage> { private transient Project project; private transient Expression expr; private transient TranslatorContext translatorContext; private transient MetricsRegistry metricsRegistry; private transient SamzaHistogram processingTime; // milli-seconds private transient Counter inputEvents; private transient Counter outputEvents; private final int queryId; private final int projectId; private final String logicalOpId; private Context context; ProjectMapFunction(int projectId, int queryId, String logicalOpId) { this.projectId = projectId; this.queryId = queryId; this.logicalOpId = logicalOpId; } /** * initializes the ProjectMapFunction before any message is processed * @param context the {@link Context} for this task */ @Override public void init(Context context) { this.context = context; this.translatorContext = ((SamzaSqlApplicationContext) context.getApplicationTaskContext()).getTranslatorContexts().get(queryId); this.project = (Project) this.translatorContext.getRelNode(projectId); LOG.info("Compiling operator {} ", project.getDigest()); this.expr = this.translatorContext.getExpressionCompiler().compile(project.getInputs(), project.getProjects()); ContainerContext containerContext = context.getContainerContext(); metricsRegistry = containerContext.getContainerMetricsRegistry(); processingTime = new SamzaHistogram(metricsRegistry, logicalOpId, TranslatorConstants.PROCESSING_TIME_NAME); inputEvents = metricsRegistry.newCounter(logicalOpId, TranslatorConstants.INPUT_EVENTS_NAME); inputEvents.clear(); outputEvents = metricsRegistry.newCounter(logicalOpId, TranslatorConstants.OUTPUT_EVENTS_NAME); outputEvents.clear(); } /** * transforms the input message into the output message with projected fields * @param message the input message to be transformed * @return the new SamzaSqlRelMessage message */ @Override public SamzaSqlRelMessage apply(SamzaSqlRelMessage message) { long arrivalTime = System.nanoTime(); RelDataType type = project.getRowType(); Object[] output = new Object[type.getFieldCount()]; Object[] inputRow = convertToJavaRow(message.getSamzaSqlRelRecord()); SamzaSqlExecutionContext execContext = translatorContext.getExecutionContext(); DataContext dataRootContext = translatorContext.getDataContext(); try { expr.execute(execContext, context, dataRootContext, inputRow, output); } catch (Exception e) { String errMsg = String.format("Handling the following rel message ran into an error. %s", message); LOG.error(errMsg, e); throw new SamzaException(errMsg, e); } SamzaSqlRelRecord record = buildSamzaRelRecord(output, project.getRowType()); updateMetrics(arrivalTime, System.nanoTime(), message.getSamzaSqlRelMsgMetadata().isNewInputMessage); return new SamzaSqlRelMessage(record, message.getSamzaSqlRelMsgMetadata()); } /** * Updates the Diagnostics Metrics (processing time and number of events) * @param arrivalTime input message arrival time (= beging of processing in this operator) * @param outputTime output message output time (=end of processing in this operator) * @param isNewInputMessage whether the input Message is from new input message or not */ private void updateMetrics(long arrivalTime, long outputTime, boolean isNewInputMessage) { if (isNewInputMessage) { inputEvents.inc(); } outputEvents.inc(); processingTime.update(outputTime - arrivalTime); } } private MessageStream<SamzaSqlRelMessage> translateFlatten(Integer flattenIndex, MessageStream<SamzaSqlRelMessage> inputStream) { return inputStream.flatMap(message -> { Object targetFlattenColumn = message.getSamzaSqlRelRecord().getFieldValues().get(flattenIndex); final List<SamzaSqlRelMessage> outMessages = new ArrayList<>(); if (targetFlattenColumn != null && targetFlattenColumn instanceof List) { List<Object> objectList = (List<Object>) targetFlattenColumn; SamzaSqlRelMsgMetadata messageMetadata = message.getSamzaSqlRelMsgMetadata(); SamzaSqlRelMsgMetadata newMetadata = new SamzaSqlRelMsgMetadata(messageMetadata.getEventTime(), messageMetadata.getArrivalTime(), messageMetadata.getScanTimeNanos(), messageMetadata.getScanTimeMillis()); for (Object fieldValue : objectList) { List<Object> newValues = new ArrayList<>(message.getSamzaSqlRelRecord().getFieldValues()); newValues.set(flattenIndex, fieldValue); outMessages.add( new SamzaSqlRelMessage(message.getSamzaSqlRelRecord().getFieldNames(), newValues, newMetadata)); newMetadata = new SamzaSqlRelMsgMetadata(newMetadata.getEventTime(), newMetadata.getArrivalTime(), newMetadata.getScanTimeNanos(), newMetadata.getScanTimeMillis()); } } else { message.getSamzaSqlRelMsgMetadata().isNewInputMessage = true; outMessages.add(message); } return outMessages; }); } private boolean isFlatten(RexNode rexNode) { return rexNode instanceof RexCall && ((RexCall) rexNode).op instanceof SqlUserDefinedFunction && ((RexCall) rexNode).op.getName().equalsIgnoreCase("flatten"); } void translate(final Project project, final String logicalOpId, final TranslatorContext context) { MessageStream<SamzaSqlRelMessage> messageStream = context.getMessageStream(project.getInput().getId()); final int projectId = project.getId(); MessageStream<SamzaSqlRelMessage> outputStream = messageStream.map(new ProjectMapFunction(projectId, queryId, logicalOpId)); List<RexNode> projects = project.getProjects(); List<Integer> flattenProjects = IntStream.range(0, projects.size()) .filter(i -> this.isFlatten(projects.get(i))) .boxed() .collect(Collectors.toList()); if (flattenProjects.size() > 0) { if (flattenProjects.size() > 1) { String msg = "Multiple flatten operators in a single query is not supported"; LOG.error(msg); throw new SamzaException(msg); } outputStream = translateFlatten(flattenProjects.get(0), outputStream); } context.registerMessageStream(project.getId(), outputStream); context.registerRelNode(project.getId(), project); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/services/ad_group_criterion_customizer_service.proto package com.google.ads.googleads.v10.services; /** * <pre> * The result for the ad group criterion customizer mutate. * </pre> * * Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult} */ public final class MutateAdGroupCriterionCustomizerResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) MutateAdGroupCriterionCustomizerResultOrBuilder { private static final long serialVersionUID = 0L; // Use MutateAdGroupCriterionCustomizerResult.newBuilder() to construct. private MutateAdGroupCriterionCustomizerResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MutateAdGroupCriterionCustomizerResult() { resourceName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new MutateAdGroupCriterionCustomizerResult(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MutateAdGroupCriterionCustomizerResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } case 18: { com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder subBuilder = null; if (adGroupCriterionCustomizer_ != null) { subBuilder = adGroupCriterionCustomizer_.toBuilder(); } adGroupCriterionCustomizer_ = input.readMessage(com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(adGroupCriterionCustomizer_); adGroupCriterionCustomizer_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupCriterionCustomizerResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupCriterionCustomizerResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.class, com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object resourceName_; /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AD_GROUP_CRITERION_CUSTOMIZER_FIELD_NUMBER = 2; private com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer adGroupCriterionCustomizer_; /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> * @return Whether the adGroupCriterionCustomizer field is set. */ @java.lang.Override public boolean hasAdGroupCriterionCustomizer() { return adGroupCriterionCustomizer_ != null; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> * @return The adGroupCriterionCustomizer. */ @java.lang.Override public com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer getAdGroupCriterionCustomizer() { return adGroupCriterionCustomizer_ == null ? com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.getDefaultInstance() : adGroupCriterionCustomizer_; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizerOrBuilder getAdGroupCriterionCustomizerOrBuilder() { return getAdGroupCriterionCustomizer(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (adGroupCriterionCustomizer_ != null) { output.writeMessage(2, getAdGroupCriterionCustomizer()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (adGroupCriterionCustomizer_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getAdGroupCriterionCustomizer()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult)) { return super.equals(obj); } com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult other = (com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasAdGroupCriterionCustomizer() != other.hasAdGroupCriterionCustomizer()) return false; if (hasAdGroupCriterionCustomizer()) { if (!getAdGroupCriterionCustomizer() .equals(other.getAdGroupCriterionCustomizer())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasAdGroupCriterionCustomizer()) { hash = (37 * hash) + AD_GROUP_CRITERION_CUSTOMIZER_FIELD_NUMBER; hash = (53 * hash) + getAdGroupCriterionCustomizer().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The result for the ad group criterion customizer mutate. * </pre> * * Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupCriterionCustomizerResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupCriterionCustomizerResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.class, com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.Builder.class); } // Construct using com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; if (adGroupCriterionCustomizerBuilder_ == null) { adGroupCriterionCustomizer_ = null; } else { adGroupCriterionCustomizer_ = null; adGroupCriterionCustomizerBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupCriterionCustomizerResult_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult getDefaultInstanceForType() { return com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult build() { com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult buildPartial() { com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult result = new com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult(this); result.resourceName_ = resourceName_; if (adGroupCriterionCustomizerBuilder_ == null) { result.adGroupCriterionCustomizer_ = adGroupCriterionCustomizer_; } else { result.adGroupCriterionCustomizer_ = adGroupCriterionCustomizerBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) { return mergeFrom((com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult other) { if (other == com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } if (other.hasAdGroupCriterionCustomizer()) { mergeAdGroupCriterionCustomizer(other.getAdGroupCriterionCustomizer()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object resourceName_ = ""; /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * Returned for successful operations. * </pre> * * <code>string resource_name = 1 [(.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } private com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer adGroupCriterionCustomizer_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizerOrBuilder> adGroupCriterionCustomizerBuilder_; /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> * @return Whether the adGroupCriterionCustomizer field is set. */ public boolean hasAdGroupCriterionCustomizer() { return adGroupCriterionCustomizerBuilder_ != null || adGroupCriterionCustomizer_ != null; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> * @return The adGroupCriterionCustomizer. */ public com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer getAdGroupCriterionCustomizer() { if (adGroupCriterionCustomizerBuilder_ == null) { return adGroupCriterionCustomizer_ == null ? com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.getDefaultInstance() : adGroupCriterionCustomizer_; } else { return adGroupCriterionCustomizerBuilder_.getMessage(); } } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public Builder setAdGroupCriterionCustomizer(com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer value) { if (adGroupCriterionCustomizerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } adGroupCriterionCustomizer_ = value; onChanged(); } else { adGroupCriterionCustomizerBuilder_.setMessage(value); } return this; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public Builder setAdGroupCriterionCustomizer( com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder builderForValue) { if (adGroupCriterionCustomizerBuilder_ == null) { adGroupCriterionCustomizer_ = builderForValue.build(); onChanged(); } else { adGroupCriterionCustomizerBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public Builder mergeAdGroupCriterionCustomizer(com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer value) { if (adGroupCriterionCustomizerBuilder_ == null) { if (adGroupCriterionCustomizer_ != null) { adGroupCriterionCustomizer_ = com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.newBuilder(adGroupCriterionCustomizer_).mergeFrom(value).buildPartial(); } else { adGroupCriterionCustomizer_ = value; } onChanged(); } else { adGroupCriterionCustomizerBuilder_.mergeFrom(value); } return this; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public Builder clearAdGroupCriterionCustomizer() { if (adGroupCriterionCustomizerBuilder_ == null) { adGroupCriterionCustomizer_ = null; onChanged(); } else { adGroupCriterionCustomizer_ = null; adGroupCriterionCustomizerBuilder_ = null; } return this; } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder getAdGroupCriterionCustomizerBuilder() { onChanged(); return getAdGroupCriterionCustomizerFieldBuilder().getBuilder(); } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ public com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizerOrBuilder getAdGroupCriterionCustomizerOrBuilder() { if (adGroupCriterionCustomizerBuilder_ != null) { return adGroupCriterionCustomizerBuilder_.getMessageOrBuilder(); } else { return adGroupCriterionCustomizer_ == null ? com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.getDefaultInstance() : adGroupCriterionCustomizer_; } } /** * <pre> * The mutated AdGroupCriterionCustomizer with only mutable fields after * mutate. The field will only be returned when response_content_type is set * to "MUTABLE_RESOURCE". * </pre> * * <code>.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer ad_group_criterion_customizer = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizerOrBuilder> getAdGroupCriterionCustomizerFieldBuilder() { if (adGroupCriterionCustomizerBuilder_ == null) { adGroupCriterionCustomizerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v10.resources.AdGroupCriterionCustomizerOrBuilder>( getAdGroupCriterionCustomizer(), getParentForChildren(), isClean()); adGroupCriterionCustomizer_ = null; } return adGroupCriterionCustomizerBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult) private static final com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult(); } public static com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MutateAdGroupCriterionCustomizerResult> PARSER = new com.google.protobuf.AbstractParser<MutateAdGroupCriterionCustomizerResult>() { @java.lang.Override public MutateAdGroupCriterionCustomizerResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MutateAdGroupCriterionCustomizerResult(input, extensionRegistry); } }; public static com.google.protobuf.Parser<MutateAdGroupCriterionCustomizerResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MutateAdGroupCriterionCustomizerResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.services.MutateAdGroupCriterionCustomizerResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2005 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.xslt.run.rt; import org.xml.sax.SAXParseException; import javax.xml.transform.*; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import java.io.*; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; import java.util.Enumeration; import java.util.HashSet; import java.util.Set; /** @noinspection CallToPrintStackTrace,UseOfSystemOutOrSystemErr,IOResourceOpenedButNotSafelyClosed,SocketOpenedButNotSafelyClosed,UseOfArchaicSystemPropertyAccessors */ public class XSLTRunner implements XSLTMain { private XSLTRunner() { } public static void main(String[] args) throws Throwable { final XSLTMain main = loadMain(); TransformerFactory transformerFactory; try { transformerFactory = main.createTransformerFactory(); } catch (AbstractMethodError e) { // old debugger transformerFactory = createTransformerFactoryStatic(); } catch (ClassNotFoundException e) { transformerFactory = createTransformerFactoryStatic(); } final String uriResolverClass = System.getProperty("xslt.uri-resolver"); if (uriResolverClass != null) { transformerFactory.setURIResolver((URIResolver)Class.forName(uriResolverClass).newInstance()); } final boolean[] trouble = new boolean[]{ false }; final MyErrorListener listener = new MyErrorListener(trouble); final boolean isSmartErrorHandling = System.getProperty("xslt.smart-error-handling", "false").equals("true"); if (isSmartErrorHandling) { transformerFactory.setErrorListener(listener); } final File xslt = new File(System.getProperty("xslt.file")); try { final Transformer transformer = transformerFactory.newTransformer(new StreamSource(xslt)); if (transformer != null && !trouble[0]) { final Enumeration props = System.getProperties().keys(); while (props.hasMoreElements()) { String s = (String)props.nextElement(); if (s.startsWith("xslt.param.")) { final String name = s.substring("xslt.param.".length()); final String value = System.getProperty(s); transformer.setParameter(name, value); } } final File input = new File(System.getProperty("xslt.input")); final String out = System.getProperty("xslt.output"); final StreamResult result; final Integer _port = Integer.getInteger("xslt.listen-port", -1); final int port = _port.intValue(); if (port != -1) { // block until IDEA connects try { final ServerSocket serverSocket = new ServerSocket(port, 1, InetAddress.getByName("127.0.0.1")); serverSocket.setSoTimeout(Integer.getInteger("xslt.listen-timeout", 5000).intValue()); final Socket socket = serverSocket.accept(); final BufferedOutputStream stream = new BufferedOutputStream(socket.getOutputStream(), 16); if (out != null) { final File output = new File(out); result = new StreamResult(new ForkedOutputStream(new OutputStream[]{ stream, new FileOutputStream(output) })); } else { result = new StreamResult(new OutputStreamWriter(stream, "UTF-8")); } Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { try { final Writer out = result.getWriter(); if (out != null) { out.flush(); out.close(); } else if (result.getOutputStream() != null) { result.getOutputStream().flush(); result.getOutputStream().close(); } } catch (IOException e) { // no chance to fix... } } }); } catch (SocketTimeoutException e) { System.err.println("Plugin did not connect to runner within timeout. Run aborted."); return; } } else { final String encoding = System.getProperty("file.encoding"); if (encoding != null) { // ensure proper encoding in xml declaration transformer.setOutputProperty("encoding", encoding); result = new StreamResult(new OutputStreamWriter(System.out, encoding)); } else { result = new StreamResult(System.out); } } main.start(transformer, new StreamSource(input), result); } } catch (TransformerException e) { if (isSmartErrorHandling) { listener.error(e); } else { throw e; } } catch (Throwable t) { t.printStackTrace(); System.exit(1); } } public TransformerFactory createTransformerFactory() throws Exception { return createTransformerFactoryStatic(); } public static TransformerFactory createTransformerFactoryStatic() throws InstantiationException, IllegalAccessException, ClassNotFoundException { final String factoryClass = System.getProperty("xslt.transformer-factory"); if (factoryClass != null) { return (TransformerFactory)Class.forName(factoryClass).newInstance(); } else { return TransformerFactory.newInstance(); } } public void start(Transformer transformer, Source source, Result result) throws TransformerException { transformer.transform(source, result); } private static XSLTMain loadMain() { final String mainClass = System.getProperty("xslt.main"); if (mainClass == null) { return new XSLTRunner(); } try { return (XSLTMain)Class.forName(mainClass).newInstance(); } catch (ClassNotFoundException e) { throw new AssertionError(e); } catch (IllegalAccessException e) { throw new AssertionError(e); } catch (InstantiationException e) { throw new AssertionError(e); } } /** @noinspection UseOfSystemOutOrSystemErr*/ private static class MyErrorListener implements ErrorListener { private final Set myMessages = new HashSet(); private final boolean[] myTrouble; public MyErrorListener(boolean[] trouble) { myTrouble = trouble; } public void warning(TransformerException exception) { handleException(exception, "WARNING"); } public void error(TransformerException exception) { handleException(exception, "ERROR"); myTrouble[0] = true; } public void fatalError(TransformerException exception) { handleException(exception, "FATAL"); myTrouble[0] = true; } private void handleException(TransformerException exception, String type) { final String message = getMessage(exception); if (!myMessages.contains(message)) { System.err.println("[" + type + "]: " + message); myMessages.add(message); } } private static String getMessage(TransformerException exception) { final SourceLocator[] locators = new SourceLocator[]{ exception.getLocator() }; final String[] messages = new String[1]; findLocator(exception, locators, messages); final SourceLocator locator = locators[0]; if (locator != null) { final String systemId = locator.getSystemId(); if (systemId != null) { String s = systemId.replaceAll(" ", "%20") + ": "; final int lineNumber = locator.getLineNumber(); if (lineNumber != -1) { s += "line " + lineNumber + ": "; final int columnNumber = locator.getColumnNumber(); if (columnNumber != -1) { s += "column " + columnNumber + ": "; } } return s + (messages[0] != null ? messages[0] : exception.getMessage()); } } return messages[0] != null ? messages[0] : exception.getMessage(); } private static void findLocator(Throwable exception, SourceLocator[] locators, String[] messages) { if (exception instanceof TransformerException) { final TransformerException t = (TransformerException)exception; if (t.getLocator() != null) { messages[0] = t.getMessage(); locators[0] = t.getLocator(); } else if (exception.getCause() != null) { findLocator(exception.getCause(), locators, messages); } } else if (exception instanceof SAXParseException) { final SAXParseException sae = (SAXParseException)exception; messages[0] = sae.getMessage(); locators[0] = new SourceLocator() { public int getColumnNumber() { return sae.getColumnNumber(); } public int getLineNumber() { return sae.getLineNumber(); } public String getPublicId() { //noinspection ConstantConditions return null; } public String getSystemId() { return sae.getSystemId(); } }; } else if (exception.getCause() != null) { findLocator(exception.getCause(), locators, messages); } try { final Throwable t = (Throwable)exception.getClass().getMethod("getException", new Class[0]).invoke(exception, new Object[0]); if (t != exception) { findLocator(t, locators, messages); } } catch (Exception e) { // } } } static class ForkedOutputStream extends OutputStream { OutputStream[] outs; ForkedOutputStream(OutputStream[] out) { outs = out; } public void write(byte[] b, int off, int len) throws IOException { for (int i = 0, outsLength = outs.length; i < outsLength; i++) { outs[i].write(b, off, len); } } public void write(int b) throws IOException { for (int i = 0, outsLength = outs.length; i < outsLength; i++) { outs[i].write(b); } } public void flush() throws IOException { for (int i = 0, outsLength = outs.length; i < outsLength; i++) { outs[i].flush(); } } public void close() throws IOException { for (int i = 0, outsLength = outs.length; i < outsLength; i++) { outs[i].close(); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.camel.CamelContext; import org.apache.camel.api.management.ManagedCamelContext; import org.apache.camel.api.management.mbean.ManagedCamelContextMBean; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.util.StringHelper; import org.junit.Test; public class ManagedCamelContextTest extends ManagementTestSupport { @Override protected CamelContext createCamelContext() throws Exception { CamelContext context = super.createCamelContext(); context.init(); // to force a different management name than the camel id context.getManagementNameStrategy().setNamePattern("19-#name#"); return context; } @Test public void testManagedCamelContextClient() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } ManagedCamelContextMBean client = context.getExtension(ManagedCamelContext.class).getManagedCamelContext(); assertNotNull(client); assertEquals("camel-1", client.getCamelId()); assertEquals("Started", client.getState()); List<String> names = client.findComponentNames(); assertNotNull(names); assertTrue(names.contains("mock")); } @Test public void testManagedCamelContext() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); String name = (String) mbeanServer.getAttribute(on, "CamelId"); assertEquals("camel-1", name); String managementName = (String) mbeanServer.getAttribute(on, "ManagementName"); assertEquals("19-camel-1", managementName); String level = (String) mbeanServer.getAttribute(on, "ManagementStatisticsLevel"); assertEquals("Default", level); String uptime = (String) mbeanServer.getAttribute(on, "Uptime"); assertNotNull(uptime); long uptimeMillis = (Long) mbeanServer.getAttribute(on, "UptimeMillis"); assertTrue(uptimeMillis > 0); String status = (String) mbeanServer.getAttribute(on, "State"); assertEquals("Started", status); Boolean messageHistory = (Boolean) mbeanServer.getAttribute(on, "MessageHistory"); assertEquals(Boolean.TRUE, messageHistory); Boolean logMask = (Boolean) mbeanServer.getAttribute(on, "LogMask"); assertEquals(Boolean.FALSE, logMask); Integer total = (Integer) mbeanServer.getAttribute(on, "TotalRoutes"); assertEquals(2, total.intValue()); Integer started = (Integer) mbeanServer.getAttribute(on, "StartedRoutes"); assertEquals(2, started.intValue()); // invoke operations MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertMockEndpointsSatisfied(); resetMocks(); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendStringBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertMockEndpointsSatisfied(); Object reply = mbeanServer.invoke(on, "requestBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertEquals("Bye World", reply); reply = mbeanServer.invoke(on, "requestStringBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertEquals("Bye World", reply); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); Map<String, Object> headers = new HashMap<>(); headers.put("foo", 123); mbeanServer.invoke(on, "sendBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertMockEndpointsSatisfied(); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); reply = mbeanServer.invoke(on, "requestBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertEquals("Hello World", reply); assertMockEndpointsSatisfied(); // test can send Boolean can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"direct:start"}, new String[]{"java.lang.String"}); assertEquals(true, can.booleanValue()); can = (Boolean) mbeanServer.invoke(on, "canSendToEndpoint", new Object[]{"timer:foo"}, new String[]{"java.lang.String"}); assertEquals(false, can.booleanValue()); // stop Camel mbeanServer.invoke(on, "stop", null, null); } @Test public void testManagedCamelContextCreateEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // create it again reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.FALSE, reply); registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); } @Test public void testManagedCamelContextRemoveEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // remove it Object num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(1, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); // remove it again num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(0, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); } @Test public void testFindComponentsInClasspath() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); @SuppressWarnings("unchecked") Map<String, Properties> info = (Map<String, Properties>) mbeanServer.invoke(on, "findComponents", null, null); assertNotNull(info); assertTrue(info.size() > 20); Properties prop = info.get("seda"); assertNotNull(prop); assertEquals("seda", prop.get("name")); assertEquals("org.apache.camel", prop.get("groupId")); assertEquals("camel-core", prop.get("artifactId")); } @Test public void testManagedCamelContextCreateRouteStaticEndpointJson() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "createRouteStaticEndpointJson", null, null); assertNotNull(json); assertEquals(7, StringHelper.countChar(json, '{')); assertEquals(7, StringHelper.countChar(json, '}')); assertTrue(json.contains("{ \"uri\": \"direct://start\" }")); assertTrue(json.contains("{ \"uri\": \"direct://foo\" }")); } @Test public void testManagedCamelContextExplainEndpointUriFalse() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEndpointJson", new Object[]{"log:foo?groupDelay=2000&groupSize=5", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); // the loggerName option should come before the groupDelay option int pos = json.indexOf("loggerName"); int pos2 = json.indexOf("groupDelay"); assertTrue("LoggerName should come before groupDelay", pos < pos2); assertEquals(6, StringHelper.countChar(json, '{')); assertEquals(6, StringHelper.countChar(json, '}')); assertTrue(json.contains("\"scheme\": \"log\"")); assertTrue(json.contains("\"label\": \"core,monitoring\"")); assertTrue(json.contains("\"loggerName\": { \"kind\": \"path\", \"group\": \"producer\", \"required\": \"true\"")); assertTrue(json.contains("\"groupSize\": { \"kind\": \"parameter\", \"group\": \"producer\", \"type\": \"integer\"," + " \"javaType\": \"java.lang.Integer\", \"deprecated\": \"false\", \"secret\": \"false\", \"value\": \"5\"")); // and we should also have the javadoc documentation assertTrue(json.contains("Set the initial delay for stats (in millis)")); } @Test public void testManagedCamelContextExplainEndpointUriTrue() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEndpointJson", new Object[]{"log:foo?groupDelay=2000&groupSize=5", true}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); // the loggerName option should come before the groupDelay option int pos = json.indexOf("loggerName"); int pos2 = json.indexOf("groupDelay"); assertTrue("LoggerName should come before groupDelay", pos < pos2); assertEquals(30, StringHelper.countChar(json, '{')); assertEquals(30, StringHelper.countChar(json, '}')); assertTrue(json.contains("\"scheme\": \"log\"")); assertTrue(json.contains("\"label\": \"core,monitoring\"")); assertTrue(json.contains("\"loggerName\": { \"kind\": \"path\", \"group\": \"producer\", \"required\": \"true\"")); assertTrue(json.contains("\"groupSize\": { \"kind\": \"parameter\", \"group\": \"producer\", \"type\": \"integer\"," + " \"javaType\": \"java.lang.Integer\", \"deprecated\": \"false\", \"secret\": \"false\", \"value\": \"5\"")); // and we should also have the javadoc documentation assertTrue(json.contains("Set the initial delay for stats (in millis)")); } @Test public void testManagedCamelContextExplainEipFalse() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEipJson", new Object[]{"myTransform", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertTrue(json.contains("\"label\": \"eip,transformation\"")); assertTrue(json.contains("\"expression\": { \"kind\": \"expression\", \"required\": \"true\", \"type\": \"object\"")); // we should see the constant value assertTrue(json.contains("Bye World")); } @Test public void testManagedCamelContextExplainEipTrue() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEipJson", new Object[]{"myTransform", true}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertTrue(json.contains("\"label\": \"eip,transformation\"")); assertTrue(json.contains("\"expression\": { \"kind\": \"expression\", \"required\": \"true\", \"type\": \"object\"")); // and now we have the description option also assertTrue(json.contains("\"description\": { \"kind\": \"element\", \"required\": \"false\", \"type\": \"object\", \"javaType\"")); // we should see the constant value assertTrue(json.contains("Bye World")); } @Test public void testManagedCamelContextExplainEipModel() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainEipJson", new Object[]{"aggregate", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertTrue(json.contains("\"description\": \"Aggregates many messages into a single message\"")); assertTrue(json.contains("\"label\": \"eip,routing\"")); assertTrue(json.contains("\"correlationExpression\": { \"kind\": \"expression\", \"displayName\": \"Correlation Expression\", \"required\": true, \"type\": \"object\"")); assertTrue(json.contains("\"discardOnCompletionTimeout\": { \"kind\": \"attribute\", \"displayName\": \"Discard On Completion Timeout\", \"required\": false, \"type\": \"boolean\"")); } @Test public void testManagedCamelContextExplainComponentModel() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "explainComponentJson", new Object[]{"seda", false}, new String[]{"java.lang.String", "boolean"}); assertNotNull(json); assertTrue(json.contains("\"label\": \"core,endpoint\"")); assertTrue(json.contains("\"queueSize\": { \"kind\": \"property\", \"group\": \"advanced\", \"label\": \"advanced\"")); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").delay(10).to("mock:result"); from("direct:foo").delay(10).transform(constant("Bye World")).id("myTransform"); } }; } }
package com.gj.administrator.gjerp.domain; import java.util.List; import com.gj.administrator.gjerp.dao.DaoSession; import de.greenrobot.dao.DaoException; import com.gj.administrator.gjerp.dao.DialogDao; import com.gj.administrator.gjerp.dao.PartnerDao; import com.gj.administrator.gjerp.dao.StaffDao; import com.gj.administrator.gjerp.dao.SupplierDao; import com.gj.administrator.gjerp.dao.TaskDao; // THIS CODE IS GENERATED BY greenDAO, EDIT ONLY INSIDE THE "KEEP"-SECTIONS // KEEP INCLUDES - put your custom includes here // KEEP INCLUDES END /** * Entity mapped to table "TASK". */ public class Task { private Long id; /** Not-null value. */ private String title; /** Not-null value. */ private String content; private Long leader_id; private int period; /** Not-null value. */ private String state; /** Not-null value. */ private java.util.Date start_time; /** Not-null value. */ private java.util.Date finish_time; private Long parenter_id; private Long supplier_id; private Long task_id; private Long dialog_id; /** Used to resolve relations */ private transient DaoSession daoSession; /** Used for active entity operations. */ private transient TaskDao myDao; private Staff leader; private Long leader__resolvedKey; private Partner parenter; private Long parenter__resolvedKey; private Supplier supplier; private Long supplier__resolvedKey; private Task parent; private Long parent__resolvedKey; private Dialog dialog; private Long dialog__resolvedKey; private List<Task> children; // KEEP FIELDS - put your custom fields here public final static int INSTANT = 0x0000000; public final static int MON = 0x0000001; public final static int TUES = 0x0000010; public final static int WED = 0x0000100; public final static int TUARS = 0x0001000; public final static int FRI = 0x0010000; public final static int SAT = 0x0100000; public final static int SUN = 0x1000000; public final static int[] PERIODS = {MON,TUES,WED,TUARS,FRI,SAT,SUN}; // KEEP FIELDS END public Task() { } public Task(Long id) { this.id = id; } public Task(Long id, String title, String content, Long leader_id, int period, String state, java.util.Date start_time, java.util.Date finish_time, Long parenter_id, Long supplier_id, Long task_id, Long dialog_id) { this.id = id; this.title = title; this.content = content; this.leader_id = leader_id; this.period = period; this.state = state; this.start_time = start_time; this.finish_time = finish_time; this.parenter_id = parenter_id; this.supplier_id = supplier_id; this.task_id = task_id; this.dialog_id = dialog_id; } /** called by internal mechanisms, do not call yourself. */ public void __setDaoSession(DaoSession daoSession) { this.daoSession = daoSession; myDao = daoSession != null ? daoSession.getTaskDao() : null; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } /** Not-null value. */ public String getTitle() { return title; } /** Not-null value; ensure this value is available before it is saved to the database. */ public void setTitle(String title) { this.title = title; } /** Not-null value. */ public String getContent() { return content; } /** Not-null value; ensure this value is available before it is saved to the database. */ public void setContent(String content) { this.content = content; } public Long getLeader_id() { return leader_id; } public void setLeader_id(Long leader_id) { this.leader_id = leader_id; } public int getPeriod() { return period; } public void setPeriod(int period) { this.period = period; } /** Not-null value. */ public String getState() { return state; } /** Not-null value; ensure this value is available before it is saved to the database. */ public void setState(String state) { this.state = state; } /** Not-null value. */ public java.util.Date getStart_time() { return start_time; } /** Not-null value; ensure this value is available before it is saved to the database. */ public void setStart_time(java.util.Date start_time) { this.start_time = start_time; } /** Not-null value. */ public java.util.Date getFinish_time() { return finish_time; } /** Not-null value; ensure this value is available before it is saved to the database. */ public void setFinish_time(java.util.Date finish_time) { this.finish_time = finish_time; } public Long getParenter_id() { return parenter_id; } public void setParenter_id(Long parenter_id) { this.parenter_id = parenter_id; } public Long getSupplier_id() { return supplier_id; } public void setSupplier_id(Long supplier_id) { this.supplier_id = supplier_id; } public Long getTask_id() { return task_id; } public void setTask_id(Long task_id) { this.task_id = task_id; } public Long getDialog_id() { return dialog_id; } public void setDialog_id(Long dialog_id) { this.dialog_id = dialog_id; } /** To-one relationship, resolved on first access. */ public Staff getLeader() { Long __key = this.leader_id; if (leader__resolvedKey == null || !leader__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } StaffDao targetDao = daoSession.getStaffDao(); Staff leaderNew = targetDao.load(__key); synchronized (this) { leader = leaderNew; leader__resolvedKey = __key; } } return leader; } public void setLeader(Staff leader) { synchronized (this) { this.leader = leader; leader_id = leader == null ? null : leader.getId(); leader__resolvedKey = leader_id; } } /** To-one relationship, resolved on first access. */ public Partner getParenter() { Long __key = this.parenter_id; if (parenter__resolvedKey == null || !parenter__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } PartnerDao targetDao = daoSession.getPartnerDao(); Partner parenterNew = targetDao.load(__key); synchronized (this) { parenter = parenterNew; parenter__resolvedKey = __key; } } return parenter; } public void setParenter(Partner parenter) { synchronized (this) { this.parenter = parenter; parenter_id = parenter == null ? null : parenter.getId(); parenter__resolvedKey = parenter_id; } } /** To-one relationship, resolved on first access. */ public Supplier getSupplier() { Long __key = this.supplier_id; if (supplier__resolvedKey == null || !supplier__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } SupplierDao targetDao = daoSession.getSupplierDao(); Supplier supplierNew = targetDao.load(__key); synchronized (this) { supplier = supplierNew; supplier__resolvedKey = __key; } } return supplier; } public void setSupplier(Supplier supplier) { synchronized (this) { this.supplier = supplier; supplier_id = supplier == null ? null : supplier.getId(); supplier__resolvedKey = supplier_id; } } /** To-one relationship, resolved on first access. */ public Task getParent() { Long __key = this.task_id; if (parent__resolvedKey == null || !parent__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } TaskDao targetDao = daoSession.getTaskDao(); Task parentNew = targetDao.load(__key); synchronized (this) { parent = parentNew; parent__resolvedKey = __key; } } return parent; } public void setParent(Task parent) { synchronized (this) { this.parent = parent; task_id = parent == null ? null : parent.getId(); parent__resolvedKey = task_id; } } /** To-one relationship, resolved on first access. */ public Dialog getDialog() { Long __key = this.dialog_id; if (dialog__resolvedKey == null || !dialog__resolvedKey.equals(__key)) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } DialogDao targetDao = daoSession.getDialogDao(); Dialog dialogNew = targetDao.load(__key); synchronized (this) { dialog = dialogNew; dialog__resolvedKey = __key; } } return dialog; } public void setDialog(Dialog dialog) { synchronized (this) { this.dialog = dialog; dialog_id = dialog == null ? null : dialog.getId(); dialog__resolvedKey = dialog_id; } } /** To-many relationship, resolved on first access (and after reset). Changes to to-many relations are not persisted, make changes to the target entity. */ public List<Task> getChildren() { if (children == null) { if (daoSession == null) { throw new DaoException("Entity is detached from DAO context"); } TaskDao targetDao = daoSession.getTaskDao(); List<Task> childrenNew = targetDao._queryTask_Children(id); synchronized (this) { if(children == null) { children = childrenNew; } } } return children; } /** Resets a to-many relationship, making the next get call to query for a fresh result. */ public synchronized void resetChildren() { children = null; } /** Convenient call for {@link AbstractDao#delete(Object)}. Entity must attached to an entity context. */ public void delete() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.delete(this); } /** Convenient call for {@link AbstractDao#update(Object)}. Entity must attached to an entity context. */ public void update() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.update(this); } /** Convenient call for {@link AbstractDao#refresh(Object)}. Entity must attached to an entity context. */ public void refresh() { if (myDao == null) { throw new DaoException("Entity is detached from DAO context"); } myDao.refresh(this); } // KEEP METHODS - put your custom methods here public static String getPeriodString(int periodInt){ if(periodInt == 0) return "Instant Task"; else{ String result = ""; if((periodInt & MON ) !=0){ result += 1+" "; } if((periodInt & TUES ) !=0){ result += 2+" "; } if((periodInt & WED ) !=0){ result += 3+" "; } if((periodInt & TUARS ) !=0){ result += 4+" "; } if((periodInt & FRI ) !=0){ result += 5+" "; } if((periodInt & SAT ) !=0){ result += 6+" "; } if((periodInt & SUN ) !=0){ result += 7+" "; } return "Period Task:"+ result; } } // KEEP METHODS END }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; import java.util.Map; import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.sameInstance; public class AwarenessAllocationTests extends ESAllocationTestCase { private final Logger logger = Loggers.getLogger(AwarenessAllocationTests.class); public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded1'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder().addAsNew(metaData.index("test")).build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node3")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, make sure nothing moves"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); ClusterState newState = strategy.reroute(clusterState, "reroute"); assertThat(newState, equalTo(clusterState)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); } public void testMoveShardOnceNewNodeWithAttributeAdded2() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded2'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder().addAsNew(metaData.index("test")).build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) .add(newNode("node3", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node4")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, make sure nothing moves"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node5", singletonMap("rack_id", "3"))) ).build(); ClusterState newState = strategy.reroute(clusterState, "reroute"); assertThat(newState, equalTo(clusterState)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); } public void testMoveShardOnceNewNodeWithAttributeAdded3() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .put("cluster.routing.allocation.balance.index", 0.0f) .put("cluster.routing.allocation.balance.replica", 1.0f) .put("cluster.routing.allocation.balance.primary", 0.0f) .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded3'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); logger.info("Initializing shards: {}", clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("Started shards: {}", clusterState.getRoutingNodes().shardsWithState(STARTED)); logger.info("Relocating shards: {}", clusterState.getRoutingNodes().shardsWithState(RELOCATING)); logger.info("Unassigned shards: {}", clusterState.getRoutingNodes().shardsWithState(UNASSIGNED)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(5)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node3")); logger.info("--> complete initializing"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> run it again, since we still might have relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, some more relocation should happen"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(RELOCATING).size(), greaterThan(0)); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); } public void testMoveShardOnceNewNodeWithAttributeAdded4() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded4'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test1")) .addAsNew(metaData.index("test2")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(10)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(20)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node3")); logger.info("--> complete initializing"); for (int i = 0; i < 2; i++) { logger.info("--> complete initializing round: [{}]", i); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); } clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(20)); assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(5)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, some more relocation should happen"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(RELOCATING).size(), greaterThan(0)); logger.info("--> complete relocation"); for (int i = 0; i < 2; i++) { logger.info("--> complete initializing round: [{}]", i); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); } assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(20)); assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().node("node4").size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().node("node2").size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(5)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); } public void testMoveShardOnceNewNodeWithAttributeAdded5() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded5'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo("node3")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(3)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, we will have another relocation"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node4")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(3)); logger.info("--> make sure another reroute does not move things"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); } public void testMoveShardOnceNewNodeWithAttributeAdded6() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'moveShardOnceNewNodeWithAttributeAdded6'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(3)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) .add(newNode("node3", singletonMap("rack_id", "1"))) .add(newNode("node4", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(4)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node5", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(3)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node5")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(4)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, we will have another relocation"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node6", singletonMap("rack_id", "3"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(3)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).get(0).relocatingNodeId(), equalTo("node6")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(4)); logger.info("--> make sure another reroute does not move things"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); } public void testFullAwareness1() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'fullAwareness1'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> replica will not start because we have only one rack value"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo("node3")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, make sure nothing moves"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); ClusterState newState = strategy.reroute(clusterState, "reroute"); assertThat(newState, equalTo(clusterState)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); } public void testFullAwareness2() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .build()); logger.info("Building initial routing table for 'fullAwareness2'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) .add(newNode("node3", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> replica will not start because we have only one rack value"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo("node4")); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, make sure nothing moves"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node5", singletonMap("rack_id", "3"))) ).build(); ClusterState newState = strategy.reroute(clusterState, "reroute"); assertThat(newState, equalTo(clusterState)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); } public void testFullAwareness3() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.awareness.force.rack_id.values", "1,2") .put("cluster.routing.allocation.awareness.attributes", "rack_id") .put("cluster.routing.allocation.balance.index", 0.0f) .put("cluster.routing.allocation.balance.replica", 1.0f) .put("cluster.routing.allocation.balance.primary", 0.0f) .build()); logger.info("Building initial routing table for 'fullAwareness3'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test1")) .addAsNew(metaData.index("test2")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("rack_id", "1"))) .add(newNode("node2", singletonMap("rack_id", "1"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(10)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); logger.info("--> add a new node with a new rack and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node3", singletonMap("rack_id", "2"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo("node3")); logger.info("--> complete initializing"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> run it again, since we still might have relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(20)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); logger.info("--> add another node with a new rack, some more relocation should happen"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("node4", singletonMap("rack_id", "3"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(RELOCATING).size(), greaterThan(0)); logger.info("--> complete relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(20)); logger.info("--> do another reroute, make sure nothing moves"); assertThat(strategy.reroute(clusterState, "reroute").routingTable(), sameInstance(clusterState.routingTable())); } public void testUnbalancedZones() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.awareness.force.zone.values", "a,b") .put("cluster.routing.allocation.awareness.attributes", "zone") .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .build()); logger.info("Building initial routing table for 'testUnbalancedZones'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes in different zones and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("A-0", singletonMap("zone", "a"))) .add(newNode("B-0", singletonMap("zone", "b"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(5)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(5)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(5)); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> all replicas are allocated and started since we have on node in each zone"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> add a new node in zone 'a' and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) .add(newNode("A-1", singletonMap("zone", "a"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(8)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo("A-1")); logger.info("--> starting initializing shards on the new node"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(10)); assertThat(clusterState.getRoutingNodes().node("A-1").size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("A-0").size(), equalTo(3)); assertThat(clusterState.getRoutingNodes().node("B-0").size(), equalTo(5)); } public void testUnassignedShardsWithUnbalancedZones() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .put("cluster.routing.allocation.awareness.attributes", "zone") .build()); logger.info("Building initial routing table for 'testUnassignedShardsWithUnbalancedZones'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(4)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding 5 nodes in different zones and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("A-0", singletonMap("zone", "a"))) .add(newNode("A-1", singletonMap("zone", "a"))) .add(newNode("A-2", singletonMap("zone", "a"))) .add(newNode("A-3", singletonMap("zone", "a"))) .add(newNode("A-4", singletonMap("zone", "a"))) .add(newNode("B-0", singletonMap("zone", "b"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shard (primary)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(3)); assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); // Unassigned shard is expected. // Cancel all initializing shards and move started primary to another node. AllocationCommands commands = new AllocationCommands(); String primaryNode = null; for (ShardRouting routing : clusterState.routingTable().allShards()) { if (routing.primary()) { primaryNode = routing.currentNodeId(); } else if (routing.initializing()) { commands.add(new CancelAllocationCommand(routing.shardId().getIndexName(), routing.id(), routing.currentNodeId(), false)); } } commands.add(new MoveAllocationCommand("test", 0, primaryNode, "A-4")); clusterState = strategy.reroute(clusterState, commands, false, false).getClusterState(); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); assertThat(clusterState.getRoutingNodes().shardsWithState(RELOCATING).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(4)); // +1 for relocating shard. assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1)); // Still 1 unassigned. } public void testMultipleAwarenessAttributes() { AllocationService strategy = createAllocationService(Settings.builder() .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone, rack") .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a, b") .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "rack.values", "c, d") .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") .build()); logger.info("Building initial routing table for 'testUnbalancedZones'"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder().addAsNew(metaData.index("test")).build(); ClusterState clusterState = ClusterState.builder( org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY) ).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes in different zones and do rerouting"); Map<String, String> nodeAAttributes = new HashMap<>(); nodeAAttributes.put("zone", "a"); nodeAAttributes.put("rack", "c"); Map<String, String> nodeBAttributes = new HashMap<>(); nodeBAttributes.put("zone", "b"); nodeBAttributes.put("rack", "d"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("A-0", nodeAAttributes)) .add(newNode("B-0", nodeBAttributes)) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> all replicas are allocated and started since we have one node in each zone and rack"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.profiler; import static com.google.devtools.build.lib.profiler.ProfilerTask.TASK_COUNT; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.clock.Clock; import com.google.devtools.build.lib.collect.Extrema; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.profiler.PredicateBasedStatRecorder.RecorderAndPredicate; import com.google.devtools.build.lib.profiler.StatRecorder.VfsHeuristics; import com.google.gson.stream.JsonWriter; import com.sun.management.OperatingSystemMXBean; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.lang.management.ManagementFactory; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.zip.GZIPOutputStream; /** * Blaze internal profiler. Provides facility to report various Blaze tasks and store them * (asynchronously) in the file for future analysis. * * <p>Implemented as singleton so any caller should use Profiler.instance() to obtain reference. * * <p>Internally, profiler uses two data structures - ThreadLocal task stack to track nested tasks * and single ConcurrentLinkedQueue to gather all completed tasks. * * <p>Also, due to the nature of the provided functionality (instrumentation of all Blaze * components), build.lib.profiler package will be used by almost every other Blaze package, so * special attention should be paid to avoid any dependencies on the rest of the Blaze code, * including build.lib.util and build.lib.vfs. This is important because build.lib.util and * build.lib.vfs contain Profiler invocations and any dependency on those two packages would create * circular relationship. * * <p> * * @see ProfilerTask enum for recognized task types. */ @ThreadSafe public final class Profiler { /** The profiler (a static singleton instance). Inactive by default. */ private static final Profiler instance = new Profiler(); private static final int HISTOGRAM_BUCKETS = 20; private static final TaskData POISON_PILL = new TaskData(0, 0, null, "poison pill"); private static final long ACTION_COUNT_BUCKET_MS = 200; /** File format enum. */ public enum Format { JSON_TRACE_FILE_FORMAT, JSON_TRACE_FILE_COMPRESSED_FORMAT } /** A task that was very slow. */ public static final class SlowTask implements Comparable<SlowTask> { final long durationNanos; final String description; final ProfilerTask type; private SlowTask(TaskData taskData) { this.durationNanos = taskData.duration; this.description = taskData.description; this.type = taskData.type; } @Override public int compareTo(SlowTask other) { long delta = durationNanos - other.durationNanos; if (delta < 0) { // Very clumsy return -1; } else if (delta > 0) { return 1; } else { return 0; } } public long getDurationNanos() { return durationNanos; } public String getDescription() { return description; } public ProfilerTask getType() { return type; } } /** * Container for the single task record. Should never be instantiated directly - use * TaskStack.create() instead. * * <p>Class itself is not thread safe, but all access to it from Profiler methods is. */ @ThreadCompatible private static class TaskData { final long threadId; final long startTimeNanos; final int id; final ProfilerTask type; final String description; long duration; TaskData(int id, long startTimeNanos, ProfilerTask eventType, String description) { this.id = id; this.threadId = Thread.currentThread().getId(); this.startTimeNanos = startTimeNanos; this.type = eventType; this.description = Preconditions.checkNotNull(description); } TaskData(long threadId, long startTimeNanos, long duration, String description) { this.id = -1; this.type = ProfilerTask.UNKNOWN; this.threadId = threadId; this.startTimeNanos = startTimeNanos; this.duration = duration; this.description = description; } @Override public String toString() { return "Thread " + threadId + ", task " + id + ", type " + type + ", " + description; } } private static final class ActionTaskData extends TaskData { final String primaryOutputPath; final String targetLabel; ActionTaskData( int id, long startTimeNanos, ProfilerTask eventType, String description, String primaryOutputPath, String targetLabel) { super(id, startTimeNanos, eventType, description); this.primaryOutputPath = primaryOutputPath; this.targetLabel = targetLabel; } } /** * Aggregator class that keeps track of the slowest tasks of the specified type. * * <p><code>extremaAggregators</p> is sharded so that all threads need not compete for the same * lock if they do the same operation at the same time. Access to an individual {@link Extrema} * is synchronized on the {@link Extrema} instance itself. */ private static final class SlowestTaskAggregator { private static final int SHARDS = 16; private static final int SIZE = 30; @SuppressWarnings({"unchecked", "rawtypes"}) private final Extrema<SlowTask>[] extremaAggregators = new Extrema[SHARDS]; SlowestTaskAggregator() { for (int i = 0; i < SHARDS; i++) { extremaAggregators[i] = Extrema.max(SIZE); } } // @ThreadSafe void add(TaskData taskData) { Extrema<SlowTask> extrema = extremaAggregators[(int) (Thread.currentThread().getId() % SHARDS)]; synchronized (extrema) { extrema.aggregate(new SlowTask(taskData)); } } // @ThreadSafe void clear() { for (int i = 0; i < SHARDS; i++) { Extrema<SlowTask> extrema = extremaAggregators[i]; synchronized (extrema) { extrema.clear(); } } } // @ThreadSafe Iterable<SlowTask> getSlowestTasks() { // This is slow, but since it only happens during the end of the invocation, it's OK. Extrema<SlowTask> mergedExtrema = Extrema.max(SIZE); for (int i = 0; i < SHARDS; i++) { Extrema<SlowTask> extrema = extremaAggregators[i]; synchronized (extrema) { for (SlowTask task : extrema.getExtremeElements()) { mergedExtrema.aggregate(task); } } } return mergedExtrema.getExtremeElements(); } } private Clock clock; private ImmutableSet<ProfilerTask> profiledTasks; private volatile long profileStartTime; private volatile boolean recordAllDurations = false; private Duration profileCpuStartTime; /** This counter provides a unique id for every task, used to provide a parent/child relation. */ private AtomicInteger taskId = new AtomicInteger(); /** * The reference to the current writer, if any. If the referenced writer is null, then disk writes * are disabled. This can happen when slowest task recording is enabled. */ private AtomicReference<FileWriter> writerRef = new AtomicReference<>(); /** * This is a per-thread data structure that's used to track the current stack of open tasks, the * purpose of which is to track the parent id of every task. This is also used to ensure that * {@link #profile} and {@link #completeTask} calls always occur in pairs. */ // TODO(ulfjack): We can infer the parent/child relationship after the fact instead of tracking it // at runtime. That would allow us to remove this data structure entirely. private ThreadLocal<ArrayDeque<TaskData>> taskStack; private final SlowestTaskAggregator[] slowestTasks = new SlowestTaskAggregator[ProfilerTask.values().length]; private final StatRecorder[] tasksHistograms = new StatRecorder[ProfilerTask.values().length]; /** Thread that collects local cpu usage data (if enabled). */ private CollectLocalCpuUsage cpuUsageThread; private TimeSeries actionCountTimeSeries; private long actionCountStartTime; private Profiler() { initHistograms(); for (ProfilerTask task : ProfilerTask.values()) { if (task.collectsSlowestInstances) { slowestTasks[task.ordinal()] = new SlowestTaskAggregator(); } } } private void initHistograms() { for (ProfilerTask task : ProfilerTask.values()) { if (task.isVfs()) { Map<String, ? extends Predicate<? super String>> vfsHeuristics = VfsHeuristics.vfsTypeHeuristics; List<RecorderAndPredicate> recorders = new ArrayList<>(vfsHeuristics.size()); for (Map.Entry<String, ? extends Predicate<? super String>> e : vfsHeuristics.entrySet()) { recorders.add(new RecorderAndPredicate( new SingleStatRecorder(task + " " + e.getKey(), HISTOGRAM_BUCKETS), e.getValue())); } tasksHistograms[task.ordinal()] = new PredicateBasedStatRecorder(recorders); } else { tasksHistograms[task.ordinal()] = new SingleStatRecorder(task, HISTOGRAM_BUCKETS); } } } /** * Returns task histograms. This must be called between calls to {@link #start} and {@link #stop}, * or the returned recorders are all empty. Note that the returned recorders may still be modified * concurrently (but at least they are thread-safe, so that's good). * * <p>The stat recorders are indexed by {@code ProfilerTask#ordinal}. */ // TODO(ulfjack): This returns incomplete data by design. Maybe we should return the histograms on // stop instead? However, this is currently only called from one location in a module, and that // can't call stop itself. What to do? public ImmutableList<StatRecorder> getTasksHistograms() { return ImmutableList.copyOf(tasksHistograms); } public static Profiler instance() { return instance; } /** * Returns the nanoTime of the current profiler instance, or an arbitrary * constant if not active. */ public static long nanoTimeMaybe() { if (instance.isActive()) { return instance.clock.nanoTime(); } return -1; } // Returns the elapsed wall clock time since the profile has been started or null if inactive. public static Duration elapsedTimeMaybe() { if (instance.isActive()) { return Duration.ofNanos(instance.clock.nanoTime()) .minus(Duration.ofNanos(instance.profileStartTime)); } return null; } private static Duration getProcessCpuTime() { OperatingSystemMXBean bean = (OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean(); return Duration.ofNanos(bean.getProcessCpuTime()); } // Returns the CPU time since the profile has been started or null if inactive. public static Duration getProcessCpuTimeMaybe() { if (instance().isActive()) { return getProcessCpuTime().minus(instance().profileCpuStartTime); } return null; } /** * Enable profiling. * * <p>Subsequent calls to beginTask/endTask will be recorded in the provided output stream. Please * note that stream performance is extremely important and buffered streams should be utilized. * * @param profiledTasks which of {@link ProfilerTask}s to track * @param stream output stream to store profile data. Note: passing unbuffered stream object * reference may result in significant performance penalties * @param recordAllDurations iff true, record all tasks regardless of their duration; otherwise * some tasks may get aggregated if they finished quick enough * @param clock a {@code BlazeClock.instance()} * @param execStartTimeNanos execution start time in nanos obtained from {@code clock.nanoTime()} */ public synchronized void start( ImmutableSet<ProfilerTask> profiledTasks, OutputStream stream, Format format, String outputBase, UUID buildID, boolean recordAllDurations, Clock clock, long execStartTimeNanos, boolean enabledCpuUsageProfiling, boolean slimProfile, boolean includePrimaryOutput, boolean includeTargetLabel) throws IOException { Preconditions.checkState(!isActive(), "Profiler already active"); initHistograms(); this.profiledTasks = profiledTasks; this.clock = clock; this.actionCountStartTime = clock.nanoTime(); this.actionCountTimeSeries = new TimeSeries(Duration.ofNanos(actionCountStartTime).toMillis(), ACTION_COUNT_BUCKET_MS); // Check for current limitation on the number of supported types due to using enum.ordinal() to // store them instead of EnumSet for performance reasons. Preconditions.checkState( TASK_COUNT < 256, "The profiler implementation supports only up to 255 different ProfilerTask values."); // reset state for the new profiling session taskId.set(0); this.recordAllDurations = recordAllDurations; this.taskStack = ThreadLocal.withInitial(ArrayDeque::new); FileWriter writer = null; if (stream != null && format != null) { switch (format) { case JSON_TRACE_FILE_FORMAT: writer = new JsonTraceFileWriter( stream, execStartTimeNanos, slimProfile, outputBase, buildID, includePrimaryOutput, includeTargetLabel); break; case JSON_TRACE_FILE_COMPRESSED_FORMAT: writer = new JsonTraceFileWriter( new GZIPOutputStream(stream), execStartTimeNanos, slimProfile, outputBase, buildID, includePrimaryOutput, includeTargetLabel); } writer.start(); } this.writerRef.set(writer); // activate profiler profileStartTime = execStartTimeNanos; profileCpuStartTime = getProcessCpuTime(); if (enabledCpuUsageProfiling) { cpuUsageThread = new CollectLocalCpuUsage(); cpuUsageThread.setDaemon(true); cpuUsageThread.start(); } } /** * Returns task histograms. This must be called between calls to {@link #start} and {@link #stop}, * or the returned list is empty. */ // TODO(ulfjack): This returns incomplete data by design. Also see getTasksHistograms. public synchronized Iterable<SlowTask> getSlowestTasks() { List<Iterable<SlowTask>> slowestTasksByType = new ArrayList<>(); for (SlowestTaskAggregator aggregator : slowestTasks) { if (aggregator != null) { slowestTasksByType.add(aggregator.getSlowestTasks()); } } return Iterables.concat(slowestTasksByType); } private void collectActionCounts() { if (actionCountTimeSeries != null) { long endTimeMillis = Duration.ofNanos(clock.nanoTime()).toMillis(); long profileStartMillis = Duration.ofNanos(actionCountStartTime).toMillis(); int len = (int) ((endTimeMillis - profileStartMillis) / ACTION_COUNT_BUCKET_MS) + 1; double[] actionCountValues = actionCountTimeSeries.toDoubleArray(len); Profiler profiler = Profiler.instance(); for (int i = 0; i < len; i++) { long timeMillis = profileStartMillis + i * ACTION_COUNT_BUCKET_MS; long timeNanos = TimeUnit.MILLISECONDS.toNanos(timeMillis); profiler.logEventAtTime( timeNanos, ProfilerTask.ACTION_COUNTS, String.valueOf(actionCountValues[i])); } actionCountTimeSeries = null; } } /** * Disable profiling and complete profile file creation. * Subsequent calls to beginTask/endTask will no longer * be recorded in the profile. */ public synchronized void stop() throws IOException { if (!isActive()) { return; } collectActionCounts(); if (cpuUsageThread != null) { cpuUsageThread.stopCollecting(); try { cpuUsageThread.join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } cpuUsageThread.logCollectedData(); cpuUsageThread = null; } // Log a final event to update the duration of ProfilePhase.FINISH. logEvent(ProfilerTask.INFO, "Finishing"); FileWriter writer = writerRef.getAndSet(null); if (writer != null) { writer.shutdown(); writer = null; } taskStack = null; initHistograms(); profileStartTime = 0L; profileCpuStartTime = null; for (SlowestTaskAggregator aggregator : slowestTasks) { if (aggregator != null) { aggregator.clear(); } } } /** * Returns true iff profiling is currently enabled. */ public boolean isActive() { return profileStartTime != 0L; } public boolean isProfiling(ProfilerTask type) { return profiledTasks.contains(type); } /** * Unless --record_full_profiler_data is given we drop small tasks and add their time to the * parents duration. */ private boolean wasTaskSlowEnoughToRecord(ProfilerTask type, long duration) { return (recordAllDurations || duration >= type.minDuration); } /** * Adds task directly to the main queue bypassing task stack. Used for simple tasks that are known * to not have any subtasks. * * @param startTimeNanos task start time (obtained through {@link Profiler#nanoTimeMaybe()}) * @param duration task duration * @param type task type * @param description task description. May be stored until end of build. */ private void logTask(long startTimeNanos, long duration, ProfilerTask type, String description) { Preconditions.checkNotNull(description); Preconditions.checkState(startTimeNanos > 0, "startTime was %s", startTimeNanos); Preconditions.checkState(!"".equals(description), "No description -> not helpful"); if (duration < 0) { // See note in Clock#nanoTime, which is used by Profiler#nanoTimeMaybe. duration = 0; } tasksHistograms[type.ordinal()].addStat( (int) TimeUnit.NANOSECONDS.toMillis(duration), description); // Store instance fields as local variables so they are not nulled out from under us by #clear. FileWriter currentWriter = writerRef.get(); if (wasTaskSlowEnoughToRecord(type, duration)) { TaskData data = new TaskData(taskId.incrementAndGet(), startTimeNanos, type, description); data.duration = duration; if (currentWriter != null) { currentWriter.enqueue(data); } SlowestTaskAggregator aggregator = slowestTasks[type.ordinal()]; if (aggregator != null) { aggregator.add(data); } } } private boolean shouldProfile(long startTime, ProfilerTask type) { return isActive() && startTime > 0 && isProfiling(type); } /** * Used externally to submit simple task (one that does not have any subtasks). Depending on the * minDuration attribute of the task type, task may be just aggregated into the parent task and * not stored directly. * * @param startTime task start time (obtained through {@link Profiler#nanoTimeMaybe()}) * @param type task type * @param description task description. May be stored until the end of the build. */ public void logSimpleTask(long startTime, ProfilerTask type, String description) { if (shouldProfile(startTime, type)) { logTask(startTime, clock.nanoTime() - startTime, type, description); } } /** * Used externally to submit simple task (one that does not have any subtasks). Depending on the * minDuration attribute of the task type, task may be just aggregated into the parent task and * not stored directly. * * <p>Note that start and stop time must both be acquired from the same clock instance. * * @param startTimeNanos task start time * @param stopTimeNanos task stop time * @param type task type * @param description task description. May be stored until the end of the build. */ public void logSimpleTask( long startTimeNanos, long stopTimeNanos, ProfilerTask type, String description) { if (shouldProfile(startTimeNanos, type)) { logTask(startTimeNanos, stopTimeNanos - startTimeNanos, type, description); } } /** * Used externally to submit simple task (one that does not have any subtasks). Depending on the * minDuration attribute of the task type, task may be just aggregated into the parent task and * not stored directly. * * @param startTimeNanos task start time (obtained through {@link Profiler#nanoTimeMaybe()}) * @param duration the duration of the task * @param type task type * @param description task description. May be stored until the end of the build. */ public void logSimpleTaskDuration( long startTimeNanos, Duration duration, ProfilerTask type, String description) { if (shouldProfile(startTimeNanos, type)) { logTask(startTimeNanos, duration.toNanos(), type, description); } } /** Used to log "events" happening at a specific time - tasks with zero duration. */ public void logEventAtTime(long atTimeNanos, ProfilerTask type, String description) { if (isActive() && isProfiling(type)) { logTask(atTimeNanos, 0, type, description); } } /** Used to log "events" - tasks with zero duration. */ @VisibleForTesting void logEvent(ProfilerTask type, String description) { logEventAtTime(clock.nanoTime(), type, description); } /** * Records the beginning of a task as specified, and returns a {@link SilentCloseable} instance * that ends the task. This lets the system do the work of ending the task, with the compiler * giving a warning if the returned instance is not closed. * * <p>Use of this method allows to support nested task monitoring. For tasks that are known to not * have any subtasks, logSimpleTask() should be used instead. * * <p>Use like this: * <pre> * {@code * try (SilentCloseable c = Profiler.instance().profile(type, "description")) { * // Your code here. * } * } * </pre> * * @param type predefined task type - see ProfilerTask for available types. * @param description task description. May be stored until the end of the build. */ public SilentCloseable profile(ProfilerTask type, String description) { // ProfilerInfo.allTasksById is supposed to be an id -> Task map, but it is in fact a List, // which means that we cannot drop tasks to which we had already assigned ids. Therefore, // non-leaf tasks must not have a minimum duration. However, we don't quite consistently // enforce this, and Blaze only works because we happen not to add child tasks to those parent // tasks that have a minimum duration. Preconditions.checkNotNull(description); if (isActive() && isProfiling(type)) { taskStack .get() .push(new TaskData(taskId.incrementAndGet(), clock.nanoTime(), type, description)); return () -> completeTask(type); } else { return NOP; } } /** * Similar to {@link #profile}, but specific to action-related events. Takes an extra argument: * primaryOutput. */ public SilentCloseable profileAction( ProfilerTask type, String description, String primaryOutput, String targetLabel) { Preconditions.checkNotNull(description); if (isActive() && isProfiling(type)) { taskStack .get() .push( new ActionTaskData( taskId.incrementAndGet(), clock.nanoTime(), type, description, primaryOutput, targetLabel)); return () -> completeTask(type); } else { return NOP; } } private static final SilentCloseable NOP = () -> {}; /** * Records the beginning of a task as specified, and returns a {@link SilentCloseable} instance * that ends the task. This lets the system do the work of ending the task, with the compiler * giving a warning if the returned instance is not closed. * * <p>Use of this method allows to support nested task monitoring. For tasks that are known to not * have any subtasks, logSimpleTask() should be used instead. * * <p>This is a convenience method that uses {@link ProfilerTask#INFO}. * * <p>Use like this: * <pre> * {@code * try (SilentCloseable c = Profiler.instance().profile("description")) { * // Your code here. * } * } * </pre> * * @param description task description. May be stored until the end of the build. */ public SilentCloseable profile(String description) { return profile(ProfilerTask.INFO, description); } private boolean countAction(ProfilerTask type, TaskData taskData) { return type == ProfilerTask.ACTION || (type == ProfilerTask.INFO && "discoverInputs".equals(taskData.description)); } /** * Records the end of the task and moves tasks from the thread-local stack to * the main queue. Will validate that given task type matches task at the top * of the stack. * * @param type task type. */ private void completeTask(ProfilerTask type) { if (isActive() && isProfiling(type)) { long endTime = clock.nanoTime(); TaskData data = taskStack.get().pop(); Preconditions.checkState( data.type == type, "Inconsistent Profiler.completeTask() call: should have been %s but got %s (%s, %s)", data.type, type, data, taskStack.get()); data.duration = endTime - data.startTimeNanos; boolean shouldRecordTask = wasTaskSlowEnoughToRecord(type, data.duration); FileWriter writer = writerRef.get(); if (shouldRecordTask && writer != null) { writer.enqueue(data); } if (shouldRecordTask) { if (actionCountTimeSeries != null && countAction(type, data)) { actionCountTimeSeries.addRange( Duration.ofNanos(data.startTimeNanos).toMillis(), Duration.ofNanos(endTime).toMillis()); } SlowestTaskAggregator aggregator = slowestTasks[type.ordinal()]; if (aggregator != null) { aggregator.add(data); } } } } /** Convenience method to log phase marker tasks. */ public void markPhase(ProfilePhase phase) throws InterruptedException { MemoryProfiler.instance().markPhase(phase); if (isActive() && isProfiling(ProfilerTask.PHASE)) { Preconditions.checkState(taskStack.get().isEmpty(), "Phase tasks must not be nested"); logEvent(ProfilerTask.PHASE, phase.description); } } private abstract static class FileWriter implements Runnable { protected final BlockingQueue<TaskData> queue; protected final Thread thread; protected IOException savedException; FileWriter() { this.queue = new LinkedBlockingQueue<>(); this.thread = new Thread(this, "profile-writer-thread"); } public void shutdown() throws IOException { // Add poison pill to queue and then wait for writer thread to shut down. queue.add(POISON_PILL); try { thread.join(); } catch (InterruptedException e) { thread.interrupt(); Thread.currentThread().interrupt(); } if (savedException != null) { throw savedException; } } public void start() { thread.start(); } public void enqueue(TaskData data) { queue.add(data); } } /** Writes the profile in Json Trace file format. */ private static class JsonTraceFileWriter extends FileWriter { private final OutputStream outStream; private final long profileStartTimeNanos; private final ThreadLocal<Boolean> metadataPosted = ThreadLocal.withInitial(() -> Boolean.FALSE); private final boolean slimProfile; private final boolean includePrimaryOutput; private final boolean includeTargetLabel; private final UUID buildID; private final String outputBase; // The JDK never returns 0 as thread id so we use that as fake thread id for the critical path. private static final long CRITICAL_PATH_THREAD_ID = 0; private static final long SLIM_PROFILE_EVENT_THRESHOLD = 10_000; private static final long SLIM_PROFILE_MAXIMAL_PAUSE_NS = Duration.ofMillis(100).toNanos(); private static final long SLIM_PROFILE_MAXIMAL_DURATION_NS = Duration.ofMillis(250).toNanos(); JsonTraceFileWriter( OutputStream outStream, long profileStartTimeNanos, boolean slimProfile, String outputBase, UUID buildID, boolean includePrimaryOutput, boolean includeTargetLabel) { this.outStream = outStream; this.profileStartTimeNanos = profileStartTimeNanos; this.slimProfile = slimProfile; this.buildID = buildID; this.outputBase = outputBase; this.includePrimaryOutput = includePrimaryOutput; this.includeTargetLabel = includeTargetLabel; } @Override public void enqueue(TaskData data) { if (!metadataPosted.get()) { metadataPosted.set(Boolean.TRUE); // Create a TaskData object that is special-cased below. queue.add( new TaskData( /* id= */ 0, /* startTimeNanos= */ -1, ProfilerTask.THREAD_NAME, Thread.currentThread().getName())); } queue.add(data); } private static final class MergedEvent { int count = 0; long startTimeNanos; long endTimeNanos; TaskData data; /* * Tries to merge an additional event, i.e. if the event is close enough to the already merged * event. * * Returns null, if merging was possible. * If not mergeable, returns the TaskData of the previously merged events and clears the * internal data structures. */ TaskData maybeMerge(TaskData data) { long startTimeNanos = data.startTimeNanos; long endTimeNanos = startTimeNanos + data.duration; if (count > 0 && startTimeNanos >= this.startTimeNanos && endTimeNanos <= this.endTimeNanos) { // Skips child tasks. return null; } if (count == 0) { this.data = data; this.startTimeNanos = startTimeNanos; this.endTimeNanos = endTimeNanos; count++; return null; } else if (startTimeNanos <= this.endTimeNanos + SLIM_PROFILE_MAXIMAL_PAUSE_NS) { this.endTimeNanos = endTimeNanos; count++; return null; } else { TaskData ret = getAndReset(); this.startTimeNanos = startTimeNanos; this.endTimeNanos = endTimeNanos; this.data = data; count = 1; return ret; } } // Returns a TaskData object representing the merged data and clears internal data structures. TaskData getAndReset() { TaskData ret; if (data == null || count <= 1) { ret = data; } else { ret = new TaskData( data.threadId, this.startTimeNanos, this.endTimeNanos - this.startTimeNanos, "merged " + count + " events"); } count = 0; data = null; return ret; } } private void writeTask(JsonWriter writer, TaskData data) throws IOException { Preconditions.checkNotNull(data); String eventType = data.duration == 0 ? "i" : "X"; writer.setIndent(" "); writer.beginObject(); writer.setIndent(""); if (data.type == null) { writer.setIndent(" "); } else { writer.name("cat").value(data.type.description); } writer.name("name").value(data.description); writer.name("ph").value(eventType); writer .name("ts") .value(TimeUnit.NANOSECONDS.toMicros(data.startTimeNanos - profileStartTimeNanos)); if (data.duration != 0) { writer.name("dur").value(TimeUnit.NANOSECONDS.toMicros(data.duration)); } writer.name("pid").value(1); // Primary outputs are non-mergeable, thus incompatible with slim profiles. if (includePrimaryOutput && data instanceof ActionTaskData) { writer.name("out").value(((ActionTaskData) data).primaryOutputPath); } if (includeTargetLabel && data instanceof ActionTaskData) { writer.name("args"); writer.beginObject(); writer.name("target").value(((ActionTaskData) data).targetLabel); writer.endObject(); } long threadId = data.type == ProfilerTask.CRITICAL_PATH_COMPONENT ? CRITICAL_PATH_THREAD_ID : data.threadId; writer.name("tid").value(threadId); writer.endObject(); } /** * Saves all gathered information from taskQueue queue to the file. * Method is invoked internally by the Timer-based thread and at the end of * profiling session. */ @Override public void run() { try { boolean receivedPoisonPill = false; try (JsonWriter writer = new JsonWriter( // The buffer size of 262144 is chosen at random. new OutputStreamWriter( new BufferedOutputStream(outStream, 262144), StandardCharsets.UTF_8))) { writer.beginObject(); writer.name("otherData"); writer.beginObject(); writer.name("build_id").value(buildID.toString()); writer.name("output_base").value(outputBase); writer.name("date").value(new Date().toString()); writer.endObject(); writer.name("traceEvents"); writer.beginArray(); TaskData data; // Generate metadata event for the critical path as thread 0 in disguise. writer.setIndent(" "); writer.beginObject(); writer.setIndent(""); writer.name("name").value("thread_name"); writer.name("ph").value("M"); writer.name("pid").value(1); writer.name("tid").value(CRITICAL_PATH_THREAD_ID); writer.name("args"); writer.beginObject(); writer.name("name").value("Critical Path"); writer.endObject(); writer.endObject(); HashMap<Long, MergedEvent> eventsPerThread = new HashMap<>(); int eventCount = 0; while ((data = queue.take()) != POISON_PILL) { Preconditions.checkNotNull(data); eventCount++; if (data.type == ProfilerTask.THREAD_NAME) { writer.setIndent(" "); writer.beginObject(); writer.setIndent(""); writer.name("name").value("thread_name"); writer.name("ph").value("M"); writer.name("pid").value(1); writer.name("tid").value(data.threadId); writer.name("args"); writer.beginObject(); writer.name("name").value(data.description); writer.endObject(); writer.endObject(); continue; } if (data.type == ProfilerTask.LOCAL_CPU_USAGE || data.type == ProfilerTask.ACTION_COUNTS) { // Skip counts equal to zero. They will show up as a thin line in the profile. if ("0.0".equals(data.description)) { continue; } writer.setIndent(" "); writer.beginObject(); writer.setIndent(""); writer.name("name").value(data.type.description); writer.name("ph").value("C"); writer .name("ts") .value( TimeUnit.NANOSECONDS.toMicros(data.startTimeNanos - profileStartTimeNanos)); writer.name("pid").value(1); writer.name("tid").value(data.threadId); writer.name("args"); writer.beginObject(); writer.name("cpu").value(data.description); writer.endObject(); writer.endObject(); continue; } if (slimProfile && eventCount > SLIM_PROFILE_EVENT_THRESHOLD && data.duration > 0 && data.duration < SLIM_PROFILE_MAXIMAL_DURATION_NS && data.type != ProfilerTask.CRITICAL_PATH_COMPONENT) { eventsPerThread.putIfAbsent(data.threadId, new MergedEvent()); TaskData taskData = eventsPerThread.get(data.threadId).maybeMerge(data); if (taskData != null) { writeTask(writer, taskData); } } else { writeTask(writer, data); } } for (Profiler.JsonTraceFileWriter.MergedEvent value : eventsPerThread.values()) { TaskData taskData = value.getAndReset(); if (taskData != null) { writeTask(writer, taskData); } } receivedPoisonPill = true; writer.setIndent(" "); writer.endArray(); writer.endObject(); } catch (IOException e) { this.savedException = e; if (!receivedPoisonPill) { while (queue.take() != POISON_PILL) { // We keep emptying the queue, but we can't write anything. } } } } catch (InterruptedException e) { // Exit silently. } } } }
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package org.eigenbase.rel.metadata; import java.util.*; import org.eigenbase.rel.*; import org.eigenbase.rel.rules.*; import org.eigenbase.relopt.*; import org.eigenbase.reltype.*; import org.eigenbase.rex.*; import org.eigenbase.sql.fun.*; /** * RelMdColumnUniqueness supplies a default implementation of {@link * RelMetadataQuery#areColumnsUnique} for the standard logical algebra. * * @author Zelaine Fong * @version $Id$ */ public class RelMdColumnUniqueness extends ReflectiveRelMetadataProvider { //~ Constructors ----------------------------------------------------------- public RelMdColumnUniqueness() { // Tell superclass reflection about parameter types expected // for various metadata queries. // This corresponds to areColumnsUnique(rel, BitSet columns, // boolean ignoreNulls); // note that we don't specify the rel type because we always overload // on that. List<Class> args = new ArrayList<Class>(); args.add((Class) BitSet.class); args.add((Class) Boolean.TYPE); mapParameterTypes( "areColumnsUnique", args); } //~ Methods ---------------------------------------------------------------- public Boolean areColumnsUnique( FilterRelBase rel, BitSet columns, boolean ignoreNulls) { return RelMetadataQuery.areColumnsUnique( rel.getChild(), columns, ignoreNulls); } public Boolean areColumnsUnique( SortRel rel, BitSet columns, boolean ignoreNulls) { return RelMetadataQuery.areColumnsUnique( rel.getChild(), columns, ignoreNulls); } public Boolean areColumnsUnique( CorrelatorRel rel, BitSet columns, boolean ignoreNulls) { return RelMetadataQuery.areColumnsUnique( rel.getLeft(), columns, ignoreNulls); } public Boolean areColumnsUnique( ProjectRelBase rel, BitSet columns, boolean ignoreNulls) { // ProjectRel maps a set of rows to a different set; // Without knowledge of the mapping function(whether it // preserves uniqueness), it is only safe to derive uniqueness // info from the child of a project when the mapping is f(a) => a. // // Also need to map the input column set to the corresponding child // references RexNode [] projExprs = rel.getProjectExps(); BitSet childColumns = new BitSet(); for ( int bit = columns.nextSetBit(0); bit >= 0; bit = columns.nextSetBit(bit + 1)) { RexNode projExpr = projExprs[bit]; if (projExpr instanceof RexInputRef) { childColumns.set(((RexInputRef) projExpr).getIndex()); } else if (projExpr instanceof RexCall && ignoreNulls) { // If the expression is a cast such that the types are the same // except for the nullability, then if we're ignoring nulls, // it doesn't matter whether the underlying column reference // is nullable. Check that the types are the same by making a // nullable copy of both types and then comparing them. RexCall call = (RexCall) projExpr; if (call.getOperator() != SqlStdOperatorTable.castFunc) { continue; } RexNode castOperand = call.getOperands()[0]; if (!(castOperand instanceof RexInputRef)) { continue; } RelDataTypeFactory typeFactory = rel.getCluster().getTypeFactory(); RelDataType castType = typeFactory.createTypeWithNullability( projExpr.getType(), true); RelDataType origType = typeFactory.createTypeWithNullability( castOperand.getType(), true); if (castType.equals(origType)) { childColumns.set(((RexInputRef) castOperand).getIndex()); } } else { // If the expression will not influence uniqueness of the // projection, then skip it. continue; } } // If no columns can affect uniqueness, then return unknown if (childColumns.cardinality() == 0) { return null; } return RelMetadataQuery.areColumnsUnique( rel.getChild(), childColumns, ignoreNulls); } public Boolean areColumnsUnique( JoinRelBase rel, BitSet columns, boolean ignoreNulls) { if (columns.cardinality() == 0) { return false; } RelNode left = rel.getLeft(); RelNode right = rel.getRight(); // Divide up the input column mask into column masks for the left and // right sides of the join BitSet leftColumns = new BitSet(); BitSet rightColumns = new BitSet(); int nLeftColumns = left.getRowType().getFieldCount(); for ( int bit = columns.nextSetBit(0); bit >= 0; bit = columns.nextSetBit(bit + 1)) { if (bit < nLeftColumns) { leftColumns.set(bit); } else { rightColumns.set(bit - nLeftColumns); } } // If the original column mask contains columns from both the left and // right hand side, then the columns are unique if and only if they're // unique for their respective join inputs Boolean leftUnique = RelMetadataQuery.areColumnsUnique(left, leftColumns, ignoreNulls); Boolean rightUnique = RelMetadataQuery.areColumnsUnique(right, rightColumns, ignoreNulls); if ((leftColumns.cardinality() > 0) && (rightColumns.cardinality() > 0)) { if ((leftUnique == null) || (rightUnique == null)) { return null; } else { return (leftUnique && rightUnique); } } // If we're only trying to determine uniqueness for columns that // originate from one join input, then determine if the equijoin // columns from the other join input are unique. If they are, then // the columns are unique for the entire join if they're unique for // the corresponding join input, provided that input is not null // generating. BitSet leftJoinCols = new BitSet(); BitSet rightJoinCols = new BitSet(); RelMdUtil.findEquiJoinCols( left, right, rel.getCondition(), leftJoinCols, rightJoinCols); if (leftColumns.cardinality() > 0) { if (rel.getJoinType().generatesNullsOnLeft()) { return false; } Boolean rightJoinColsUnique = RelMetadataQuery.areColumnsUnique( right, rightJoinCols, ignoreNulls); if ((rightJoinColsUnique == null) || (leftUnique == null)) { return null; } return (rightJoinColsUnique && leftUnique); } else if (rightColumns.cardinality() > 0) { if (rel.getJoinType().generatesNullsOnRight()) { return false; } Boolean leftJoinColsUnique = RelMetadataQuery.areColumnsUnique( left, leftJoinCols, ignoreNulls); if ((leftJoinColsUnique == null) || (rightUnique == null)) { return null; } return (leftJoinColsUnique && rightUnique); } assert (false); return null; } public Boolean areColumnsUnique( SemiJoinRel rel, BitSet columns, boolean ignoreNulls) { // only return the unique keys from the LHS since a semijoin only // returns the LHS return RelMetadataQuery.areColumnsUnique( rel.getLeft(), columns, ignoreNulls); } public Boolean areColumnsUnique( AggregateRelBase rel, BitSet columns, boolean ignoreNulls) { // group by keys form a unique key if (rel.getGroupCount() > 0) { BitSet groupKey = new BitSet(); for (int i = 0; i < rel.getGroupCount(); i++) { groupKey.set(i); } return RelOptUtil.contains(columns, groupKey); } else { // interpret an empty set as asking whether the aggregation is full // table (in which case it returns at most one row); // TODO jvs 1-Sept-2008: apply this convention consistently // to other relational expressions, as well as to // RelMetadataQuery.getUniqueKeys return columns.isEmpty(); } } // Catch-all rule when none of the others apply. public Boolean areColumnsUnique( RelNode rel, BitSet columns, boolean ignoreNulls) { // no information available return null; } } // End RelMdColumnUniqueness.java
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.client.model; import com.fasterxml.jackson.annotation.JsonProperty; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; // This class was originally generated, however it's now maintained manually @ApiModel(description = "") public class StageDefinitionJson { public enum TypeEnum { SOURCE("SOURCE"), PROCESSOR("PROCESSOR"), EXECUTOR("EXECUTOR"), TARGET("TARGET"); private final String value; TypeEnum(String value) { this.value = value; } @Override public String toString() { return value; } } public enum ExecutionModesEnum { STANDALONE("STANDALONE"), CLUSTER_BATCH("CLUSTER_BATCH"), CLUSTER_YARN_STREAMING("CLUSTER_YARN_STREAMING"), CLUSTER_MESOS_STREAMING("CLUSTER_MESOS_STREAMING"), SLAVE("SLAVE"), EDGE("EDGE"), EMR_BATCH("EMR_BATCH"), BATCH("BATCH"), STREAMING("STREAMING"), ; private final String value; ExecutionModesEnum(String value) { this.value = value; } @Override public String toString() { return value; } } private String name = null; private TypeEnum type = null; private String className = null; private String label = null; private String libraryLabel = null; private ConfigGroupDefinitionJson configGroupDefinition = null; private RawSourceDefinitionJson rawSourceDefinition = null; private Boolean errorStage = null; private Boolean statsAggregatorStage = null; private Boolean connectionVerifierStage = null; private Boolean pipelineLifecycleStage = null; private Boolean offsetCommitTrigger = null; private Boolean variableOutputStreams = null; private Integer outputStreams = null; private String outputStreamLabelProviderClass = null; private List<String> outputStreamLabels = new ArrayList<String>(); private String outputStreamsDrivenByConfig = null; private List<ServiceDependencyDefinitionJson> services = null; private List<String> hideStage = null; private List<ExecutionModesEnum> executionModes = new ArrayList<ExecutionModesEnum>(); private String description = null; private Boolean privateClassLoader = null; private String library = null; private List<ConfigDefinitionJson> configDefinitions = new ArrayList<ConfigDefinitionJson>(); private String version = null; private String icon = null; private Boolean onRecordError = null; private Boolean preconditions = null; private Boolean resetOffset = null; private Boolean producingEvents = null; private String onlineHelpRefUrl = null; private Boolean sendsResponse = null; private Boolean beta = null; private Integer inputStreams = null; private String inputStreamLabelProviderClass = null; private List<String> inputStreamLabels = null; private List<String> eventDefs = new ArrayList<>(); private Boolean bisectable = false; private String yamlUpgrader; /** **/ @ApiModelProperty(value = "") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ @ApiModelProperty(value = "") @JsonProperty("type") public TypeEnum getType() { return type; } public void setType(TypeEnum type) { this.type = type; } /** **/ @ApiModelProperty(value = "") @JsonProperty("className") public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } /** **/ @ApiModelProperty(value = "") @JsonProperty("label") public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } /** **/ @ApiModelProperty(value = "") @JsonProperty("libraryLabel") public String getLibraryLabel() { return libraryLabel; } public void setLibraryLabel(String libraryLabel) { this.libraryLabel = libraryLabel; } /** **/ @ApiModelProperty(value = "") @JsonProperty("configGroupDefinition") public ConfigGroupDefinitionJson getConfigGroupDefinition() { return configGroupDefinition; } public void setConfigGroupDefinition(ConfigGroupDefinitionJson configGroupDefinition) { this.configGroupDefinition = configGroupDefinition; } /** **/ @ApiModelProperty(value = "") @JsonProperty("rawSourceDefinition") public RawSourceDefinitionJson getRawSourceDefinition() { return rawSourceDefinition; } public void setRawSourceDefinition(RawSourceDefinitionJson rawSourceDefinition) { this.rawSourceDefinition = rawSourceDefinition; } /** **/ @ApiModelProperty(value = "") @JsonProperty("errorStage") public Boolean getErrorStage() { return errorStage; } public void setErrorStage(Boolean errorStage) { this.errorStage = errorStage; } /** **/ @ApiModelProperty(value = "") @JsonProperty("statsAggregatorStage") public Boolean getStatsAggregatorStage() { return statsAggregatorStage; } public void setStatsAggregatorStage(Boolean statsAggregatorStage) { this.statsAggregatorStage = statsAggregatorStage; } /** **/ @ApiModelProperty(value = "") @JsonProperty("connectionVerifierStage") public Boolean getConnectionVerifierStage() { return connectionVerifierStage; } public void setConnectionVerifierStage(Boolean connectionVerifierStage) { this.connectionVerifierStage = connectionVerifierStage; } /** **/ @ApiModelProperty(value = "") @JsonProperty("pipelineLifecycleStage") public Boolean getPipelineLifecycleStage() { return pipelineLifecycleStage; } public void setPipelineLifecycleStage(Boolean pipelineLifecycleStage) { this.pipelineLifecycleStage = pipelineLifecycleStage; } /** **/ @ApiModelProperty(value = "") @JsonProperty("offsetCommitTrigger") public Boolean getOffsetCommitTrigger() { return offsetCommitTrigger; } public void setOffsetCommitTrigger(Boolean offsetCommitTrigger) { this.offsetCommitTrigger = offsetCommitTrigger; } /** **/ @ApiModelProperty(value = "") @JsonProperty("variableOutputStreams") public Boolean getVariableOutputStreams() { return variableOutputStreams; } public void setVariableOutputStreams(Boolean variableOutputStreams) { this.variableOutputStreams = variableOutputStreams; } /** **/ @ApiModelProperty(value = "") @JsonProperty("outputStreams") public Integer getOutputStreams() { return outputStreams; } public void setOutputStreams(Integer outputStreams) { this.outputStreams = outputStreams; } /** **/ @ApiModelProperty(value = "") @JsonProperty("outputStreamLabelProviderClass") public String getOutputStreamLabelProviderClass() { return outputStreamLabelProviderClass; } public void setOutputStreamLabelProviderClass(String outputStreamLabelProviderClass) { this.outputStreamLabelProviderClass = outputStreamLabelProviderClass; } /** **/ @ApiModelProperty(value = "") @JsonProperty("outputStreamLabels") public List<String> getOutputStreamLabels() { return outputStreamLabels; } public void setOutputStreamLabels(List<String> outputStreamLabels) { this.outputStreamLabels = outputStreamLabels; } /** **/ @ApiModelProperty(value = "") @JsonProperty("outputStreamsDrivenByConfig") public String getOutputStreamsDrivenByConfig() { return outputStreamsDrivenByConfig; } public void setOutputStreamsDrivenByConfig(String outputStreamsDrivenByConfig) { this.outputStreamsDrivenByConfig = outputStreamsDrivenByConfig; } /** **/ @ApiModelProperty(value = "") @JsonProperty("services") public List<ServiceDependencyDefinitionJson> getServices() { return services; } public void setServices(List<ServiceDependencyDefinitionJson> services) { this.services = services; } /** **/ @ApiModelProperty(value = "") @JsonProperty("hideStage") public List<String> getHideStage() { return hideStage; } public void setHideStage(List<String>hideStage ) { this.hideStage = hideStage; } /** **/ @ApiModelProperty(value = "") @JsonProperty("executionModes") public List<ExecutionModesEnum> getExecutionModes() { return executionModes; } public void setExecutionModes(List<ExecutionModesEnum> executionModes) { this.executionModes = executionModes; } /** **/ @ApiModelProperty(value = "") @JsonProperty("description") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /** **/ @ApiModelProperty(value = "") @JsonProperty("privateClassLoader") public Boolean getPrivateClassLoader() { return privateClassLoader; } public void setPrivateClassLoader(Boolean privateClassLoader) { this.privateClassLoader = privateClassLoader; } /** **/ @ApiModelProperty(value = "") @JsonProperty("library") public String getLibrary() { return library; } public void setLibrary(String library) { this.library = library; } /** **/ @ApiModelProperty(value = "") @JsonProperty("configDefinitions") public List<ConfigDefinitionJson> getConfigDefinitions() { return configDefinitions; } public void setConfigDefinitions(List<ConfigDefinitionJson> configDefinitions) { this.configDefinitions = configDefinitions; } /** **/ @ApiModelProperty(value = "") @JsonProperty("version") public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } /** **/ @ApiModelProperty(value = "") @JsonProperty("icon") public String getIcon() { return icon; } public void setIcon(String icon) { this.icon = icon; } /** **/ @ApiModelProperty(value = "") @JsonProperty("onRecordError") public Boolean getOnRecordError() { return onRecordError; } public void setOnRecordError(Boolean onRecordError) { this.onRecordError = onRecordError; } /** **/ @ApiModelProperty(value = "") @JsonProperty("preconditions") public Boolean getPreconditions() { return preconditions; } public void setPreconditions(Boolean preconditions) { this.preconditions = preconditions; } /** **/ @ApiModelProperty(value = "") @JsonProperty("resetOffset") public Boolean getResetOffset() { return resetOffset; } public void setResetOffset(Boolean resetOffset) { this.resetOffset = resetOffset; } /** **/ @ApiModelProperty(value = "") @JsonProperty("producingEvents") public Boolean getProducingEvents() { return producingEvents; } public void setProducingEvents(Boolean producingEvents) { this.producingEvents = producingEvents; } /** **/ @ApiModelProperty(value = "") @JsonProperty("onlineHelpRefUrl") public String getOnlineHelpRefUrl() { return onlineHelpRefUrl; } public void setOnlineHelpRefUrl(String onlineHelpRefUrl) { this.onlineHelpRefUrl = onlineHelpRefUrl; } /** **/ @ApiModelProperty(value = "") @JsonProperty("sendsResponse") public Boolean getSendsResponse() { return sendsResponse; } public void setSendsResponse(Boolean sendsResponse) { this.sendsResponse = sendsResponse; } /** **/ @ApiModelProperty(value = "") @JsonProperty("beta") public Boolean getBeta() { return beta; } public void setBeta(Boolean beta) { this.beta = beta; } /** **/ @ApiModelProperty(value = "") @JsonProperty("inputStreams") public Integer getInputStreams() { return inputStreams; } public void setInputStreams(Integer inputStreams) { this.inputStreams = inputStreams; } /** **/ @ApiModelProperty(value = "") @JsonProperty("inputStreamLabelProviderClass") public String getInputStreamLabelProviderClass() { return inputStreamLabelProviderClass; } public void setInputStreamLabelProviderClass(String inputStreamLabelProviderClass) { this.inputStreamLabelProviderClass = inputStreamLabelProviderClass; } /** **/ @ApiModelProperty(value = "") @JsonProperty("inputStreamLabels") public List<String> getInputStreamLabels() { return inputStreamLabels; } public void setInputStreamLabels(List<String> inputStreamLabels) { this.inputStreamLabels = inputStreamLabels; } public List<String> getEventDefs() { return eventDefs; } public void setEventDefs(List<String> eventDefs) { this.eventDefs = eventDefs; } /** **/ @ApiModelProperty(value = "") @JsonProperty("bisectable") public Boolean isBisectable() { return bisectable; } public void setBisectable(Boolean bisectable) { this.bisectable = bisectable; } @ApiModelProperty(value = "") @JsonProperty("yamlUpgrader") public String getYamlUpgrader() { return yamlUpgrader; } public StageDefinitionJson setYamlUpgrader(String yamlUpgrader) { this.yamlUpgrader = yamlUpgrader; return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.integrationtests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.HashMap; import org.apache.fineract.integrationtests.common.ClientHelper; import org.apache.fineract.integrationtests.common.LoanRescheduleRequestHelper; import org.apache.fineract.integrationtests.common.Utils; import org.apache.fineract.integrationtests.common.loans.LoanApplicationTestBuilder; import org.apache.fineract.integrationtests.common.loans.LoanProductTestBuilder; import org.apache.fineract.integrationtests.common.loans.LoanRescheduleRequestTestBuilder; import org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper; import org.junit.Before; import org.junit.Test; import com.jayway.restassured.builder.RequestSpecBuilder; import com.jayway.restassured.builder.ResponseSpecBuilder; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.specification.RequestSpecification; import com.jayway.restassured.specification.ResponseSpecification; /** * Test the creation, approval and rejection of a loan reschedule request **/ @SuppressWarnings({ "rawtypes" }) public class LoanRescheduleRequestTest { private ResponseSpecification responseSpec; private ResponseSpecification generalResponseSpec; private RequestSpecification requestSpec; private LoanTransactionHelper loanTransactionHelper; private LoanRescheduleRequestHelper loanRescheduleRequestHelper; private Integer clientId; private Integer loanProductId; private Integer loanId; private Integer loanRescheduleRequestId; private String loanPrincipalAmount = "100000.00"; private String numberOfRepayments = "12"; private String interestRatePerPeriod = "18"; private String dateString = "4 September 2014"; @Before public void initialize() { Utils.initializeRESTAssured(); this.requestSpec = new RequestSpecBuilder().setContentType(ContentType.JSON).build(); this.requestSpec.header("Authorization", "Basic " + Utils.loginIntoServerAndGetBase64EncodedAuthenticationKey()); this.responseSpec = new ResponseSpecBuilder().expectStatusCode(200).build(); this.loanTransactionHelper = new LoanTransactionHelper(this.requestSpec, this.responseSpec); this.loanRescheduleRequestHelper = new LoanRescheduleRequestHelper(this.requestSpec, this.responseSpec); this.generalResponseSpec = new ResponseSpecBuilder().build(); // create all required entities this.createRequiredEntities(); } /** * Creates the client, loan product, and loan entities **/ private void createRequiredEntities() { this.createClientEntity(); this.createLoanProductEntity(); this.createLoanEntity(); } /** * create a new client **/ private void createClientEntity() { this.clientId = ClientHelper.createClient(this.requestSpec, this.responseSpec); ClientHelper.verifyClientCreatedOnServer(this.requestSpec, this.responseSpec, this.clientId); } /** * create a new loan product **/ private void createLoanProductEntity() { System.out.println("---------------------------------CREATING LOAN PRODUCT------------------------------------------"); final String loanProductJSON = new LoanProductTestBuilder() .withPrincipal(loanPrincipalAmount) .withNumberOfRepayments(numberOfRepayments) .withinterestRatePerPeriod(interestRatePerPeriod) .withInterestRateFrequencyTypeAsYear() .build(null); this.loanProductId = this.loanTransactionHelper.getLoanProductId(loanProductJSON); System.out.println("Successfully created loan product (ID: " + this.loanProductId + ")"); } /** * submit a new loan application, approve and disburse the loan **/ private void createLoanEntity() { System.out.println("---------------------------------NEW LOAN APPLICATION------------------------------------------"); final String loanApplicationJSON = new LoanApplicationTestBuilder() .withPrincipal(loanPrincipalAmount) .withLoanTermFrequency(numberOfRepayments) .withLoanTermFrequencyAsMonths() .withNumberOfRepayments(numberOfRepayments) .withRepaymentEveryAfter("1") .withRepaymentFrequencyTypeAsMonths() .withAmortizationTypeAsEqualInstallments() .withInterestCalculationPeriodTypeAsDays() .withInterestRatePerPeriod(interestRatePerPeriod) .withLoanTermFrequencyAsMonths() .withSubmittedOnDate(dateString) .withExpectedDisbursementDate(dateString) .withPrincipalGrace("2") .withInterestGrace("2") .build(this.clientId.toString(), this.loanProductId.toString(), null); this.loanId = this.loanTransactionHelper.getLoanId(loanApplicationJSON); System.out.println("Sucessfully created loan (ID: " + this.loanId + ")"); this.approveLoanApplication(); this.disburseLoan(); } /** * approve the loan application **/ private void approveLoanApplication() { if(this.loanId != null) { this.loanTransactionHelper.approveLoan(this.dateString, this.loanId); System.out.println("Successfully approved loan (ID: " + this.loanId + ")"); } } /** * disburse the newly created loan **/ private void disburseLoan() { if(this.loanId != null) { this.loanTransactionHelper.disburseLoan(this.dateString, this.loanId); System.out.println("Successfully disbursed loan (ID: " + this.loanId + ")"); } } /** * create new loan reschedule request **/ private void createLoanRescheduleRequest() { System.out.println("---------------------------------CREATING LOAN RESCHEDULE REQUEST------------------------------------------"); final String requestJSON = new LoanRescheduleRequestTestBuilder().build(this.loanId.toString()); this.loanRescheduleRequestId = this.loanRescheduleRequestHelper.createLoanRescheduleRequest(requestJSON); this.loanRescheduleRequestHelper.verifyCreationOfLoanRescheduleRequest(this.loanRescheduleRequestId); System.out.println("Successfully created loan reschedule request (ID: " + this.loanRescheduleRequestId + ")"); } @Test public void testCreateLoanRescheduleRequest() { this.createLoanRescheduleRequest(); } @Test public void testRejectLoanRescheduleRequest() { this.createLoanRescheduleRequest(); System.out.println("-----------------------------REJECTING LOAN RESCHEDULE REQUEST--------------------------"); final String requestJSON = new LoanRescheduleRequestTestBuilder().getRejectLoanRescheduleRequestJSON(); this.loanRescheduleRequestHelper.rejectLoanRescheduleRequest(this.loanRescheduleRequestId, requestJSON); final HashMap response = (HashMap) this.loanRescheduleRequestHelper.getLoanRescheduleRequest(loanRescheduleRequestId, "statusEnum"); assertTrue((Boolean)response.get("rejected")); System.out.println("Successfully rejected loan reschedule request (ID: " + this.loanRescheduleRequestId + ")"); } @Test public void testApproveLoanRescheduleRequest() { this.createLoanRescheduleRequest(); System.out.println("-----------------------------APPROVING LOAN RESCHEDULE REQUEST--------------------------"); final String requestJSON = new LoanRescheduleRequestTestBuilder().getApproveLoanRescheduleRequestJSON(); this.loanRescheduleRequestHelper.approveLoanRescheduleRequest(this.loanRescheduleRequestId, requestJSON); final HashMap response = (HashMap) this.loanRescheduleRequestHelper.getLoanRescheduleRequest(loanRescheduleRequestId, "statusEnum"); assertTrue((Boolean)response.get("approved")); final Integer numberOfRepayments = (Integer) this.loanTransactionHelper.getLoanDetail(requestSpec, generalResponseSpec, loanId, "numberOfRepayments"); final HashMap loanSummary = this.loanTransactionHelper.getLoanSummary(requestSpec, generalResponseSpec, loanId); final Float totalExpectedRepayment = (Float) loanSummary.get("totalExpectedRepayment"); assertEquals("NUMBER OF REPAYMENTS SHOULD BE 16, NOT 12", "12", numberOfRepayments.toString()); assertEquals("TOTAL EXPECTED REPAYMENT MUST BE EQUAL TO 118000.0", "118000.0", totalExpectedRepayment.toString()); System.out.println("Successfully approved loan reschedule request (ID: " + this.loanRescheduleRequestId + ")"); } }
package org.traxnet.shadingzen.core; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Matrix; import android.opengl.GLES20; import android.opengl.GLUtils; import android.util.Log; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; public class BitmapTexture extends CompressedResource implements Texture { protected IntBuffer _textureIds; protected int _width, _height; protected boolean _driverDataDirty; protected boolean _hasAlpha; protected boolean _isWatingForRelease; protected Parameters _params; protected int _target; protected Bitmap [] _bmps; protected ByteBuffer _compressedBuffer; public enum TextureType{ Texture2D, TextureCubeMap } public enum TextureFilter{ Nearest, Linear, Bilinear } public static class Parameters{ TextureType _type; int [] _cubemap; TextureFilter _magFilter, _minFilter; boolean _genMipmaps; public boolean resizeTexturesToPowerOfTwo = true; public Parameters(){ _type = TextureType.Texture2D; _cubemap = new int[6]; _magFilter = TextureFilter.Nearest; _minFilter = TextureFilter.Nearest; _genMipmaps = true; } public void setType(TextureType type){ _type = type; } public TextureType getType(){ return _type; } public int getCubeMapImage(int num){ return _cubemap[num]; } public TextureFilter getMinFilter(){ return _minFilter; } public void setMinFilter(TextureFilter filter){ _minFilter = filter; } public TextureFilter getMagFilter(){ return _magFilter; } public void setMagFilter(TextureFilter filter){ _magFilter = filter; } public void setCubeMapImage(int num, int resource_id){ if(6 <= num || 0 > num) return; _cubemap[num] = resource_id; } public void setGenMipMaps(boolean mode) { _genMipmaps = mode; } public boolean getGenMipMaps(){ return _genMipmaps; } } public BitmapTexture(){ } int calculateUpperPowerOfTwo(int v) { v--; v |= v >>> 1; v |= v >>> 2; v |= v >>> 4; v |= v >>> 8; v |= v >>> 16; v++; return v; } boolean isPowerOfTwo(int i){ return ( i & (i - 1)) == 0; } public boolean isCubemap(){ return _target == GLES20.GL_TEXTURE_CUBE_MAP; } boolean loadAsTexture2D(Context context, ResourcesManager manager, String id, InputStream resource_stream, BitmapTexture.Parameters params){ _bmps = new Bitmap[1]; Matrix flip = new Matrix(); flip.postScale(1f, -1f); BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inScaled = false; Bitmap textureBmp = BitmapFactory.decodeStream(resource_stream); int level = manager.getDefaultMimapLevel()+1; if(level>0 || params.resizeTexturesToPowerOfTwo && (!isPowerOfTwo(textureBmp.getWidth()) || !isPowerOfTwo(textureBmp.getHeight()))){ int target_width = calculateUpperPowerOfTwo(textureBmp.getWidth()); int target_height = calculateUpperPowerOfTwo(textureBmp.getHeight()); _width = target_width; _height = target_height; target_width /= level+1; target_height /= level+1; Log.i("ShadingZen", "Texture id=" + id + " has no power of two dimesions " + textureBmp.getWidth() + "x" + textureBmp.getHeight() + " adjusting to " + target_width + "x" + target_height); Bitmap temp = Bitmap.createBitmap(textureBmp, 0, 0, textureBmp.getWidth(), textureBmp.getHeight(), flip, false); _bmps[0] = Bitmap.createScaledBitmap(temp, target_width, target_height, false); temp.recycle(); } else{ _bmps[0] = Bitmap.createBitmap(textureBmp, 0, 0, textureBmp.getWidth(), textureBmp.getHeight(), flip, false); _width = _bmps[0].getWidth(); _height = _bmps[0].getHeight(); } textureBmp.recycle(); _hasAlpha = textureBmp.hasAlpha(); _driverDataDirty = true; _params = params; _target = GLES20.GL_TEXTURE_2D; return true; } boolean loadAsTextureCubeMap(Context context, String id, InputStream resource_stream, BitmapTexture.Parameters params){ _bmps = new Bitmap[6]; Matrix flip = new Matrix(); flip.postScale(1f, -1f); BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inScaled = false; for(int side=0; side < 6; side++){ Bitmap textureBmp = BitmapFactory.decodeResource(context.getResources(), params.getCubeMapImage(side), opts); _bmps[side] = Bitmap.createBitmap(textureBmp, 0, 0, textureBmp.getWidth(), textureBmp.getHeight(), flip, false); textureBmp.recycle(); _hasAlpha = textureBmp.hasAlpha(); _width = textureBmp.getWidth(); _height = textureBmp.getHeight(); } _params = params; _driverDataDirty = true; _target = GLES20.GL_TEXTURE_CUBE_MAP; return true; } /// Resource implementations @Override public boolean onStorageLoad(Context context, String id, int resource_id, Object params) { InputStream input_stream = null; if(resource_id > 0) input_stream = context.getResources().openRawResource(resource_id); return loadTextureFromInputStream(context, ResourcesManager.getSharedInstance(), id, params, input_stream); } private boolean loadTextureFromInputStream(Context context, ResourcesManager manager, String id, Object params, InputStream input_stream) { try{ if(null != params){ Parameters texparams = (Parameters)params; if(texparams.getType() == TextureType.Texture2D) return loadAsTexture2D(context, manager, id, input_stream, texparams); else return loadAsTextureCubeMap(context, id, input_stream, texparams); } return loadAsTexture2D(context, manager, id, input_stream, new Parameters()); } catch(Exception e){ Log.e("ShadingZen", "Error loading texture from input stream:" + e.getMessage(), e); return false; } } @Override public boolean onCompressedStorageLoad(Context context, ResourcesManager manager, String id, ZipFile zipfile, String location, Object params) { try{ ZipEntry entry = zipfile.getEntry(location); if(null == entry){ Log.e("ShadingZen", "Given compressed resource location was not found:" + location); return false; } if(entry.isDirectory()){ /*File file = new File(location); String filename = file.getName(); if(manager.isPVRCompressedTexturesSupported()) return loadCompressedPVRCompressedTextureSet(manager, zipfile, filename, id, (BitmapTexture.Parameters) params); else return false; */ return false; } else{ return loadTextureFromInputStream(context, manager, id, params, zipfile.getInputStream(entry)); } } catch(IOException ex){ Log.e("ShadingZen", "Error getting input stream from zip file:" + ex.getMessage(), ex); return false; } } protected boolean loadCompressedPVRCompressedTextureSet(ResourcesManager manager, ZipFile zipfile, String filename, String id, BitmapTexture.Parameters params){ return false; } @Override public boolean onDriverLoad(Context context) { GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); if(TextureType.Texture2D == _params.getType()){ _textureIds = IntBuffer.allocate(1); GLES20.glGenTextures(1, _textureIds); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _textureIds.get(0)); if(_hasAlpha) //GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, _width, _height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, _buffer); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, _bmps[0], 0); else GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, _bmps[0], 0); //GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB565, _bmps[0], 0); //GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, _width, _height, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, _buffer); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); if(_params.getGenMipMaps()) GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D); } else{ _textureIds = IntBuffer.allocate(1); GLES20.glGenTextures(1, _textureIds); GLES20.glBindTexture(GLES20.GL_TEXTURE_CUBE_MAP, _textureIds.get(0)); GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); //GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GL10.GL_TEXTUR,GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_CUBE_MAP, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); if(_hasAlpha){ for(int side=0; side<6; side++){ GLUtils.texImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X + side, 0, _bmps[side], 0); //GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP, 0, GLES20.GL_RGBA, _width, _height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, _buffer); } } else{ for(int side=0; side<6; side++){ GLUtils.texImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X + side, 0, _bmps[side], 0); //GLUtils.texImage2D(GLES20.GL_TEXTURE_CUBE_MAP, 0, GLES20.GL_RGB565, _bmps[side], 0); //GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP, 0, GLES20.GL_RGB565, _width, _height, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, _buffer); } } if(_params.getGenMipMaps()) GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_CUBE_MAP); } _driverDataDirty = false; return true; } @Override public boolean onResumed(Context context) { _driverDataDirty = true; return true; } @Override public boolean onPaused(Context context) { _driverDataDirty = true; return true; } @Override public boolean isDriverDataDirty() { return _driverDataDirty; } @Override public void onRelease() { GLES20.glDeleteTextures(1, _textureIds); _bmps = null; _textureIds = null; } /// Texture public methods /* Returns the id for this texture in OGL ES. Must be called after the texture has been loaded into the driver memory */ public int getTextureId(){ return _textureIds.get(0); } public void bindTexture(int unit){ //if(_driverDataDirty) // onDriverLoad(null); // WARNING!!!!!!!!!!!! null if(null == _bmps || null == _textureIds){ _driverDataDirty = true; return; } GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + unit); GLES20.glBindTexture(_target, _textureIds.get(0)); //checkGlError("bind texture"); } public void unbindTexture(int unit){ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + unit); GLES20.glBindTexture(_target, 0); } void checkGlError(String op) { int error; while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { Log.e("ShadingZen", op + ": glError " + error); //throw new RuntimeException(op + ": glError " + error); } } /// Protected methods private ByteBuffer extract(Bitmap bmp) { int channels = 3; if(bmp.hasAlpha()) channels = 4; ByteBuffer bb = ByteBuffer.allocateDirect(bmp.getHeight() * bmp.getWidth() * channels); bb.order(ByteOrder.BIG_ENDIAN); IntBuffer ib = bb.asIntBuffer(); // Convert ARGB -> RGBA if(bmp.hasAlpha()){ for (int y = bmp.getHeight() - 1; y > -1; y--) { for (int x = 0; x < bmp.getWidth(); x++) { int pix = bmp.getPixel(x, bmp.getHeight() - y - 1); int alpha = ((pix >> 24) & 0xFF); int red = ((pix >> 16) & 0xFF); int green = ((pix >> 8) & 0xFF); int blue = ((pix) & 0xFF); // Make up alpha for interesting effect //ib.put(red << 24 | green << 16 | blue << 8 | ((red + blue + green) / 3)); ib.put(red << 24 | green << 16 | blue << 8 | alpha); } } } else{ for (int y = 0; y < bmp.getHeight(); y++) { for (int x = 0; x < bmp.getWidth(); x++) { int pix = bmp.getPixel(x, y); int red = ((pix >> 16) & 0xFF); int green = ((pix >> 8) & 0xFF); int blue = ((pix) & 0xFF); bb.put((byte)red); bb.put((byte)256); bb.put((byte)blue); } } } bb.position(0); return bb; } public int getWidth(){ return this._width; } public int getHeight(){ return _height; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.hbase.index.write; import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.phoenix.hbase.index.IndexTableName; import org.apache.phoenix.hbase.index.IndexTestingUtils; import org.apache.phoenix.hbase.index.Indexer; import org.apache.phoenix.hbase.index.covered.ColumnGroup; import org.apache.phoenix.hbase.index.covered.CoveredColumn; import org.apache.phoenix.hbase.index.covered.CoveredColumnIndexSpecifierBuilder; import org.apache.phoenix.hbase.index.table.HTableInterfaceReference; import org.apache.phoenix.hbase.index.util.IndexManagementUtil; import org.apache.phoenix.hbase.index.util.TestIndexManagementUtil; import org.apache.phoenix.hbase.index.write.recovery.PerRegionIndexWriteCache; import org.apache.phoenix.hbase.index.write.recovery.StoreFailuresInCachePolicy; import org.junit.Assert; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.phoenix.thirdparty.com.google.common.collect.Multimap; /** * When a regionserver crashes, its WAL is split and then replayed to the server. If the index * region was present on the same server, we have to make a best effort to not kill the server for * not succeeding on index writes while the index region is coming up. */ public class TestWALRecoveryCaching { private static final Logger LOGGER = LoggerFactory.getLogger(TestWALRecoveryCaching.class); private static final long ONE_SEC = 1000; private static final long ONE_MIN = 60 * ONE_SEC; private static final long TIMEOUT = ONE_MIN; @Rule public IndexTableName testTable = new IndexTableName(); private String getIndexTableName() { return this.testTable.getTableNameString() + "_index"; } // ----------------------------------------------------------------------------------------------- // Warning! The classes here rely on this static. Adding multiple tests to this class and running // them concurrently could have unexpected results (including, but not limited to, odd failures // and flapping tests). // ----------------------------------------------------------------------------------------------- private static CountDownLatch allowIndexTableToRecover; public static class IndexTableBlockingReplayObserver implements RegionObserver, RegionCoprocessor { @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(this); } @Override public void preWALRestore( org.apache.hadoop.hbase.coprocessor.ObserverContext<? extends RegionCoprocessorEnvironment> ctx, org.apache.hadoop.hbase.client.RegionInfo info, WALKey logKey, org.apache.hadoop.hbase.wal.WALEdit logEdit) throws IOException { try { LOGGER.debug("Restoring logs for index table"); if (allowIndexTableToRecover != null) { allowIndexTableToRecover.await(); LOGGER.debug("Completed index table recovery wait latch"); } } catch (InterruptedException e) { Assert.fail("Should not be interrupted while waiting to allow the index to restore WALs."); } } } public static class ReleaseLatchOnFailurePolicy extends StoreFailuresInCachePolicy { /** * @param failedIndexEdits */ public ReleaseLatchOnFailurePolicy(PerRegionIndexWriteCache failedIndexEdits) { super(failedIndexEdits); } @Override public void handleFailure(Multimap<HTableInterfaceReference, Mutation> attempted, Exception cause) throws IOException { LOGGER.debug("Found index update failure!"); if (allowIndexTableToRecover != null) { LOGGER.info("failed index write on WAL recovery - allowing index table to be restored."); allowIndexTableToRecover.countDown(); } super.handleFailure(attempted, cause); } } //TODO: Jesse to fix @SuppressWarnings("deprecation") @Ignore("Configuration issue - valid test, just needs fixing") @Test public void testWaitsOnIndexRegionToReload() throws Exception { HBaseTestingUtility util = new HBaseTestingUtility(); Configuration conf = util.getConfiguration(); setUpConfigForMiniCluster(conf); // setup other useful stats IndexTestingUtils.setupConfig(conf); conf.setBoolean(Indexer.CHECK_VERSION_CONF_KEY, false); // make sure everything is setup correctly IndexManagementUtil.ensureMutableIndexingCorrectlyConfigured(conf); // start the cluster with 2 rs util.startMiniCluster(2); Admin admin = util.getHBaseAdmin(); // setup the index byte[] family = Bytes.toBytes("family"); byte[] qual = Bytes.toBytes("qualifier"); byte[] nonIndexedFamily = Bytes.toBytes("nonIndexedFamily"); String indexedTableName = getIndexTableName(); ColumnGroup columns = new ColumnGroup(indexedTableName); columns.add(new CoveredColumn(family, qual)); CoveredColumnIndexSpecifierBuilder builder = new CoveredColumnIndexSpecifierBuilder(); builder.addIndexGroup(columns); // create the primary table w/ indexing enabled TableDescriptor primaryTable = TableDescriptorBuilder.newBuilder(TableName.valueOf(testTable.getTableName())) .addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)) .addColumnFamily(ColumnFamilyDescriptorBuilder.of(nonIndexedFamily)).build(); builder.addArbitraryConfigForTesting(Indexer.RecoveryFailurePolicyKeyForTesting, ReleaseLatchOnFailurePolicy.class.getName()); builder.build(primaryTable); admin.createTable(primaryTable); // create the index table TableDescriptorBuilder indexTableBuilder = TableDescriptorBuilder .newBuilder(TableName.valueOf(Bytes.toBytes(getIndexTableName()))) .addCoprocessor(IndexTableBlockingReplayObserver.class.getName()); TestIndexManagementUtil.createIndexTable(admin, indexTableBuilder); // figure out where our tables live ServerName shared = ensureTablesLiveOnSameServer(util.getMiniHBaseCluster(), Bytes.toBytes(indexedTableName), testTable.getTableName()); // load some data into the table Put p = new Put(Bytes.toBytes("row")); p.addColumn(family, qual, Bytes.toBytes("value")); Connection hbaseConn = ConnectionFactory.createConnection(conf); Table primary = hbaseConn.getTable(org.apache.hadoop.hbase.TableName.valueOf(testTable.getTableName())); primary.put(p); // turn on the recovery latch allowIndexTableToRecover = new CountDownLatch(1); // kill the server where the tables live - this should trigger distributed log splitting // find the regionserver that matches the passed server List<HRegion> online = new ArrayList<HRegion>(); online.addAll(getRegionsFromServerForTable(util.getMiniHBaseCluster(), shared, testTable.getTableName())); online.addAll(getRegionsFromServerForTable(util.getMiniHBaseCluster(), shared, Bytes.toBytes(indexedTableName))); // log all the current state of the server LOGGER.info("Current Server/Region paring: "); for (RegionServerThread t : util.getMiniHBaseCluster().getRegionServerThreads()) { // check all the conditions for the server to be done HRegionServer server = t.getRegionServer(); if (server.isStopping() || server.isStopped() || server.isAborted()) { LOGGER.info("\t== Offline: " + server.getServerName()); continue; } List<HRegion> regions = server.getRegions(); LOGGER.info("\t" + server.getServerName() + " regions: " + regions); } LOGGER.debug("Killing server " + shared); util.getMiniHBaseCluster().killRegionServer(shared); LOGGER.debug("Waiting on server " + shared + "to die"); util.getMiniHBaseCluster().waitForRegionServerToStop(shared, TIMEOUT); // force reassign the regions from the table // LOGGER.debug("Forcing region reassignment from the killed server: " + shared); // for (HRegion region : online) { // util.getMiniHBaseCluster().getMaster().assign(region.getRegionName()); // } System.out.println(" ====== Killed shared server ==== "); // make a second put that (1), isn't indexed, so we can be sure of the index state and (2) // ensures that our table is back up Put p2 = new Put(p.getRow()); p2.addColumn(nonIndexedFamily, Bytes.toBytes("Not indexed"), Bytes.toBytes("non-indexed value")); primary.put(p2); // make sure that we actually failed the write once (within a 5 minute window) assertTrue("Didn't find an error writing to index table within timeout!", allowIndexTableToRecover.await(ONE_MIN * 5, TimeUnit.MILLISECONDS)); // scan the index to make sure it has the one entry, (that had to be replayed from the WAL, // since we hard killed the server) Scan s = new Scan(); Table index = hbaseConn.getTable(org.apache.hadoop.hbase.TableName.valueOf(getIndexTableName())); ResultScanner scanner = index.getScanner(s); int count = 0; for (Result r : scanner) { LOGGER.info("Got index table result:" + r); count++; } assertEquals("Got an unexpected found of index rows", 1, count); // cleanup scanner.close(); index.close(); primary.close(); util.shutdownMiniCluster(); } /** * @param cluster * @param server * @param table * @return */ private List<HRegion> getRegionsFromServerForTable(MiniHBaseCluster cluster, ServerName server, byte[] table) { List<HRegion> online = Collections.emptyList(); for (RegionServerThread rst : cluster.getRegionServerThreads()) { // if its the server we are going to kill, get the regions we want to reassign if (rst.getRegionServer().getServerName().equals(server)) { online = rst.getRegionServer().getRegions(org.apache.hadoop.hbase.TableName.valueOf(table)); break; } } return online; } /** * @param cluster * @param indexTable * @param primaryTable */ private ServerName ensureTablesLiveOnSameServer(MiniHBaseCluster cluster, byte[] indexTable, byte[] primaryTable) throws Exception { ServerName shared = getSharedServer(cluster, indexTable, primaryTable); boolean tryIndex = true; while (shared == null) { // start killing servers until we get an overlap Set<ServerName> servers; byte[] table = null; // switch which server we kill each time to get region movement if (tryIndex) { table = indexTable; } else { table = primaryTable; } servers = getServersForTable(cluster, table); tryIndex = !tryIndex; for (ServerName server : servers) { // find the regionserver that matches the passed server List<HRegion> online = getRegionsFromServerForTable(cluster, server, table); LOGGER.info("Shutting down and reassigning regions from " + server); cluster.stopRegionServer(server); cluster.waitForRegionServerToStop(server, TIMEOUT); // force reassign the regions from the table for (Region region : online) { cluster.getMaster().getAssignmentManager().assign(region.getRegionInfo()); } LOGGER.info("Starting region server:" + server.getHostname()); cluster.startRegionServer(server.getHostname(), server.getPort()); cluster.waitForRegionServerToStart(server.getHostname(), server.getPort(), TIMEOUT); // start a server to get back to the base number of servers LOGGER.info("STarting server to replace " + server); cluster.startRegionServer(); break; } shared = getSharedServer(cluster, indexTable, primaryTable); } return shared; } /** * @param cluster * @param indexTable * @param primaryTable * @return * @throws Exception */ private ServerName getSharedServer(MiniHBaseCluster cluster, byte[] indexTable, byte[] primaryTable) throws Exception { Set<ServerName> indexServers = getServersForTable(cluster, indexTable); Set<ServerName> primaryServers = getServersForTable(cluster, primaryTable); Set<ServerName> joinSet = new HashSet<ServerName>(indexServers); joinSet.addAll(primaryServers); // if there is already an overlap, then find it and return it if (joinSet.size() < indexServers.size() + primaryServers.size()) { // find the first overlapping server for (ServerName server : joinSet) { if (indexServers.contains(server) && primaryServers.contains(server)) { return server; } } throw new RuntimeException( "Couldn't find a matching server on which both the primary and index table live, " + "even though they have overlapping server sets"); } return null; } private Set<ServerName> getServersForTable(MiniHBaseCluster cluster, byte[] table) throws Exception { Set<ServerName> indexServers = new HashSet<ServerName>(); for (Region region : cluster.getRegions(table)) { indexServers.add(cluster.getServerHoldingRegion(null, region.getRegionInfo().getRegionName())); } return indexServers; } }
/* * Copyright (c) 2010-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.repo.sql; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.PrismObjectDefinition; import com.evolveum.midpoint.prism.PrismReferenceValue; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PropertyDelta; import com.evolveum.midpoint.prism.delta.ReferenceDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.util.PrismAsserts; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.schema.DeltaConvertor; import com.evolveum.midpoint.schema.MidPointPrismContextFactory; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectModificationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.MetadataType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.File; import java.util.Arrays; /** * This is not real test, it's just used to check how hibernate handles insert/modify of different objects. * * @author lazyman */ @ContextConfiguration(locations = {"../../../../../ctx-test.xml"}) @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) public class ModifyUser extends BaseSQLRepoTest { private static final String USER_FULLNAME = "Guybrush Threepwood"; private String userOid; private String userBigOid; private String shadowOid; @BeforeClass public void beforeClass() throws Exception { super.beforeClass(); PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY); } @Test public void test010Add() throws Exception { PrismObject<UserType> user = PrismTestUtil.parseObject(new File(FOLDER_BASIC, "user.xml")); userOid = repositoryService.addObject(user, null, new OperationResult("asdf")); user = PrismTestUtil.parseObject(new File(FOLDER_BASIC, "user-big.xml")); userBigOid = repositoryService.addObject(user, null, new OperationResult("asdf")); PrismObject<ShadowType> shadow = PrismTestUtil.parseObject(new File(FOLDER_BASIC, "account-shadow.xml")); shadowOid = repositoryService.addObject(shadow, null, new OperationResult("asdf")); } @Test public void test020ModifyUser() throws Exception { final String TEST_NAME = "test020ModifyUser"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TEST_NAME); ObjectModificationType modification = PrismTestUtil.parseAtomicValue( new File(FOLDER_BASIC, "t002.xml"), ObjectModificationType.COMPLEX_TYPE); ObjectDelta delta = DeltaConvertor.createObjectDelta(modification, UserType.class, prismContext); delta.setOid(userOid); // WHEN TestUtil.displayWhen(TEST_NAME); repositoryService.modifyObject(UserType.class, userOid, delta.getModifications(), result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); PrismObject<UserType> userAfter = repositoryService.getObject(UserType.class, userOid, null, result); PrismAsserts.assertPropertyValue(userAfter, UserType.F_FULL_NAME, PrismTestUtil.createPolyString(USER_FULLNAME)); PrismAsserts.assertPropertyValue(userAfter, UserType.F_EMPLOYEE_NUMBER, "en1234"); } @Test public void test021ModifyUserNoEmpNum() throws Exception { final String TEST_NAME = "test021ModifyUserNoEmpNum"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TEST_NAME); ObjectModificationType modification = PrismTestUtil.parseAtomicValue( new File(FOLDER_BASIC, "t002a.xml"), ObjectModificationType.COMPLEX_TYPE); ObjectDelta delta = DeltaConvertor.createObjectDelta(modification, UserType.class, prismContext); delta.setOid(userOid); // WHEN TestUtil.displayWhen(TEST_NAME); repositoryService.modifyObject(UserType.class, userOid, delta.getModifications(), result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); PrismObject<UserType> userAfter = repositoryService.getObject(UserType.class, userOid, null, result); PrismAsserts.assertPropertyValue(userAfter, UserType.F_FULL_NAME, PrismTestUtil.createPolyString(USER_FULLNAME)); PrismAsserts.assertNoItem(userAfter, UserType.F_EMPLOYEE_NUMBER); } @Test public void test022ModifyUserEmptyEmpNum() throws Exception { final String TEST_NAME = "test022ModifyUserEmptyEmpNum"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TEST_NAME); ObjectModificationType modification = PrismTestUtil.parseAtomicValue( new File(FOLDER_BASIC, "t002b.xml"), ObjectModificationType.COMPLEX_TYPE); ObjectDelta delta = DeltaConvertor.createObjectDelta(modification, UserType.class, prismContext); delta.setOid(userOid); // WHEN TestUtil.displayWhen(TEST_NAME); repositoryService.modifyObject(UserType.class, userOid, delta.getModifications(), result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); PrismObject<UserType> userAfter = repositoryService.getObject(UserType.class, userOid, null, result); PrismAsserts.assertPropertyValue(userAfter, UserType.F_FULL_NAME, PrismTestUtil.createPolyString(USER_FULLNAME)); PrismAsserts.assertPropertyValue(userAfter, UserType.F_EMPLOYEE_NUMBER, ""); } @Test public void test030ModifyShadow() throws Exception { ObjectModificationType modification = PrismTestUtil.parseAtomicValue( new File(FOLDER_BASIC, "t003.xml"), ObjectModificationType.COMPLEX_TYPE); ObjectDelta delta = DeltaConvertor.createObjectDelta(modification, ShadowType.class, prismContext); delta.setOid(userOid); repositoryService.modifyObject(ShadowType.class, shadowOid, delta.getModifications(), new OperationResult("asdf")); } @Test public void test040GetShadow() throws Exception { repositoryService.getObject(ShadowType.class, shadowOid, null, new OperationResult("asdf")); } @Test public void test050ModifyBigUser() throws Exception { PrismObjectDefinition def = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class); PropertyDelta delta = PropertyDelta.createModificationReplaceProperty(ObjectType.F_DESCRIPTION, def, "new description"); repositoryService.modifyObject(UserType.class, userBigOid, Arrays.asList(delta), new OperationResult("asdf")); } @Test public void test060GetBigUser() throws Exception { repositoryService.getObject(UserType.class, userBigOid, null, new OperationResult("asdf")); } /** * This test fails with java.lang.IllegalStateException: An entity copy was already assigned to a different entity. * It's ok to fail, but it should fail somehow differently. * * todo improve later [lazyman] */ @Test(enabled = false) public void test070ModifyBigUser() throws Exception { ObjectModificationType modification = PrismTestUtil.parseAtomicValue( new File(FOLDER_BASIC, "t004.xml"), ObjectModificationType.COMPLEX_TYPE); ObjectDelta delta = DeltaConvertor.createObjectDelta(modification, UserType.class, prismContext); repositoryService.modifyObject(UserType.class, userBigOid, delta.getModifications(), new OperationResult("asdf")); } @Test public void test100ModifyUserApproverMetadata() throws Exception { PrismObjectDefinition userDefinition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class); ReferenceDelta delta1 = ReferenceDelta.createModificationAdd( new ItemPath(UserType.F_METADATA, MetadataType.F_CREATE_APPROVER_REF), userDefinition, new PrismReferenceValue("target-oid-1", UserType.COMPLEX_TYPE)); ReferenceDelta delta2 = ReferenceDelta.createModificationAdd( new ItemPath(UserType.F_METADATA, MetadataType.F_MODIFY_APPROVER_REF), userDefinition, new PrismReferenceValue("target-oid-1", UserType.COMPLEX_TYPE)); // the same as in delta1 repositoryService.modifyObject(UserType.class, userOid, Arrays.asList(delta1, delta2), new OperationResult("asdf")); } }
/* * Copyright 2011-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lettuce.core.cluster.api.reactive; import java.time.Duration; import java.util.Map; import java.util.concurrent.TimeUnit; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import io.lettuce.core.KeyValue; import io.lettuce.core.api.reactive.*; /** * A complete reactive and thread-safe cluster Redis API with 400+ Methods. * * @param <K> Key type. * @param <V> Value type. * @author Mark Paluch * @since 5.0 */ public interface RedisClusterReactiveCommands<K, V> extends BaseRedisReactiveCommands<K, V>, RedisGeoReactiveCommands<K, V>, RedisHashReactiveCommands<K, V>, RedisHLLReactiveCommands<K, V>, RedisKeyReactiveCommands<K, V>, RedisListReactiveCommands<K, V>, RedisScriptingReactiveCommands<K, V>, RedisServerReactiveCommands<K, V>, RedisSetReactiveCommands<K, V>, RedisSortedSetReactiveCommands<K, V>, RedisStreamReactiveCommands<K, V>, RedisStringReactiveCommands<K, V> { /** * Set the default timeout for operations. A zero timeout value indicates to not time out. * * @param timeout the timeout value * @since 5.0 */ void setTimeout(Duration timeout); /** * Authenticate to the server. * * @param password the password * @return String simple-string-reply */ Mono<String> auth(CharSequence password); /** * Authenticate to the server with username and password. Requires Redis 6 or newer. * * @param username the username * @param password the password * @return String simple-string-reply * @since 6.0 */ Mono<String> auth(String username, CharSequence password); /** * Generate a new config epoch, incrementing the current epoch, assign the new epoch to this node, WITHOUT any consensus and * persist the configuration on disk before sending packets with the new configuration. * * @return String simple-string-reply If the new config epoch is generated and assigned either BUMPED (epoch) or STILL * (epoch) are returned. */ Mono<String> clusterBumpepoch(); /** * Meet another cluster node to include the node into the cluster. The command starts the cluster handshake and returns with * {@literal OK} when the node was added to the cluster. * * @param ip IP address of the host * @param port port number. * @return String simple-string-reply */ Mono<String> clusterMeet(String ip, int port); /** * Disallow connections and remove the cluster node from the cluster. * * @param nodeId the node Id * @return String simple-string-reply */ Mono<String> clusterForget(String nodeId); /** * Adds slots to the cluster node. The current node will become the master for the specified slots. * * @param slots one or more slots from {@literal 0} to {@literal 16384} * @return String simple-string-reply */ Mono<String> clusterAddSlots(int... slots); /** * Removes slots from the cluster node. * * @param slots one or more slots from {@literal 0} to {@literal 16384} * @return String simple-string-reply */ Mono<String> clusterDelSlots(int... slots); /** * Assign a slot to a node. The command migrates the specified slot from the current node to the specified node in * {@code nodeId} * * @param slot the slot * @param nodeId the id of the node that will become the master for the slot * @return String simple-string-reply */ Mono<String> clusterSetSlotNode(int slot, String nodeId); /** * Clears migrating / importing state from the slot. * * @param slot the slot * @return String simple-string-reply */ Mono<String> clusterSetSlotStable(int slot); /** * Flag a slot as {@literal MIGRATING} (outgoing) towards the node specified in {@code nodeId}. The slot must be handled by * the current node in order to be migrated. * * @param slot the slot * @param nodeId the id of the node is targeted to become the master for the slot * @return String simple-string-reply */ Mono<String> clusterSetSlotMigrating(int slot, String nodeId); /** * Flag a slot as {@literal IMPORTING} (incoming) from the node specified in {@code nodeId}. * * @param slot the slot * @param nodeId the id of the node is the master of the slot * @return String simple-string-reply */ Mono<String> clusterSetSlotImporting(int slot, String nodeId); /** * Get information and statistics about the cluster viewed by the current node. * * @return String bulk-string-reply as a collection of text lines. */ Mono<String> clusterInfo(); /** * Obtain the nodeId for the currently connected node. * * @return String simple-string-reply */ Mono<String> clusterMyId(); /** * Obtain details about all cluster nodes. Can be parsed using * {@link io.lettuce.core.cluster.models.partitions.ClusterPartitionParser#parse} * * @return String bulk-string-reply as a collection of text lines */ Mono<String> clusterNodes(); /** * List replicas for a certain node identified by its {@code nodeId}. Can be parsed using * {@link io.lettuce.core.cluster.models.partitions.ClusterPartitionParser#parse} * * @param nodeId node id of the master node * @return List&lt;String&gt; array-reply list of replicas. The command returns data in the same format as * {@link #clusterNodes()} but one line per replica. */ Flux<String> clusterSlaves(String nodeId); /** * Retrieve the list of keys within the {@code slot}. * * @param slot the slot * @param count maximal number of keys * @return List&lt;K&gt; array-reply list of keys */ Flux<K> clusterGetKeysInSlot(int slot, int count); /** * Returns the number of keys in the specified Redis Cluster hash {@code slot}. * * @param slot the slot * @return Integer reply: The number of keys in the specified hash slot, or an error if the hash slot is invalid. */ Mono<Long> clusterCountKeysInSlot(int slot); /** * Returns the number of failure reports for the specified node. Failure reports are the way Redis Cluster uses in order to * promote a {@literal PFAIL} state, that means a node is not reachable, to a {@literal FAIL} state, that means that the * majority of masters in the cluster agreed within a window of time that the node is not reachable. * * @param nodeId the node id * @return Integer reply: The number of active failure reports for the node. */ Mono<Long> clusterCountFailureReports(String nodeId); /** * Returns an integer identifying the hash slot the specified key hashes to. This command is mainly useful for debugging and * testing, since it exposes via an API the underlying Redis implementation of the hashing algorithm. Basically the same as * {@link io.lettuce.core.cluster.SlotHash#getSlot(byte[])}. If not, call Houston and report that we've got a problem. * * @param key the key. * @return Integer reply: The hash slot number. */ Mono<Long> clusterKeyslot(K key); /** * Forces a node to save the nodes.conf configuration on disk. * * @return String simple-string-reply: {@code OK} or an error if the operation fails. */ Mono<String> clusterSaveconfig(); /** * This command sets a specific config epoch in a fresh node. It only works when: * <ul> * <li>The nodes table of the node is empty.</li> * <li>The node current config epoch is zero.</li> * </ul> * * @param configEpoch the config epoch * @return String simple-string-reply: {@code OK} or an error if the operation fails. */ Mono<String> clusterSetConfigEpoch(long configEpoch); /** * Get array of cluster slots to node mappings. * * @return List&lt;Object&gt; array-reply nested list of slot ranges with IP/Port mappings. */ Flux<Object> clusterSlots(); /** * The asking command is required after a {@code -ASK} redirection. The client should issue {@code ASKING} before to * actually send the command to the target instance. See the Redis Cluster specification for more information. * * @return String simple-string-reply */ Mono<String> asking(); /** * Turn this node into a replica of the node with the id {@code nodeId}. * * @param nodeId master node id * @return String simple-string-reply */ Mono<String> clusterReplicate(String nodeId); /** * Failover a cluster node. Turns the currently connected node into a master and the master into its replica. * * @param force do not coordinate with master if {@code true} * @return String simple-string-reply */ Mono<String> clusterFailover(boolean force); /** * Reset a node performing a soft or hard reset: * <ul> * <li>All other nodes are forgotten</li> * <li>All the assigned / open slots are released</li> * <li>If the node is a replica, it turns into a master</li> * <li>Only for hard reset: a new Node ID is generated</li> * <li>Only for hard reset: currentEpoch and configEpoch are set to 0</li> * <li>The new configuration is saved and the cluster state updated</li> * <li>If the node was a replica, the whole data set is flushed away</li> * </ul> * * @param hard {@code true} for hard reset. Generates a new nodeId and currentEpoch/configEpoch are set to 0 * @return String simple-string-reply */ Mono<String> clusterReset(boolean hard); /** * Delete all the slots associated with the specified node. The number of deleted slots is returned. * * @return String simple-string-reply */ Mono<String> clusterFlushslots(); /** * Tells a Redis cluster replica node that the client is ok reading possibly stale data and is not interested in running * write queries. * * @return String simple-string-reply */ Mono<String> readOnly(); /** * Resets readOnly flag. * * @return String simple-string-reply */ Mono<String> readWrite(); /** * Delete a key with pipelining. Cross-slot keys will result in multiple calls to the particular cluster nodes. * * @param keys the key * @return Flux&lt;Long&gt; integer-reply The number of keys that were removed. */ Mono<Long> del(K... keys); /** * Get the values of all the given keys with pipelining. Cross-slot keys will result in multiple calls to the particular * cluster nodes. * * @param keys the key * @return Flux&lt;List&lt;V&gt;&gt; array-reply list of values at the specified keys. */ Flux<KeyValue<K, V>> mget(K... keys); /** * Set multiple keys to multiple values with pipelining. Cross-slot keys will result in multiple calls to the particular * cluster nodes. * * @param map the null * @return Flux&lt;String&gt; simple-string-reply always {@code OK} since {@code MSET} can't fail. */ Mono<String> mset(Map<K, V> map); /** * Set multiple keys to multiple values, only if none of the keys exist with pipelining. Cross-slot keys will result in * multiple calls to the particular cluster nodes. * * @param map the null * @return Flux&lt;Boolean&gt; integer-reply specifically: * * {@code 1} if the all the keys were set. {@code 0} if no key was set (at least one key already existed). */ Mono<Boolean> msetnx(Map<K, V> map); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.jdbc; import org.apache.commons.lang.StringUtils; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.auth.SaslQOP; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.rpc.thrift.*; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponse; import org.apache.http.client.CookieStore; import org.apache.http.client.ServiceUnavailableRetryStrategy; import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.ssl.DefaultHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.BasicHttpClientConnectionManager; import org.apache.http.protocol.HttpContext; import org.apache.http.ssl.SSLContexts; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import java.io.FileInputStream; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.security.KeyStore; import java.security.SecureRandom; import java.sql.*; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; /** * HiveConnection. * */ public class HiveConnection implements Connection { public static final Logger LOG = LoggerFactory.getLogger(HiveConnection.class.getName()); private String jdbcUriString; private String host; private int port; private final Map<String, String> sessConfMap; private JdbcConnectionParams connParams; private final boolean isEmbeddedMode; private TTransport transport; private boolean assumeSubject; // TODO should be replaced by CliServiceClient private TCLIService.Iface client; private boolean isClosed = true; private SQLWarning warningChain = null; private TSessionHandle sessHandle = null; private final List<TProtocolVersion> supportedProtocols = new LinkedList<TProtocolVersion>(); private int loginTimeout = 0; private TProtocolVersion protocol; private int fetchSize = HiveStatement.DEFAULT_FETCH_SIZE; public HiveConnection(String uri, Properties info) throws SQLException { setupLoginTimeout(); try { connParams = Utils.parseURL(uri, info); } catch (ZooKeeperHiveClientException e) { throw new SQLException(e); } jdbcUriString = connParams.getJdbcUriString(); // JDBC URL: jdbc:hive2://<host>:<port>/dbName;sess_var_list?hive_conf_list#hive_var_list // each list: <key1>=<val1>;<key2>=<val2> and so on // sess_var_list -> sessConfMap // hive_conf_list -> hiveConfMap // hive_var_list -> hiveVarMap host = connParams.getHost(); port = connParams.getPort(); sessConfMap = connParams.getSessionVars(); isEmbeddedMode = connParams.isEmbeddedMode(); if (sessConfMap.containsKey(JdbcConnectionParams.FETCH_SIZE)) { fetchSize = Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE)); } if (isEmbeddedMode) { EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService(); embeddedClient.init(null); client = embeddedClient; } else { // open the client transport openTransport(); // set up the client client = new TCLIService.Client(new TBinaryProtocol(transport)); } // add supported protocols supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V7); supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8); // open client session openSession(); // Wrap the client with a thread-safe proxy to serialize the RPC calls client = newSynchronizedClient(client); } private void openTransport() throws SQLException { int maxRetries = 1; try { String strRetries = sessConfMap.get(JdbcConnectionParams.RETRIES); if (StringUtils.isNotBlank(strRetries)) { maxRetries = Integer.parseInt(strRetries); } } catch(NumberFormatException e) { // Ignore the exception } for (int numRetries = 0;;) { try { assumeSubject = JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap .get(JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE)); transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport(); if (!transport.isOpen()) { transport.open(); logZkDiscoveryMessage("Connected to " + connParams.getHost() + ":" + connParams.getPort()); } break; } catch (TTransportException e) { LOG.warn("Failed to connect to " + connParams.getHost() + ":" + connParams.getPort()); String errMsg = null; String warnMsg = "Could not open client transport with JDBC Uri: " + jdbcUriString + ": "; if (isZkDynamicDiscoveryMode()) { errMsg = "Could not open client transport for any of the Server URI's in ZooKeeper: "; // Try next available server in zookeeper, or retry all the servers again if retry is enabled while(!Utils.updateConnParamsFromZooKeeper(connParams) && ++numRetries < maxRetries) { connParams.getRejectedHostZnodePaths().clear(); } // Update with new values jdbcUriString = connParams.getJdbcUriString(); host = connParams.getHost(); port = connParams.getPort(); } else { errMsg = warnMsg; ++numRetries; } if (numRetries >= maxRetries) { throw new SQLException(errMsg + e.getMessage(), " 08S01", e); } else { LOG.warn(warnMsg + e.getMessage() + " Retrying " + numRetries + " of " + maxRetries); } } } } public String getConnectedUrl() { return jdbcUriString; } private String getServerHttpUrl(boolean useSsl) { // Create the http/https url // JDBC driver will set up an https url if ssl is enabled, otherwise http String schemeName = useSsl ? "https" : "http"; // http path should begin with "/" String httpPath; httpPath = sessConfMap.get(JdbcConnectionParams.HTTP_PATH); if (httpPath == null) { httpPath = "/"; } else if (!httpPath.startsWith("/")) { httpPath = "/" + httpPath; } return schemeName + "://" + host + ":" + port + httpPath; } private TTransport createHttpTransport() throws SQLException, TTransportException { CloseableHttpClient httpClient; boolean useSsl = isSslConnection(); // Create an http client from the configs httpClient = getHttpClient(useSsl); try { transport = new THttpClient(getServerHttpUrl(useSsl), httpClient); // We'll call an open/close here to send a test HTTP message to the server. Any // TTransportException caused by trying to connect to a non-available peer are thrown here. // Bubbling them up the call hierarchy so that a retry can happen in openTransport, // if dynamic service discovery is configured. TCLIService.Iface client = new TCLIService.Client(new TBinaryProtocol(transport)); TOpenSessionResp openResp = client.OpenSession(new TOpenSessionReq()); if (openResp != null) { client.CloseSession(new TCloseSessionReq(openResp.getSessionHandle())); } } catch (TException e) { LOG.info("JDBC Connection Parameters used : useSSL = " + useSsl + " , httpPath = " + sessConfMap.get(JdbcConnectionParams.HTTP_PATH) + " Authentication type = " + sessConfMap.get(JdbcConnectionParams.AUTH_TYPE)); String msg = "Could not create http connection to " + jdbcUriString + ". " + e.getMessage(); throw new TTransportException(msg, e); } return transport; } private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException { boolean isCookieEnabled = sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH) == null || (!JdbcConnectionParams.COOKIE_AUTH_FALSE.equalsIgnoreCase( sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH))); String cookieName = sessConfMap.get(JdbcConnectionParams.COOKIE_NAME) == null ? JdbcConnectionParams.DEFAULT_COOKIE_NAMES_HS2 : sessConfMap.get(JdbcConnectionParams.COOKIE_NAME); CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null; HttpClientBuilder httpClientBuilder; // Request interceptor for any request pre-processing logic HttpRequestInterceptor requestInterceptor; Map<String, String> additionalHttpHeaders = new HashMap<String, String>(); // Retrieve the additional HttpHeaders for (Entry<String, String> entry : sessConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith(JdbcConnectionParams.HTTP_HEADER_PREFIX)) { additionalHttpHeaders.put(key.substring(JdbcConnectionParams.HTTP_HEADER_PREFIX.length()), entry.getValue()); } } // Configure http client for kerberos/password based authentication if (isKerberosAuthMode()) { /** * Add an interceptor which sets the appropriate header in the request. * It does the kerberos authentication and get the final service ticket, * for sending to the server before every request. * In https mode, the entire information is encrypted */ requestInterceptor = new HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, cookieName, useSsl, additionalHttpHeaders); } else { // Check for delegation token, if present add it in the header String tokenStr = getClientDelegationToken(sessConfMap); if (tokenStr != null) { requestInterceptor = new HttpTokenAuthInterceptor(tokenStr, cookieStore, cookieName, useSsl, additionalHttpHeaders); } else { /** * Add an interceptor to pass username/password in the header. * In https mode, the entire information is encrypted */ requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword(), cookieStore, cookieName, useSsl, additionalHttpHeaders); } } // Configure http client for cookie based authentication if (isCookieEnabled) { // Create a http client with a retry mechanism when the server returns a status code of 401. httpClientBuilder = HttpClients.custom().setServiceUnavailableRetryStrategy( new ServiceUnavailableRetryStrategy() { @Override public boolean retryRequest(final HttpResponse response, final int executionCount, final HttpContext context) { int statusCode = response.getStatusLine().getStatusCode(); boolean ret = statusCode == 401 && executionCount <= 1; // Set the context attribute to true which will be interpreted by the request // interceptor if (ret) { context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE); } return ret; } @Override public long getRetryInterval() { // Immediate retry return 0; } }); } else { httpClientBuilder = HttpClientBuilder.create(); } // Add the request interceptor to the client builder httpClientBuilder.addInterceptorFirst(requestInterceptor); // Add an interceptor to add in an XSRF header httpClientBuilder.addInterceptorLast(new XsrfHttpRequestInterceptor()); // Configure http client for SSL if (useSsl) { String useTwoWaySSL = sessConfMap.get(JdbcConnectionParams.USE_TWO_WAY_SSL); String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE); String sslTrustStorePassword = sessConfMap.get( JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); KeyStore sslTrustStore; SSLConnectionSocketFactory socketFactory; SSLContext sslContext; /** * The code within the try block throws: SSLInitializationException, KeyStoreException, * IOException, NoSuchAlgorithmException, CertificateException, KeyManagementException & * UnrecoverableKeyException. We don't want the client to retry on any of these, * hence we catch all and throw a SQLException. */ try { if (useTwoWaySSL != null && useTwoWaySSL.equalsIgnoreCase(JdbcConnectionParams.TRUE)) { socketFactory = getTwoWaySSLSocketFactory(); } else if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) { // Create a default socket factory based on standard JSSE trust material socketFactory = SSLConnectionSocketFactory.getSocketFactory(); } else { // Pick trust store config from the given path sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE); try (FileInputStream fis = new FileInputStream(sslTrustStorePath)) { sslTrustStore.load(fis, sslTrustStorePassword.toCharArray()); } sslContext = SSLContexts.custom().loadTrustMaterial(sslTrustStore, null).build(); socketFactory = new SSLConnectionSocketFactory(sslContext, new DefaultHostnameVerifier(null)); } final Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory> create().register("https", socketFactory) .build(); httpClientBuilder.setConnectionManager(new BasicHttpClientConnectionManager(registry)); } catch (Exception e) { String msg = "Could not create an https connection to " + jdbcUriString + ". " + e.getMessage(); throw new SQLException(msg, " 08S01", e); } } return httpClientBuilder.build(); } /** * Create underlying SSL or non-SSL transport * * @return TTransport * @throws TTransportException */ private TTransport createUnderlyingTransport() throws TTransportException { TTransport transport = null; // Note: Thrift returns an SSL socket that is already bound to the specified host:port // Therefore an open called on this would be a no-op later // Hence, any TTransportException related to connecting with the peer are thrown here. // Bubbling them up the call hierarchy so that a retry can happen in openTransport, // if dynamic service discovery is configured. if (isSslConnection()) { // get SSL socket String sslTrustStore = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE); String sslTrustStorePassword = sessConfMap.get( JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); if (sslTrustStore == null || sslTrustStore.isEmpty()) { transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout); } else { transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout, sslTrustStore, sslTrustStorePassword); } } else { // get non-SSL socket transport transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); } return transport; } /** * Create transport per the connection options * Supported transport options are: * - SASL based transports over * + Kerberos * + Delegation token * + SSL * + non-SSL * - Raw (non-SASL) socket * * Kerberos and Delegation token supports SASL QOP configurations * @throws SQLException, TTransportException */ private TTransport createBinaryTransport() throws SQLException, TTransportException { try { TTransport socketTransport = createUnderlyingTransport(); // handle secure connection if specified if (!JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) { // If Kerberos Map<String, String> saslProps = new HashMap<String, String>(); SaslQOP saslQOP = SaslQOP.AUTH; if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_QOP)) { try { saslQOP = SaslQOP.fromString(sessConfMap.get(JdbcConnectionParams.AUTH_QOP)); } catch (IllegalArgumentException e) { throw new SQLException("Invalid " + JdbcConnectionParams.AUTH_QOP + " parameter. " + e.getMessage(), "42000", e); } saslProps.put(Sasl.QOP, saslQOP.toString()); } else { // If the client did not specify qop then just negotiate the one supported by server saslProps.put(Sasl.QOP, "auth-conf,auth-int,auth"); } saslProps.put(Sasl.SERVER_AUTH, "true"); if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL)) { transport = KerberosSaslHelper.getKerberosTransport( sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, socketTransport, saslProps, assumeSubject); } else { // If there's a delegation token available then use token based connection String tokenStr = getClientDelegationToken(sessConfMap); if (tokenStr != null) { transport = KerberosSaslHelper.getTokenTransport(tokenStr, host, socketTransport, saslProps); } else { // we are using PLAIN Sasl connection with user/password String userName = getUserName(); String passwd = getPassword(); // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL) transport = PlainSaslHelper.getPlainTransport(userName, passwd, socketTransport); } } } else { // Raw socket connection (non-sasl) transport = socketTransport; } } catch (SaslException e) { throw new SQLException("Could not create secure connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e); } return transport; } SSLConnectionSocketFactory getTwoWaySSLSocketFactory() throws SQLException { SSLConnectionSocketFactory socketFactory = null; try { KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance( JdbcConnectionParams.SUNX509_ALGORITHM_STRING, JdbcConnectionParams.SUNJSSE_ALGORITHM_STRING); String keyStorePath = sessConfMap.get(JdbcConnectionParams.SSL_KEY_STORE); String keyStorePassword = sessConfMap.get(JdbcConnectionParams.SSL_KEY_STORE_PASSWORD); KeyStore sslKeyStore = KeyStore.getInstance(JdbcConnectionParams.SSL_KEY_STORE_TYPE); if (keyStorePath == null || keyStorePath.isEmpty()) { throw new IllegalArgumentException(JdbcConnectionParams.SSL_KEY_STORE + " Not configured for 2 way SSL connection, keyStorePath param is empty"); } try (FileInputStream fis = new FileInputStream(keyStorePath)) { sslKeyStore.load(fis, keyStorePassword.toCharArray()); } keyManagerFactory.init(sslKeyStore, keyStorePassword.toCharArray()); TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance( JdbcConnectionParams.SUNX509_ALGORITHM_STRING); String trustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE); String trustStorePassword = sessConfMap.get( JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); KeyStore sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE); if (trustStorePath == null || trustStorePath.isEmpty()) { throw new IllegalArgumentException(JdbcConnectionParams.SSL_TRUST_STORE + " Not configured for 2 way SSL connection"); } try (FileInputStream fis = new FileInputStream(trustStorePath)) { sslTrustStore.load(fis, trustStorePassword.toCharArray()); } trustManagerFactory.init(sslTrustStore); SSLContext context = SSLContext.getInstance("TLS"); context.init(keyManagerFactory.getKeyManagers(), trustManagerFactory.getTrustManagers(), new SecureRandom()); socketFactory = new SSLConnectionSocketFactory(context); } catch (Exception e) { throw new SQLException("Error while initializing 2 way ssl socket factory ", e); } return socketFactory; } // Lookup the delegation token. First in the connection URL, then Configuration private String getClientDelegationToken(Map<String, String> jdbcConnConf) throws SQLException { String tokenStr = null; if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) { // check delegation token in job conf if any try { tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN); } catch (IOException e) { throw new SQLException("Error reading token ", e); } } return tokenStr; } private void openSession() throws SQLException { TOpenSessionReq openReq = new TOpenSessionReq(); Map<String, String> openConf = new HashMap<String, String>(); // for remote JDBC client, try to set the conf var using 'set foo=bar' for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) { openConf.put("set:hiveconf:" + hiveConf.getKey(), hiveConf.getValue()); } // For remote JDBC client, try to set the hive var using 'set hivevar:key=value' for (Entry<String, String> hiveVar : connParams.getHiveVars().entrySet()) { openConf.put("set:hivevar:" + hiveVar.getKey(), hiveVar.getValue()); } // switch the database openConf.put("use:database", connParams.getDbName()); // set the session configuration Map<String, String> sessVars = connParams.getSessionVars(); if (sessVars.containsKey(HiveAuthFactory.HS2_PROXY_USER)) { openConf.put(HiveAuthFactory.HS2_PROXY_USER, sessVars.get(HiveAuthFactory.HS2_PROXY_USER)); } openReq.setConfiguration(openConf); // Store the user name in the open request in case no non-sasl authentication if (JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) { openReq.setUsername(sessConfMap.get(JdbcConnectionParams.AUTH_USER)); openReq.setPassword(sessConfMap.get(JdbcConnectionParams.AUTH_PASSWD)); } try { TOpenSessionResp openResp = client.OpenSession(openReq); // validate connection Utils.verifySuccess(openResp.getStatus()); if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) { throw new TException("Unsupported Hive2 protocol"); } protocol = openResp.getServerProtocolVersion(); sessHandle = openResp.getSessionHandle(); } catch (TException e) { LOG.error("Error opening session", e); throw new SQLException("Could not establish connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e); } isClosed = false; } /** * @return username from sessConfMap */ private String getUserName() { return getSessionValue(JdbcConnectionParams.AUTH_USER, JdbcConnectionParams.ANONYMOUS_USER); } /** * @return password from sessConfMap */ private String getPassword() { return getSessionValue(JdbcConnectionParams.AUTH_PASSWD, JdbcConnectionParams.ANONYMOUS_PASSWD); } private boolean isSslConnection() { return "true".equalsIgnoreCase(sessConfMap.get(JdbcConnectionParams.USE_SSL)); } private boolean isKerberosAuthMode() { return !JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE)) && sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL); } private boolean isHttpTransportMode() { String transportMode = sessConfMap.get(JdbcConnectionParams.TRANSPORT_MODE); if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) { return true; } return false; } private boolean isZkDynamicDiscoveryMode() { return (sessConfMap.get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE) != null) && (JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER.equalsIgnoreCase(sessConfMap .get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE))); } private void logZkDiscoveryMessage(String message) { if (isZkDynamicDiscoveryMode()) { LOG.info(message); } } /** * Lookup varName in sessConfMap, if its null or empty return the default * value varDefault * @param varName * @param varDefault * @return */ private String getSessionValue(String varName, String varDefault) { String varValue = sessConfMap.get(varName); if ((varValue == null) || varValue.isEmpty()) { varValue = varDefault; } return varValue; } // copy loginTimeout from driver manager. Thrift timeout needs to be in millis private void setupLoginTimeout() { long timeOut = TimeUnit.SECONDS.toMillis(DriverManager.getLoginTimeout()); if (timeOut > Integer.MAX_VALUE) { loginTimeout = Integer.MAX_VALUE; } else { loginTimeout = (int) timeOut; } } public void abort(Executor executor) throws SQLException { // JDK 1.7 throw new SQLException("Method not supported"); } public String getDelegationToken(String owner, String renewer) throws SQLException { TGetDelegationTokenReq req = new TGetDelegationTokenReq(sessHandle, owner, renewer); try { TGetDelegationTokenResp tokenResp = client.GetDelegationToken(req); Utils.verifySuccess(tokenResp.getStatus()); return tokenResp.getDelegationToken(); } catch (TException e) { throw new SQLException("Could not retrieve token: " + e.getMessage(), " 08S01", e); } } public void cancelDelegationToken(String tokenStr) throws SQLException { TCancelDelegationTokenReq cancelReq = new TCancelDelegationTokenReq(sessHandle, tokenStr); try { TCancelDelegationTokenResp cancelResp = client.CancelDelegationToken(cancelReq); Utils.verifySuccess(cancelResp.getStatus()); return; } catch (TException e) { throw new SQLException("Could not cancel token: " + e.getMessage(), " 08S01", e); } } public void renewDelegationToken(String tokenStr) throws SQLException { TRenewDelegationTokenReq cancelReq = new TRenewDelegationTokenReq(sessHandle, tokenStr); try { TRenewDelegationTokenResp renewResp = client.RenewDelegationToken(cancelReq); Utils.verifySuccess(renewResp.getStatus()); return; } catch (TException e) { throw new SQLException("Could not renew token: " + e.getMessage(), " 08S01", e); } } /* * (non-Javadoc) * * @see java.sql.Connection#clearWarnings() */ @Override public void clearWarnings() throws SQLException { warningChain = null; } /* * (non-Javadoc) * * @see java.sql.Connection#close() */ @Override public void close() throws SQLException { if (!isClosed) { TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle); try { client.CloseSession(closeReq); } catch (TException e) { throw new SQLException("Error while cleaning up the server resources", e); } finally { isClosed = true; if (transport != null) { transport.close(); } } } } /* * (non-Javadoc) * * @see java.sql.Connection#commit() */ @Override public void commit() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createArrayOf(java.lang.String, * java.lang.Object[]) */ @Override public Array createArrayOf(String arg0, Object[] arg1) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createBlob() */ @Override public Blob createBlob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createClob() */ @Override public Clob createClob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createNClob() */ @Override public NClob createNClob() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createSQLXML() */ @Override public SQLXML createSQLXML() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /** * Creates a Statement object for sending SQL statements to the database. * * @throws SQLException * if a database access error occurs. * @see Connection#createStatement() */ @Override public Statement createStatement() throws SQLException { if (isClosed) { throw new SQLException("Can't create Statement, connection is closed"); } return new HiveStatement(this, client, sessHandle, fetchSize); } /* * (non-Javadoc) * * @see java.sql.Connection#createStatement(int, int) */ @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { throw new SQLException("Statement with resultset concurrency " + resultSetConcurrency + " is not supported", "HYC00"); // Optional feature not implemented } if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) { throw new SQLException("Statement with resultset type " + resultSetType + " is not supported", "HYC00"); // Optional feature not implemented } return new HiveStatement(this, client, sessHandle, resultSetType == ResultSet.TYPE_SCROLL_INSENSITIVE, fetchSize); } /* * (non-Javadoc) * * @see java.sql.Connection#createStatement(int, int, int) */ @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#createStruct(java.lang.String, java.lang.Object[]) */ @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#getAutoCommit() */ @Override public boolean getAutoCommit() throws SQLException { return true; } /* * (non-Javadoc) * * @see java.sql.Connection#getCatalog() */ @Override public String getCatalog() throws SQLException { return ""; } /* * (non-Javadoc) * * @see java.sql.Connection#getClientInfo() */ @Override public Properties getClientInfo() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#getClientInfo(java.lang.String) */ @Override public String getClientInfo(String name) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#getHoldability() * HOLD_CURSORS_OVER_COMMIT , CLOSE_CURSORS_AT_COMMIT * modify by yangsishu */ @Override public int getHoldability() throws SQLException { // TODO Auto-generated method stub return ResultSet.CLOSE_CURSORS_AT_COMMIT; // throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#getMetaData() */ @Override public DatabaseMetaData getMetaData() throws SQLException { if (isClosed) { throw new SQLException("Connection is closed"); } return new HiveDatabaseMetaData(this, client, sessHandle); } public int getNetworkTimeout() throws SQLException { // JDK 1.7 throw new SQLException("Method not supported"); } public String getSchema() throws SQLException { if (isClosed) { throw new SQLException("Connection is closed"); } try (Statement stmt = createStatement(); ResultSet res = stmt.executeQuery("SELECT current_database()")) { if (!res.next()) { throw new SQLException("Failed to get schema information"); } return res.getString(1); } } /* * (non-Javadoc) * * @see java.sql.Connection#getTransactionIsolation() */ @Override public int getTransactionIsolation() throws SQLException { return Connection.TRANSACTION_NONE; } /* * (non-Javadoc) * * @see java.sql.Connection#getTypeMap() */ @Override public Map<String, Class<?>> getTypeMap() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#getWarnings() */ @Override public SQLWarning getWarnings() throws SQLException { return warningChain; } /* * (non-Javadoc) * * @see java.sql.Connection#isClosed() */ @Override public boolean isClosed() throws SQLException { return isClosed; } /* * (non-Javadoc) * * @see java.sql.Connection#isReadOnly() */ @Override public boolean isReadOnly() throws SQLException { return false; } /* * (non-Javadoc) * * @see java.sql.Connection#isValid(int) */ @Override public boolean isValid(int timeout) throws SQLException { if (timeout < 0) { throw new SQLException("timeout value was negative"); } boolean rc = false; try { String productName = new HiveDatabaseMetaData(this, client, sessHandle) .getDatabaseProductName(); rc = true; } catch (SQLException e) { // IGNORE } return rc; } /* * (non-Javadoc) * * @see java.sql.Connection#nativeSQL(java.lang.String) */ @Override public String nativeSQL(String sql) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareCall(java.lang.String) */ @Override public CallableStatement prepareCall(String sql) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareCall(java.lang.String, int, int) */ @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareCall(java.lang.String, int, int, int) */ @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String) */ @Override public PreparedStatement prepareStatement(String sql) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String, int) */ @Override public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String, int[]) */ @Override public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String, * java.lang.String[]) */ @Override public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String, int, int) */ @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return new HivePreparedStatement(this, client, sessHandle, sql); } /* * (non-Javadoc) * * @see java.sql.Connection#prepareStatement(java.lang.String, int, int, int) */ @Override public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#releaseSavepoint(java.sql.Savepoint) */ @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#rollback() */ @Override public void rollback() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#rollback(java.sql.Savepoint) */ @Override public void rollback(Savepoint savepoint) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#setAutoCommit(boolean) */ @Override public void setAutoCommit(boolean autoCommit) throws SQLException { // Per JDBC spec, if the connection is closed a SQLException should be thrown. if(isClosed) { throw new SQLException("Connection is closed"); } // The auto-commit mode is always enabled for this connection. Per JDBC spec, // if setAutoCommit is called and the auto-commit mode is not changed, the call is a no-op. if (!autoCommit) { LOG.warn("Request to set autoCommit to false; Hive does not support autoCommit=false."); SQLWarning warning = new SQLWarning("Hive does not support autoCommit=false"); if (warningChain == null) warningChain = warning; else warningChain.setNextWarning(warning); } } /* * (non-Javadoc) * * @see java.sql.Connection#setCatalog(java.lang.String) */ @Override public void setCatalog(String catalog) throws SQLException { // Per JDBC spec, if the driver does not support catalogs, // it will silently ignore this request. if (isClosed) { throw new SQLException("Connection is closed"); } return; } /* * (non-Javadoc) * * @see java.sql.Connection#setClientInfo(java.util.Properties) */ @Override public void setClientInfo(Properties properties) throws SQLClientInfoException { // TODO Auto-generated method stub throw new SQLClientInfoException("Method not supported", null); } /* * (non-Javadoc) * * @see java.sql.Connection#setClientInfo(java.lang.String, java.lang.String) */ @Override public void setClientInfo(String name, String value) throws SQLClientInfoException { // TODO Auto-generated method stub throw new SQLClientInfoException("Method not supported", null); } /* * (non-Javadoc) * * @see java.sql.Connection#setHoldability(int) */ @Override public void setHoldability(int holdability) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { // JDK 1.7 throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#setReadOnly(boolean) */ @Override public void setReadOnly(boolean readOnly) throws SQLException { // Per JDBC spec, if the connection is closed a SQLException should be thrown. if (isClosed) { throw new SQLException("Connection is closed"); } // Per JDBC spec, the request defines a hint to the driver to enable database optimizations. // The read-only mode for this connection is disabled and cannot be enabled (isReadOnly always returns false). // The most correct behavior is to throw only if the request tries to enable the read-only mode. if(readOnly) { throw new SQLException("Enabling read-only mode not supported"); } } /* * (non-Javadoc) * * @see java.sql.Connection#setSavepoint() */ @Override public Savepoint setSavepoint() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Connection#setSavepoint(java.lang.String) */ @Override public Savepoint setSavepoint(String name) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } public void setSchema(String schema) throws SQLException { // JDK 1.7 if (isClosed) { throw new SQLException("Connection is closed"); } if (schema == null || schema.isEmpty()) { throw new SQLException("Schema name is null or empty"); } Statement stmt = createStatement(); stmt.execute("use " + schema); stmt.close(); } /* * (non-Javadoc) * * @see java.sql.Connection#setTransactionIsolation(int) */ @Override public void setTransactionIsolation(int level) throws SQLException { // TODO: throw an exception? } /* * (non-Javadoc) * * @see java.sql.Connection#setTypeMap(java.util.Map) */ @Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Wrapper#isWrapperFor(java.lang.Class) */ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } /* * (non-Javadoc) * * @see java.sql.Wrapper#unwrap(java.lang.Class) */ @Override public <T> T unwrap(Class<T> iface) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); } public TProtocolVersion getProtocol() { return protocol; } public static TCLIService.Iface newSynchronizedClient( TCLIService.Iface client) { return (TCLIService.Iface) Proxy.newProxyInstance( HiveConnection.class.getClassLoader(), new Class [] { TCLIService.Iface.class }, new SynchronizedHandler(client)); } private static class SynchronizedHandler implements InvocationHandler { private final TCLIService.Iface client; SynchronizedHandler(TCLIService.Iface client) { this.client = client; } @Override public Object invoke(Object proxy, Method method, Object [] args) throws Throwable { try { synchronized (client) { return method.invoke(client, args); } } catch (InvocationTargetException e) { // all IFace APIs throw TException if (e.getTargetException() instanceof TException) { throw (TException)e.getTargetException(); } else { // should not happen throw new TException("Error in calling method " + method.getName(), e.getTargetException()); } } catch (Exception e) { throw new TException("Error in calling method " + method.getName(), e); } } } }
/* * Copyright 2014 Uwe Trottmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.battlelancer.seriesguide.extensions; import android.content.ComponentName; import android.graphics.Point; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.PopupMenu; import android.widget.Toast; import butterknife.Bind; import butterknife.ButterKnife; import com.battlelancer.seriesguide.BuildConfig; import com.battlelancer.seriesguide.R; import com.battlelancer.seriesguide.adapters.ExtensionsAdapter; import com.battlelancer.seriesguide.loaders.AvailableExtensionsLoader; import com.battlelancer.seriesguide.util.Utils; import com.mobeta.android.dslv.DragSortController; import com.mobeta.android.dslv.DragSortListView; import de.greenrobot.event.EventBus; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import timber.log.Timber; /** * Provides tools to display all installed extensions and enable or disable them. */ public class ExtensionsConfigurationFragment extends Fragment implements AdapterView.OnItemClickListener { public static final int EXTENSION_LIMIT_FREE = 2; private static final String TAG = "Extension Configuration"; @Bind(R.id.listViewExtensionsConfiguration) DragSortListView mListView; private ExtensionsAdapter mAdapter; private PopupMenu mAddExtensionPopupMenu; private List<ExtensionManager.Extension> mAvailableExtensions = new ArrayList<>(); private List<ComponentName> mEnabledExtensions; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_extensions_configuration, container, false); ButterKnife.bind(this, v); final ExtensionsDragSortController dragSortController = new ExtensionsDragSortController(); mListView.setFloatViewManager(dragSortController); mListView.setOnTouchListener(dragSortController); mListView.setDropListener(new DragSortListView.DropListener() { @Override public void drop(int from, int to) { ComponentName extension = mEnabledExtensions.remove(from); mEnabledExtensions.add(to, extension); getLoaderManager().restartLoader(ExtensionsConfigurationActivity.LOADER_ACTIONS_ID, null, mExtensionsLoaderCallbacks); } }); mListView.setOnItemClickListener(this); return v; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mAdapter = new ExtensionsAdapter(getActivity()); mListView.setAdapter(mAdapter); getLoaderManager().initLoader(ExtensionsConfigurationActivity.LOADER_ACTIONS_ID, null, mExtensionsLoaderCallbacks); setHasOptionsMenu(true); } @Override public void onResume() { super.onResume(); EventBus.getDefault().register(this); } @Override public void onPause() { super.onPause(); EventBus.getDefault().unregister(this); if (mEnabledExtensions != null) { // might not have finished loading, yet ExtensionManager.getInstance(getActivity()).setEnabledExtensions(mEnabledExtensions); } } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.extensions_configuration_menu, menu); if (Utils.isAmazonVersion()) { // no third-party extensions supported on Amazon app store for now menu.findItem(R.id.menu_action_extensions_search).setVisible(false); menu.findItem(R.id.menu_action_extensions_search).setEnabled(false); } if (!BuildConfig.DEBUG) { menu.findItem(R.id.menu_action_extensions_enable).setVisible(false); menu.findItem(R.id.menu_action_extensions_disable).setVisible(false); } } @Override public boolean onOptionsItemSelected(MenuItem item) { int itemId = item.getItemId(); if (itemId == R.id.menu_action_extensions_search) { Utils.launchWebsite(getActivity(), getString(R.string.url_extensions_search), TAG, "Get more extensions"); return true; } if (itemId == R.id.menu_action_extensions_enable) { List<ExtensionManager.Extension> extensions = ExtensionManager.getInstance( getActivity()).queryAllAvailableExtensions(); List<ComponentName> enabledExtensions = new ArrayList<>(); for (ExtensionManager.Extension extension : extensions) { enabledExtensions.add(extension.componentName); } ExtensionManager.getInstance(getActivity()).setEnabledExtensions(enabledExtensions); Toast.makeText(getActivity(), "Enabled all available extensions", Toast.LENGTH_LONG) .show(); return true; } if (itemId == R.id.menu_action_extensions_disable) { ExtensionManager.getInstance(getActivity()) .setEnabledExtensions(new ArrayList<ComponentName>()); Toast.makeText(getActivity(), "Disabled all available extensions", Toast.LENGTH_LONG) .show(); return true; } return super.onOptionsItemSelected(item); } @Override public void onDestroyView() { super.onDestroyView(); ButterKnife.unbind(this); } private LoaderManager.LoaderCallbacks<List<ExtensionManager.Extension>> mExtensionsLoaderCallbacks = new LoaderManager.LoaderCallbacks<List<ExtensionManager.Extension>>() { @Override public Loader<List<ExtensionManager.Extension>> onCreateLoader(int id, Bundle args) { return new AvailableExtensionsLoader(getActivity()); } @Override public void onLoadFinished(Loader<List<ExtensionManager.Extension>> loader, List<ExtensionManager.Extension> availableExtensions) { if (availableExtensions == null || availableExtensions.size() == 0) { Timber.d("Did not find any extension"); } else { Timber.d("Found " + availableExtensions.size() + " extensions"); } if (mEnabledExtensions == null) { mEnabledExtensions = ExtensionManager.getInstance(getActivity()) .getEnabledExtensions(); } Set<ComponentName> enabledExtensions = new HashSet<>(mEnabledExtensions); // find all extensions not yet enabled mAvailableExtensions.clear(); Map<ComponentName, ExtensionManager.Extension> enabledExtensionsMap = new HashMap<>(); for (ExtensionManager.Extension extension : availableExtensions) { if (enabledExtensions.contains(extension.componentName)) { // extension is already enabled enabledExtensionsMap.put(extension.componentName, extension); continue; } mAvailableExtensions.add(extension); } // sort available extensions alphabetically Collections.sort(mAvailableExtensions, new Comparator<ExtensionManager.Extension>() { @Override public int compare(ExtensionManager.Extension extension1, ExtensionManager.Extension extension2) { String title1 = createTitle(extension1); String title2 = createTitle(extension2); return title1.compareToIgnoreCase(title2); } private String createTitle(ExtensionManager.Extension extension) { String title = extension.label; if (TextUtils.isEmpty(title)) { title = extension.componentName.flattenToShortString(); } return title; } }); // force re-creation of extension add menu if (mAddExtensionPopupMenu != null) { mAddExtensionPopupMenu.dismiss(); mAddExtensionPopupMenu = null; } // list enabled extensions in order dictated by extension manager List<ExtensionManager.Extension> enabledExtensionsList = new ArrayList<>(); List<ComponentName> enabledExtensionNames = new ArrayList<>(mEnabledExtensions); for (ComponentName extensionName : enabledExtensionNames) { ExtensionManager.Extension extension = enabledExtensionsMap.get(extensionName); if (extension == null) { // filter out any unavailable/uninstalled extensions mEnabledExtensions.remove(extensionName); continue; } enabledExtensionsList.add(extension); } // refresh enabled extensions list mAdapter.clear(); mAdapter.addAll(enabledExtensionsList); } @Override public void onLoaderReset(Loader<List<ExtensionManager.Extension>> loader) { mAdapter.clear(); } }; @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if (position == mAdapter.getCount() - 1) { // non-supporters only can add a few extensions if (mAdapter.getCount() - 1 == EXTENSION_LIMIT_FREE && !Utils.hasAccessToX(getActivity())) { Utils.advertiseSubscription(getActivity()); return; } showAddExtensionPopupMenu(view.findViewById(R.id.textViewItemExtensionAddLabel)); Utils.trackAction(getActivity(), TAG, "Add extension"); } } public void onEventMainThread(ExtensionsAdapter.ExtensionDisableRequestEvent event) { mEnabledExtensions.remove(event.position); getLoaderManager().restartLoader(ExtensionsConfigurationActivity.LOADER_ACTIONS_ID, null, mExtensionsLoaderCallbacks); Utils.trackAction(getActivity(), TAG, "Remove extension"); } private void showAddExtensionPopupMenu(View anchorView) { if (mAddExtensionPopupMenu != null) { mAddExtensionPopupMenu.dismiss(); } mAddExtensionPopupMenu = new PopupMenu(getActivity(), anchorView); for (int i = 0; i < mAvailableExtensions.size(); i++) { ExtensionManager.Extension extension = mAvailableExtensions.get(i); mAddExtensionPopupMenu.getMenu().add(Menu.NONE, i, Menu.NONE, extension.label); } mAddExtensionPopupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(android.view.MenuItem item) { // add to enabled extensions ExtensionManager.Extension extension = mAvailableExtensions.get(item.getItemId()); mEnabledExtensions.add(extension.componentName); // re-populate extension list getLoaderManager().restartLoader(ExtensionsConfigurationActivity.LOADER_ACTIONS_ID, null, mExtensionsLoaderCallbacks); // scroll to end of list mListView.smoothScrollToPosition(mAdapter.getCount() - 1); return true; } }); mAddExtensionPopupMenu.show(); } private class ExtensionsDragSortController extends DragSortController { private int mFloatViewOriginPosition; public ExtensionsDragSortController() { super(mListView, R.id.drag_handle, DragSortController.ON_DOWN, DragSortController.CLICK_REMOVE); setRemoveEnabled(false); } @Override public int startDragPosition(MotionEvent ev) { int hitPosition = super.dragHandleHitPosition(ev); if (hitPosition >= mAdapter.getCount() - 1) { return DragSortController.MISS; } return hitPosition; } @Override public View onCreateFloatView(int position) { mFloatViewOriginPosition = position; return super.onCreateFloatView(position); } private int mFloatViewHeight = -1; // cache height @Override public void onDragFloatView(View floatView, Point floatPoint, Point touchPoint) { final int addButtonPosition = mAdapter.getCount() - 1; final int first = mListView.getFirstVisiblePosition(); final int lvDivHeight = mListView.getDividerHeight(); if (mFloatViewHeight == -1) { mFloatViewHeight = floatView.getHeight(); } View div = mListView.getChildAt(addButtonPosition - first); if (touchPoint.x > mListView.getWidth() / 2) { float scale = touchPoint.x - mListView.getWidth() / 2; scale /= (float) (mListView.getWidth() / 5); ViewGroup.LayoutParams lp = floatView.getLayoutParams(); lp.height = Math.max(mFloatViewHeight, (int) (scale * mFloatViewHeight)); floatView.setLayoutParams(lp); } if (div != null) { if (mFloatViewOriginPosition > addButtonPosition) { // don't allow floating View to go above // section divider final int limit = div.getBottom() + lvDivHeight; if (floatPoint.y < limit) { floatPoint.y = limit; } } else { // don't allow floating View to go below // section divider final int limit = div.getTop() - lvDivHeight - floatView.getHeight(); if (floatPoint.y > limit) { floatPoint.y = limit; } } } } } }
/** * The MIT License * Copyright (c) 2015 Teal Cube Games * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.json; import java.io.IOException; import java.io.Writer; /* Copyright (c) 2006 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * JSONWriter provides a quick and convenient way of producing JSON text. * The texts produced strictly conform to JSON syntax rules. No whitespace is * added, so the results are ready for transmission or storage. Each instance of * JSONWriter can produce one JSON text. * <p> * A JSONWriter instance provides a <code>value</code> method for appending * values to the * text, and a <code>key</code> * method for adding keys before values in objects. There are <code>array</code> * and <code>endArray</code> methods that make and bound array values, and * <code>object</code> and <code>endObject</code> methods which make and bound * object values. All of these methods return the JSONWriter instance, * permitting a cascade style. For example, <pre> * new JSONWriter(myWriter) * .object() * .key("JSON") * .value("Hello, World!") * .endObject();</pre> which writes <pre> * {"JSON":"Hello, World!"}</pre> * <p> * The first method called must be <code>array</code> or <code>object</code>. * There are no methods for adding commas or colons. JSONWriter adds them for * you. Objects and arrays can be nested up to 20 levels deep. * <p> * This can sometimes be easier than using a JSONObject to build a string. * @author JSON.org * @version 2011-11-24 */ public class JSONWriter { private static final int maxdepth = 200; /** * The comma flag determines if a comma should be output before the next * value. */ private boolean comma; /** * The current mode. Values: * 'a' (array), * 'd' (done), * 'i' (initial), * 'k' (key), * 'o' (object). */ protected char mode; /** * The object/array stack. */ private final JSONObject stack[]; /** * The stack top index. A value of 0 indicates that the stack is empty. */ private int top; /** * The writer that will receive the output. */ protected Writer writer; /** * Make a fresh JSONWriter. It can be used to build one JSON text. */ public JSONWriter(Writer w) { this.comma = false; this.mode = 'i'; this.stack = new JSONObject[maxdepth]; this.top = 0; this.writer = w; } /** * Append a value. * @param string A string value. * @return this * @throws JSONException If the value is out of sequence. */ private JSONWriter append(String string) throws JSONException { if (string == null) { throw new JSONException("Null pointer"); } if (this.mode == 'o' || this.mode == 'a') { try { if (this.comma && this.mode == 'a') { this.writer.write(','); } this.writer.write(string); } catch (IOException e) { throw new JSONException(e); } if (this.mode == 'o') { this.mode = 'k'; } this.comma = true; return this; } throw new JSONException("Value out of sequence."); } /** * Begin appending a new array. All values until the balancing * <code>endArray</code> will be appended to this array. The * <code>endArray</code> method must be called to mark the array's end. * @return this * @throws JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter array() throws JSONException { if (this.mode == 'i' || this.mode == 'o' || this.mode == 'a') { this.push(null); this.append("["); this.comma = false; return this; } throw new JSONException("Misplaced array."); } /** * End something. * @param mode Mode * @param c Closing character * @return this * @throws JSONException If unbalanced. */ private JSONWriter end(char mode, char c) throws JSONException { if (this.mode != mode) { throw new JSONException(mode == 'a' ? "Misplaced endArray." : "Misplaced endObject."); } this.pop(mode); try { this.writer.write(c); } catch (IOException e) { throw new JSONException(e); } this.comma = true; return this; } /** * End an array. This method most be called to balance calls to * <code>array</code>. * @return this * @throws JSONException If incorrectly nested. */ public JSONWriter endArray() throws JSONException { return this.end('a', ']'); } /** * End an object. This method most be called to balance calls to * <code>object</code>. * @return this * @throws JSONException If incorrectly nested. */ public JSONWriter endObject() throws JSONException { return this.end('k', '}'); } /** * Append a key. The key will be associated with the next value. In an * object, every value must be preceded by a key. * @param string A key string. * @return this * @throws JSONException If the key is out of place. For example, keys * do not belong in arrays or if the key is null. */ public JSONWriter key(String string) throws JSONException { if (string == null) { throw new JSONException("Null key."); } if (this.mode == 'k') { try { this.stack[this.top - 1].putOnce(string, Boolean.TRUE); if (this.comma) { this.writer.write(','); } this.writer.write(JSONObject.quote(string)); this.writer.write(':'); this.comma = false; this.mode = 'o'; return this; } catch (IOException e) { throw new JSONException(e); } } throw new JSONException("Misplaced key."); } /** * Begin appending a new object. All keys and values until the balancing * <code>endObject</code> will be appended to this object. The * <code>endObject</code> method must be called to mark the object's end. * @return this * @throws JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter object() throws JSONException { if (this.mode == 'i') { this.mode = 'o'; } if (this.mode == 'o' || this.mode == 'a') { this.append("{"); this.push(new JSONObject()); this.comma = false; return this; } throw new JSONException("Misplaced object."); } /** * Pop an array or object scope. * @param c The scope to close. * @throws JSONException If nesting is wrong. */ private void pop(char c) throws JSONException { if (this.top <= 0) { throw new JSONException("Nesting error."); } char m = this.stack[this.top - 1] == null ? 'a' : 'k'; if (m != c) { throw new JSONException("Nesting error."); } this.top -= 1; this.mode = this.top == 0 ? 'd' : this.stack[this.top - 1] == null ? 'a' : 'k'; } /** * Push an array or object scope. * @param c The scope to open. * @throws JSONException If nesting is too deep. */ private void push(JSONObject jo) throws JSONException { if (this.top >= maxdepth) { throw new JSONException("Nesting too deep."); } this.stack[this.top] = jo; this.mode = jo == null ? 'a' : 'k'; this.top += 1; } /** * Append either the value <code>true</code> or the value * <code>false</code>. * @param b A boolean. * @return this * @throws JSONException */ public JSONWriter value(boolean b) throws JSONException { return this.append(b ? "true" : "false"); } /** * Append a double value. * @param d A double. * @return this * @throws JSONException If the number is not finite. */ public JSONWriter value(double d) throws JSONException { return this.value(new Double(d)); } /** * Append a long value. * @param l A long. * @return this * @throws JSONException */ public JSONWriter value(long l) throws JSONException { return this.append(Long.toString(l)); } /** * Append an object value. * @param object The object to append. It can be null, or a Boolean, Number, * String, JSONObject, or JSONArray, or an object that implements JSONString. * @return this * @throws JSONException If the value is out of sequence. */ public JSONWriter value(Object object) throws JSONException { return this.append(JSONObject.valueToString(object)); } }
package com.gmail.brian.broll.taxidash.app; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.location.Location; import android.os.AsyncTask; import android.util.Log; import android.widget.Toast; import com.google.android.gms.maps.model.LatLng; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.HttpVersion; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.CoreProtocolPNames; import org.apache.http.util.EntityUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; /** * Created by Brian Broll on 5/26/14. * * This class will have utility functions that are used * by multiple classes */ public class Utils { /* * * * * * * Debugging * * * * * * */ public static void debugLogging(Context context, String msg){ if(CONSTANTS.DEBUG){ Toast.makeText(context, msg, Toast.LENGTH_SHORT).show(); } } /* * * * * * * END Debugging * * * * * * */ /* * * * * * * Network Communications * * * * * * */ //Registration Server Messages public static class initializeTaxiDashConstants extends AsyncTask<Location, Void, Void>{ @Override protected Void doInBackground(Location... params) { JSONObject servers = null; double latitude = params[0].getLatitude(), longitude = params[0].getLongitude(); //Request the nearest TaxiDash server String endpoint = "/getNearbyTaxiDash?latitude=" + latitude + "&longitude=" + longitude; try { servers = makeRequestToRegistrationServer(endpoint); } catch (IOException e) { //Make this more clear //TODO e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } Log.i("Server info", "Server info is " + servers); //Store the server info JSONArray serverList = null; JSONObject server = null; TaxiDashServer taxiDashServer = null; String city, state, address; if (servers != null) { try { serverList = servers.getJSONArray("cities"); for (int i = 0; i < serverList.length(); i++){ try { server = serverList.getJSONObject(i); city = server.getString("city"); state = server.getString("state"); address = "http://" + server.getString("address"); taxiDashServer = new TaxiDashServer(city, state, address); if(i == 0){ CONSTANTS.CURRENT_SERVER = taxiDashServer; Log.i("INIT", "Current city is " + taxiDashServer.getCity()); } CONSTANTS.NEARBY_SERVERS.add(taxiDashServer); } catch (JSONException e) { //Error retrieving one of the cities e.printStackTrace(); } } } catch (JSONException e) { //Could not retrieve nearbyCities //TODO e.printStackTrace(); } } else { //No nearby servers found //TODO } return null; } } public static class GetAllTaxiDashServers extends AsyncTask<Void, Void, Void>{ @Override protected Void doInBackground(Void... params) { //Request all TaxiDash servers from routing server and store them in CONSTANTS String path = "/getAllTaxiDashServers"; JSONObject response = null; try { response = makeRequestToRegistrationServer(path); } catch (IOException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } if (response != null){ //Get the TaxiDash info try { JSONArray servers = response.getJSONArray("cities"); JSONObject server; TaxiDashServer taxiDashServer; for(int i = 0; i < servers.length(); i++){ try { server = servers.getJSONObject(i); taxiDashServer = new TaxiDashServer(server.getString("city"), server.getString("state"), server.getString("address")); CONSTANTS.ALL_SERVERS.add(taxiDashServer); } catch (JSONException e) { //Failed on a specific server instance e.printStackTrace(); } } } catch (JSONException e) { //Could not find the list of cities e.printStackTrace(); } } return null; } } //TaxiDash Server messages public abstract static class GetLocalCompanies extends AsyncTask<Void, Void, JSONArray>{ @Override protected JSONArray doInBackground(Void... params) { String path = "/mobile/companies/contact.json"; JSONObject companyContactInfo = null; try { companyContactInfo = makeRequestToTaxiDashServer(path); } catch (IOException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } try { return companyContactInfo.getJSONArray("companies"); } catch (JSONException e) { e.printStackTrace(); } return null; } } //Load the driver images for a given driver public abstract static class GetDriverImages extends AsyncTask<Driver, Void, Void>{ @Override protected Void doInBackground(Driver... params) { Driver driver; String endpoint; for(int i = 0; i < params.length; i++){ driver = params[i]; endpoint = CONSTANTS.CURRENT_SERVER.getAddress() + "/mobile/images/drivers/" + driver.getBeaconId() + ".json"; try { Bitmap image = getImageFromServer(endpoint); } catch (IOException e) { //Perhaps put something better here... //TODO Log.i("GETTING DRIVER IMAGE", "FAILED"); e.printStackTrace(); } } return null; } } //Convenience methods private static JSONObject makeRequestToTaxiDashServer(String path) throws IOException, JSONException { String endpoint = CONSTANTS.CURRENT_SERVER.getAddress() + path; Log.i("JSON request", endpoint ); return makeJSONRequestToServer(endpoint); } private static JSONObject makeRequestToRegistrationServer(String path) throws IOException, JSONException { String endpoint = CONSTANTS.ROUTER_ADDRESS + path; return makeJSONRequestToServer(endpoint); } public static JSONObject makeJSONRequestToServer(String endpoint) throws IOException, JSONException { HttpEntity entity = makeRequestToServer(endpoint); String resString = EntityUtils.toString(entity); return new JSONObject(resString); } private static HttpEntity makeRequestToServer(String endpoint) throws IOException { JSONObject response = null; String resString; HttpClient http = new DefaultHttpClient(); http.getParams().setParameter(CoreProtocolPNames.PROTOCOL_VERSION, HttpVersion.HTTP_1_1); HttpGet req = new HttpGet(endpoint); HttpResponse res = null; res = http.execute(req); return res.getEntity(); } public static Bitmap getImageFromServer(String endpoint) throws IOException { URL url = new URL(endpoint); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setDoInput(true); connection.connect(); InputStream input = connection.getInputStream(); return BitmapFactory.decodeStream(input); } /* * * * * * * END Network Communications * * * * * * */ /* * * * * * * Saving/retrieving favorite drivers to/from file * * * * * * */ //Convenience methods for loading public static ArrayList<Driver> loadFavoriteDrivers(Context context) { //Load the favorite drivers from file //File is saved as CITY_NAME-favorites.dat ArrayList<Driver> favoriteDrivers = null; File favoriteDriverFile = new File(context.getFilesDir(), getFavoriteDriverFileName()); if (favoriteDriverFile.exists()) { favoriteDrivers = (ArrayList<Driver>) loadObjectFromFile(favoriteDriverFile); } if(favoriteDrivers == null) {//No favorite drivers yet! favoriteDrivers = new ArrayList<Driver>(); Log.i("LOADING FAV DRIVERS", "INITIALIZING FAV DRIVERS"); } return favoriteDrivers; } public static boolean haveStoredLocalCompanies(Context context){ File companyFile = new File(context.getFilesDir(), getLocalCompaniesFileName()); return companyFile.exists(); } public static ArrayList<Company> loadLocalCompanies(Context context){ File companyFile = new File(context.getFilesDir(), getLocalCompaniesFileName()); return (ArrayList<Company>) loadObjectFromFile(companyFile); } public static Object loadObjectFromFile(File file){ Object object = null; try { FileInputStream inputStream = new FileInputStream(file.getPath()); ObjectInputStream in = new ObjectInputStream(inputStream); object = in.readObject(); } catch (Exception e) { e.printStackTrace(); } return object; } //Convenience methods for saving public static void saveFavoriteDrivers(Context context, Object favDrivers){ saveData(context, getFavoriteDriverFileName(), favDrivers); } public static void saveLocalCompanies(Context context, List<Company> companies){ Log.i("Saving Companies", "Saving " + companies.size() + " companies to file"); saveData(context, getLocalCompaniesFileName(), companies); } public static void saveData(Context context, String filename, Object data) { //Write the favorite drivers array to file File favoriteDriverFile = new File(context.getFilesDir(), filename); try { Log.i("Saving Object", "About to save object to " + favoriteDriverFile.getPath()); FileOutputStream fileOutputStream = new FileOutputStream(favoriteDriverFile.getPath()); ObjectOutputStream out = new ObjectOutputStream(fileOutputStream); out.writeObject(data); out.flush(); out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private static String getFavoriteDriverFileName(){ return CONSTANTS.CURRENT_SERVER.getCity() + CONSTANTS.CURRENT_SERVER.getState() + "-favorites.dat"; } private static String getLocalCompaniesFileName(){ return CONSTANTS.CURRENT_SERVER.getCity() + CONSTANTS.CURRENT_SERVER.getState() + "-companies.dat"; } /* * * * * * * END Saving/retrieving favorite drivers to/from file * * * * * * */ /* * * * * * * Google Maps Directions * * * * * * */ /* * Thank you to Emil Adz for similar code on stack overflow: * http://stackoverflow.com/questions/15638884/google-maps-routing-api-v2-android */ private static class GoogleDirections { public static Document getDocument(LatLng start, LatLng end) { String url = "http://maps.googleapis.com/maps/api/directions/xml?" + "origin=" + start.latitude + "," + start.longitude + "&destination=" + end.latitude + "," + end.longitude + "&sensor=false&mode=driving"; try { InputStream in = makeRequestToServer(url).getContent(); DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document doc = builder.parse(in); return doc; } catch (Exception e) { e.printStackTrace(); } return null; } public static String getDurationText (Document doc) { NodeList nl1 = doc.getElementsByTagName("duration"); Node node1 = nl1.item(0); NodeList nl2 = node1.getChildNodes(); Node node2 = nl2.item(getNodeIndex(nl2, "text")); Log.i("DurationText", node2.getTextContent()); return node2.getTextContent(); } public static int getDurationValue (Document doc) { //Get total duration //TODO NodeList nl1 = doc.getElementsByTagName("duration"); Node node1 = nl1.item(0); NodeList nl2 = node1.getChildNodes(); Node node2 = nl2.item(getNodeIndex(nl2, "value")); Log.i("DurationValue", node2.getTextContent()); return Integer.parseInt(node2.getTextContent()); } public static String getDistanceText (Document doc) { NodeList nl1 = doc.getElementsByTagName("distance"); Node node1 = nl1.item(0); NodeList nl2 = node1.getChildNodes(); Node node2 = nl2.item(getNodeIndex(nl2, "text")); Log.i("DistanceText", node2.getTextContent()); return node2.getTextContent(); } public static int getDistanceValue (Document doc) { //Get total distance //TODO /* NodeList steps = doc.getElementsByTagName("distance"); Element step; NodeList double totalDistance = 0; for(int i = 0; i < steps.getLength(); i++){ step = (Element) steps.item(i); step.getElementsByTagName("value");//in meters } return totalDistance; */ NodeList nl1 = doc.getElementsByTagName("distance"); int totalDistance = 0; Log.i("distance node length:", nl1.getLength() + ""); for(int i = 0; i < nl1.getLength(); i++){ Node node1 = nl1.item(i); NodeList nl2 = node1.getChildNodes(); Node node2 = nl2.item(getNodeIndex(nl2, "value")); Log.i("DistanceValue", node2.getTextContent()); totalDistance += Integer.parseInt(node2.getTextContent()); } Log.i("TotalDistance: ", totalDistance + " meters (" + (totalDistance/1609) + ")"); return totalDistance; } public static ArrayList<LatLng> getDirections (Document doc) { NodeList nl1, nl2, nl3; ArrayList<LatLng> listGeopoints = new ArrayList<LatLng>(); nl1 = doc.getElementsByTagName("step"); if (nl1.getLength() > 0) { for (int i = 0; i < nl1.getLength(); i++) { Node node1 = nl1.item(i); nl2 = node1.getChildNodes(); Node locationNode = nl2.item(getNodeIndex(nl2, "start_location")); nl3 = locationNode.getChildNodes(); Node latNode = nl3.item(getNodeIndex(nl3, "lat")); double lat = Double.parseDouble(latNode.getTextContent()); Node lngNode = nl3.item(getNodeIndex(nl3, "lng")); double lng = Double.parseDouble(lngNode.getTextContent()); listGeopoints.add(new LatLng(lat, lng)); locationNode = nl2.item(getNodeIndex(nl2, "polyline")); nl3 = locationNode.getChildNodes(); latNode = nl3.item(getNodeIndex(nl3, "points")); ArrayList<LatLng> arr = decodePoly(latNode.getTextContent()); for(int j = 0 ; j < arr.size() ; j++) { listGeopoints.add(new LatLng(arr.get(j).latitude, arr.get(j).longitude)); } locationNode = nl2.item(getNodeIndex(nl2, "end_location")); nl3 = locationNode.getChildNodes(); latNode = nl3.item(getNodeIndex(nl3, "lat")); lat = Double.parseDouble(latNode.getTextContent()); lngNode = nl3.item(getNodeIndex(nl3, "lng")); lng = Double.parseDouble(lngNode.getTextContent()); listGeopoints.add(new LatLng(lat, lng)); } } return listGeopoints; } private static int getNodeIndex(NodeList nl, String nodename) { for(int i = 0 ; i < nl.getLength() ; i++) { if(nl.item(i).getNodeName().equals(nodename)) return i; } return -1; } private static ArrayList<LatLng> decodePoly(String encoded) { ArrayList<LatLng> poly = new ArrayList<LatLng>(); int index = 0, len = encoded.length(); int lat = 0, lng = 0; while (index < len) { int b, shift = 0, result = 0; do { b = encoded.charAt(index++) - 63; result |= (b & 0x1f) << shift; shift += 5; } while (b >= 0x20); int dlat = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1)); lat += dlat; shift = 0; result = 0; do { b = encoded.charAt(index++) - 63; result |= (b & 0x1f) << shift; shift += 5; } while (b >= 0x20); int dlng = ((result & 1) != 0 ? ~(result >> 1) : (result >> 1)); lng += dlng; LatLng position = new LatLng((double) lat / 1E5, (double) lng / 1E5); poly.add(position); } return poly; } } public abstract static class directionsCalculator extends AsyncTask<LatLng, Void, Void>{ protected int passengerCount = 1; protected int durationValue; protected String durationText; protected double distanceValue; protected String distanceText; protected List<LatLng> directions; protected double fare = -1; //REMOVE protected String MSG; //protected @Override protected Void doInBackground(LatLng... params) { //First I will get the directions, then I will get the //estimate fare from the TaxiDash server assert params.length == 2; LatLng start = params[0]; LatLng end = params[1]; Document document = GoogleDirections.getDocument(start, end); durationText = GoogleDirections.getDurationText(document); durationValue = GoogleDirections.getDurationValue(document); distanceText = GoogleDirections.getDistanceText(document); distanceValue = ((double)GoogleDirections.getDistanceValue(document)/1609.34)/2;//Convert to miles directions = GoogleDirections.getDirections(document); //Get estimate fare from TaxiDash server String endpoint = "/mobile/estimate_fare.json?origin=" + start.latitude + "," + start.longitude + "&destination=" + end.latitude + "," + end.longitude + "&distance=" + distanceValue + "&duration=" + durationValue + "&passengerCount=" + passengerCount; try { JSONObject response = makeRequestToTaxiDashServer(endpoint); fare = response.getDouble("fare"); } catch (IOException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); MSG = e.getMessage(); } return null; } } /* * * * * * * END Google Maps Directions * * * * * * */ //Save to TEMP_DIR //driver images //TODO //Load driver info }
/* * Copyright (c) 2017, ValidationFramework Authors * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.google.code.validationframework.swing.property; import com.google.code.validationframework.api.property.ReadableWritableProperty; import com.google.code.validationframework.api.property.ValueChangeListener; import com.google.code.validationframework.base.property.PrintStreamValueChangeAdapter; import org.hamcrest.Description; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatcher; import javax.swing.JTable; import javax.swing.ListSelectionModel; import javax.swing.table.DefaultTableModel; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * @see JTableSelectedRowIndicesProperty */ public class JTableSelectedRowIndicesPropertyTest { private JTable table; private ListSelectionModel selectionModel; private ReadableWritableProperty<List<Integer>, List<Integer>> property; private ValueChangeListener<List<Integer>> listenerMock; private void assertListEquals(List<Integer> expected, List<Integer> actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { assertEquals(expected.get(i), actual.get(i)); } } @SuppressWarnings("unchecked") @Before public void setUp() { // Create table model DefaultTableModel tableModel = new DefaultTableModel(); tableModel.addColumn("A"); tableModel.addColumn("B"); tableModel.addColumn("C"); tableModel.addRow(new Object[]{1, 2, 3}); tableModel.addRow(new Object[]{4, 5, 6}); tableModel.addRow(new Object[]{7, 8, 9}); // Create table table = new JTable(tableModel); selectionModel = table.getSelectionModel(); // Create property property = new JTableSelectedRowIndicesProperty(table); listenerMock = (ValueChangeListener<List<Integer>>) mock(ValueChangeListener.class); property.addValueChangeListener(listenerMock); property.addValueChangeListener(new PrintStreamValueChangeAdapter<List<Integer>>("SELECTION")); } @Test public void testSelectionChanges() { // Check initial state verifyPropertyValue(); // Test property value changes selectionModel.addSelectionInterval(0, 1); verifyPropertyValue(); selectionModel.addSelectionInterval(2, 2); verifyPropertyValue(); selectionModel.removeSelectionInterval(1, 1); verifyPropertyValue(); selectionModel.setSelectionInterval(1, 1); selectionModel.clearSelection(); verifyPropertyValue(); // Check fired events verify(listenerMock).valueChanged(eq(property), // argThat(new CollectionMatcher<Integer>(Collections.<Integer>emptyList())), // argThat(new CollectionMatcher<Integer>(Arrays.asList(0, 1)))); // verify(listenerMock).valueChanged(property, 2, 3); // verify(listenerMock).valueChanged(property, 3, 2); // verify(listenerMock).valueChanged(property, 2, 1); // verify(listenerMock).valueChanged(property, 1, 0); // verify(listenerMock, times(5)).valueChanged(any(JTableSelectedRowIndicesProperty.class), anyInt(), anyInt()); } private void verifyPropertyValue() { int[] selectedRows = table.getSelectedRows(); assertEquals(selectedRows.length, property.getValue().size()); for (int i = 0; i < selectedRows.length; i++) { assertEquals(Integer.valueOf(selectedRows[i]), property.getValue().get(i)); } } // @Test // public void testNewModel() { // // Check initial state // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // // Test property value changes // selectionModel.setSelectionInterval(0, 1); // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // table.setModel(new DefaultTableModel()); // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // // Check fired events // verify(listenerMock).valueChanged(property, 0, 2); // verify(listenerMock).valueChanged(property, 2, 0); // verify(listenerMock, times(2)).valueChanged(any(JTableSelectedRowIndicesProperty.class), anyInt(), anyInt()); // } // @Test // public void testNewSelectionModel() { // // Check initial state // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // // Test property value changes // selectionModel.setSelectionInterval(0, 1); // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // table.setSelectionModel(new DefaultListSelectionModel()); // assertEquals(Integer.valueOf(table.getSelectedRowCount()), property.getValue()); // // // Check fired events // verify(listenerMock).valueChanged(property, 0, 2); // verify(listenerMock).valueChanged(property, 2, 0); // verify(listenerMock, times(2)).valueChanged(any(JTableSelectedRowIndicesProperty.class), anyInt(), anyInt()); // } private static class CollectionMatcher<T> extends ArgumentMatcher<List<T>> { private final List<T> refElements; public CollectionMatcher(List<T> refElements) { super(); this.refElements = new ArrayList<T>(refElements); } @SuppressWarnings("unchecked") @Override public boolean matches(Object actualElements) { boolean match = false; if (actualElements instanceof Collection<?>) { match = haveEqualElements(refElements, (Collection<T>) actualElements); } return match; } public void describeTo(Description description) { // Do nothing } } private static <T> boolean haveEqualElements(Collection<T> first, Collection<T> second) { boolean match = false; // First, check size if (first.size() == second.size()) { // Then, check each element match = true; List<T> firstList = new ArrayList<T>(first); List<T> secondList = new ArrayList<T>(second); for (int i = 0; (i < first.size()) && match; i++) { match = firstList.get(i).equals(secondList.get(i)); } } return match; } }
/** * Copyright (c) 2015 by My Company * This file and its contents are Confidential. */ package com.mycompany.example.parser; import java.text.ParseException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Locale; import java.util.Date; import java.util.Map; import java.util.HashMap; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import java.io.IOException; /** * Generated from java_objects_test1.xml on 15/6/2015 */ public class ObjectsParser { /** * Parses the date/time string into the Joda DateTime object. * @param dateString the string containing the date/time to parse * @return org.joda.time.DateTime object with the parsed date/time */ public static org.joda.time.DateTime parseISODate(String dateString) throws IOException { org.joda.time.DateTime date = null; try { date = org.joda.time.format.ISODateTimeFormat.dateTimeParser().parseDateTime(dateString); } catch (UnsupportedOperationException e) { throw new IOException("Unable to parse date: ".concat(dateString)); } catch (IllegalArgumentException e) { throw new IOException("Unable to parse date: ".concat(dateString)); } return date; } public static com.arrays.test.CommandLine parse(JsonParser parser, com.arrays.test.CommandLine commandLine) throws IOException { /* object should start with the START_OBJECT token */ if (parser.getCurrentToken() != JsonToken.START_OBJECT) throw new IOException("Json node for com.arrays.test.CommandLine is not a Json Object"); /* loop through all fields of the object */ while(parser.nextValue() != JsonToken.END_OBJECT) { if (!parser.hasCurrentToken()) throw new IOException("Malformed JSON"); final String currentName = parser.getCurrentName(); if ("executable".equals(currentName)) /* parse "executable" of type String */ { /* type verification, either NULL or JsonToken.VALUE_STRING */ if (!(parser.getCurrentToken() == JsonToken.VALUE_STRING || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.CommandLine::executable is not of type string"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ commandLine.setExecutable(parser.getText()); } } else if ("arguments".equals(currentName)) /* parse "arguments" of type java.util.ArrayList<String> */ { /* type verification, either NULL or JsonToken.START_ARRAY */ if (!(parser.getCurrentToken() == JsonToken.START_ARRAY || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.CommandLine::arguments is not of type [string]"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* Create the array */ java.util.ArrayList<String> array = new java.util.ArrayList<String>(); /* Loop until the end of the array */ while (parser.nextToken() != JsonToken.END_ARRAY) { /* We expect not null elements of correct type */ if (!(parser.getCurrentToken() == JsonToken.VALUE_STRING || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.CommandLine::arguments[i] is not of type string"); array.add(parser.getText()); } /* call the setter and set field value */ commandLine.setArguments(array); } } else { /* unknown element. Call skipChildren() if the element is an array or object */ parser.skipChildren(); } } return commandLine; } public static com.arrays.test.FieldsTest parse(JsonParser parser, com.arrays.test.FieldsTest fieldsTest) throws IOException { /* object should start with the START_OBJECT token */ if (parser.getCurrentToken() != JsonToken.START_OBJECT) throw new IOException("Json node for com.arrays.test.FieldsTest is not a Json Object"); /* loop through all fields of the object */ while(parser.nextValue() != JsonToken.END_OBJECT) { if (!parser.hasCurrentToken()) throw new IOException("Malformed JSON"); final String currentName = parser.getCurrentName(); if ("0field00".equals(currentName)) /* parse "0field00" of type com.arrays.test.CommandLineVersion1 */ { /* type verification, either NULL or JsonToken.START_OBJECT */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::0field00 is not of type com.arrays.test.CommandLineVersion1"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ fieldsTest.set0field00(null); } } else if ("1field01".equals(currentName)) /* parse "1field01" of type com.arrays.test.CommandLineVersion2 */ { /* type verification, either NULL or JsonToken.START_OBJECT */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::1field01 is not of type com.arrays.test.CommandLineVersion2"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ fieldsTest.set1field01(null); } } else if ("field1".equals(currentName)) /* parse "field1" of type com.arrays.test.CommandLine */ { /* type verification, either NULL or JsonToken.START_OBJECT */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::field1 is not of type com.arrays.test.CommandLine"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ fieldsTest.setField1(ObjectsParser.parse(parser, new com.arrays.test.CommandLine())); } } else if ("field2".equals(currentName)) /* parse "field2" of type com.arrays.test.CommandLine */ { /* type verification, either NULL or JsonToken.START_OBJECT */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::field2 is not of type com.arrays.test.CommandLine"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ fieldsTest.setField2(ObjectsParser.parse(parser, new com.arrays.test.CommandLine())); } } else if ("field3".equals(currentName)) /* parse "field3" of type com.structs.WorkPackage */ { /* type verification, either NULL or JsonToken.START_OBJECT */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::field3 is not of type com.structs.WorkPackage"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ fieldsTest.setField3(); } } else if ("array1".equals(currentName)) /* parse "array1" of type java.util.ArrayList<com.arrays.test.CommandLine> */ { /* type verification, either NULL or JsonToken.START_ARRAY */ if (!(parser.getCurrentToken() == JsonToken.START_ARRAY || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array1 is not of type [com.arrays.test.CommandLine]"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* Create the array */ java.util.ArrayList<com.arrays.test.CommandLine> array = new java.util.ArrayList<com.arrays.test.CommandLine>(); /* Loop until the end of the array */ while (parser.nextToken() != JsonToken.END_ARRAY) { /* We expect not null elements of correct type */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array1[i] is not of type com.arrays.test.CommandLine"); array.add(ObjectsParser.parse(parser, new com.arrays.test.CommandLine())); } /* call the setter and set field value */ fieldsTest.setArray1(array); } } else if ("array2".equals(currentName)) /* parse "array2" of type java.util.ArrayList<com.arrays.test.CommandLine> */ { /* type verification, either NULL or JsonToken.START_ARRAY */ if (!(parser.getCurrentToken() == JsonToken.START_ARRAY || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array2 is not of type [com.arrays.test.CommandLine]"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* Create the array */ java.util.ArrayList<com.arrays.test.CommandLine> array = new java.util.ArrayList<com.arrays.test.CommandLine>(); /* Loop until the end of the array */ while (parser.nextToken() != JsonToken.END_ARRAY) { /* We expect not null elements of correct type */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array2[i] is not of type com.arrays.test.CommandLine"); array.add(ObjectsParser.parse(parser, new com.arrays.test.CommandLine())); } /* call the setter and set field value */ fieldsTest.setArray2(array); } } else if ("array3".equals(currentName)) /* parse "array3" of type java.util.ArrayList<com.structs.test.WorkPackage> */ { /* type verification, either NULL or JsonToken.START_ARRAY */ if (!(parser.getCurrentToken() == JsonToken.START_ARRAY || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array3 is not of type [com.structs.test.WorkPackage]"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* Create the array */ java.util.ArrayList<com.structs.test.WorkPackage> array = new java.util.ArrayList<com.structs.test.WorkPackage>(); /* Loop until the end of the array */ while (parser.nextToken() != JsonToken.END_ARRAY) { /* We expect not null elements of correct type */ if (!(parser.getCurrentToken() == JsonToken.START_OBJECT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.arrays.test.FieldsTest::array3[i] is not of type com.structs.test.WorkPackage"); array.add(null); } /* call the setter and set field value */ fieldsTest.setArray3(array); } } else { /* unknown element. Call skipChildren() if the element is an array or object */ parser.skipChildren(); } } return fieldsTest; } public static com.arrays.test.EmptyObject parse(JsonParser parser, com.arrays.test.EmptyObject emptyObject) throws IOException { /* object should start with the START_OBJECT token */ if (parser.getCurrentToken() != JsonToken.START_OBJECT) throw new IOException("Json node for com.arrays.test.EmptyObject is not a Json Object"); /* loop through all fields of the object */ while(parser.nextValue() != JsonToken.END_OBJECT) { if (!parser.hasCurrentToken()) throw new IOException("Malformed JSON"); final String currentName = parser.getCurrentName(); /* unknown element. Call skipChildren() if the element is an array or object */ parser.skipChildren(); } return emptyObject; } public static com.mycompany.users.PersonalUserData parse(JsonParser parser, com.mycompany.users.PersonalUserData personalUserData) throws IOException { /* object should start with the START_OBJECT token */ if (parser.getCurrentToken() != JsonToken.START_OBJECT) throw new IOException("Json node for com.mycompany.users.PersonalUserData is not a Json Object"); /* loop through all fields of the object */ while(parser.nextValue() != JsonToken.END_OBJECT) { if (!parser.hasCurrentToken()) throw new IOException("Malformed JSON"); final String currentName = parser.getCurrentName(); if ("name".equals(currentName)) /* parse "name" of type String */ { /* type verification, either NULL or JsonToken.VALUE_STRING */ if (!(parser.getCurrentToken() == JsonToken.VALUE_STRING || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.mycompany.users.PersonalUserData::name is not of type string"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ personalUserData.setName(parser.getText()); } } else if ("occupation".equals(currentName)) /* parse "occupation" of type String */ { /* type verification, either NULL or JsonToken.VALUE_STRING */ if (!(parser.getCurrentToken() == JsonToken.VALUE_STRING || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.mycompany.users.PersonalUserData::occupation is not of type string"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ personalUserData.setOccupation(parser.getText()); } } else if ("age".equals(currentName)) /* parse "age" of type int */ { /* type verification, either NULL or JsonToken.VALUE_NUMBER_INT */ if (!(parser.getCurrentToken() == JsonToken.VALUE_NUMBER_INT || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.mycompany.users.PersonalUserData::age is not of type int"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* call the setter and set field value */ personalUserData.setAge(parser.getValueAsInt()); } } else if ("visits".equals(currentName)) /* parse "visits" of type java.util.ArrayList<org.joda.time.DateTime> */ { /* type verification, either NULL or JsonToken.START_ARRAY */ if (!(parser.getCurrentToken() == JsonToken.START_ARRAY || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.mycompany.users.PersonalUserData::visits is not of type [date]"); /* if not NULL try to parse */ if (parser.getCurrentToken() != JsonToken.VALUE_NULL) { /* Create the array */ java.util.ArrayList<org.joda.time.DateTime> array = new java.util.ArrayList<org.joda.time.DateTime>(); /* Loop until the end of the array */ while (parser.nextToken() != JsonToken.END_ARRAY) { /* We expect not null elements of correct type */ if (!(parser.getCurrentToken() == JsonToken.VALUE_STRING || parser.getCurrentToken() == JsonToken.VALUE_NULL)) throw new IOException("Json node for com.mycompany.users.PersonalUserData::visits[i] is not of type date"); array.add(parseISODate(parser.getText())); } /* call the setter and set field value */ personalUserData.setVisits(array); } } else { /* unknown element. Call skipChildren() if the element is an array or object */ parser.skipChildren(); } } return personalUserData; } }
/* * Copyright 2001-2013 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.juddi.samples; import java.net.URL; import java.util.Map; import java.util.Properties; import java.util.Set; import javax.wsdl.Definition; import javax.wsdl.PortType; import javax.xml.namespace.QName; import org.apache.juddi.jaxb.PrintUDDI; import org.apache.juddi.v3.client.config.UDDIClerk; import org.apache.juddi.v3.client.config.UDDIClient; import org.apache.juddi.v3.client.mapping.URLLocalizerDefaultImpl; import org.apache.juddi.v3.client.mapping.wsdl.ReadWSDL; import org.apache.juddi.v3.client.mapping.wsdl.WSDL2UDDI; import org.apache.juddi.v3.client.transport.Transport; import org.uddi.api_v3.AuthToken; import org.uddi.api_v3.BusinessDetail; import org.uddi.api_v3.BusinessEntity; import org.uddi.api_v3.BusinessServices; import org.uddi.api_v3.GetAuthToken; import org.uddi.api_v3.Name; import org.uddi.api_v3.SaveBusiness; import org.uddi.api_v3.SaveService; import org.uddi.api_v3.SaveTModel; import org.uddi.api_v3.TModel; import org.uddi.v3_service.UDDIPublicationPortType; import org.uddi.v3_service.UDDISecurityPortType; /** * This class shows how to perform a WSDL2UDDI import manually. More * specifically, this is WSDL2UDDI without using annotations. * * @author <a href="mailto:alexoree@apache.org">Alex O'Ree</a> */ public class WsdlImport { private Properties properties = new Properties(); private String wsdlURL = null; private UDDISecurityPortType security = null; private UDDIPublicationPortType publish = null; public static void main(String[] args) throws Exception { new WsdlImport().fire("http://svn.apache.org/repos/asf/juddi/trunk/uddi-ws/src/main/resources/juddi_api_v1.wsdl", null, null, null); } public void fire(String pathOrURL, String businessKey, String token, Transport transport) throws Exception { if (transport == null) { // create a manager and read the config in the archive; // you can use your config file name UDDIClient clerkManager = new UDDIClient("META-INF/simple-publish-uddi.xml"); transport = clerkManager.getTransport(); } // Now you create a reference to the UDDI API security = transport.getUDDISecurityService(); publish = transport.getUDDIPublishService(); if (token == null) { //step one, get a token GetAuthToken getAuthTokenRoot = new GetAuthToken(); getAuthTokenRoot.setUserID("uddi"); getAuthTokenRoot.setCred("uddi"); // Making API call that retrieves the authentication token for the 'root' user. AuthToken rootAuthToken = security.getAuthToken(getAuthTokenRoot); token = rootAuthToken.getAuthInfo(); } //step two, identify the key used for all your stuff //you must have a key generator created already //here, we are assuming that you don't have one //NOTE: these are some of the publicly available WSDLs that were used to test WSDL2UDDI //URL url = new URL("http://wsf.cdyne.com/WeatherWS/Weather.asmx?WSDL"); //http://www.bccs.uni.no/~pve002/wsdls/ebi-mafft.wsdl"); //http://www.webservicex.net/GenericNAICS.asmx?WSDL"); //http://www.webservicex.net/stockquote.asmx?WSDL"); //http://www.webservicex.com/globalweather.asmx?WSDL"); //http://graphical.weather.gov/xml/SOAP_server/ndfdXMLserver.php?wsdl"); String domain = "localhost"; int port = 80; if (pathOrURL.startsWith("http")) { URL url = new URL(pathOrURL); domain = url.getHost(); port = url.getPort(); if (port == -1) { if (pathOrURL.startsWith("https://")) { port = 443; } if (pathOrURL.startsWith("http://")) { port = 80; } } } TModel keygen = UDDIClerk.createKeyGenator("uddi:" + domain + ":keygenerator", domain, "en"); //save the keygen SaveTModel stm = new SaveTModel(); stm.setAuthInfo(token); stm.getTModel().add(keygen); System.out.println("Saving key gen " + keygen.getTModelKey()); publish.saveTModel(stm); System.out.println("Saved!"); //step three, we have two options //1) import the wsdl's services into a brand new business //2) import the wsdl's services into an existing business //in either case, we're going to have to parse the WSDL ReadWSDL rw = new ReadWSDL(); Definition wsdlDefinition = null; if (pathOrURL.startsWith("http")) { wsdlDefinition = rw.readWSDL(new URL(pathOrURL)); } else { wsdlDefinition = rw.readWSDL(pathOrURL); } if (wsdlDefinition == null) { System.out.println("There was an error parsing the WSDL!"); return; } properties.put("keyDomain", domain); properties.put("businessName", domain); properties.put("serverName", domain); properties.put("serverPort", port); wsdlURL = wsdlDefinition.getDocumentBaseURI(); WSDL2UDDI wsdl2UDDI = new WSDL2UDDI(null, new URLLocalizerDefaultImpl(), properties); BusinessServices businessServices = wsdl2UDDI.createBusinessServices(wsdlDefinition); @SuppressWarnings("unchecked") Map<QName, PortType> portTypes = (Map<QName, PortType>) wsdlDefinition.getAllPortTypes(); Set<TModel> portTypeTModels = wsdl2UDDI.createWSDLPortTypeTModels(wsdlURL, portTypes); Map allBindings = wsdlDefinition.getAllBindings(); Set<TModel> createWSDLBindingTModels = wsdl2UDDI.createWSDLBindingTModels(wsdlURL, allBindings); //When parsing a WSDL, there's really two things going on //1) convert a bunch of stuff (the portTypes) to tModels //2) convert the service definition to a BusinessService //Since the service depends on the tModel, we have to save the tModels first stm = new SaveTModel(); stm.setAuthInfo(token); TModel[] tmodels = portTypeTModels.toArray(new TModel[0]); for (int i = 0; i < tmodels.length; i++) { stm.getTModel().add(tmodels[i]); } tmodels = createWSDLBindingTModels.toArray(new TModel[0]); for (int i = 0; i < tmodels.length; i++) { stm.getTModel().add(tmodels[i]); } //important, you'll need to save your new tModels first, or else saving the business/service may fail System.out.println(new PrintUDDI<SaveTModel>().print(stm)); System.out.println("Saving " + stm.getTModel().size() + " tModels"); publish.saveTModel(stm); System.out.println("Saved!"); if (businessKey == null || businessKey.length() == 0) { SaveBusiness sb = new SaveBusiness(); sb.setAuthInfo(token); BusinessEntity be = new BusinessEntity(); be.setBusinessKey(businessServices.getBusinessService().get(0).getBusinessKey()); be.getName().add(new Name()); be.getName().get(0).setValue(domain); be.getName().get(0).setLang("en"); sb.getBusinessEntity().add(be); BusinessDetail saveBusiness = publish.saveBusiness(sb); businessKey = saveBusiness.getBusinessEntity().get(0).getBusinessKey(); System.out.println("new business created key= " + businessKey); } //finaly, we're ready to save all of the services defined in the WSDL //again, we're creating a new business, if you have one already, look it up using the Inquiry getBusinessDetails SaveService ss = new SaveService(); ss.setAuthInfo(token); for (int i = 0; i < businessServices.getBusinessService().size(); i++) { businessServices.getBusinessService().get(i).setBusinessKey(businessKey); ss.getBusinessService().add(businessServices.getBusinessService().get(i)); } System.out.println("Here's our new service(s): " + new PrintUDDI<SaveService>().print(ss)); publish.saveService(ss); System.out.println("Saved!"); //and we're done //Be sure to report any problems to the jUDDI JIRA bug tracker at //https://issues.apache.org/jira/browse/JUDDI } }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.android_webview; import android.graphics.Rect; import android.widget.OverScroller; import org.chromium.base.VisibleForTesting; /** * Takes care of syncing the scroll offset between the Android View system and the * InProcessViewRenderer. * * Unless otherwise values (sizes, scroll offsets) are in physical pixels. */ @VisibleForTesting public class AwScrollOffsetManager { // Values taken from WebViewClassic. // The amount of content to overlap between two screens when using pageUp/pageDown methiods. private static final int PAGE_SCROLL_OVERLAP = 24; // Standard animated scroll speed. private static final int STD_SCROLL_ANIMATION_SPEED_PIX_PER_SEC = 480; // Time for the longest scroll animation. private static final int MAX_SCROLL_ANIMATION_DURATION_MILLISEC = 750; /** * The interface that all users of AwScrollOffsetManager should implement. * * The unit of all the values in this delegate are physical pixels. */ public interface Delegate { // Call View#overScrollBy on the containerView. void overScrollContainerViewBy(int deltaX, int deltaY, int scrollX, int scrollY, int scrollRangeX, int scrollRangeY, boolean isTouchEvent); // Call View#scrollTo on the containerView. void scrollContainerViewTo(int x, int y); // Store the scroll offset in the native side. This should really be a simple store // operation, the native side shouldn't synchronously alter the scroll offset from within // this call. void scrollNativeTo(int x, int y); int getContainerViewScrollX(); int getContainerViewScrollY(); void invalidate(); } private final Delegate mDelegate; // Scroll offset as seen by the native side. private int mNativeScrollX; private int mNativeScrollY; // How many pixels can we scroll in a given direction. private int mMaxHorizontalScrollOffset; private int mMaxVerticalScrollOffset; // Size of the container view. private int mContainerViewWidth; private int mContainerViewHeight; // Whether we're in the middle of processing a touch event. private boolean mProcessingTouchEvent; // Don't skip computeScrollAndAbsorbGlow just because isFling is called in between. private boolean mWasFlinging; // Whether (and to what value) to update the native side scroll offset after we've finished // processing a touch event. private boolean mApplyDeferredNativeScroll; private int mDeferredNativeScrollX; private int mDeferredNativeScrollY; private OverScroller mScroller; public AwScrollOffsetManager(Delegate delegate, OverScroller overScroller) { mDelegate = delegate; mScroller = overScroller; } //----- Scroll range and extent calculation methods ------------------------------------------- public int computeHorizontalScrollRange() { return mContainerViewWidth + mMaxHorizontalScrollOffset; } public int computeMaximumHorizontalScrollOffset() { return mMaxHorizontalScrollOffset; } public int computeHorizontalScrollOffset() { return mDelegate.getContainerViewScrollX(); } public int computeVerticalScrollRange() { return mContainerViewHeight + mMaxVerticalScrollOffset; } public int computeMaximumVerticalScrollOffset() { return mMaxVerticalScrollOffset; } public int computeVerticalScrollOffset() { return mDelegate.getContainerViewScrollY(); } public int computeVerticalScrollExtent() { return mContainerViewHeight; } //--------------------------------------------------------------------------------------------- /** * Called when the scroll range changes. This needs to be the size of the on-screen content. */ public void setMaxScrollOffset(int width, int height) { mMaxHorizontalScrollOffset = width; mMaxVerticalScrollOffset = height; } /** * Called when the physical size of the view changes. */ public void setContainerViewSize(int width, int height) { mContainerViewWidth = width; mContainerViewHeight = height; } public void syncScrollOffsetFromOnDraw() { // Unfortunately apps override onScrollChanged without calling super which is why we need // to sync the scroll offset on every onDraw. onContainerViewScrollChanged(mDelegate.getContainerViewScrollX(), mDelegate.getContainerViewScrollY()); } public void setProcessingTouchEvent(boolean processingTouchEvent) { assert mProcessingTouchEvent != processingTouchEvent; mProcessingTouchEvent = processingTouchEvent; if (!mProcessingTouchEvent && mApplyDeferredNativeScroll) { mApplyDeferredNativeScroll = false; scrollNativeTo(mDeferredNativeScrollX, mDeferredNativeScrollY); } } // Called by the native side to scroll the container view. public void scrollContainerViewTo(int x, int y) { mNativeScrollX = x; mNativeScrollY = y; final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); final int deltaX = x - scrollX; final int deltaY = y - scrollY; final int scrollRangeX = computeMaximumHorizontalScrollOffset(); final int scrollRangeY = computeMaximumVerticalScrollOffset(); // We use overScrollContainerViewBy to be compatible with WebViewClassic which used this // method for handling both over-scroll as well as in-bounds scroll. mDelegate.overScrollContainerViewBy(deltaX, deltaY, scrollX, scrollY, scrollRangeX, scrollRangeY, mProcessingTouchEvent); } public boolean isFlingActive() { boolean flinging = mScroller.computeScrollOffset(); mWasFlinging |= flinging; return flinging; } // Called by the native side to over-scroll the container view. public void overScrollBy(int deltaX, int deltaY) { // TODO(mkosiba): Once http://crbug.com/260663 and http://crbug.com/261239 are fixed it // should be possible to uncomment the following asserts: // if (deltaX < 0) assert mDelegate.getContainerViewScrollX() == 0; // if (deltaX > 0) assert mDelegate.getContainerViewScrollX() == // computeMaximumHorizontalScrollOffset(); scrollBy(deltaX, deltaY); } private void scrollBy(int deltaX, int deltaY) { if (deltaX == 0 && deltaY == 0) return; final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); final int scrollRangeX = computeMaximumHorizontalScrollOffset(); final int scrollRangeY = computeMaximumVerticalScrollOffset(); // The android.view.View.overScrollBy method is used for both scrolling and over-scrolling // which is why we use it here. mDelegate.overScrollContainerViewBy(deltaX, deltaY, scrollX, scrollY, scrollRangeX, scrollRangeY, mProcessingTouchEvent); } private int clampHorizontalScroll(int scrollX) { scrollX = Math.max(0, scrollX); scrollX = Math.min(computeMaximumHorizontalScrollOffset(), scrollX); return scrollX; } private int clampVerticalScroll(int scrollY) { scrollY = Math.max(0, scrollY); scrollY = Math.min(computeMaximumVerticalScrollOffset(), scrollY); return scrollY; } // Called by the View system as a response to the mDelegate.overScrollContainerViewBy call. public void onContainerViewOverScrolled(int scrollX, int scrollY, boolean clampedX, boolean clampedY) { // Clamp the scroll offset at (0, max). scrollX = clampHorizontalScroll(scrollX); scrollY = clampVerticalScroll(scrollY); mDelegate.scrollContainerViewTo(scrollX, scrollY); // This is only necessary if the containerView scroll offset ends up being different // than the one set from native in which case we want the value stored on the native side // to reflect the value stored in the containerView (and not the other way around). scrollNativeTo(mDelegate.getContainerViewScrollX(), mDelegate.getContainerViewScrollY()); } // Called by the View system when the scroll offset had changed. This might not get called if // the embedder overrides WebView#onScrollChanged without calling super.onScrollChanged. If // this method does get called it is called both as a response to the embedder scrolling the // view as well as a response to mDelegate.scrollContainerViewTo. public void onContainerViewScrollChanged(int x, int y) { scrollNativeTo(x, y); } private void scrollNativeTo(int x, int y) { x = clampHorizontalScroll(x); y = clampVerticalScroll(y); // We shouldn't do the store to native while processing a touch event since that confuses // the gesture processing logic. if (mProcessingTouchEvent) { mDeferredNativeScrollX = x; mDeferredNativeScrollY = y; mApplyDeferredNativeScroll = true; return; } if (x == mNativeScrollX && y == mNativeScrollY) return; // The scrollNativeTo call should be a simple store, so it's OK to assume it always // succeeds. mNativeScrollX = x; mNativeScrollY = y; mDelegate.scrollNativeTo(x, y); } // Called whenever some other touch interaction requires the fling gesture to be canceled. public void onFlingCancelGesture() { // TODO(mkosiba): Support speeding up a fling by flinging again. // http://crbug.com/265841 mScroller.forceFinished(true); } // Called when a fling gesture is not handled by the renderer. // We explicitly ask the renderer not to handle fling gestures targeted at the root // scroll layer. public void onUnhandledFlingStartEvent(int velocityX, int velocityY) { flingScroll(-velocityX, -velocityY); } // Starts the fling animation. Called both as a response to a fling gesture and as via the // public WebView#flingScroll(int, int) API. public void flingScroll(int velocityX, int velocityY) { final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); final int scrollRangeX = computeMaximumHorizontalScrollOffset(); final int scrollRangeY = computeMaximumVerticalScrollOffset(); mScroller.fling(scrollX, scrollY, velocityX, velocityY, 0, scrollRangeX, 0, scrollRangeY); mDelegate.invalidate(); } // Called immediately before the draw to update the scroll offset. public void computeScrollAndAbsorbGlow(OverScrollGlow overScrollGlow) { if (!mScroller.computeScrollOffset() && !mWasFlinging) { return; } mWasFlinging = false; final int oldX = mDelegate.getContainerViewScrollX(); final int oldY = mDelegate.getContainerViewScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); final int scrollRangeX = computeMaximumHorizontalScrollOffset(); final int scrollRangeY = computeMaximumVerticalScrollOffset(); if (overScrollGlow != null) { overScrollGlow.absorbGlow(x, y, oldX, oldY, scrollRangeX, scrollRangeY, mScroller.getCurrVelocity()); } // The mScroller is configured not to go outside of the scrollable range, so this call // should never result in attempting to scroll outside of the scrollable region. scrollBy(x - oldX, y - oldY); mDelegate.invalidate(); } private static int computeDurationInMilliSec(int dx, int dy) { int distance = Math.max(Math.abs(dx), Math.abs(dy)); int duration = distance * 1000 / STD_SCROLL_ANIMATION_SPEED_PIX_PER_SEC; return Math.min(duration, MAX_SCROLL_ANIMATION_DURATION_MILLISEC); } private boolean animateScrollTo(int x, int y) { final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); x = clampHorizontalScroll(x); y = clampVerticalScroll(y); int dx = x - scrollX; int dy = y - scrollY; if (dx == 0 && dy == 0) return false; mScroller.startScroll(scrollX, scrollY, dx, dy, computeDurationInMilliSec(dx, dy)); mDelegate.invalidate(); return true; } /** * See {@link android.webkit.WebView#pageUp(boolean)} */ public boolean pageUp(boolean top) { final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); if (top) { // go to the top of the document return animateScrollTo(scrollX, 0); } int dy = -mContainerViewHeight / 2; if (mContainerViewHeight > 2 * PAGE_SCROLL_OVERLAP) { dy = -mContainerViewHeight + PAGE_SCROLL_OVERLAP; } // animateScrollTo clamps the argument to the scrollable range so using (scrollY + dy) is // fine. return animateScrollTo(scrollX, scrollY + dy); } /** * See {@link android.webkit.WebView#pageDown(boolean)} */ public boolean pageDown(boolean bottom) { final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); if (bottom) { return animateScrollTo(scrollX, computeVerticalScrollRange()); } int dy = mContainerViewHeight / 2; if (mContainerViewHeight > 2 * PAGE_SCROLL_OVERLAP) { dy = mContainerViewHeight - PAGE_SCROLL_OVERLAP; } // animateScrollTo clamps the argument to the scrollable range so using (scrollY + dy) is // fine. return animateScrollTo(scrollX, scrollY + dy); } /** * See {@link android.webkit.WebView#requestChildRectangleOnScreen(View, Rect, boolean)} */ public boolean requestChildRectangleOnScreen(int childOffsetX, int childOffsetY, Rect rect, boolean immediate) { // TODO(mkosiba): WebViewClassic immediately returns false if a zoom animation is // in progress. We currently can't tell if one is happening.. should we instead cancel any // scroll animation when the size/pageScaleFactor changes? // TODO(mkosiba): Take scrollbar width into account in the screenRight/screenBotton // calculations. http://crbug.com/269032 final int scrollX = mDelegate.getContainerViewScrollX(); final int scrollY = mDelegate.getContainerViewScrollY(); rect.offset(childOffsetX, childOffsetY); int screenTop = scrollY; int screenBottom = scrollY + mContainerViewHeight; int scrollYDelta = 0; if (rect.bottom > screenBottom) { int oneThirdOfScreenHeight = mContainerViewHeight / 3; if (rect.width() > 2 * oneThirdOfScreenHeight) { // If the rectangle is too tall to fit in the bottom two thirds // of the screen, place it at the top. scrollYDelta = rect.top - screenTop; } else { // If the rectangle will still fit on screen, we want its // top to be in the top third of the screen. scrollYDelta = rect.top - (screenTop + oneThirdOfScreenHeight); } } else if (rect.top < screenTop) { scrollYDelta = rect.top - screenTop; } int screenLeft = scrollX; int screenRight = scrollX + mContainerViewWidth; int scrollXDelta = 0; if (rect.right > screenRight && rect.left > screenLeft) { if (rect.width() > mContainerViewWidth) { scrollXDelta += (rect.left - screenLeft); } else { scrollXDelta += (rect.right - screenRight); } } else if (rect.left < screenLeft) { scrollXDelta -= (screenLeft - rect.left); } if (scrollYDelta == 0 && scrollXDelta == 0) { return false; } if (immediate) { scrollBy(scrollXDelta, scrollYDelta); return true; } else { return animateScrollTo(scrollX + scrollXDelta, scrollY + scrollYDelta); } } }
/** * Copyright 2005-2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.testkit.support; import io.fabric8.common.util.Closeables; import io.fabric8.common.util.Files; import io.fabric8.common.util.IOHelpers; import io.fabric8.common.util.Processes; import io.fabric8.testkit.FabricAssertions; import io.fabric8.testkit.FabricController; import io.fabric8.testkit.jolokia.JolokiaFabricController; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.Callable; import static io.fabric8.common.util.Strings.join; import static io.fabric8.testkit.FabricAssertions.waitForValidValue; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * An implementation of {@link io.fabric8.testkit.FabricControllerManager} which uses a binary distribution, unpacks it * and runs shell commands to create a fabric. */ public class CommandLineFabricControllerManager extends FabricControllerManagerSupport { private static final transient Logger LOG = LoggerFactory.getLogger(CommandLineFabricControllerManager.class); private File installDir; private String startFabricScriptName = "bin/fabric8-start"; public CommandLineFabricControllerManager() { } @Override public FabricController createFabric() throws Exception { if (workDirectory == null) { workDirectory = createTempDirectory(); } String version = System.getProperty("fabric8-version", "1.2.0-SNAPSHOT"); String home = System.getProperty("user.home", "~"); String repo = home + "/.m2/repository"; File distro = new File(repo, "io/fabric8/fabric8-karaf/" + version + "/fabric8-karaf-" + version + ".tar.gz"); FabricAssertions.assertFileExists(distro); installDir = new File(workDirectory, "fabric8-karaf-" + version); killInstanceProcesses(getInstancesFile()); if (workDirectory.exists()) { Files.recursiveDelete(workDirectory); } workDirectory.mkdirs(); executeCommand(workDirectory, "tar", "zxf", distro.getAbsolutePath()); FabricAssertions.assertDirectoryExists(installDir); assertTrue("install dir does not exist: " + installDir.getAbsolutePath(), installDir.exists()); assertTrue("install dir is not a directory: " + installDir.getAbsolutePath(), installDir.isDirectory()); System.out.println("About to boot up the fabric8 at: " + installDir.getAbsolutePath()); File shellScript = new File(installDir, startFabricScriptName); FabricAssertions.assertFileExists(shellScript); executeCommand(installDir, "./" + startFabricScriptName); final FabricController restApi = createFabricController(); return restApi; } @Override public void destroy() throws Exception { if (installDir == null) { return; } boolean killProcesses = FabricAssertions.shouldKillProcessesAfterTestRun(); if (!killProcesses) { String message = installDir == null ? "" : " at: " + installDir.getAbsolutePath(); System.out.println("Not destroying the fabric" + message + " due to system property " + FabricAssertions.KILL_CONTAINERS_FLAG + " being " + System.getProperty(FabricAssertions.KILL_CONTAINERS_FLAG)); return; } System.out.println("Destroying the fabric at: " + installDir.getAbsolutePath()); File instancesFile = waitForInstancesFile(20 * 1000); killInstanceProcesses(instancesFile); } protected FabricController createFabricController() { //return new SimpleFabricRestApi(); return new JolokiaFabricController(); } protected File createTempDirectory() throws IOException { File tempFile = File.createTempFile("fabric8-testkit", ".dir"); tempFile.delete(); tempFile.mkdirs(); return tempFile; } protected void killInstanceProcesses(File instancesFile) throws IOException { if (instancesFile != null && instancesFile.exists() && instancesFile.isFile()) { Properties properties = new Properties(); properties.load(new FileInputStream(instancesFile)); Set<Map.Entry<Object, Object>> entries = properties.entrySet(); for (Map.Entry<Object, Object> entry : entries) { Object key = entry.getKey(); if (key != null) { String text = key.toString(); if (text.startsWith("item.") && text.endsWith(".pid")) { Object value = entry.getValue(); if (value instanceof String) { String pidText = value.toString(); Long pid = Long.parseLong(pidText); if (pid != null) { System.out.println("Killing process " + pid); int status = Processes.killProcess(pid, "-9"); if (status != 0) { System.err.println("Failed to kill process " + pid + ". Got " + status); } } } } } } } } protected File waitForInstancesFile(long timeout) throws Exception { if (installDir != null) { return waitForValidValue(timeout, new Callable<File>() { @Override public File call() throws Exception { return getInstancesFile(); } }, new FileExistsFilter()); } else { return null; } } protected File getInstancesFile() { return new File(installDir, "instances/instance.properties"); } protected String executeCommand(File workDir, String... commands) throws IOException { String errors = null; String answer = null; String message = join(asList(commands), " "); try { System.out.println("Executing " + message); ProcessBuilder builder = new ProcessBuilder().command(commands).directory(workDir); Map<String, String> env = builder.environment(); Map<String, String> envVars = createChildEnvironmentVariables(); env.putAll(envVars); logEnvironmentVariables(env); Process process = builder.start(); answer = readProcessOutput(process.getInputStream(), message); errors = processErrors(process.getErrorStream(), message); int status = process.waitFor(); assertEquals("Command " + message + "; " + answer + " Status", 0, status); } catch (Exception e) { fail("Failed to execute command " + message + ": " + e); } errors = errors.trim(); if (errors.length() > 0) { fail("Command: " + message + " got errors: " + errors); } return answer; } protected void logEnvironmentVariables(Map<String, String> env) { if (LOG.isDebugEnabled()) { TreeMap<String, String> sorted = new TreeMap<String, String>(env); Set<Map.Entry<String, String>> entries = sorted.entrySet(); for (Map.Entry<String, String> entry : entries) { LOG.debug("Setting " + entry.getKey() + "=" + entry.getValue()); } } } protected String readProcessOutput(InputStream inputStream, String message) throws Exception { return IOHelpers.readFully(inputStream); } protected String processErrors(InputStream inputStream, String message) throws Exception { StringBuilder builder = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); try { while (true) { String line = reader.readLine(); if (line == null) break; if (builder.length() > 0) { builder.append("\n"); } builder.append(line); LOG.info(line); } return builder.toString(); } catch (Exception e) { LOG.error("Failed to process stderr for " + message + ": " + e, e); throw e; } finally { Closeables.closeQuietly(reader); } } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.inflation.provider; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.interestrate.inflation.derivative.CapFloorInflationYearOnYearInterpolation; import com.opengamma.analytics.financial.interestrate.inflation.derivative.CapFloorInflationYearOnYearMonthly; import com.opengamma.analytics.financial.interestrate.inflation.derivative.CapFloorInflationZeroCouponInterpolation; import com.opengamma.analytics.financial.interestrate.inflation.derivative.CapFloorInflationZeroCouponMonthly; import com.opengamma.analytics.financial.provider.description.inflation.BlackSmileCapInflationYearOnYearWithConvexityProviderInterface; import com.opengamma.analytics.financial.provider.description.inflation.BlackSmileCapInflationZeroCouponWithConvexityProviderInterface; import com.opengamma.analytics.financial.provider.description.inflation.InflationConvexityAdjustmentProviderInterface; import com.opengamma.util.ArgumentChecker; /** * Compute the convexity adjustment between two times for year on year coupons and for zero coupons (this adjustment is also used for the computation of the * forward in optional inflation instruments). */ public class InflationMarketModelConvexityAdjustmentForCapFloor { /** * Computes the convexity adjustment for year on year inflation swap with a monthly index. * * @param coupon * The year on year coupon. * @param inflationConvexity * The inflation provider. * @return The convexity adjustment. */ public double getYearOnYearConvexityAdjustment(final CapFloorInflationYearOnYearMonthly coupon, final BlackSmileCapInflationYearOnYearWithConvexityProviderInterface inflationConvexity) { ArgumentChecker.notNull(coupon, "Coupon"); ArgumentChecker.notNull(inflationConvexity, "Inflation"); final double firstFixingTime = coupon.getReferenceStartTime(); final double secondFixingTime = coupon.getReferenceEndTime(); final double firstNaturalPaymentTime = coupon.getNaturalPaymentStartTime(); final double secondNaturalPaymentTime = coupon.getNaturalPaymentEndTime(); final double paymentTime = coupon.getPaymentTime(); final double volatilityStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0]; final double volatilityEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[1]; final double correlationInflation = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexCorrelation().getZValue(firstFixingTime, secondFixingTime); final double correlationInflationRateStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(firstFixingTime); final double correlationInflationRateEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(secondFixingTime); final double volBondForwardStart = getVolBondForward(firstNaturalPaymentTime, paymentTime, inflationConvexity); final double volBondForwardEnd = getVolBondForward(secondNaturalPaymentTime, paymentTime, inflationConvexity); final double adjustment = volatilityStart * (volatilityStart - volatilityEnd * correlationInflation - volBondForwardStart * correlationInflationRateStart) * firstNaturalPaymentTime + volatilityEnd * volBondForwardEnd * correlationInflationRateEnd * secondNaturalPaymentTime; return Math.exp(adjustment); } /** * Computes the convexity adjustment for year on year inflation swap with an interpolated index. * * @param cap * The year on year coupon. * @param inflationConvexity * The inflation provider. * @return The convexity adjustment. */ public double getYearOnYearConvexityAdjustment(final CapFloorInflationYearOnYearInterpolation cap, final BlackSmileCapInflationYearOnYearWithConvexityProviderInterface inflationConvexity) { ArgumentChecker.notNull(cap, "Coupon"); ArgumentChecker.notNull(inflationConvexity, "Inflation"); final double firstFixingTime = cap.getWeightStart() * cap.getReferenceStartTime()[0] + (1 - cap.getWeightStart()) * cap.getReferenceStartTime()[1]; final double secondFixingTime = cap.getWeightEnd() * cap.getReferenceEndTime()[0] + (1 - cap.getWeightEnd()) * cap.getReferenceEndTime()[1]; final double firstNaturalPaymentTime = cap.getNaturalPaymentStartTime(); final double secondNaturalPaymentTime = cap.getNaturalPaymentEndTime(); final double paymentTime = cap.getPaymentTime(); final double volatilityStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0]; final double volatilityEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[1]; final double correlationInflation = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexCorrelation().getZValue(firstFixingTime, secondFixingTime); final double correlationInflationRateStart = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(firstFixingTime); final double correlationInflationRateEnd = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation() .getYValue(secondFixingTime); final double volBondForwardStart = getVolBondForward(firstNaturalPaymentTime, paymentTime, inflationConvexity); final double volBondForwardEnd = getVolBondForward(secondNaturalPaymentTime, paymentTime, inflationConvexity); final double adjustment = volatilityStart * (volatilityStart - volatilityEnd * correlationInflation - volBondForwardStart * correlationInflationRateStart) * firstNaturalPaymentTime + volatilityEnd * volBondForwardEnd * correlationInflationRateEnd * secondNaturalPaymentTime; return Math.exp(adjustment); } /** * Computes the convexity adjustment for zero coupon inflation swap with a monthly index. * * @param coupon * The zero-coupon payment. * @param inflationConvexity * The inflation provider. * @return The convexity adjustment. */ public double getZeroCouponConvexityAdjustment(final CapFloorInflationZeroCouponMonthly coupon, final BlackSmileCapInflationZeroCouponWithConvexityProviderInterface inflationConvexity) { ArgumentChecker.notNull(coupon, "Coupon"); ArgumentChecker.notNull(inflationConvexity, "Inflation"); final double fixingTime = coupon.getReferenceEndTime(); final double naturalPaymentTime = coupon.getNaturalPaymentTime(); final double paymentTime = coupon.getPaymentTime(); final double volatility = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0]; final double correlationInflationRate = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation().getYValue(fixingTime); final double volBondForward = getVolBondForward(naturalPaymentTime, paymentTime, inflationConvexity); final double adjustment = volatility * volBondForward * correlationInflationRate * naturalPaymentTime; return Math.exp(adjustment); } /** * Computes the convexity adjustment for zero coupon inflation swap with an interpolated index. * * @param coupon * The zero-coupon payment. * @param inflationConvexity * The inflation provider. * @return The convexity adjustment. */ public double getZeroCouponConvexityAdjustment(final CapFloorInflationZeroCouponInterpolation coupon, final BlackSmileCapInflationZeroCouponWithConvexityProviderInterface inflationConvexity) { ArgumentChecker.notNull(coupon, "Coupon"); ArgumentChecker.notNull(inflationConvexity, "Inflation"); final double fixingTime = coupon.getWeight() * coupon.getReferenceEndTime()[0] + (1 - coupon.getWeight()) * coupon.getReferenceEndTime()[1]; final double naturalPaymentTime = coupon.getNaturalPaymentTime(); final double paymentTime = coupon.getPaymentTime(); final double volatility = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexAtmVolatility()[0]; final double correlationInflationRate = inflationConvexity.getInflationConvexityAdjustmentParameters().getPriceIndexRateCorrelation().getYValue(fixingTime); final double volBondForward = getVolBondForward(naturalPaymentTime, paymentTime, inflationConvexity); final double adjustment = volatility * volBondForward * correlationInflationRate * naturalPaymentTime; return Math.exp(adjustment); } /** * Computes the volatility of a bond forward, a bond forward is defined by his start time and his end time. * * @param startTime * The * @param endTime * The * @param inflationConvexity * The * @return The convexity adjustment. */ public double getVolBondForward(final double startTime, final double endTime, final InflationConvexityAdjustmentProviderInterface inflationConvexity) { ArgumentChecker.isTrue(startTime <= endTime, null); if (startTime == endTime) { return 0.0; } final IborIndex iborIndex = inflationConvexity.getBlackSmileIborCapParameters().getIndex(); final int liborTenorInMonth = iborIndex.getTenor().getMonths(); final double lenghtOfInterval = liborTenorInMonth / 12.0; final int numberOfInterval = (int) Math.round((endTime - startTime) / lenghtOfInterval); if (numberOfInterval == 0) { double volBondForward = (endTime - startTime) / lenghtOfInterval * inflationConvexity.getMulticurveProvider().getSimplyCompoundForwardRate(iborIndex, startTime, endTime, 1.0); volBondForward = volBondForward / (1 + volBondForward) * inflationConvexity.getBlackSmileIborCapParameters().getVolatility(endTime); return volBondForward; } // generate the schedule final double[] scheduleTimes = new double[numberOfInterval + 2]; scheduleTimes[numberOfInterval + 1] = endTime; for (int i = 0; i < numberOfInterval + 1; i++) { scheduleTimes[i] = startTime + i * lenghtOfInterval; } final double[] volatilityComponents = new double[numberOfInterval + 1]; double varBondForward = 0.0; // implementation note : double sum for the for (int i = 0; i < numberOfInterval + 1; i++) { // Implementation note : breaktrough for the last period where the accrued calculation is different. if (i == numberOfInterval + 1) { volatilityComponents[i] = (scheduleTimes[i + 1] - scheduleTimes[i]) / lenghtOfInterval * inflationConvexity.getMulticurveProvider().getSimplyCompoundForwardRate(iborIndex, scheduleTimes[i], scheduleTimes[i + 1], 1.0); } else { volatilityComponents[i] = inflationConvexity.getMulticurveProvider().getSimplyCompoundForwardRate(iborIndex, scheduleTimes[i], scheduleTimes[i + 1], 1.0); } volatilityComponents[i] = volatilityComponents[i] / (1 + volatilityComponents[i]) * inflationConvexity.getBlackSmileIborCapParameters().getVolatility(scheduleTimes[i + 1]); varBondForward = varBondForward + volatilityComponents[i] * volatilityComponents[i] * scheduleTimes[i + 1]; for (int j = 0; j < i; j++) { varBondForward = varBondForward + 2 * volatilityComponents[i] * volatilityComponents[j] * scheduleTimes[j + 1] * inflationConvexity.getInflationConvexityAdjustmentParameters().getLiborCorrelation().getZValue(scheduleTimes[i], scheduleTimes[j]); } } return Math.sqrt(varBondForward) / endTime; } }
package org.apache.solr.cloud; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.commons.codec.binary.StringUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.client.solrj.response.CoreAdminResponse; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.TimeOut; import org.apache.zookeeper.KeeperException; import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.concurrent.TimeUnit; import static org.apache.solr.cloud.ReplicaPropertiesBase.verifyUniqueAcrossCollection; @LuceneTestCase.Slow public class CollectionsAPISolrJTests extends AbstractFullDistribZkTestBase { @Test public void test() throws Exception { testCreateAndDeleteCollection(); testCreateAndDeleteShard(); testReloadCollection(); testCreateAndDeleteAlias(); testSplitShard(); testCreateCollectionWithPropertyParam(); testAddAndDeleteReplica(); testClusterProp(); testAddAndRemoveRole(); testOverseerStatus(); testList(); testAddAndDeleteReplicaProp(); testBalanceShardUnique(); } protected void testCreateAndDeleteCollection() throws Exception { String collectionName = "solrj_test"; CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create() .setCollectionName(collectionName) .setNumShards(2) .setReplicationFactor(2) .setConfigName("conf1") .setRouterField("myOwnField") .setStateFormat(1); CollectionAdminResponse response = createCollectionRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String, NamedList<Integer>> coresStatus = response.getCollectionCoresStatus(); assertEquals(4, coresStatus.size()); for (int i=0; i<4; i++) { NamedList<Integer> status = coresStatus.get(collectionName + "_shard" + (i/2+1) + "_replica" + (i%2+1)); assertEquals(0, (int)status.get("status")); assertTrue(status.get("QTime") > 0); } cloudClient.setDefaultCollection(collectionName); CollectionAdminRequest.Delete deleteCollectionRequest = new CollectionAdminRequest.Delete() .setCollectionName(collectionName); response = deleteCollectionRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String,NamedList<Integer>> nodesStatus = response.getCollectionNodesStatus(); assertNull("Deleted collection " + collectionName + "still exists", cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collectionName)); assertEquals(4, nodesStatus.size()); // Test Creating a collection with new stateformat. collectionName = "solrj_newstateformat"; createCollectionRequest = new CollectionAdminRequest.Create() .setCollectionName(collectionName) .setNumShards(2) .setConfigName("conf1") .setStateFormat(2); response = createCollectionRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); waitForRecoveriesToFinish(collectionName, false); assertTrue("Collection state does not exist", cloudClient.getZkStateReader().getZkClient() .exists(ZkStateReader.getCollectionPath(collectionName), true)); } protected void testCreateAndDeleteShard() throws IOException, SolrServerException { // Create an implicit collection String collectionName = "solrj_implicit"; CollectionAdminResponse response = new CollectionAdminRequest.Create() .setCollectionName(collectionName) .setShards("shardA,shardB") .setConfigName("conf1") .setRouterName("implicit").process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String, NamedList<Integer>> coresStatus = response.getCollectionCoresStatus(); assertEquals(2, coresStatus.size()); cloudClient.setDefaultCollection(collectionName); // Add a shard to the implicit collection response = new CollectionAdminRequest .CreateShard() .setCollectionName(collectionName) .setShardName("shardC").process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); coresStatus = response.getCollectionCoresStatus(); assertEquals(1, coresStatus.size()); assertEquals(0, (int) coresStatus.get(collectionName + "_shardC_replica1").get("status")); CollectionAdminRequest.DeleteShard deleteShardRequest = new CollectionAdminRequest .DeleteShard() .setCollectionName(collectionName) .setShardName("shardC"); response = deleteShardRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String, NamedList<Integer>> nodesStatus = response.getCollectionNodesStatus(); assertEquals(1, nodesStatus.size()); } protected void testReloadCollection() throws IOException, SolrServerException { cloudClient.setDefaultCollection(DEFAULT_COLLECTION); CollectionAdminRequest.Reload reloadCollectionRequest = new CollectionAdminRequest.Reload() .setCollectionName("collection1"); CollectionAdminResponse response = reloadCollectionRequest.process(cloudClient); assertEquals(0, response.getStatus()); } protected void testCreateAndDeleteAlias() throws IOException, SolrServerException { CollectionAdminRequest.CreateAlias createAliasRequest = new CollectionAdminRequest .CreateAlias() .setAliasName("solrj_alias") .setAliasedCollections(DEFAULT_COLLECTION); CollectionAdminResponse response = createAliasRequest.process(cloudClient); assertEquals(0, response.getStatus()); CollectionAdminRequest.DeleteAlias deleteAliasRequest = new CollectionAdminRequest.DeleteAlias() .setAliasName("solrj_alias"); deleteAliasRequest.process(cloudClient); assertEquals(0, response.getStatus()); } protected void testSplitShard() throws Exception { String collectionName = "solrj_test_splitshard"; cloudClient.setDefaultCollection(collectionName); CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create() .setConfigName("conf1") .setNumShards(2) .setCollectionName(collectionName); createCollectionRequest.process(cloudClient); CollectionAdminRequest.SplitShard splitShardRequest = new CollectionAdminRequest.SplitShard() .setCollectionName(collectionName) .setShardName("shard1"); CollectionAdminResponse response = splitShardRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String, NamedList<Integer>> coresStatus = response.getCollectionCoresStatus(); assertEquals(0, (int) coresStatus.get(collectionName + "_shard1_0_replica1").get("status")); assertEquals(0, (int) coresStatus.get(collectionName + "_shard1_1_replica1").get("status")); waitForRecoveriesToFinish(collectionName, false); waitForThingsToLevelOut(10); // Test splitting using split.key splitShardRequest = new CollectionAdminRequest.SplitShard() .setCollectionName(collectionName) .setSplitKey("b!"); response = splitShardRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); waitForRecoveriesToFinish(collectionName, false); waitForThingsToLevelOut(10); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); Collection<Slice> slices = clusterState.getActiveSlices(collectionName); assertEquals("ClusterState: "+ clusterState.getActiveSlices(collectionName), 5, slices.size()); } private void testCreateCollectionWithPropertyParam() throws Exception { String collectionName = "solrj_test_core_props"; File tmpDir = createTempDir("testPropertyParamsForCreate").toFile(); File instanceDir = new File(tmpDir, "instanceDir-" + TestUtil.randomSimpleString(random(), 1, 5)); File dataDir = new File(tmpDir, "dataDir-" + TestUtil.randomSimpleString(random(), 1, 5)); File ulogDir = new File(tmpDir, "ulogDir-" + TestUtil.randomSimpleString(random(), 1, 5)); Properties properties = new Properties(); properties.put(CoreAdminParams.INSTANCE_DIR, instanceDir.getAbsolutePath()); properties.put(CoreAdminParams.DATA_DIR, dataDir.getAbsolutePath()); properties.put(CoreAdminParams.ULOG_DIR, ulogDir.getAbsolutePath()); CollectionAdminRequest.Create createReq = new CollectionAdminRequest.Create() .setCollectionName(collectionName) .setNumShards(1) .setConfigName("conf1") .setProperties(properties); CollectionAdminResponse response = createReq.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); Map<String, NamedList<Integer>> coresStatus = response.getCollectionCoresStatus(); assertEquals(1, coresStatus.size()); DocCollection testCollection = cloudClient.getZkStateReader() .getClusterState().getCollection(collectionName); Replica replica1 = testCollection.getReplica("core_node1"); try (HttpSolrClient client = new HttpSolrClient(replica1.getStr("base_url"))) { CoreAdminResponse status = CoreAdminRequest.getStatus(replica1.getStr("core"), client); NamedList<Object> coreStatus = status.getCoreStatus(replica1.getStr("core")); String dataDirStr = (String) coreStatus.get("dataDir"); String instanceDirStr = (String) coreStatus.get("instanceDir"); assertEquals("Instance dir does not match param passed in property.instanceDir syntax", new File(instanceDirStr).getAbsolutePath(), instanceDir.getAbsolutePath()); assertEquals("Data dir does not match param given in property.dataDir syntax", new File(dataDirStr).getAbsolutePath(), dataDir.getAbsolutePath()); } CollectionAdminRequest.Delete deleteCollectionRequest = new CollectionAdminRequest.Delete(); deleteCollectionRequest.setCollectionName(collectionName); deleteCollectionRequest.process(cloudClient); } private void testAddAndDeleteReplica() throws Exception { String collectionName = "solrj_replicatests"; createCollection(collectionName, cloudClient, 1, 2); cloudClient.setDefaultCollection(collectionName); String newReplicaName = Assign.assignNode(collectionName, cloudClient.getZkStateReader().getClusterState()); ArrayList<String> nodeList = new ArrayList<>(cloudClient.getZkStateReader().getClusterState().getLiveNodes()); Collections.shuffle(nodeList, random()); CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica() .setCollectionName(collectionName) .setShardName("shard1") .setNode(nodeList.get(0)); CollectionAdminResponse response = addReplica.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); TimeOut timeout = new TimeOut(3, TimeUnit.SECONDS); Replica newReplica = null; while (! timeout.hasTimedOut() && newReplica == null) { Slice slice = cloudClient.getZkStateReader().getClusterState().getSlice(collectionName, "shard1"); newReplica = slice.getReplica(newReplicaName); } assertNotNull(newReplica); assertEquals("Replica should be created on the right node", cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeList.get(0)), newReplica.getStr(ZkStateReader.BASE_URL_PROP) ); // Test DELETEREPLICA CollectionAdminRequest.DeleteReplica deleteReplicaRequest = new CollectionAdminRequest.DeleteReplica() .setCollectionName(collectionName) .setShardName("shard1") .setReplica(newReplicaName); response = deleteReplicaRequest.process(cloudClient); assertEquals(0, response.getStatus()); timeout = new TimeOut(3, TimeUnit.SECONDS); while (! timeout.hasTimedOut() && newReplica != null) { Slice slice = cloudClient.getZkStateReader().getClusterState().getSlice(collectionName, "shard1"); newReplica = slice.getReplica(newReplicaName); } assertNull(newReplica); } private void testClusterProp() throws InterruptedException, IOException, SolrServerException { CollectionAdminRequest.ClusterProp clusterPropRequest = new CollectionAdminRequest.ClusterProp() .setPropertyName(ZkStateReader.LEGACY_CLOUD) .setPropertyValue("false"); CollectionAdminResponse response = clusterPropRequest.process(cloudClient); assertEquals(0, response.getStatus()); TimeOut timeout = new TimeOut(3, TimeUnit.SECONDS); boolean changed = false; while(! timeout.hasTimedOut()){ Thread.sleep(10); changed = Objects.equals("false", cloudClient.getZkStateReader().getClusterProps().get(ZkStateReader.LEGACY_CLOUD)); if(changed) break; } assertTrue("The Cluster property wasn't set", changed); // Unset ClusterProp that we set. clusterPropRequest = new CollectionAdminRequest.ClusterProp() .setPropertyName(ZkStateReader.LEGACY_CLOUD) .setPropertyValue(null); clusterPropRequest.process(cloudClient); timeout = new TimeOut(3, TimeUnit.SECONDS); changed = false; while(! timeout.hasTimedOut()) { Thread.sleep(10); changed = (cloudClient.getZkStateReader().getClusterProps().get(ZkStateReader.LEGACY_CLOUD) == null); if(changed) break; } assertTrue("The Cluster property wasn't unset", changed); } private void testAddAndRemoveRole() throws InterruptedException, IOException, SolrServerException { cloudClient.setDefaultCollection(DEFAULT_COLLECTION); Replica replica = cloudClient.getZkStateReader().getLeaderRetry(DEFAULT_COLLECTION, SHARD1); CollectionAdminRequest.AddRole addRoleRequest = new CollectionAdminRequest.AddRole() .setNode(replica.getNodeName()) .setRole("overseer"); addRoleRequest.process(cloudClient); CollectionAdminRequest.ClusterStatus clusterStatusRequest = new CollectionAdminRequest.ClusterStatus() .setCollectionName(DEFAULT_COLLECTION); CollectionAdminResponse response = clusterStatusRequest.process(cloudClient); NamedList<Object> rsp = response.getResponse(); NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); Map<String, Object> roles = (Map<String, Object>) cluster.get("roles"); assertNotNull("Role information should not be null", roles); List<String> overseer = (List<String>) roles.get("overseer"); assertNotNull(overseer); assertEquals(1, overseer.size()); assertTrue(overseer.contains(replica.getNodeName())); // Remove role new CollectionAdminRequest.RemoveRole() .setNode(replica.getNodeName()) .setRole("overseer") .process(cloudClient); clusterStatusRequest = new CollectionAdminRequest.ClusterStatus(); clusterStatusRequest.setCollectionName(DEFAULT_COLLECTION); response = clusterStatusRequest.process(cloudClient); rsp = response.getResponse(); cluster = (NamedList<Object>) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); roles = (Map<String, Object>) cluster.get("roles"); assertNotNull("Role information should not be null", roles); overseer = (List<String>) roles.get("overseer"); assertFalse(overseer.contains(replica.getNodeName())); } private void testOverseerStatus() throws IOException, SolrServerException { CollectionAdminResponse response = new CollectionAdminRequest.OverseerStatus().process(cloudClient); assertEquals(0, response.getStatus()); assertNotNull("overseer_operations shouldn't be null", response.getResponse().get("overseer_operations")); } private void testList() throws IOException, SolrServerException { CollectionAdminResponse response = new CollectionAdminRequest.List().process(cloudClient); assertEquals(0, response.getStatus()); assertNotNull("collection list should not be null", response.getResponse().get("collections")); } private void testAddAndDeleteReplicaProp() throws InterruptedException, IOException, SolrServerException { Replica replica = cloudClient.getZkStateReader().getLeaderRetry(DEFAULT_COLLECTION, SHARD1); CollectionAdminResponse response = new CollectionAdminRequest.AddReplicaProp() .setCollectionName(DEFAULT_COLLECTION) .setShardName(SHARD1) .setReplica(replica.getName()) .setPropertyName("preferredleader") .setPropertyValue("true").process(cloudClient); assertEquals(0, response.getStatus()); TimeOut timeout = new TimeOut(20, TimeUnit.SECONDS); String propertyValue = null; String replicaName = replica.getName(); while (! timeout.hasTimedOut()) { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); replica = clusterState.getReplica(DEFAULT_COLLECTION, replicaName); propertyValue = replica.getStr("property.preferredleader"); if(StringUtils.equals("true", propertyValue)) break; Thread.sleep(50); } assertEquals("Replica property was not updated, Latest value: " + cloudClient.getZkStateReader().getClusterState().getReplica(DEFAULT_COLLECTION, replicaName), "true", propertyValue); response = new CollectionAdminRequest.DeleteReplicaProp() .setCollectionName(DEFAULT_COLLECTION) .setShardName(SHARD1) .setReplica(replicaName) .setPropertyName("property.preferredleader").process(cloudClient); assertEquals(0, response.getStatus()); timeout = new TimeOut(20, TimeUnit.SECONDS); boolean updated = false; while (! timeout.hasTimedOut()) { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); replica = clusterState.getReplica(DEFAULT_COLLECTION, replicaName); updated = replica.getStr("property.preferredleader") == null; if(updated) break; Thread.sleep(50); } assertTrue("Replica property was not removed", updated); } private void testBalanceShardUnique() throws IOException, SolrServerException, KeeperException, InterruptedException { CollectionAdminResponse response = new CollectionAdminRequest.BalanceShardUnique() .setCollection(DEFAULT_COLLECTION) .setPropertyName("preferredLeader").process(cloudClient); assertEquals(0, response.getStatus()); verifyUniqueAcrossCollection(cloudClient, DEFAULT_COLLECTION, "property.preferredleader"); } }
package wb.app.seek.widgets; import android.animation.Animator; import android.animation.AnimatorInflater; import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.os.Build; import android.support.annotation.AnimatorRes; import android.support.annotation.DrawableRes; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.view.Gravity; import android.view.View; import android.view.animation.Interpolator; import android.widget.LinearLayout; import wb.app.seek.R; import static android.support.v4.view.ViewPager.OnPageChangeListener; public class CircleIndicator extends LinearLayout { private final static int DEFAULT_INDICATOR_WIDTH = 5; private ViewPager mViewpager; private int mIndicatorMargin = -1; private int mIndicatorWidth = -1; private int mIndicatorHeight = -1; private int mAnimatorResId = R.animator.scale_with_alpha; private int mAnimatorReverseResId = 0; private int mIndicatorBackgroundResId = R.drawable.white_radius; private int mIndicatorUnselectedBackgroundResId = R.drawable.white_radius; private Animator mAnimatorOut; private Animator mAnimatorIn; private Animator mImmediateAnimatorOut; private Animator mImmediateAnimatorIn; private int mLastPosition = -1; private int mMaxCount; public CircleIndicator(Context context) { super(context); init(context, null); } public CircleIndicator(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public CircleIndicator(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public CircleIndicator(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(context, attrs); } private void init(Context context, AttributeSet attrs) { handleTypedArray(context, attrs); checkIndicatorConfig(context); } private void handleTypedArray(Context context, AttributeSet attrs) { if (attrs == null) { return; } TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.CircleIndicator); mIndicatorWidth = typedArray.getDimensionPixelSize(R.styleable.CircleIndicator_ci_width, -1); mIndicatorHeight = typedArray.getDimensionPixelSize(R.styleable.CircleIndicator_ci_height, -1); mIndicatorMargin = typedArray.getDimensionPixelSize(R.styleable.CircleIndicator_ci_margin, -1); mAnimatorResId = typedArray.getResourceId(R.styleable.CircleIndicator_ci_animator, R.animator.scale_with_alpha); mAnimatorReverseResId = typedArray.getResourceId(R.styleable.CircleIndicator_ci_animator_reverse, 0); mIndicatorBackgroundResId = typedArray.getResourceId(R.styleable.CircleIndicator_ci_drawable, R.drawable.white_radius); mIndicatorUnselectedBackgroundResId = typedArray.getResourceId(R.styleable.CircleIndicator_ci_drawable_unselected, mIndicatorBackgroundResId); int orientation = typedArray.getInt(R.styleable.CircleIndicator_ci_orientation, -1); setOrientation(orientation == VERTICAL ? VERTICAL : HORIZONTAL); int gravity = typedArray.getInt(R.styleable.CircleIndicator_ci_gravity, -1); setGravity(gravity >= 0 ? gravity : Gravity.CENTER); typedArray.recycle(); } /** * Create and configure Indicator in Java code. */ public void configureIndicator(int indicatorWidth, int indicatorHeight, int indicatorMargin) { configureIndicator(indicatorWidth, indicatorHeight, indicatorMargin, R.animator.scale_with_alpha, 0, R.drawable.white_radius, R.drawable.white_radius); } public void configureIndicator(int indicatorWidth, int indicatorHeight, int indicatorMargin, @AnimatorRes int animatorId, @AnimatorRes int animatorReverseId, @DrawableRes int indicatorBackgroundId, @DrawableRes int indicatorUnselectedBackgroundId) { mIndicatorWidth = indicatorWidth; mIndicatorHeight = indicatorHeight; mIndicatorMargin = indicatorMargin; mAnimatorResId = animatorId; mAnimatorReverseResId = animatorReverseId; mIndicatorBackgroundResId = indicatorBackgroundId; mIndicatorUnselectedBackgroundResId = indicatorUnselectedBackgroundId; checkIndicatorConfig(getContext()); } private void checkIndicatorConfig(Context context) { mIndicatorWidth = (mIndicatorWidth < 0) ? dip2px(DEFAULT_INDICATOR_WIDTH) : mIndicatorWidth; mIndicatorHeight = (mIndicatorHeight < 0) ? dip2px(DEFAULT_INDICATOR_WIDTH) : mIndicatorHeight; mIndicatorMargin = (mIndicatorMargin < 0) ? dip2px(DEFAULT_INDICATOR_WIDTH) : mIndicatorMargin; mAnimatorResId = (mAnimatorResId == 0) ? R.animator.scale_with_alpha : mAnimatorResId; mAnimatorOut = createAnimatorOut(context); mImmediateAnimatorOut = createAnimatorOut(context); mImmediateAnimatorOut.setDuration(0); mAnimatorIn = createAnimatorIn(context); mImmediateAnimatorIn = createAnimatorIn(context); mImmediateAnimatorIn.setDuration(0); mIndicatorBackgroundResId = (mIndicatorBackgroundResId == 0) ? R.drawable.white_radius : mIndicatorBackgroundResId; mIndicatorUnselectedBackgroundResId = (mIndicatorUnselectedBackgroundResId == 0) ? mIndicatorBackgroundResId : mIndicatorUnselectedBackgroundResId; } private Animator createAnimatorOut(Context context) { return AnimatorInflater.loadAnimator(context, mAnimatorResId); } private Animator createAnimatorIn(Context context) { Animator animatorIn; if (mAnimatorReverseResId == 0) { animatorIn = AnimatorInflater.loadAnimator(context, mAnimatorResId); animatorIn.setInterpolator(new ReverseInterpolator()); } else { animatorIn = AnimatorInflater.loadAnimator(context, mAnimatorReverseResId); } return animatorIn; } public void setViewPager(ViewPager viewPager, int maxCount) { mViewpager = viewPager; mMaxCount = maxCount; if (mViewpager != null && mViewpager.getAdapter() != null) { mLastPosition = -1; createIndicators(); mViewpager.removeOnPageChangeListener(mInternalPageChangeListener); mViewpager.addOnPageChangeListener(mInternalPageChangeListener); mInternalPageChangeListener.onPageSelected(mViewpager.getCurrentItem()); } } private final OnPageChangeListener mInternalPageChangeListener = new OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { if (mViewpager.getAdapter() == null || mViewpager.getAdapter().getCount() <= 0) { return; } position %= mMaxCount; if (mAnimatorIn.isRunning()) { mAnimatorIn.end(); mAnimatorIn.cancel(); } if (mAnimatorOut.isRunning()) { mAnimatorOut.end(); mAnimatorOut.cancel(); } View currentIndicator; if (mLastPosition >= 0 && (currentIndicator = getChildAt(mLastPosition)) != null) { currentIndicator.setBackgroundResource(mIndicatorUnselectedBackgroundResId); mAnimatorIn.setTarget(currentIndicator); mAnimatorIn.start(); } View selectedIndicator = getChildAt(position); if (selectedIndicator != null) { selectedIndicator.setBackgroundResource(mIndicatorBackgroundResId); mAnimatorOut.setTarget(selectedIndicator); mAnimatorOut.start(); } mLastPosition = position; } @Override public void onPageScrollStateChanged(int state) { } }; public DataSetObserver getDataSetObserver() { return mInternalDataSetObserver; } private DataSetObserver mInternalDataSetObserver = new DataSetObserver() { @Override public void onChanged() { super.onChanged(); if (mViewpager == null) { return; } int newCount = mViewpager.getAdapter().getCount(); int currentCount = getChildCount(); if (newCount == currentCount) { // No change return; } else if (mLastPosition < newCount) { mLastPosition = mViewpager.getCurrentItem(); } else { mLastPosition = -1; } createIndicators(); } }; /** * @deprecated User ViewPager addOnPageChangeListener */ @Deprecated public void setOnPageChangeListener(OnPageChangeListener onPageChangeListener) { if (mViewpager == null) { throw new NullPointerException("can not find Viewpager , setViewPager first"); } mViewpager.removeOnPageChangeListener(onPageChangeListener); mViewpager.addOnPageChangeListener(onPageChangeListener); } private void createIndicators() { removeAllViews(); // int count = mViewpager.initAdapter().getCount(); int count = mMaxCount; if (count <= 0) { return; } int currentItem = mViewpager.getCurrentItem() % mMaxCount; int orientation = getOrientation(); for (int i = 0; i < count; i++) { if (currentItem == i) { addIndicator(orientation, mIndicatorBackgroundResId, mImmediateAnimatorOut); } else { addIndicator(orientation, mIndicatorUnselectedBackgroundResId, mImmediateAnimatorIn); } } } private void addIndicator(int orientation, @DrawableRes int backgroundDrawableId, Animator animator) { if (animator.isRunning()) { animator.end(); animator.cancel(); } View Indicator = new View(getContext()); Indicator.setBackgroundResource(backgroundDrawableId); addView(Indicator, mIndicatorWidth, mIndicatorHeight); LayoutParams lp = (LayoutParams) Indicator.getLayoutParams(); if (orientation == HORIZONTAL) { lp.leftMargin = mIndicatorMargin; lp.rightMargin = mIndicatorMargin; } else { lp.topMargin = mIndicatorMargin; lp.bottomMargin = mIndicatorMargin; } Indicator.setLayoutParams(lp); animator.setTarget(Indicator); animator.start(); } private class ReverseInterpolator implements Interpolator { @Override public float getInterpolation(float value) { return Math.abs(1.0f - value); } } public int dip2px(float dpValue) { final float scale = getResources().getDisplayMetrics().density; return (int) (dpValue * scale + 0.5f); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services.stub; import com.google.ads.googleads.v9.resources.AssetGroupAsset; import com.google.ads.googleads.v9.services.GetAssetGroupAssetRequest; import com.google.ads.googleads.v9.services.MutateAssetGroupAssetsRequest; import com.google.ads.googleads.v9.services.MutateAssetGroupAssetsResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AssetGroupAssetServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getAssetGroupAsset to 30 seconds: * * <pre>{@code * AssetGroupAssetServiceStubSettings.Builder assetGroupAssetServiceSettingsBuilder = * AssetGroupAssetServiceStubSettings.newBuilder(); * assetGroupAssetServiceSettingsBuilder * .getAssetGroupAssetSettings() * .setRetrySettings( * assetGroupAssetServiceSettingsBuilder * .getAssetGroupAssetSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * AssetGroupAssetServiceStubSettings assetGroupAssetServiceSettings = * assetGroupAssetServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class AssetGroupAssetServiceStubSettings extends StubSettings<AssetGroupAssetServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/adwords").build(); private final UnaryCallSettings<GetAssetGroupAssetRequest, AssetGroupAsset> getAssetGroupAssetSettings; private final UnaryCallSettings<MutateAssetGroupAssetsRequest, MutateAssetGroupAssetsResponse> mutateAssetGroupAssetsSettings; /** Returns the object with the settings used for calls to getAssetGroupAsset. */ public UnaryCallSettings<GetAssetGroupAssetRequest, AssetGroupAsset> getAssetGroupAssetSettings() { return getAssetGroupAssetSettings; } /** Returns the object with the settings used for calls to mutateAssetGroupAssets. */ public UnaryCallSettings<MutateAssetGroupAssetsRequest, MutateAssetGroupAssetsResponse> mutateAssetGroupAssetsSettings() { return mutateAssetGroupAssetsSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public AssetGroupAssetServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcAssetGroupAssetServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "googleads.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "googleads.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(AssetGroupAssetServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected AssetGroupAssetServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getAssetGroupAssetSettings = settingsBuilder.getAssetGroupAssetSettings().build(); mutateAssetGroupAssetsSettings = settingsBuilder.mutateAssetGroupAssetsSettings().build(); } /** Builder for AssetGroupAssetServiceStubSettings. */ public static class Builder extends StubSettings.Builder<AssetGroupAssetServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<GetAssetGroupAssetRequest, AssetGroupAsset> getAssetGroupAssetSettings; private final UnaryCallSettings.Builder< MutateAssetGroupAssetsRequest, MutateAssetGroupAssetsResponse> mutateAssetGroupAssetsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(3600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(3600000L)) .setTotalTimeout(Duration.ofMillis(3600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getAssetGroupAssetSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); mutateAssetGroupAssetsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getAssetGroupAssetSettings, mutateAssetGroupAssetsSettings); initDefaults(this); } protected Builder(AssetGroupAssetServiceStubSettings settings) { super(settings); getAssetGroupAssetSettings = settings.getAssetGroupAssetSettings.toBuilder(); mutateAssetGroupAssetsSettings = settings.mutateAssetGroupAssetsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getAssetGroupAssetSettings, mutateAssetGroupAssetsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getAssetGroupAssetSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .mutateAssetGroupAssetsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getAssetGroupAsset. */ public UnaryCallSettings.Builder<GetAssetGroupAssetRequest, AssetGroupAsset> getAssetGroupAssetSettings() { return getAssetGroupAssetSettings; } /** Returns the builder for the settings used for calls to mutateAssetGroupAssets. */ public UnaryCallSettings.Builder<MutateAssetGroupAssetsRequest, MutateAssetGroupAssetsResponse> mutateAssetGroupAssetsSettings() { return mutateAssetGroupAssetsSettings; } @Override public AssetGroupAssetServiceStubSettings build() throws IOException { return new AssetGroupAssetServiceStubSettings(this); } } }
/** * */ package com.bimaas.connect; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import org.apache.axis2.AxisFault; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.synapse.MessageContext; import org.apache.synapse.commons.json.JsonUtil; import org.apache.synapse.core.axis2.Axis2MessageContext; import org.apache.synapse.mediators.AbstractMediator; import org.json.JSONException; import org.json.JSONObject; import com.bimaas.connect.constants.BimExConstants; import com.bimaas.connect.data.DbConnectionManager; import com.bimaas.connect.exception.BimExchangeException; /** * This class is used to add a row to project_rights table with the poid and the * admin user name. * <p> * The data in the project_rights table can be used to manage the rights of the * projects by users. By default the project creator gets the access to the * project, and he should be able to grant access to others from the bim stats. * </p> * * @author isuru * */ public class CreateProjectOutSeqMediator extends AbstractMediator { /** * Logger. */ private static final Log LOG = LogFactory .getLog(CreateProjectOutSeqMediator.class); /** * Mediate overridden method. */ @Override public boolean mediate(MessageContext context) { try { String jsonPayloadToString = JsonUtil .jsonPayloadToString(((Axis2MessageContext) context) .getAxis2MessageContext()); JSONObject originalJsonBody = new JSONObject(jsonPayloadToString); String poid = new JSONObject(new JSONObject( originalJsonBody.getString("response")).getString("result")) .getString("oid"); String userAccessToken = (String) context .getProperty("AUTHZ_USER_TOKEN"); String projectName = (String) context.getProperty("PROJECT_NAME"); String tempParentId = (String) context.getProperty("PARENT_ID"); String parentId = tempParentId == null ? "-1" : tempParentId; String strLat = (String) context.getProperty("LATITUDE"); String strLon = (String) context.getProperty("LONGITUDE"); Double latitude = strLat.isEmpty() ? null : Double.valueOf(strLat); Double longitude = strLon.isEmpty() ? null : Double.valueOf(strLon); String strGeoFence = (String) context.getProperty("GEO_FENCE"); String geoFence = (strGeoFence.isEmpty() ? null : strGeoFence); String userName = getUserName(userAccessToken); insertProject(poid, projectName, parentId, latitude, longitude, geoFence, userName); insertProjectRigtsRecord(poid, userName); if (LOG.isDebugEnabled()) { LOG.debug("Successfully set the project rights for the user access token: " + userAccessToken + ", user name: " + userName + ", projectOID: " + poid); } setResponse(context, jsonPayloadToString); } catch (JSONException e) { LOG.error("Unexpected response from the backend due to some error \n" + e); return true; } catch (BimExchangeException e) { LOG.error("Error occurred\n" + e); return false; } catch (Exception e) { LOG.error("Error occurred\n" + e); return false; } return true; } /** * Insert the details to the project table of bim_exchange. * * @param poid * project Id. * @param projectName * name of the project. * @param parentId * id of the parent if sub project. * @param latitude * latitude of the project. * @param longitude * longitude of the project. * @param geoFence * geo Fence. * @param userName * name of the user created. * @return true if success. * @throws BimExchangeException * custom exception. */ private boolean insertProject(String poid, String projectName, String parentId, Double latitude, Double longitude, Object geoFence, String userName) throws BimExchangeException { String query = "INSERT INTO project (poid, project_name, parent_id, latitude, longitude, geo_fence, created_by) " + "VALUES ('" + poid + "', '" + projectName + "', '" + parentId + "', " + latitude + "," + longitude + ", '" + geoFence + "', '" + userName + "')"; return executeInsert(BimExConstants.BIMEX_DB_SCHEMA, query); } /** * Insert the record to project_rights table with user name retrieved from * the user access token. * * @param userName * name of the user. * @return true if success. * @throws BimExchangeException * custom exception. */ private boolean insertProjectRigtsRecord(String poid, String userName) throws BimExchangeException { String query = "INSERT INTO project_rights (poid, authz_user) VALUES ('" + poid + "', '" + userName + "')"; return executeInsert(BimExConstants.BIMEX_DB_SCHEMA, query); } /** * Execute insert query. * * @param query * query to be executed. * @return */ private boolean executeInsert(String dbSchema, String query) throws BimExchangeException { Connection connection = null; Statement statement = null; try { connection = DbConnectionManager.getInstance(dbSchema) .getConnection(); if (LOG.isDebugEnabled()) { LOG.debug("Executing query...\n" + query); } statement = connection.createStatement(); return statement.executeUpdate(query) > 0; } catch (BimExchangeException e) { LOG.error("Error occurred in executing: " + query + "\n" + e); throw e; } catch (SQLException e) { LOG.error("Error occurred in executing: " + query + "\n" + e); throw new BimExchangeException("SQL error occurred", e); } finally { if (statement != null) { try { statement.close(); } catch (SQLException e) { LOG.error(e); throw new BimExchangeException( "Error in closing statement", e); } } if (connection != null) { try { connection.close(); } catch (SQLException e) { LOG.error(e); throw new BimExchangeException( "Error in closing connection", e); } } } } /** * Return the name of the user for the given user access token. * * @param userAccessToken * access token of the user. * @return name of the user. * @throws BimExchangeException * custom exception. */ private String getUserName(String userAccessToken) throws BimExchangeException { ResultSet resultSet = null; String query = "SELECT AUTHZ_USER FROM IDN_OAUTH2_ACCESS_TOKEN WHERE ACCESS_TOKEN='" + userAccessToken.split(" ")[1] + "' AND USER_TYPE = 'APPLICATION_USER' AND TOKEN_STATE='ACTIVE'"; try { Connection connection = DbConnectionManager.getInstance( BimExConstants.APIM_DB_SCHEMA).getConnection(); if (LOG.isDebugEnabled()) { LOG.debug("Executing the query to fetch the user name for the access token \n" + query); } resultSet = connection.createStatement().executeQuery(query); // Move to the first result item. resultSet.next(); return resultSet.getString("AUTHZ_USER"); } catch (BimExchangeException e) { LOG.error("Error occurred in selecting the user name for the user access token: \n" + e); throw e; } catch (SQLException e) { LOG.error("Error occurred in selecting the user name for the user access token: \n" + e); throw new BimExchangeException( "SQL error occurred in Data Agent. ", e); } } /** * Set the response to message context. * * @param messageContext * @param responseBody * json body to set. * @throws AxisFault * Exception. */ private void setResponse(MessageContext messageContext, String responseBody) throws AxisFault { JsonUtil.newJsonPayload( ((Axis2MessageContext) messageContext).getAxis2MessageContext(), responseBody, true, true); } }
package org.springframework.security.oauth.examples.config; import java.util.Arrays; import java.util.Collections; import java.util.List; import javax.annotation.Resource; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; import org.springframework.context.annotation.ScopedProxyMode; import org.springframework.context.support.ConversionServiceFactoryBean; import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.http.MediaType; import org.springframework.http.converter.BufferedImageHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJacksonHttpMessageConverter; import org.springframework.security.oauth.examples.tonr.SparklrService; import org.springframework.security.oauth.examples.tonr.converter.AccessTokenRequestConverter; import org.springframework.security.oauth.examples.tonr.impl.SparklrServiceImpl; import org.springframework.security.oauth.examples.tonr.mvc.FacebookController; import org.springframework.security.oauth.examples.tonr.mvc.SparklrController; import org.springframework.security.oauth.examples.tonr.mvc.SparklrRedirectController; import org.springframework.security.oauth.examples.tonr.mvc.XviaController; import org.springframework.security.oauth.examples.tonr.mvc.XviaScribeController; import org.springframework.security.oauth2.client.DefaultOAuth2ClientContext; import org.springframework.security.oauth2.client.OAuth2RestTemplate; import org.springframework.security.oauth2.client.resource.OAuth2ProtectedResourceDetails; import org.springframework.security.oauth2.client.token.AccessTokenRequest; import org.springframework.security.oauth2.client.token.grant.client.ClientCredentialsResourceDetails; import org.springframework.security.oauth2.client.token.grant.code.AuthorizationCodeResourceDetails; import org.springframework.security.oauth2.common.AuthenticationScheme; import org.springframework.security.oauth2.config.annotation.web.configuration.OAuth2ClientConfiguration; import org.springframework.security.oauth2.http.converter.FormOAuth2AccessTokenMessageConverter; import org.springframework.web.accept.ContentNegotiationManagerFactoryBean; import org.springframework.web.client.RestOperations; import org.springframework.web.servlet.View; import org.springframework.web.servlet.ViewResolver; import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; import org.springframework.web.servlet.view.ContentNegotiatingViewResolver; import org.springframework.web.servlet.view.InternalResourceViewResolver; import org.springframework.web.servlet.view.json.MappingJacksonJsonView; @Configuration @EnableWebMvc @PropertySource("classpath:sparklr.properties") public class WebMvcConfig extends WebMvcConfigurerAdapter { @Bean public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() { return new PropertySourcesPlaceholderConfigurer(); } @Bean public ContentNegotiatingViewResolver contentViewResolver() throws Exception { ContentNegotiatingViewResolver contentViewResolver = new ContentNegotiatingViewResolver(); ContentNegotiationManagerFactoryBean contentNegotiationManager = new ContentNegotiationManagerFactoryBean(); contentNegotiationManager.addMediaType("json", MediaType.APPLICATION_JSON); contentViewResolver.setContentNegotiationManager(contentNegotiationManager.getObject()); contentViewResolver.setDefaultViews(Arrays.<View> asList(new MappingJacksonJsonView())); return contentViewResolver; } @Bean public ViewResolver viewResolver() { InternalResourceViewResolver viewResolver = new InternalResourceViewResolver(); viewResolver.setPrefix("/WEB-INF/jsp/"); viewResolver.setSuffix(".jsp"); return viewResolver; } @Override public void configureDefaultServletHandling(DefaultServletHandlerConfigurer configurer) { configurer.enable(); } @Bean public SparklrController sparklrController(@Qualifier("sparklrService") SparklrService sparklrService) { SparklrController controller = new SparklrController(); controller.setSparklrService(sparklrService); return controller; } @Bean public SparklrRedirectController sparklrRedirectController(@Qualifier("sparklrRedirectService") SparklrService sparklrService) { SparklrRedirectController controller = new SparklrRedirectController(); controller.setSparklrService(sparklrService); return controller; } @Bean public FacebookController facebookController(@Qualifier("facebookRestTemplate") RestOperations facebookRestTemplate) { FacebookController controller = new FacebookController(); controller.setFacebookRestTemplate(facebookRestTemplate); return controller; } @Bean public XviaController xviaController(@Qualifier("xviaRestTemplate") RestOperations xviaRestTemplate) { XviaController controller = new XviaController(); controller.setXviaRestTemplate(xviaRestTemplate); return controller; } @Bean public XviaScribeController xviaScribeController() { XviaScribeController controller = new XviaScribeController("tonr", "secret", "http://localhost:8080/tonr2"); return controller; } @Bean public SparklrServiceImpl sparklrService(@Value("${sparklrPhotoListURL}") String sparklrPhotoListURL, @Value("${sparklrPhotoURLPattern}") String sparklrPhotoURLPattern, @Value("${sparklrTrustedMessageURL}") String sparklrTrustedMessageURL, @Qualifier("sparklrRestTemplate") RestOperations sparklrRestTemplate, @Qualifier("trustedClientRestTemplate") RestOperations trustedClientRestTemplate) { SparklrServiceImpl sparklrService = new SparklrServiceImpl(); sparklrService.setSparklrPhotoListURL(sparklrPhotoListURL); sparklrService.setSparklrPhotoURLPattern(sparklrPhotoURLPattern); sparklrService.setSparklrTrustedMessageURL(sparklrTrustedMessageURL); sparklrService.setSparklrRestTemplate(sparklrRestTemplate); sparklrService.setTrustedClientRestTemplate(trustedClientRestTemplate); return sparklrService; } @Bean public SparklrServiceImpl sparklrRedirectService(@Value("${sparklrPhotoListURL}") String sparklrPhotoListURL, @Value("${sparklrPhotoURLPattern}") String sparklrPhotoURLPattern, @Value("${sparklrTrustedMessageURL}") String sparklrTrustedMessageURL, @Qualifier("sparklrRedirectRestTemplate") RestOperations sparklrRestTemplate, @Qualifier("trustedClientRestTemplate") RestOperations trustedClientRestTemplate) { SparklrServiceImpl sparklrService = new SparklrServiceImpl(); sparklrService.setSparklrPhotoListURL(sparklrPhotoListURL); sparklrService.setSparklrPhotoURLPattern(sparklrPhotoURLPattern); sparklrService.setSparklrTrustedMessageURL(sparklrTrustedMessageURL); sparklrService.setSparklrRestTemplate(sparklrRestTemplate); sparklrService.setTrustedClientRestTemplate(trustedClientRestTemplate); return sparklrService; } @Bean public ConversionServiceFactoryBean conversionService() { ConversionServiceFactoryBean conversionService = new ConversionServiceFactoryBean(); conversionService.setConverters(Collections.singleton(new AccessTokenRequestConverter())); return conversionService; } public void addResourceHandlers(ResourceHandlerRegistry registry) { registry.addResourceHandler("/resources/**").addResourceLocations("/resources/"); } @Override public void configureMessageConverters(List<HttpMessageConverter<?>> converters) { converters.add(new BufferedImageHttpMessageConverter()); converters.add(new FormOAuth2AccessTokenMessageConverter()); } @Configuration @Import(OAuth2ClientConfiguration.class) protected static class ResourceConfiguration { @Value("${accessTokenUri}") private String accessTokenUri; @Value("${userAuthorizationUri}") private String userAuthorizationUri; @Resource @Qualifier("accessTokenRequest") private AccessTokenRequest accessTokenRequest; @Bean public OAuth2ProtectedResourceDetails sparklr() { AuthorizationCodeResourceDetails details = new AuthorizationCodeResourceDetails(); details.setId("sparklr/tonr"); details.setClientId("tonr"); details.setClientSecret("secret"); details.setAccessTokenUri(accessTokenUri); details.setUserAuthorizationUri(userAuthorizationUri); details.setScope(Arrays.asList("read", "write")); return details; } @Bean public OAuth2ProtectedResourceDetails xvia() { AuthorizationCodeResourceDetails details = new AuthorizationCodeResourceDetails(); details.setId("xvia/tonr"); details.setClientId("tonr"); details.setClientSecret("secret"); details.setAccessTokenUri("http://localhost:9000/xvia/oauth/token"); details.setUserAuthorizationUri("http://localhost:9000/xvia/oauth/authorize"); details.setScope(Arrays.asList("read", "write")); return details; } @Bean public OAuth2ProtectedResourceDetails sparklrRedirect() { AuthorizationCodeResourceDetails details = new AuthorizationCodeResourceDetails(); details.setId("sparklr/tonr-redirect"); details.setClientId("tonr-with-redirect"); details.setClientSecret("secret"); details.setAccessTokenUri(accessTokenUri); details.setUserAuthorizationUri(userAuthorizationUri); details.setScope(Arrays.asList("read", "write")); details.setUseCurrentUri(false); return details; } @Bean public OAuth2ProtectedResourceDetails facebook() { AuthorizationCodeResourceDetails details = new AuthorizationCodeResourceDetails(); details.setId("facebook"); details.setClientId("233668646673605"); details.setClientSecret("33b17e044ee6a4fa383f46ec6e28ea1d"); details.setAccessTokenUri("https://graph.facebook.com/oauth/access_token"); details.setUserAuthorizationUri("https://www.facebook.com/dialog/oauth"); details.setTokenName("oauth_token"); details.setAuthenticationScheme(AuthenticationScheme.query); details.setClientAuthenticationScheme(AuthenticationScheme.form); return details; } @Bean public OAuth2ProtectedResourceDetails trusted() { ClientCredentialsResourceDetails details = new ClientCredentialsResourceDetails(); details.setId("sparklr/trusted"); details.setClientId("my-client-with-registered-redirect"); details.setAccessTokenUri(accessTokenUri); details.setScope(Arrays.asList("trust")); return details; } @Bean @Scope(value = "session", proxyMode = ScopedProxyMode.INTERFACES) public OAuth2RestTemplate facebookRestTemplate() { OAuth2RestTemplate template = new OAuth2RestTemplate(facebook(), new DefaultOAuth2ClientContext( accessTokenRequest)); MappingJacksonHttpMessageConverter converter = new MappingJacksonHttpMessageConverter(); converter.setSupportedMediaTypes(Arrays.asList(MediaType.APPLICATION_JSON, MediaType.valueOf("text/javascript"))); template.setMessageConverters(Arrays.<HttpMessageConverter<?>> asList(converter)); return template; } @Bean @Scope(value = "session", proxyMode = ScopedProxyMode.INTERFACES) public OAuth2RestTemplate sparklrRestTemplate() { return new OAuth2RestTemplate(sparklr(), new DefaultOAuth2ClientContext(accessTokenRequest)); } @Bean @Scope(value = "session", proxyMode = ScopedProxyMode.INTERFACES) public OAuth2RestTemplate xviaRestTemplate() { OAuth2RestTemplate template = new OAuth2RestTemplate(xvia(), new DefaultOAuth2ClientContext( accessTokenRequest)); // FormOAuth2AccessTokenMessageConverter converter = new FormOAuth2AccessTokenMessageConverter(); // template.setMessageConverters(Arrays.<HttpMessageConverter<?>> asList(converter)); return template; } @Bean @Scope(value = "session", proxyMode = ScopedProxyMode.INTERFACES) public OAuth2RestTemplate sparklrRedirectRestTemplate() { return new OAuth2RestTemplate(sparklrRedirect(), new DefaultOAuth2ClientContext(accessTokenRequest)); } @Bean public OAuth2RestTemplate trustedClientRestTemplate() { return new OAuth2RestTemplate(trusted(), new DefaultOAuth2ClientContext()); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.forecast.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides a summary of the predictor properties that are used in the <a>ListPredictors</a> operation. To get the * complete set of properties, call the <a>DescribePredictor</a> operation, and provide the listed * <code>PredictorArn</code>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/PredictorSummary" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PredictorSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ARN of the predictor. * </p> */ private String predictorArn; /** * <p> * The name of the predictor. * </p> */ private String predictorName; /** * <p> * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. * </p> */ private String datasetGroupArn; /** * <p> * Whether AutoPredictor was used to create the predictor. * </p> */ private Boolean isAutoPredictor; /** * <p> * A summary of the reference predictor used if the predictor was retrained or upgraded. * </p> */ private ReferencePredictorSummary referencePredictorSummary; /** * <p> * The status of the predictor. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to create a * forecast. * </p> * </note> */ private String status; /** * <p> * If an error occurred, an informational message about the error. * </p> */ private String message; /** * <p> * When the model training task was created. * </p> */ private java.util.Date creationTime; /** * <p> * The last time the resource was modified. The timestamp depends on the status of the job: * </p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> * </ul> */ private java.util.Date lastModificationTime; /** * <p> * The ARN of the predictor. * </p> * * @param predictorArn * The ARN of the predictor. */ public void setPredictorArn(String predictorArn) { this.predictorArn = predictorArn; } /** * <p> * The ARN of the predictor. * </p> * * @return The ARN of the predictor. */ public String getPredictorArn() { return this.predictorArn; } /** * <p> * The ARN of the predictor. * </p> * * @param predictorArn * The ARN of the predictor. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withPredictorArn(String predictorArn) { setPredictorArn(predictorArn); return this; } /** * <p> * The name of the predictor. * </p> * * @param predictorName * The name of the predictor. */ public void setPredictorName(String predictorName) { this.predictorName = predictorName; } /** * <p> * The name of the predictor. * </p> * * @return The name of the predictor. */ public String getPredictorName() { return this.predictorName; } /** * <p> * The name of the predictor. * </p> * * @param predictorName * The name of the predictor. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withPredictorName(String predictorName) { setPredictorName(predictorName); return this; } /** * <p> * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. * </p> * * @param datasetGroupArn * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. */ public void setDatasetGroupArn(String datasetGroupArn) { this.datasetGroupArn = datasetGroupArn; } /** * <p> * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. * </p> * * @return The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. */ public String getDatasetGroupArn() { return this.datasetGroupArn; } /** * <p> * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. * </p> * * @param datasetGroupArn * The Amazon Resource Name (ARN) of the dataset group that contains the data used to train the predictor. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withDatasetGroupArn(String datasetGroupArn) { setDatasetGroupArn(datasetGroupArn); return this; } /** * <p> * Whether AutoPredictor was used to create the predictor. * </p> * * @param isAutoPredictor * Whether AutoPredictor was used to create the predictor. */ public void setIsAutoPredictor(Boolean isAutoPredictor) { this.isAutoPredictor = isAutoPredictor; } /** * <p> * Whether AutoPredictor was used to create the predictor. * </p> * * @return Whether AutoPredictor was used to create the predictor. */ public Boolean getIsAutoPredictor() { return this.isAutoPredictor; } /** * <p> * Whether AutoPredictor was used to create the predictor. * </p> * * @param isAutoPredictor * Whether AutoPredictor was used to create the predictor. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withIsAutoPredictor(Boolean isAutoPredictor) { setIsAutoPredictor(isAutoPredictor); return this; } /** * <p> * Whether AutoPredictor was used to create the predictor. * </p> * * @return Whether AutoPredictor was used to create the predictor. */ public Boolean isAutoPredictor() { return this.isAutoPredictor; } /** * <p> * A summary of the reference predictor used if the predictor was retrained or upgraded. * </p> * * @param referencePredictorSummary * A summary of the reference predictor used if the predictor was retrained or upgraded. */ public void setReferencePredictorSummary(ReferencePredictorSummary referencePredictorSummary) { this.referencePredictorSummary = referencePredictorSummary; } /** * <p> * A summary of the reference predictor used if the predictor was retrained or upgraded. * </p> * * @return A summary of the reference predictor used if the predictor was retrained or upgraded. */ public ReferencePredictorSummary getReferencePredictorSummary() { return this.referencePredictorSummary; } /** * <p> * A summary of the reference predictor used if the predictor was retrained or upgraded. * </p> * * @param referencePredictorSummary * A summary of the reference predictor used if the predictor was retrained or upgraded. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withReferencePredictorSummary(ReferencePredictorSummary referencePredictorSummary) { setReferencePredictorSummary(referencePredictorSummary); return this; } /** * <p> * The status of the predictor. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to create a * forecast. * </p> * </note> * * @param status * The status of the predictor. States include:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to * create a forecast. * </p> */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the predictor. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to create a * forecast. * </p> * </note> * * @return The status of the predictor. States include:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to * create a forecast. * </p> */ public String getStatus() { return this.status; } /** * <p> * The status of the predictor. States include: * </p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to create a * forecast. * </p> * </note> * * @param status * The status of the predictor. States include:</p> * <ul> * <li> * <p> * <code>ACTIVE</code> * </p> * </li> * <li> * <p> * <code>CREATE_PENDING</code>, <code>CREATE_IN_PROGRESS</code>, <code>CREATE_FAILED</code> * </p> * </li> * <li> * <p> * <code>DELETE_PENDING</code>, <code>DELETE_IN_PROGRESS</code>, <code>DELETE_FAILED</code> * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code>, <code>CREATE_STOPPED</code> * </p> * </li> * </ul> * <note> * <p> * The <code>Status</code> of the predictor must be <code>ACTIVE</code> before you can use the predictor to * create a forecast. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withStatus(String status) { setStatus(status); return this; } /** * <p> * If an error occurred, an informational message about the error. * </p> * * @param message * If an error occurred, an informational message about the error. */ public void setMessage(String message) { this.message = message; } /** * <p> * If an error occurred, an informational message about the error. * </p> * * @return If an error occurred, an informational message about the error. */ public String getMessage() { return this.message; } /** * <p> * If an error occurred, an informational message about the error. * </p> * * @param message * If an error occurred, an informational message about the error. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withMessage(String message) { setMessage(message); return this; } /** * <p> * When the model training task was created. * </p> * * @param creationTime * When the model training task was created. */ public void setCreationTime(java.util.Date creationTime) { this.creationTime = creationTime; } /** * <p> * When the model training task was created. * </p> * * @return When the model training task was created. */ public java.util.Date getCreationTime() { return this.creationTime; } /** * <p> * When the model training task was created. * </p> * * @param creationTime * When the model training task was created. * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withCreationTime(java.util.Date creationTime) { setCreationTime(creationTime); return this; } /** * <p> * The last time the resource was modified. The timestamp depends on the status of the job: * </p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> * </ul> * * @param lastModificationTime * The last time the resource was modified. The timestamp depends on the status of the job:</p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> */ public void setLastModificationTime(java.util.Date lastModificationTime) { this.lastModificationTime = lastModificationTime; } /** * <p> * The last time the resource was modified. The timestamp depends on the status of the job: * </p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> * </ul> * * @return The last time the resource was modified. The timestamp depends on the status of the job:</p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> */ public java.util.Date getLastModificationTime() { return this.lastModificationTime; } /** * <p> * The last time the resource was modified. The timestamp depends on the status of the job: * </p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> * </ul> * * @param lastModificationTime * The last time the resource was modified. The timestamp depends on the status of the job:</p> * <ul> * <li> * <p> * <code>CREATE_PENDING</code> - The <code>CreationTime</code>. * </p> * </li> * <li> * <p> * <code>CREATE_IN_PROGRESS</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPING</code> - The current timestamp. * </p> * </li> * <li> * <p> * <code>CREATE_STOPPED</code> - When the job stopped. * </p> * </li> * <li> * <p> * <code>ACTIVE</code> or <code>CREATE_FAILED</code> - When the job finished or failed. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public PredictorSummary withLastModificationTime(java.util.Date lastModificationTime) { setLastModificationTime(lastModificationTime); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPredictorArn() != null) sb.append("PredictorArn: ").append(getPredictorArn()).append(","); if (getPredictorName() != null) sb.append("PredictorName: ").append(getPredictorName()).append(","); if (getDatasetGroupArn() != null) sb.append("DatasetGroupArn: ").append(getDatasetGroupArn()).append(","); if (getIsAutoPredictor() != null) sb.append("IsAutoPredictor: ").append(getIsAutoPredictor()).append(","); if (getReferencePredictorSummary() != null) sb.append("ReferencePredictorSummary: ").append(getReferencePredictorSummary()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getMessage() != null) sb.append("Message: ").append(getMessage()).append(","); if (getCreationTime() != null) sb.append("CreationTime: ").append(getCreationTime()).append(","); if (getLastModificationTime() != null) sb.append("LastModificationTime: ").append(getLastModificationTime()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PredictorSummary == false) return false; PredictorSummary other = (PredictorSummary) obj; if (other.getPredictorArn() == null ^ this.getPredictorArn() == null) return false; if (other.getPredictorArn() != null && other.getPredictorArn().equals(this.getPredictorArn()) == false) return false; if (other.getPredictorName() == null ^ this.getPredictorName() == null) return false; if (other.getPredictorName() != null && other.getPredictorName().equals(this.getPredictorName()) == false) return false; if (other.getDatasetGroupArn() == null ^ this.getDatasetGroupArn() == null) return false; if (other.getDatasetGroupArn() != null && other.getDatasetGroupArn().equals(this.getDatasetGroupArn()) == false) return false; if (other.getIsAutoPredictor() == null ^ this.getIsAutoPredictor() == null) return false; if (other.getIsAutoPredictor() != null && other.getIsAutoPredictor().equals(this.getIsAutoPredictor()) == false) return false; if (other.getReferencePredictorSummary() == null ^ this.getReferencePredictorSummary() == null) return false; if (other.getReferencePredictorSummary() != null && other.getReferencePredictorSummary().equals(this.getReferencePredictorSummary()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getMessage() == null ^ this.getMessage() == null) return false; if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false) return false; if (other.getCreationTime() == null ^ this.getCreationTime() == null) return false; if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false) return false; if (other.getLastModificationTime() == null ^ this.getLastModificationTime() == null) return false; if (other.getLastModificationTime() != null && other.getLastModificationTime().equals(this.getLastModificationTime()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPredictorArn() == null) ? 0 : getPredictorArn().hashCode()); hashCode = prime * hashCode + ((getPredictorName() == null) ? 0 : getPredictorName().hashCode()); hashCode = prime * hashCode + ((getDatasetGroupArn() == null) ? 0 : getDatasetGroupArn().hashCode()); hashCode = prime * hashCode + ((getIsAutoPredictor() == null) ? 0 : getIsAutoPredictor().hashCode()); hashCode = prime * hashCode + ((getReferencePredictorSummary() == null) ? 0 : getReferencePredictorSummary().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode()); hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode()); hashCode = prime * hashCode + ((getLastModificationTime() == null) ? 0 : getLastModificationTime().hashCode()); return hashCode; } @Override public PredictorSummary clone() { try { return (PredictorSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.forecast.model.transform.PredictorSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Knetik Platform API Documentation latest * This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com. * * OpenAPI spec version: latest * Contact: support@knetik.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.knetikcloud.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.knetikcloud.model.MessageTemplateResource; import com.knetikcloud.model.Order; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; /** * PageResourceMessageTemplateResource */ @javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2018-03-14T12:03:43.231-04:00") public class PageResourceMessageTemplateResource { @JsonProperty("content") private List<MessageTemplateResource> content = null; @JsonProperty("first") private Boolean first = null; @JsonProperty("last") private Boolean last = null; @JsonProperty("number") private Integer number = null; @JsonProperty("number_of_elements") private Integer numberOfElements = null; @JsonProperty("size") private Integer size = null; @JsonProperty("sort") private List<Order> sort = null; @JsonProperty("total_elements") private Long totalElements = null; @JsonProperty("total_pages") private Integer totalPages = null; public PageResourceMessageTemplateResource content(List<MessageTemplateResource> content) { this.content = content; return this; } public PageResourceMessageTemplateResource addContentItem(MessageTemplateResource contentItem) { if (this.content == null) { this.content = new ArrayList<MessageTemplateResource>(); } this.content.add(contentItem); return this; } /** * Get content * @return content **/ @ApiModelProperty(value = "") public List<MessageTemplateResource> getContent() { return content; } public void setContent(List<MessageTemplateResource> content) { this.content = content; } public PageResourceMessageTemplateResource first(Boolean first) { this.first = first; return this; } /** * Get first * @return first **/ @ApiModelProperty(value = "") public Boolean isFirst() { return first; } public void setFirst(Boolean first) { this.first = first; } public PageResourceMessageTemplateResource last(Boolean last) { this.last = last; return this; } /** * Get last * @return last **/ @ApiModelProperty(value = "") public Boolean isLast() { return last; } public void setLast(Boolean last) { this.last = last; } public PageResourceMessageTemplateResource number(Integer number) { this.number = number; return this; } /** * Get number * @return number **/ @ApiModelProperty(value = "") public Integer getNumber() { return number; } public void setNumber(Integer number) { this.number = number; } public PageResourceMessageTemplateResource numberOfElements(Integer numberOfElements) { this.numberOfElements = numberOfElements; return this; } /** * Get numberOfElements * @return numberOfElements **/ @ApiModelProperty(value = "") public Integer getNumberOfElements() { return numberOfElements; } public void setNumberOfElements(Integer numberOfElements) { this.numberOfElements = numberOfElements; } public PageResourceMessageTemplateResource size(Integer size) { this.size = size; return this; } /** * Get size * @return size **/ @ApiModelProperty(value = "") public Integer getSize() { return size; } public void setSize(Integer size) { this.size = size; } public PageResourceMessageTemplateResource sort(List<Order> sort) { this.sort = sort; return this; } public PageResourceMessageTemplateResource addSortItem(Order sortItem) { if (this.sort == null) { this.sort = new ArrayList<Order>(); } this.sort.add(sortItem); return this; } /** * Get sort * @return sort **/ @ApiModelProperty(value = "") public List<Order> getSort() { return sort; } public void setSort(List<Order> sort) { this.sort = sort; } public PageResourceMessageTemplateResource totalElements(Long totalElements) { this.totalElements = totalElements; return this; } /** * Get totalElements * @return totalElements **/ @ApiModelProperty(value = "") public Long getTotalElements() { return totalElements; } public void setTotalElements(Long totalElements) { this.totalElements = totalElements; } public PageResourceMessageTemplateResource totalPages(Integer totalPages) { this.totalPages = totalPages; return this; } /** * Get totalPages * @return totalPages **/ @ApiModelProperty(value = "") public Integer getTotalPages() { return totalPages; } public void setTotalPages(Integer totalPages) { this.totalPages = totalPages; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PageResourceMessageTemplateResource pageResourceMessageTemplateResource = (PageResourceMessageTemplateResource) o; return Objects.equals(this.content, pageResourceMessageTemplateResource.content) && Objects.equals(this.first, pageResourceMessageTemplateResource.first) && Objects.equals(this.last, pageResourceMessageTemplateResource.last) && Objects.equals(this.number, pageResourceMessageTemplateResource.number) && Objects.equals(this.numberOfElements, pageResourceMessageTemplateResource.numberOfElements) && Objects.equals(this.size, pageResourceMessageTemplateResource.size) && Objects.equals(this.sort, pageResourceMessageTemplateResource.sort) && Objects.equals(this.totalElements, pageResourceMessageTemplateResource.totalElements) && Objects.equals(this.totalPages, pageResourceMessageTemplateResource.totalPages); } @Override public int hashCode() { return Objects.hash(content, first, last, number, numberOfElements, size, sort, totalElements, totalPages); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class PageResourceMessageTemplateResource {\n"); sb.append(" content: ").append(toIndentedString(content)).append("\n"); sb.append(" first: ").append(toIndentedString(first)).append("\n"); sb.append(" last: ").append(toIndentedString(last)).append("\n"); sb.append(" number: ").append(toIndentedString(number)).append("\n"); sb.append(" numberOfElements: ").append(toIndentedString(numberOfElements)).append("\n"); sb.append(" size: ").append(toIndentedString(size)).append("\n"); sb.append(" sort: ").append(toIndentedString(sort)).append("\n"); sb.append(" totalElements: ").append(toIndentedString(totalElements)).append("\n"); sb.append(" totalPages: ").append(toIndentedString(totalPages)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
package ibis.ipl.registry.statistics; import ibis.util.ThreadPool; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Formatter; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class Statistics implements Runnable { public static final int VERSION = 1; private static final Logger logger = LoggerFactory .getLogger(Statistics.class); private final long start; private long offset; private final String[] opcodes; private final double[] totalTimes; private final long[] incomingRequestCounter; private final long[] outgoingRequestCounter; private final long[] bytesIn; private final long[] bytesOut; private String id; private String poolName; private long writeInterval; List<DataPoint> poolSizeHistory; int currentPoolSize; List<DataPoint> electionEventHistory; private boolean ended = false; public Statistics(String[] opcodes) { this.opcodes = opcodes; this.id = "unknown"; this.poolName = "unknown"; start = System.currentTimeMillis(); offset = 0; totalTimes = new double[opcodes.length]; incomingRequestCounter = new long[opcodes.length]; outgoingRequestCounter = new long[opcodes.length]; bytesIn = new long[opcodes.length]; bytesOut = new long[opcodes.length]; poolSizeHistory = new LinkedList<DataPoint>(); electionEventHistory = new LinkedList<DataPoint>(); currentPoolSize = 0; newPoolSize(0); logger.debug("created statistics"); } public Statistics(File file) throws IOException { DataInputStream in = new DataInputStream(new FileInputStream(file)); int version = in.readInt(); if (version != VERSION) { throw new IOException("cannot read statistics file version: " + version); } start = in.readLong(); offset = in.readLong(); id = in.readUTF(); int nrOfOpcodes = in.readInt(); if (nrOfOpcodes < 0) { throw new IOException("negative number of opcodes"); } opcodes = new String[nrOfOpcodes]; totalTimes = new double[nrOfOpcodes]; incomingRequestCounter = new long[nrOfOpcodes]; outgoingRequestCounter = new long[nrOfOpcodes]; bytesIn = new long[nrOfOpcodes]; bytesOut = new long[nrOfOpcodes]; for (int i = 0; i < nrOfOpcodes; i++) { opcodes[i] = in.readUTF(); totalTimes[i] = in.readDouble(); incomingRequestCounter[i] = in.readLong(); outgoingRequestCounter[i] = in.readLong(); bytesIn[i] = in.readLong(); bytesOut[i] = in.readLong(); } poolSizeHistory = new LinkedList<DataPoint>(); electionEventHistory = new LinkedList<DataPoint>(); int nrOfSizeDataPoints = in.readInt(); if (nrOfSizeDataPoints < 0) { throw new IOException("negative list size"); } for (int i = 0; i < nrOfSizeDataPoints; i++) { poolSizeHistory.add(new DataPoint(in.readLong(), in.readLong())); } int nrOfElectionDataPoints = in.readInt(); if (nrOfElectionDataPoints < 0) { throw new IOException("negative list size"); } for (int i = 0; i < nrOfElectionDataPoints; i++) { electionEventHistory .add(new DataPoint(in.readLong(), in.readLong())); } currentPoolSize = in.readInt(); in.close(); } public synchronized void end() { ended = true; notifyAll(); } public void write() { File file = null; try { ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(byteOut); // write data to array synchronized (this) { out.writeInt(VERSION); out.writeLong(start); out.writeLong(offset); out.writeUTF(id); out.writeInt(opcodes.length); for (int i = 0; i < opcodes.length; i++) { out.writeUTF(opcodes[i]); out.writeDouble(totalTimes[i]); out.writeLong(incomingRequestCounter[i]); out.writeLong(outgoingRequestCounter[i]); out.writeLong(bytesIn[i]); out.writeLong(bytesOut[i]); } out.writeInt(poolSizeHistory.size()); for (DataPoint point : poolSizeHistory) { out.writeLong(point.getTime()); out.writeLong(point.getValue()); } out.writeInt(electionEventHistory.size()); for (DataPoint point : electionEventHistory) { out.writeLong(point.getTime()); out.writeLong(point.getValue()); } out.writeInt(currentPoolSize); out.flush(); out.close(); } // write data to file file = new File("statistics" + File.separator + poolName + File.separator + id); logger.debug("writing statistics to: " + file); if (file.exists()) { file.renameTo(new File(file.getPath() + ".old")); } file.getParentFile().mkdirs(); FileOutputStream fileOut = new FileOutputStream(file); byteOut.writeTo(fileOut); fileOut.flush(); fileOut.close(); } catch (IOException e) { logger.error("cannot write statistics to " + file, e); } logger.debug("DONE writing statistics for: " + id); } public synchronized void add(byte opcode, long time, long bytesReceived, long bytesSend, boolean incoming) { if (opcode >= opcodes.length) { logger.error("unknown opcode in handling stats: " + opcode); } totalTimes[opcode] = totalTimes[opcode] + time; if (incoming) { incomingRequestCounter[opcode]++; } else { outgoingRequestCounter[opcode]++; } bytesIn[opcode] += bytesReceived; bytesOut[opcode] += bytesSend; } synchronized void clear() { for (int i = 0; i < opcodes.length; i++) { totalTimes[i] = 0; incomingRequestCounter[i] = 0; outgoingRequestCounter[i] = 0; } } public synchronized boolean empty() { for (byte i = 0; i < opcodes.length; i++) { if (totalTimes[i] != 0) { return false; } } if (poolSizeHistory.size() > 0) { return false; } if (electionEventHistory.size() > 0) { return false; } return true; } public synchronized void print(Formatter out) { out.format("#statistics for %s\n", id); printCommStats(out); out.format("#total traffic = %.2f MB\n", totalTraffic()); printPoolSizeHistory(out); } public synchronized void printCommStats(Formatter out) { // long totalTraffic = 0; out.format("#communication statistics\n"); out .format("#TYPE IN_COUNT OUT_COUNT BYTES_IN BYTES_OUT TOTAL_TIME AVG_TIME\n"); out .format("# (sec) (ms)\n"); for (byte i = 0; i < opcodes.length; i++) { // totalTraffic += bytesIn[i] + bytesOut[i]; double average = totalTimes[i] / (incomingRequestCounter[i] + outgoingRequestCounter[i]); if (incomingRequestCounter[i] == 0 && outgoingRequestCounter[i] == 0) { average = 0; } out.format("#%-20s %9d %9d %8d %9d %10.2f %10.2f\n", opcodes[i], incomingRequestCounter[i], outgoingRequestCounter[i], bytesIn[i], bytesOut[i], totalTimes[i] / 1000.0, average); } out.format("#distance from server: %d Ms\n", offset); } public synchronized void printPoolSizeHistory(Formatter out) { out.format("#pool size history\n"); out.format("#TIME POOL_SIZE\n"); for (DataPoint point : poolSizeHistory) { double time = ((double) point.getTime() - start + offset) / 1000.0; out.format("%.2f %d\n", time, point.getValue()); } } public synchronized void newPoolSize(int poolSize) { if (!poolSizeHistory.isEmpty()) { long lastPoolSize = poolSizeHistory.get(poolSizeHistory.size() - 1) .getValue(); if (poolSize == lastPoolSize) { // ignore this update, value equal to last return; } } poolSizeHistory.add(new DataPoint(poolSize)); logger.trace("reported pool size now: " + poolSize); } public synchronized void electionEvent() { electionEventHistory .add(new DataPoint(electionEventHistory.size() + 1)); } public synchronized long getStartTime() { return start; } public synchronized long getEndTime() { long result = start; if (poolSizeHistory.size() > 0) { long time = poolSizeHistory.get(poolSizeHistory.size() - 1) .getTime(); if (time > result) { result = time; } } if (electionEventHistory.size() > 0) { long time = electionEventHistory.get( electionEventHistory.size() - 1).getTime(); if (time > result) { result = time; } } return result; } public synchronized long poolSizeAt(long time) { time += offset; if (poolSizeHistory.size() == 0) { return -1; } long result = -1; for (DataPoint point : poolSizeHistory) { if (point.getTime() > time) { // previous point is result return result; } else { result = point.getValue(); } } // return -1 (we don't know) return -1; } public synchronized DataPoint[] getPoolSizeData() { return poolSizeHistory.toArray(new DataPoint[0]); } /** * Total data send/received by the registry (in Mib) */ public synchronized double totalTraffic() { double totalTraffic = 0; for (byte i = 0; i < opcodes.length; i++) { totalTraffic = totalTraffic + bytesIn[i] + bytesOut[i]; } return totalTraffic / 1024.0 / 1024.0; } public synchronized String getID() { return id; } public synchronized void setID(String id, String poolName) { this.id = id; this.poolName = poolName; } public synchronized long getOffset() { return offset; } public synchronized void setOffset(long offset) { this.offset = offset; } public void startWriting(long writeInterval) { this.writeInterval = writeInterval; ThreadPool.createNew(this, "statistics writer"); } public void run() { while (true) { write(); synchronized (this) { try { wait(writeInterval); } catch (InterruptedException e) { // IGNORE } if (ended) { return; } } } } public String toString() { return id; } public static void main(String[] args) throws IOException { Formatter formatter = new Formatter(System.out); for (int i = 0; i < args.length; i++) { File file = new File(args[i]); Statistics statistics = new Statistics(file); statistics.print(formatter); } formatter.flush(); } public synchronized Map<String, String> getMap() { Map<String, String> result = new HashMap<String, String>(); result.put("ibis.id", id); result.put("pool.name", poolName); result.put("current.pool.size", currentPoolSize + ""); double totalTime = 0; int totalInRequests = 0; int totalOutRequests = 0; long totalBytesIn = 0; long totalBytesOut = 0; for (byte i = 0; i < opcodes.length; i++) { totalTime += totalTimes[i]; totalInRequests += incomingRequestCounter[i]; totalOutRequests += outgoingRequestCounter[i]; totalBytesIn += bytesIn[i]; totalBytesOut += bytesOut[i]; } double averageRequestTime = totalTime / (totalInRequests + totalOutRequests); result.put("average.request.time", averageRequestTime + ""); result.put("incoming.requests", totalInRequests + ""); result.put("outgoing.requests", totalOutRequests + ""); result.put("send.bytes", totalBytesOut + ""); result.put("received.bytes", totalBytesIn + ""); return result; } }
package edu.gemini.spModel.gemini.obscomp; import edu.gemini.pot.sp.SPComponentType; import edu.gemini.shared.util.immutable.None; import edu.gemini.shared.util.immutable.Option; import edu.gemini.shared.util.immutable.Some; import edu.gemini.spModel.data.AbstractDataObject; import edu.gemini.spModel.data.property.PropertyProvider; import edu.gemini.spModel.data.property.PropertySupport; import edu.gemini.spModel.pio.ParamSet; import edu.gemini.spModel.pio.Pio; import edu.gemini.spModel.pio.PioFactory; import edu.gemini.spModel.type.DisplayableSpType; import edu.gemini.spModel.type.ObsoletableSpType; import edu.gemini.spModel.type.SequenceableSpType; import edu.gemini.spModel.type.SpTypeUtil; import java.beans.PropertyDescriptor; import java.io.Serializable; import java.util.*; /** * Site Quality observation component. */ public class SPSiteQuality extends AbstractDataObject implements PropertyProvider { // for serialization private static final long serialVersionUID = 3L; public static final SPComponentType SP_TYPE = SPComponentType.SCHEDULING_CONDITIONS; /** * SPSiteQuality owns a list of TimingWindow objects specifying when the target is observable. * By convention, an empty list signifies that the target is always observable, which is the same * as the list containing the ALWAYS static instance. * @author rnorris */ public static class TimingWindow implements Serializable, Cloneable { private static final long serialVersionUID = 1L; public static final int WINDOW_REMAINS_OPEN_FOREVER = -1; public static final int REPEAT_FOREVER = -1; public static final int REPEAT_NEVER = 0; private static final long MS_PER_SECOND = 1000; private static final long MS_PER_MINUTE = MS_PER_SECOND * 60; private static final long MS_PER_HOUR = MS_PER_MINUTE * 60; private static final String NAME = "timing-window"; private static final String START_PROP = "start"; private static final String DURATION_PROP = "duration"; private static final String REPEAT_PROP = "repeat"; private static final String PERIOD_PROP = "period"; // All times and durations in ms private final long start, duration, period; private final int repeat; public TimingWindow(long start, long duration, int repeat, long period) { this.start = start; this.duration = duration; this.repeat = repeat; this.period = period; assert repeat >= -1; } public TimingWindow() { this(System.currentTimeMillis(), 24 * MS_PER_HOUR, 0, 0); } TimingWindow(ParamSet params) { this(Pio.getLongValue(params, START_PROP, 0), Pio.getLongValue(params, DURATION_PROP, 0), Pio.getIntValue(params, REPEAT_PROP, 0), Pio.getLongValue(params, PERIOD_PROP, 0)); } public long getDuration() { return duration; } public long getPeriod() { return period; } public int getRepeat() { return repeat; } public long getStart() { return start; } ParamSet getParamSet(PioFactory factory) { ParamSet params = factory.createParamSet(NAME); Pio.addLongParam(factory, params, START_PROP, start); Pio.addLongParam(factory, params, DURATION_PROP, duration); Pio.addIntParam(factory, params, REPEAT_PROP, repeat); Pio.addLongParam(factory, params, PERIOD_PROP, period); return params; } @Override public String toString() { return String.format("{%d %d %d %d}", start, duration, repeat, period); } public TimingWindow clone() { try { return (TimingWindow) super.clone(); } catch (CloneNotSupportedException e) { throw new Error("This was supposed to be impossible."); } } } static class TimingWindowList extends LinkedList<TimingWindow> { private static final long serialVersionUID = 2L; private static final String NAME = "timing-window-list"; ParamSet getParamSet(PioFactory factory) { ParamSet params = factory.createParamSet(NAME); for (TimingWindow tw: this) params.addParamSet(tw.getParamSet(factory)); return params; } void setParamSet(ParamSet params) { clear(); if (params != null) { for (ParamSet ps: params.getParamSets()) add(new TimingWindow(ps)); } } @Override public TimingWindowList clone() { TimingWindowList ret = new TimingWindowList(); for (TimingWindow tw: this) ret.add(tw.clone()); return ret; } } @Override public SPSiteQuality clone() { SPSiteQuality ret = (SPSiteQuality) super.clone(); ret._timingWindows = _timingWindows.clone(); return ret; } public interface PercentageContainer { byte getPercentage(); } private static <T extends PercentageContainer> Option<T> read(String strVal, T[] values) { if (strVal.length() < 3) return None.instance(); String tail = strVal.substring(2); if ("Any".equalsIgnoreCase(tail)) tail = "100"; byte perc; try { perc = Byte.valueOf(tail); } catch (NumberFormatException ex) { return None.instance(); } for (T val : values) { if (val.getPercentage() == perc) return new Some<>(val); } return None.instance(); } /** * Sky Background Options. */ public enum SkyBackground implements DisplayableSpType, SequenceableSpType, PercentageContainer { PERCENT_20("20%/Darkest", 20, 21.37), PERCENT_50("50%/Dark", 50, 20.78), PERCENT_80("80%/Grey", 80, 19.61), ANY( "Any/Bright", 100, 0); /** The default SkyBackground value **/ public static SkyBackground DEFAULT = ANY; private final String _displayValue; private final byte _percentage; private final double _maxBrightness; // in vMag, smaller is brighter SkyBackground(String displayValue, int percentage, double maxBrightness) { _percentage = (byte) percentage; _displayValue = displayValue; _maxBrightness = maxBrightness; assert _percentage >= 0 && _percentage <= 100; } public byte getPercentage() { return _percentage; } /** * Returns the maximum brightness for this background percentile in vMag. * Note that smaller values are brighter. Wacky astronomers, go figure. */ public double getMaxBrightness() { return _maxBrightness; } public String displayValue() { return _displayValue; } public String sequenceValue() { return Byte.toString(_percentage); } /** Return a SkyBackground by name **/ public static SkyBackground getSkyBackground(String name) { return getSkyBackground(name, DEFAULT); } /** Return a SkyBackground by name with a value to return upon error **/ public static SkyBackground getSkyBackground(String name, SkyBackground nvalue) { return SpTypeUtil.oldValueOf(SkyBackground.class, name, nvalue); } public String toString() { return (this == ANY) ? "SBAny" : String.format("SB%2d", getPercentage()); } public static Option<SkyBackground> read(String s) { return SPSiteQuality.read(s, values()); } } /** * Cloud Cover Options. */ public enum CloudCover implements DisplayableSpType, ObsoletableSpType, SequenceableSpType, PercentageContainer { PERCENT_20("20%", 20), PERCENT_50("50%/Clear", 50), PERCENT_70("70%/Cirrus", 70), PERCENT_80("80%/Cloudy", 80), PERCENT_90("90%", 90), ANY( "Any", 100), ; /** The default CloudCover value **/ public static CloudCover DEFAULT = ANY; private final String _displayValue; private final byte _percentage; CloudCover(String displayValue, int percentage) { _percentage = (byte) percentage; _displayValue = displayValue; assert _percentage >= 0 && _percentage <= 100; } public byte getPercentage() { return _percentage; } public String displayValue() { return _displayValue; } public String sequenceValue() { return Byte.toString(_percentage); } /** Return a CloudCover by name **/ public static CloudCover getCloudCover(String name) { return getCloudCover(name, DEFAULT); } /** Return a CloudCover by name with a value to return upon error **/ public static CloudCover getCloudCover(String name, CloudCover nvalue) { return SpTypeUtil.oldValueOf(CloudCover.class, name, nvalue); } public boolean isObsolete() { return (this == PERCENT_20) || (this == PERCENT_90); } public String toString() { return (this == ANY) ? "CCAny" : String.format("CC%2d", getPercentage()); } public static Option<CloudCover> read(String s) { return SPSiteQuality.read(s, values()); } } /** * Image Quality Options. */ public enum ImageQuality implements DisplayableSpType, SequenceableSpType, PercentageContainer { PERCENT_20("20%/Best", 20), PERCENT_70("70%/Good", 70), PERCENT_85("85%/Poor", 85), ANY( "Any", 100); /** The default ImageQuality value **/ public static ImageQuality DEFAULT = ANY; private final String _displayValue; private final byte _percentage; ImageQuality(String displayValue, int percentage) { _percentage = (byte) percentage; _displayValue = displayValue; assert _percentage >= 0 && _percentage <= 100; } public byte getPercentage() { return _percentage; } public String displayValue() { return _displayValue; } public String sequenceValue() { return Byte.toString(_percentage); } /** Return a ImageQuality by name **/ public static ImageQuality getImageQuality(String name) { return getImageQuality(name, DEFAULT); } /** Return a ImageQuality by name with a value to return upon error **/ public static ImageQuality getImageQuality(String name, ImageQuality nvalue) { // Note the following is a temporary patch. If name has a value of 50, it means 70 // if we have a value of 80 it means 85. This should go away after the next version if (name.equals("50%")) return PERCENT_70; if (name.equals("80")) return PERCENT_85; return SpTypeUtil.oldValueOf(ImageQuality.class, name, nvalue); } public String toString() { return (this == ANY) ? "IQAny" : String.format("IQ%2d", getPercentage()); } public static Option<ImageQuality> read(String s) { return SPSiteQuality.read(s, values()); } } /** * Water Vapor Options. */ public enum WaterVapor implements DisplayableSpType, SequenceableSpType, PercentageContainer { PERCENT_20("20%/Low", 20), PERCENT_50("50%/Median", 50), PERCENT_80("80%/High", 80), ANY("Any", 100), ; /** The default WaterVapor value **/ public static WaterVapor DEFAULT = ANY; private final String _displayValue; private final byte _percentage; WaterVapor(String displayValue, int percentage) { _percentage = (byte) percentage; _displayValue = displayValue; assert _percentage >= 0 && _percentage <= 100; } public byte getPercentage() { return _percentage; } public String displayValue() { return _displayValue; } public String sequenceValue() { return Byte.toString(_percentage); } /** Return a WaterVapor by name **/ public static WaterVapor getWaterVapor(String name) { return getWaterVapor(name, DEFAULT); } /** Return a WaterVapor by name with a value to return upon error **/ public static WaterVapor getWaterVapor(String name, WaterVapor nvalue) { return SpTypeUtil.oldValueOf(WaterVapor.class, name, nvalue); } public String toString() { return (this == ANY) ? "WVAny" : String.format("WV%2d", getPercentage()); } public static Option<WaterVapor> read(String s) { return SPSiteQuality.read(s, values()); } } /** * Elevation Constraint Options */ public enum ElevationConstraintType implements DisplayableSpType { NONE("None", 0, 0, 0, 0), HOUR_ANGLE("Hour Angle", -5.5, 5.5, -5.0, 5.0), AIRMASS("Airmass", 1.0, 3.0, 1.0, 2.0), ; public static ElevationConstraintType DEFAULT = NONE; private final String _displayValue; private final double _min, _max; private final double _defaultMin, _defaultMax; ElevationConstraintType(String displayValue, double min, double max, double defaultMin, double defaultMax) { _displayValue = displayValue; _min = min; _max = max; _defaultMax = defaultMax; _defaultMin = defaultMin; } public String displayValue() { return _displayValue; } public double getDefaultMin() { return _defaultMin; } public double getDefaultMax() { return _defaultMax; } public double getMax() { return _max; } public double getMin() { return _min; } public static ElevationConstraintType getElevationConstraintType(String name) { try { return valueOf(name); } catch (IllegalArgumentException iae) { return DEFAULT; } } } public static final class Conditions implements Serializable { public static final Conditions BEST = new Conditions(CloudCover.PERCENT_20, ImageQuality.PERCENT_20, SkyBackground.PERCENT_20, WaterVapor.PERCENT_20); public static final Conditions NOMINAL = new Conditions(CloudCover.PERCENT_50, ImageQuality.PERCENT_70, SkyBackground.PERCENT_50, WaterVapor.ANY); public static final Conditions WORST = new Conditions(CloudCover.ANY, ImageQuality.ANY, SkyBackground.ANY, WaterVapor.ANY); public final CloudCover cc; public final ImageQuality iq; public final SkyBackground sb; public final WaterVapor wv; public Conditions(CloudCover cc, ImageQuality iq, SkyBackground sb, WaterVapor wv) { this.cc = cc; this.iq = iq; this.sb = sb; this.wv = wv; } public Conditions cc(CloudCover ncc) { return new Conditions(ncc, iq, sb, wv); } public Conditions iq(ImageQuality niq) { return new Conditions(cc, niq, sb, wv); } public Conditions sb(SkyBackground nsb) { return new Conditions(cc, iq, nsb, wv); } public Conditions wv(WaterVapor nwv) { return new Conditions(cc, iq, sb, nwv); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Conditions that = (Conditions) o; return cc == that.cc && iq == that.iq && sb == that.sb && wv == that.wv; } @Override public int hashCode() { int result = cc.hashCode(); result = 31 * result + iq.hashCode(); result = 31 * result + sb.hashCode(); result = 31 * result + wv.hashCode(); return result; } public String toString() { StringBuilder buf = new StringBuilder(); buf.append(cc).append(", ").append(iq).append(", ").append(sb).append(", ").append(wv); return buf.toString(); } } public static final PropertyDescriptor CLOUD_COVER_PROP; public static final PropertyDescriptor IMAGE_QUALITY_PROP; public static final PropertyDescriptor SKY_BACKGROUND_PROP; public static final PropertyDescriptor WATER_VAPOR_PROP; public static final PropertyDescriptor ELEVATION_CONSTRAINT_TYPE_PROP; public static final PropertyDescriptor ELEVATION_CONSTRAINT_MIN_PROP; public static final PropertyDescriptor ELEVATION_CONSTRAINT_MAX_PROP; public static final PropertyDescriptor TIMING_WINDOWS_PROP; private static final Map<String, PropertyDescriptor> PRIVATE_PROP_MAP = new TreeMap<>(); public static final Map<String, PropertyDescriptor> PROPERTY_MAP = Collections.unmodifiableMap(PRIVATE_PROP_MAP); static { CLOUD_COVER_PROP = PropertySupport.init("CloudCover", SPSiteQuality.class, true, false); IMAGE_QUALITY_PROP = PropertySupport.init("ImageQuality", SPSiteQuality.class, true, false); SKY_BACKGROUND_PROP = PropertySupport.init("SkyBackground", SPSiteQuality.class, true, false); WATER_VAPOR_PROP = PropertySupport.init("WaterVapor", SPSiteQuality.class, true, false); ELEVATION_CONSTRAINT_TYPE_PROP = PropertySupport.init("ElevationConstraintType", SPSiteQuality.class, true, false); ELEVATION_CONSTRAINT_MIN_PROP = PropertySupport.init("ElevationConstraintMin", SPSiteQuality.class, true, false); ELEVATION_CONSTRAINT_MAX_PROP = PropertySupport.init("ElevationConstraintMax", SPSiteQuality.class, true, false); TIMING_WINDOWS_PROP = PropertySupport.init("TimingWindows", SPSiteQuality.class, false, false); PRIVATE_PROP_MAP.put(CLOUD_COVER_PROP.getName(), CLOUD_COVER_PROP); PRIVATE_PROP_MAP.put(IMAGE_QUALITY_PROP.getName(), IMAGE_QUALITY_PROP); PRIVATE_PROP_MAP.put(SKY_BACKGROUND_PROP.getName(), SKY_BACKGROUND_PROP); PRIVATE_PROP_MAP.put(WATER_VAPOR_PROP.getName(), WATER_VAPOR_PROP); PRIVATE_PROP_MAP.put(ELEVATION_CONSTRAINT_TYPE_PROP.getName(), ELEVATION_CONSTRAINT_TYPE_PROP); PRIVATE_PROP_MAP.put(ELEVATION_CONSTRAINT_MIN_PROP.getName(), ELEVATION_CONSTRAINT_MIN_PROP); PRIVATE_PROP_MAP.put(ELEVATION_CONSTRAINT_MAX_PROP.getName(), ELEVATION_CONSTRAINT_MAX_PROP); PRIVATE_PROP_MAP.put(TIMING_WINDOWS_PROP.getName(), TIMING_WINDOWS_PROP); } private Conditions conditions = Conditions.WORST; private ElevationConstraintType _elevationConstraintType = ElevationConstraintType.DEFAULT; private double _elevationConstraintMin = 0; private double _elevationConstraintMax = 0; private TimingWindowList _timingWindows = new TimingWindowList(); /** * Default constructor. Initialize the component type. */ public SPSiteQuality() { super(SP_TYPE); } public Map<String, PropertyDescriptor> getProperties() { return PROPERTY_MAP; } public Conditions conditions() { return conditions; } /** * Set the sky. */ public void setSkyBackground(SkyBackground newValue) { SkyBackground oldValue = conditions.sb; if (oldValue != newValue) { conditions = conditions.sb(newValue); firePropertyChange(SKY_BACKGROUND_PROP.getName(), oldValue, newValue); } } /** * Get the sky. */ public SkyBackground getSkyBackground() { return conditions.sb; } /** * Set the cloud cover */ public void setCloudCover(CloudCover newValue) { CloudCover oldValue = conditions.cc; if (oldValue != newValue) { conditions = conditions.cc(newValue); firePropertyChange(CLOUD_COVER_PROP.getName(), oldValue, newValue); } } /** * Get the cloud cover */ public CloudCover getCloudCover() { return conditions.cc; } /** * Set the image quality. */ public void setImageQuality(ImageQuality newValue) { ImageQuality oldValue = conditions.iq; if (newValue != oldValue) { conditions = conditions.iq(newValue); firePropertyChange(IMAGE_QUALITY_PROP.getName(), oldValue, newValue); } } /** * Get the image quality. */ public ImageQuality getImageQuality() { return conditions.iq; } /** * Set Water Vapor */ public void setWaterVapor(WaterVapor newValue) { WaterVapor oldValue = conditions.wv; if (oldValue != newValue) { conditions = conditions.wv(newValue); firePropertyChange(WATER_VAPOR_PROP.getName(), oldValue, newValue); } } /** * Get the Water Vapor. */ public WaterVapor getWaterVapor() { return conditions.wv; } public ElevationConstraintType getElevationConstraintType() { return _elevationConstraintType; } public void setElevationConstraintType(ElevationConstraintType constraintType) { ElevationConstraintType prev = _elevationConstraintType; _elevationConstraintType = constraintType; firePropertyChange(ELEVATION_CONSTRAINT_TYPE_PROP.getName(), prev, constraintType); } public double getElevationConstraintMin() { return _elevationConstraintMin; } public void setElevationConstraintMin(double min) { double prev = _elevationConstraintMin; _elevationConstraintMin = min; firePropertyChange(ELEVATION_CONSTRAINT_MIN_PROP.getName(), prev, min); } public double getElevationConstraintMax() { return _elevationConstraintMax; } public void setElevationConstraintMax(double max) { double prev = _elevationConstraintMax; _elevationConstraintMax = max; firePropertyChange(ELEVATION_CONSTRAINT_MAX_PROP.getName(), prev, max); } public List<TimingWindow> getTimingWindows() { return Collections.unmodifiableList(_timingWindows); } public void setTimingWindows(List<TimingWindow> windows) { List<TimingWindow> prev = Collections.unmodifiableList(new ArrayList<>(_timingWindows)); _timingWindows.clear(); _timingWindows.addAll(windows); firePropertyChange(TIMING_WINDOWS_PROP.getName(), prev, getTimingWindows()); } public void addTimingWindow(TimingWindow tw) { List<TimingWindow> prev = Collections.unmodifiableList(new ArrayList<>(_timingWindows)); _timingWindows.add(tw); firePropertyChange(TIMING_WINDOWS_PROP.getName(), prev, getTimingWindows()); } public void removeTimingWindow(TimingWindow tw) { List<TimingWindow> prev = Collections.unmodifiableList(new ArrayList<>(_timingWindows)); if (_timingWindows.remove(tw)) { firePropertyChange(TIMING_WINDOWS_PROP.getName(), prev, getTimingWindows()); } } /* * Return a parameter set describing the current state of this object. * @param factory */ public ParamSet getParamSet(PioFactory factory) { ParamSet paramSet = super.getParamSet(factory); Pio.addParam(factory, paramSet, CLOUD_COVER_PROP, conditions.cc.name()); Pio.addParam(factory, paramSet, IMAGE_QUALITY_PROP, conditions.iq.name()); Pio.addParam(factory, paramSet, SKY_BACKGROUND_PROP, conditions.sb.name()); Pio.addParam(factory, paramSet, WATER_VAPOR_PROP, conditions.wv.name()); Pio.addParam(factory, paramSet, ELEVATION_CONSTRAINT_TYPE_PROP, _elevationConstraintType.name()); Pio.addDoubleParam(factory, paramSet, ELEVATION_CONSTRAINT_MIN_PROP.getName(), _elevationConstraintMin); Pio.addDoubleParam(factory, paramSet, ELEVATION_CONSTRAINT_MAX_PROP.getName(), _elevationConstraintMax); paramSet.addParamSet(_timingWindows.getParamSet(factory)); return paramSet; } /** * Set the state of this object from the given parameter set. */ public void setParamSet(ParamSet paramSet) { super.setParamSet(paramSet); String v = Pio.getValue(paramSet, CLOUD_COVER_PROP.getName()); if (v != null) { setCloudCover(CloudCover.getCloudCover(v)); } v = Pio.getValue(paramSet, IMAGE_QUALITY_PROP.getName()); if (v != null) { setImageQuality(ImageQuality.getImageQuality(v)); } v = Pio.getValue(paramSet, SKY_BACKGROUND_PROP.getName()); if (v != null) { setSkyBackground(SkyBackground.getSkyBackground(v)); } v = Pio.getValue(paramSet, WATER_VAPOR_PROP.getName()); if (v != null) { setWaterVapor(WaterVapor.getWaterVapor(v)); } v = Pio.getValue(paramSet, ELEVATION_CONSTRAINT_TYPE_PROP.getName()); if (v != null) { setElevationConstraintType(ElevationConstraintType.getElevationConstraintType(v)); } setElevationConstraintMin(Pio.getDoubleValue(paramSet, ELEVATION_CONSTRAINT_MIN_PROP.getName(), 0.0)); setElevationConstraintMax(Pio.getDoubleValue(paramSet, ELEVATION_CONSTRAINT_MAX_PROP.getName(), 0.0)); _timingWindows.setParamSet(paramSet.getParamSet(TimingWindowList.NAME)); } }
package net.sf.taverna.wsdl.parser; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.net.URL; import java.util.List; import javax.wsdl.Operation; import javax.xml.namespace.QName; import org.junit.Ignore; import org.junit.Test; public class WSDLParserTest { public static final String WSDL_TEST_BASE = "http://www.mygrid.org.uk/taverna-tests/testwsdls/"; @Ignore("Integration test") @Test public void testGetOperations() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); List<Operation> operations = parser.getOperations(); assertEquals( "wrong number of operations found (wsdl may have changed)", 12, operations.size()); Operation op = operations.get(0); assertEquals("wrong name for first operation", "run_eGquery", op .getName()); assertEquals("wrong style", "document", parser.getStyle()); } @Ignore("Integration test") @Test public void testGetActionURI() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); String actionURI = parser.getSOAPActionURI("run_eInfo"); assertEquals("action uri is wrong", "einfo", actionURI); } @Ignore("Integration test") @Test public void testMissingStyleInBinding() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "SBWReader.wsdl"); assertEquals("Style should default to document if missing", "document", parser.getStyle()); } @Ignore("Integration test") @Test public void testComplexTypeFromImport() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); List<TypeDescriptor> inputs = parser .getOperationInputParameters("run_eInfo"); List<TypeDescriptor> outputs = parser .getOperationOutputParameters("run_eInfo"); assertEquals("wrong number of inputs", 1, inputs.size()); assertEquals("wrong number of outputs", 1, outputs.size()); assertTrue("input should be complex", inputs.get(0) instanceof ComplexTypeDescriptor); ComplexTypeDescriptor complexTypeDesc = (ComplexTypeDescriptor) inputs .get(0); assertEquals("wrong name", "parameters", complexTypeDesc.getName()); assertEquals("wrong number of elements", 3, complexTypeDesc .getElements().size()); TypeDescriptor typeDesc = complexTypeDesc .getElements().get(0); assertEquals("wrong name", "db", typeDesc.getName()); assertEquals("wrong type", "string", typeDesc.getType()); assertTrue("db should be optional", typeDesc.isOptional()); assertFalse("db should not be unbounded", typeDesc.isUnbounded()); typeDesc = complexTypeDesc.getElements().get(1); assertEquals("wrong name", "tool", typeDesc.getName()); assertEquals("wrong type", "string", typeDesc.getType()); assertTrue("tool should be optional", typeDesc.isOptional()); assertFalse("tool should not be unbounded", typeDesc.isUnbounded()); typeDesc = complexTypeDesc.getElements().get(2); assertEquals("wrong name", "email", typeDesc.getName()); assertEquals("wrong type", "string", typeDesc.getType()); assertTrue("email should be optional", typeDesc.isOptional()); assertFalse("email should not be unbounded", typeDesc.isUnbounded()); } @Ignore("Integration test") @Test public void testNestedComplexTypes() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "bind.wsdl"); List<TypeDescriptor> inputs = parser .getOperationInputParameters("BIVGetComplexRecord"); List<TypeDescriptor> outputs = parser .getOperationOutputParameters("BIVGetComplexRecord"); assertEquals("wrong number of inputs", 1, inputs.size()); assertEquals("wrong number of outputs", 1, outputs.size()); assertEquals("wrong name for input", "bid", (inputs .get(0)).getName()); assertEquals("wrong type for input", "int", (inputs .get(0)).getType()); assertEquals("wrong name for output", "BIVComplex", (outputs.get(0)).getName()); assertEquals("wrong type for output", "BIVComplex", (outputs.get(0)).getType()); assertTrue("wrong descriptor class for output", outputs.get(0) instanceof ComplexTypeDescriptor); ComplexTypeDescriptor typeDesc = (ComplexTypeDescriptor) outputs.get(0); assertEquals("wrong number of inner elements", 3, typeDesc .getElements().size()); assertEquals("wrong name for first element", "bid", (typeDesc.getElements().get(0)).getName()); assertEquals("wrong name for 2nd element", "spokeModel", (typeDesc.getElements().get(1)).getName()); assertEquals("wrong name for 3rd element", "subunit", (typeDesc.getElements().get(2)).getName()); assertTrue("3rd element should be instance of ArrayTypeDescriptor", typeDesc.getElements().get(2) instanceof ArrayTypeDescriptor); ArrayTypeDescriptor arrayTypeDesc = (ArrayTypeDescriptor) typeDesc .getElements().get(2); assertEquals("wrong type for 3rd element", "BIVMolecule", arrayTypeDesc .getType()); typeDesc = (ComplexTypeDescriptor) arrayTypeDesc.getElementType(); assertEquals("wrong type for 3rd element", "BIVMolecule", typeDesc .getType()); assertEquals("wrong number of elements in nested complex type", 7, typeDesc.getElements().size()); assertEquals("wrong name for first element", "id", (typeDesc.getElements().get(0)).getName()); assertEquals("wrong type for first element", "int", (typeDesc.getElements().get(0)).getType()); assertEquals("wrong name for last element", "smid-hits", (typeDesc.getElements().get(6)).getName()); assertEquals("wrong type for last element", "int", (typeDesc.getElements().get(6)).getType()); } @Ignore("Integration test") @Test public void testBaseTypes() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "bind.wsdl"); List<TypeDescriptor> inputs = parser .getOperationInputParameters("BIVGetRecord"); assertEquals("wrong number of inputs", 1, inputs.size()); assertTrue("should not be base type", inputs.get(0) instanceof BaseTypeDescriptor); assertEquals("wrong name", "bid", (inputs.get(0)) .getName()); assertEquals("wrong type", "int", (inputs.get(0)) .getType()); } @Ignore("Integration test") @Test public void testArrayType() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "bind.wsdl"); List<TypeDescriptor> inputs = parser .getOperationInputParameters("BIVGetRecords"); List<TypeDescriptor> outputs = parser .getOperationOutputParameters("BIVGetRecords"); assertEquals("wrong number of inputs", 1, inputs.size()); assertTrue("input should be of AArrayTypeDescriptor", inputs.get(0) instanceof ArrayTypeDescriptor); ArrayTypeDescriptor arrayTypeDesc = (ArrayTypeDescriptor) inputs.get(0); assertEquals("wrong name", "ids", arrayTypeDesc.getName()); assertEquals("wrong type", "ArrayOf_xsd_int", arrayTypeDesc.getType()); TypeDescriptor typeDesc = arrayTypeDesc.getElementType(); assertTrue("element should be of type BaseTypeDescriptor", typeDesc instanceof BaseTypeDescriptor); assertEquals("wrong type", "int", typeDesc.getType()); assertEquals("wrong number of outputs", 1, outputs.size()); assertTrue("output should be of ArrayTypeDescriptor", outputs.get(0) instanceof ArrayTypeDescriptor); arrayTypeDesc = (ArrayTypeDescriptor) outputs.get(0); assertEquals("wrong name", "BIVRecords", arrayTypeDesc.getName()); assertEquals("wrong type", "ArrayOfBIVRecord", arrayTypeDesc.getType()); typeDesc = arrayTypeDesc.getElementType(); assertEquals("wrong type", "BIVRecord", typeDesc.getType()); } @Ignore("Integration test") @Test public void testGoVizNoOutputs() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "GoViz.wsdl"); List<TypeDescriptor> inputs = parser .getOperationInputParameters("destroySession"); List<TypeDescriptor> outputs = parser .getOperationOutputParameters("destroySession"); assertEquals("wrong number of inputs", 1, inputs.size()); assertEquals("wrong number of outputs", 0, outputs.size()); TypeDescriptor typeDesc = inputs.get(0); assertTrue("input should be BaseType", typeDesc instanceof BaseTypeDescriptor); assertEquals("wrong name", "sessionID", typeDesc.getName()); assertEquals("wrong type", "string", typeDesc.getType()); } @Ignore("Integration test") @Test public void testGetUseEncoded() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "bind.wsdl"); String use = parser.getUse("BIVGetRecords"); assertEquals("use should be encoded", "encoded", use); } @Ignore("Integration test") @Test public void testGetUseLiteral() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); String use = parser.getUse("run_eInfo"); assertEquals("use should be literal", "literal", use); } @Ignore("Integration test") @Test public void testGetOperationNamespace() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "CurrencyExchangeService.wsdl"); String operationNamespace = parser.getOperationNamespaceURI("getRate"); assertEquals("operation namespace is wrong", "urn:xmethods-CurrencyExchange", operationNamespace); } @Ignore("Integration test") @Test public void testGetOperationNamespace2() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); String operationNamespace = parser .getOperationNamespaceURI("run_eInfo"); assertEquals("operation namespace is wrong", "http://www.ncbi.nlm.nih.gov/soap/eutils/einfo", operationNamespace); } @Ignore("Integration test") @Test public void testGetOperationElementQName() throws Exception { WSDLParser parser = new WSDLParser(WSDL_TEST_BASE + "eutils/eutils_lite.wsdl"); QName operationQName = parser.getOperationQname("run_eInfo"); assertEquals("element name is wrong", "eInfoRequest", operationQName .getLocalPart()); assertEquals("operation namespace is wrong", "http://www.ncbi.nlm.nih.gov/soap/eutils/einfo", operationQName .getNamespaceURI()); } @Ignore("Integration test") @Test public void testGetOperationElementQName2() throws Exception { URL tav744Url = getClass().getResource( "/net/sf/taverna/wsdl/parser/TAV-744/InstrumentService__.wsdl"); WSDLParser parser = new WSDLParser(tav744Url.toExternalForm()); QName operationQName = parser.getOperationQname("getList"); assertEquals("operation element name is wrong", "GetListRequest", operationQName.getLocalPart()); assertEquals("operation namespace is wrong", "http://InstrumentService.uniparthenope.it/InstrumentService", operationQName.getNamespaceURI()); } }
/** * Copyright (c) 2015 sothawo * * http://www.sothawo.com */ package com.sothawo.taboo2.repository.jpa; import com.sothawo.taboo2.AlreadyExistsException; import com.sothawo.taboo2.Bookmark; import com.sothawo.taboo2.NotFoundException; import com.sothawo.taboo2.repository.AbstractBookmarkRepository; import com.sothawo.taboo2.repository.BookmarkRepository; import com.sothawo.taboo2.repository.BookmarkRepositoryFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityTransaction; import javax.persistence.NoResultException; import javax.persistence.Persistence; import javax.persistence.PersistenceException; import javax.persistence.TypedQuery; import javax.persistence.spi.PersistenceProvider; import javax.persistence.spi.PersistenceProviderResolver; import javax.persistence.spi.PersistenceProviderResolverHolder; import java.text.MessageFormat; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static com.sothawo.taboo2.BookmarkBuilder.aBookmark; /** * Repository implementation using a H2 database. * * @author P.J. Meisch (pj.meisch@sothawo.com). */ public class H2Repository extends AbstractBookmarkRepository { // ------------------------------ FIELDS ------------------------------ /** name of the persistence unit. */ private static final String PERSISTENCE_UNIT_NAME = "taboo2_pu"; /** Logger. */ private final static Logger LOG = LoggerFactory.getLogger(H2Repository.class); /** Entity Manager Factory, autocloseable variant */ private EntityManagerFactoryAutoCloseable emf; // -------------------------- STATIC METHODS -------------------------- static { fixDeprecatedHibernateWarning(); } /** * fixes the warning (Encountered a deprecated javax.persistence.spi.PersistenceProvider [org.hibernate.ejb * .HibernatePersistence]; use [org.hibernate.jpa.HibernatePersistenceProvider] instead.) * * must be called before getting the EntityManagerFactory */ private static void fixDeprecatedHibernateWarning() { PersistenceProviderResolverHolder.setPersistenceProviderResolver(new PersistenceProviderResolver() { private final List<PersistenceProvider> providers_ = Arrays.asList(new PersistenceProvider[]{new org.hibernate.jpa.HibernatePersistenceProvider()}); @Override public List<PersistenceProvider> getPersistenceProviders() { return providers_; } @Override public void clearCachedProviders() { } }); } // --------------------------- CONSTRUCTORS --------------------------- public H2Repository(String jdbcUrl) { LOG.info("configured jdbc url: {}", jdbcUrl); Map<String, String> props = new HashMap<>(); props.put("hibernate.connection.url", jdbcUrl); emf = EntityManagerFactoryAutoCloseable.createFor(Persistence.createEntityManagerFactory (PERSISTENCE_UNIT_NAME, props)); } // ------------------------ INTERFACE METHODS ------------------------ // --------------------- Interface AutoCloseable --------------------- @Override public void close() { try { if (null != emf) { if (emf.isOpen()) { emf.close(); } emf = null; } } catch (RuntimeException e) { LOG.warn("DB", e); } } // --------------------- Interface BookmarkRepository --------------------- @Override public Bookmark createBookmark(Bookmark bookmark) { // check arguments if (null != bookmark.getId() || null == bookmark.getUrl() || bookmark.getUrl().isEmpty()) { throw new IllegalArgumentException(); } try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { // search existing List resultList = em.createNamedQuery(BookmarkEntity.BOOKMARK_BY_URL).setParameter("url", bookmark.getUrl()) .getResultList(); if (resultList.size() > 0) { throw new AlreadyExistsException(MessageFormat.format("bookmark with url {0} already exists.", bookmark.getUrl())); } // insert new bookmark EntityTransaction tx = em.getTransaction(); tx.begin(); BookmarkEntity bookmarkEntity = new BookmarkEntity(); bookmarkEntity.setUrl(bookmark.getUrl()); bookmarkEntity.setTitle(bookmark.getTitle()); // build the TagEntities TypedQuery<TagEntity> findTagQuery = em.createNamedQuery(TagEntity.FIND_BY_TAG, TagEntity.class); for (String tag : bookmark.getTags()) { TagEntity tagEntity; try { tagEntity = findTagQuery.setParameter("tag", tag).getSingleResult(); } catch (NoResultException ignored) { tagEntity = new TagEntity(); tagEntity.setTag(tag); } bookmarkEntity.addTag(tagEntity); if (null == tagEntity.getId()) { em.persist(tagEntity); } } em.persist(bookmarkEntity); em.flush(); tx.commit(); return bookmarkFromEntity(bookmarkEntity); } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on creating bookmark", e); return null; } } @Override public void deleteBookmark(String id) { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { Long bookmarkId = Long.valueOf(id); EntityTransaction tx = em.getTransaction(); tx.begin(); BookmarkEntity bookmarkEntity = em.find(BookmarkEntity.class, bookmarkId); if (null == bookmarkEntity) { throw new NotFoundException("no bookmark with id " + id); } for (TagEntity tagEntity : bookmarkEntity.getTags()) { Set<BookmarkEntity> tagBookmarks = tagEntity.getBookmarks(); tagEntity.getBookmarks().remove(bookmarkEntity); if (tagBookmarks.size() == 0) { em.remove(tagEntity); } } em.remove(bookmarkEntity); tx.commit(); } catch (NumberFormatException e) { throw new IllegalArgumentException("non numeric id"); } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on deleting bookmark", e); } } @Override public Collection<Bookmark> getAllBookmarks() { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { Set<Bookmark> bookmarks = em .createNamedQuery(BookmarkEntity.ALL_BOOKMARKS, BookmarkEntity.class) .getResultList() .stream() .map(this::bookmarkFromEntity) .collect(Collectors.toSet()); return bookmarks; } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on getting all bookmarks", e); return Collections.emptySet(); } } @Override public Collection<String> getAllTags() { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { Set<String> tags = em .createNamedQuery(TagEntity.ALL_TAGS, TagEntity.class) .getResultList() .stream() .map(TagEntity::getTag) .collect(Collectors.toSet()); return tags; } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on getting all tags", e); return Collections.emptySet(); } } @Override public Bookmark getBookmarkById(String id) { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { Optional<Bookmark> bookmarkOptional = Optional.ofNullable(em.find(BookmarkEntity.class, Long.valueOf(id))) .map(this::bookmarkFromEntity); return bookmarkOptional.orElseThrow(() -> new NotFoundException("no bookmark with id " + id)); } catch (Exception e) { throw new NotFoundException("no bookmark with id " + id, e); } } @Override public Collection<Bookmark> getBookmarksWithSearch(String s) { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { if (null == s || s.isEmpty()) { throw new IllegalArgumentException("empty search string"); } Set<Bookmark> bookmarks = em .createNamedQuery(BookmarkEntity.BOOKMARKS_WITH_TITLE, BookmarkEntity.class) .setParameter("s", '%' + s + '%') .getResultList() .stream() .map(this::bookmarkFromEntity) .collect(Collectors.toSet()); return bookmarks; } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on getting bookmarks with search string", e); return Collections.emptySet(); } } @Override public void purge() { try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { EntityTransaction tx = em.getTransaction(); tx.begin(); em.createQuery("delete from BookmarkEntity b").executeUpdate(); em.createQuery("delete from TagEntity t").executeUpdate(); tx.commit(); } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on purging data", e); } } @Override public void updateBookmark(Bookmark bookmark) { if (null == bookmark.getId() || null == bookmark.getUrl() || bookmark.getUrl().isEmpty()) { throw new IllegalArgumentException(); } Long updateBookmarkId; try { updateBookmarkId = Long.valueOf(bookmark.getId()); } catch (NumberFormatException e) { throw new NotFoundException("no bookmark with id " + bookmark.getId()); } try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { EntityTransaction tx = em.getTransaction(); tx.begin(); // check if new URL exists on different entity try { BookmarkEntity existingBookmarkEntity = em.createNamedQuery(BookmarkEntity.BOOKMARK_BY_URL, BookmarkEntity.class) .setParameter("url", bookmark.getUrl()) .getSingleResult(); if (!existingBookmarkEntity.getId().equals(updateBookmarkId)) { tx.rollback(); throw new AlreadyExistsException(MessageFormat.format("new url {0} already bookmarked", bookmark.getUrl())); } } catch (NoResultException ignored) { // ignore } BookmarkEntity bookmarkEntity = em.find(BookmarkEntity.class, updateBookmarkId); if (null == bookmarkEntity) { throw new NotFoundException("no bookmark with id " + updateBookmarkId); } bookmarkEntity.setUrl(bookmark.getUrl()); bookmarkEntity.setTitle(bookmark.getTitle()); // keep the old tags Set<TagEntity> previousTagEntities = new HashSet<>(bookmarkEntity.getTags()); // build the new TagEntities TypedQuery<TagEntity> findTagQuery = em.createNamedQuery(TagEntity.FIND_BY_TAG, TagEntity.class); for (String tag : bookmark.getTags()) { TagEntity tagEntity; try { tagEntity = findTagQuery.setParameter("tag", tag).getSingleResult(); } catch (NoResultException ignored) { // new tag tagEntity = new TagEntity(); tagEntity.setTag(tag); } if (!bookmarkEntity.getTags().contains(tagEntity)) { bookmarkEntity.addTag(tagEntity); } if (null == tagEntity.getId()) { em.persist(tagEntity); } } // check the old set for tags that are not contained anymore previousTagEntities .stream() .filter(tagEntity -> !bookmark.getTags().contains(tagEntity.getTag())) .forEach(tagEntity -> { bookmarkEntity.removeTag(tagEntity); if (tagEntity.getBookmarks().isEmpty()) { em.remove(tagEntity); } }); em.merge(bookmarkEntity); tx.commit(); } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on updating bookmark", e); } } // -------------------------- OTHER METHODS -------------------------- /** * converts a BookmarkEntity to a Bookmark * * @param entity * the entity to convert * @return the converted Bookmark */ private Bookmark bookmarkFromEntity(BookmarkEntity entity) { Bookmark createdBookmark = aBookmark() .withId(String.valueOf(entity.getId())) .withUrl(entity.getUrl()) .withTitle(entity.getTitle()) .build(); for (TagEntity tagEntity : entity.getTags()) { createdBookmark.addTag(tagEntity.getTag()); } return createdBookmark; } @Override protected Set<Bookmark> getBookmarksWithTag(String tag) { Set<Bookmark> bookmarks = new HashSet<>(); try (EntityManagerAutoCloseable em = (EntityManagerAutoCloseable) emf.createEntityManager()) { em.createNamedQuery(TagEntity.FIND_BY_TAG, TagEntity.class) .setParameter("tag", tag) .getSingleResult() .getBookmarks() .stream() .map(this::bookmarkFromEntity) .forEach(bookmarks::add); } catch (NoResultException ignored) { // ignore } catch (IllegalStateException | IllegalArgumentException | PersistenceException e) { LOG.error("db error on getting tag", e); } return bookmarks; } // -------------------------- INNER CLASSES -------------------------- /** * Factory implementation. */ public static class Factory implements BookmarkRepositoryFactory { // ------------------------ INTERFACE METHODS ------------------------ // --------------------- Interface BookmarkRepositoryFactory --------------------- /** * first argument is jdbcUrl * * @param args * arguments for the BookmarkRepository * @return */ @Override public BookmarkRepository create(String[] args) { if (null == args || args.length < 1) { throw new IllegalArgumentException(); } final String jdbcUrl = args[0]; return new H2Repository(jdbcUrl); } } }
/* * $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/share/org/apache/slide/common/AbstractXAServiceBase.java,v 1.2 2006-01-22 22:47:24 peter-cvs Exp $ * $Revision: 1.2 $ * $Date: 2006-01-22 22:47:24 $ * * ==================================================================== * * Copyright 1999-2002 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.slide.common; import java.util.Hashtable; import java.util.Map; import org.apache.commons.transaction.util.LoggerFacade; import org.apache.slide.authenticate.CredentialsToken; import org.apache.commons.transaction.util.xa.TransactionalResource; import org.apache.slide.util.logger.Logger; import org.apache.slide.util.logger.TxLogger; import org.apache.slide.transaction.SlideXidWrapper; import javax.transaction.xa.XAResource; import javax.transaction.xa.XAException; import javax.transaction.xa.Xid; import javax.transaction.Status; import java.util.concurrent.ConcurrentHashMap; /** * Slide Service abstract implementation. * * Changes: Removed the inheritance from AbstractXAResource and pasted the code in here so we could switch to * using a different implementation of XidWrapper. * * @version $Revision: 1.2 $ */ public abstract class AbstractXAServiceBase /*extends AbstractXAResource*/ implements Service, XAResource, Status { // -------------------------------------------------------------- Constants protected String LOG_CHANNEL = this.getClass().getName(); // ----------------------------------------------------- Instance Variables /** * Namespace. */ protected Namespace namespace; // the scope of this store as specified in domain.xml protected Scope scope; protected LoggerFacade loggerFacade = null; // -------------------------------------------------------- Service Methods /** * Set the scope of the store as specified in domain.xml. */ public void setScope(Scope scope) { this.scope = scope; } /** * Namespace setter. */ public void setNamespace(Namespace namespace) { this.namespace = namespace; } /** * Logger accessor. */ public Logger getLogger() { Logger logger = null; if (namespace != null) { logger = this.namespace.getLogger(); } if (logger == null) logger = Domain.getLogger(); return logger; } protected LoggerFacade getLoggerFacade() { if (loggerFacade == null) { loggerFacade = new TxLogger(getLogger(), LOG_CHANNEL); } return loggerFacade; } /** * Initializes the service with a set of parameters. Those could be : * <li>User name, login info * <li>Host name on which to connect * <li>Remote port * <li>JDBC driver whoich is to be used :-) * <li>Anything else ... * * @param parameters Hashtable containing the parameters' names * and associated values * @exception ServiceParameterErrorException Incorrect service parameter * @exception ServiceParameterMissingException Service parameter missing */ public abstract void setParameters(Hashtable parameters) throws ServiceParameterErrorException, ServiceParameterMissingException; /** * Connects to the underlying data source (if any is needed). * Compatibility implementation for the previous store implementations * * @param crdtoken the slide token containing e.g. the credential * @exception ServiceConnectionFailedException Connection failed */ public void connect(CredentialsToken crdtoken) throws ServiceConnectionFailedException { connect(); } /** * Connects to the underlying data source (if any is needed). * * @exception ServiceConnectionFailedException Connection failed */ public abstract void connect() throws ServiceConnectionFailedException; /** * Disconnects from the underlying data source. * * @exception ServiceDisconnectionFailedException Disconnection failed */ public abstract void disconnect() throws ServiceDisconnectionFailedException; /** * Initializes service. * * @param token Namespace access token, needed if the service needs to * access objects or data within the namespace during its initialization * @exception ServiceInitializationFailedException May throw an exception * if the service has already been initialized before */ public void initialize(NamespaceAccessToken token) throws ServiceInitializationFailedException { } /** * Deletes service underlying data source, if possible (and meaningful). * * @exception ServiceResetFailedException Reset failed */ public abstract void reset() throws ServiceResetFailedException; /** * This function tells whether or not the service is connected. * * @return boolean true if we are connected * @exception ServiceAccessException Service access error */ public abstract boolean isConnected() throws ServiceAccessException; /** * Connects to the service, if we were not previously connected. * * @param token the Credeantials token containing e.g. the credential * @return boolean true if we were not already connected * @exception ServiceAccessException Unspecified service access error * @exception ServiceConnectionFailedException Connection failed */ public boolean connectIfNeeded(CredentialsToken token) throws ServiceConnectionFailedException, ServiceAccessException { boolean result = true; try { result = !isConnected(); } catch (ServiceAccessException e) { // Ignore : Will try to reconnect } if (result) { connect(token); } return result; } /** * Connects to the service, if we were not previously connected. * * @return boolean true if we were not already connected * @exception ServiceAccessException Unspecified service access error * @exception ServiceConnectionFailedException Connection failed */ public boolean connectIfNeeded() throws ServiceConnectionFailedException, ServiceAccessException { boolean result = true; try { result = !isConnected(); } catch (ServiceAccessException e) { // Ignore : Will try to reconnect } if (result) { connect(); } return result; } /** * Indicates whether or not the objects managed by this service should be * cached. Caching is enabled by default. * * @return boolean True if results should be cached */ public boolean cacheResults() { return true; } // ----------------------------------------------------- XAResource Mathods // there might be at least one active transaction branch per thread private ThreadLocal activeTransactionBranch = new ThreadLocal(); private Map suspendedContexts = new ConcurrentHashMap(); private Map activeContexts = new ConcurrentHashMap(); public abstract boolean isSameRM(XAResource xares) throws XAException; public abstract Xid[] recover(int flag) throws XAException; protected abstract boolean includeBranchInXid(); public void forget(Xid xid) throws XAException { if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine("Forgetting transaction branch " + xid); } TransactionalResource ts = getTransactionalResource(xid); if (ts == null) { throw new XAException(XAException.XAER_NOTA); } setCurrentlyActiveTransactionalResource(null); removeActiveTransactionalResource(xid); removeSuspendedTransactionalResource(xid); } public void commit(Xid xid, boolean onePhase) throws XAException { TransactionalResource ts = getTransactionalResource(xid); if (ts == null) { throw new XAException(XAException.XAER_NOTA); } if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine("Committing transaction branch " + ts); } if (ts.getStatus() == STATUS_MARKED_ROLLBACK) { throw new XAException(XAException.XA_RBROLLBACK); } if (ts.getStatus() != STATUS_PREPARED) { if (onePhase) { ts.prepare(); } else { throw new XAException(XAException.XAER_PROTO); } } ts.commit(); setCurrentlyActiveTransactionalResource(null); removeActiveTransactionalResource(xid); removeSuspendedTransactionalResource(xid); } public void rollback(Xid xid) throws XAException { TransactionalResource ts = getTransactionalResource(xid); if (ts == null) { setCurrentlyActiveTransactionalResource(null); throw new XAException(XAException.XAER_NOTA); } if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine("Rolling back transaction branch " + ts); } try { ts.rollback(); } finally { setCurrentlyActiveTransactionalResource(null); removeActiveTransactionalResource(xid); removeSuspendedTransactionalResource(xid); } } public int prepare(Xid xid) throws XAException { TransactionalResource ts = getTransactionalResource(xid); if (ts == null) { throw new XAException(XAException.XAER_NOTA); } if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine("Preparing transaction branch " + ts); } if (ts.getStatus() == STATUS_MARKED_ROLLBACK) { throw new XAException(XAException.XA_RBROLLBACK); } int result = ts.prepare(); ts.setStatus(STATUS_PREPARED); return result; } public void end(Xid xid, int flags) throws XAException { TransactionalResource ts = getActiveTransactionalResource(xid); if (ts == null) { setCurrentlyActiveTransactionalResource(null); throw new XAException(XAException.XAER_NOTA); } if (getCurrentlyActiveTransactionalResource() == null) { throw new XAException(XAException.XAER_INVAL); } if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine(new StringBuffer(128) .append("Thread ").append(Thread.currentThread()) .append(flags == TMSUSPEND ? " suspends" : flags == TMFAIL ? " fails" : " ends") .append(" work on behalf of transaction branch ") .append(ts).toString()); } switch (flags) { case TMSUSPEND : ts.suspend(); addSuspendedTransactionalResource(xid, ts); removeActiveTransactionalResource(xid); break; case TMFAIL : ts.setStatus(STATUS_MARKED_ROLLBACK); break; case TMSUCCESS : break; } setCurrentlyActiveTransactionalResource(null); } public void start(Xid xid, int flags) throws XAException { if (getCurrentlyActiveTransactionalResource() != null) { throw new XAException(XAException.XAER_INVAL); } if (getLoggerFacade().isFineEnabled()) { getLoggerFacade().logFine(new StringBuffer(128) .append("Thread ").append(Thread.currentThread()) .append(flags == TMNOFLAGS ? " starts" : flags == TMJOIN ? " joins" : " resumes") .append(" work on behalf of transaction branch ") .append(xid).toString()); } TransactionalResource ts; switch (flags) { // a new transaction case TMNOFLAGS : case TMJOIN : default : try { ts = createTransactionResource(xid); ts.begin(); } catch (Exception e) { getLoggerFacade().logSevere("Could not create new transactional resource", e); throw new XAException(e.getMessage()); } break; case TMRESUME : ts = getSuspendedTransactionalResource(xid); if (ts == null) { throw new XAException(XAException.XAER_NOTA); } ts.resume(); removeSuspendedTransactionalResource(xid); break; } setCurrentlyActiveTransactionalResource(ts); addAcitveTransactionalResource(xid, ts); } abstract protected TransactionalResource createTransactionResource(Xid xid) throws Exception; protected TransactionalResource getCurrentlyActiveTransactionalResource() { TransactionalResource context = (TransactionalResource) activeTransactionBranch.get(); return context; } protected void setCurrentlyActiveTransactionalResource(TransactionalResource context) { activeTransactionBranch.set(context); } protected TransactionalResource getTransactionalResource(Xid xid) { TransactionalResource ts = getActiveTransactionalResource(xid); if (ts != null) return ts; else return getSuspendedTransactionalResource(xid); } protected TransactionalResource getActiveTransactionalResource(Xid xid) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); return (TransactionalResource) activeContexts.get(wxid); } protected TransactionalResource getSuspendedTransactionalResource(Xid xid) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); return (TransactionalResource) suspendedContexts.get(wxid); } protected void addAcitveTransactionalResource(Xid xid, TransactionalResource txContext) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); activeContexts.put(wxid, txContext); } protected void addSuspendedTransactionalResource(Xid xid, TransactionalResource txContext) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); suspendedContexts.put(wxid, txContext); } protected void removeActiveTransactionalResource(Xid xid) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); activeContexts.remove(wxid); } protected void removeSuspendedTransactionalResource(Xid xid) { Xid wxid = SlideXidWrapper.wrap(xid, includeBranchInXid()); suspendedContexts.remove(wxid); } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.web; import org.openqa.grid.common.exception.GridConfigurationException; import org.openqa.grid.internal.GridRegistry; import org.openqa.grid.internal.utils.configuration.GridHubConfiguration; import org.openqa.grid.shared.Stoppable; import org.openqa.grid.web.servlet.DisplayHelpServlet; import org.openqa.grid.web.servlet.DriverServlet; import org.openqa.grid.web.servlet.Grid1HeartbeatServlet; import org.openqa.grid.web.servlet.HubStatusServlet; import org.openqa.grid.web.servlet.HubW3CStatusServlet; import org.openqa.grid.web.servlet.LifecycleServlet; import org.openqa.grid.web.servlet.NodeSessionsServlet; import org.openqa.grid.web.servlet.ProxyStatusServlet; import org.openqa.grid.web.servlet.RegistrationServlet; import org.openqa.grid.web.servlet.ResourceServlet; import org.openqa.grid.web.servlet.TestSessionStatusServlet; import org.openqa.grid.web.servlet.console.ConsoleServlet; import org.openqa.grid.web.utils.ExtraServletUtil; import org.openqa.selenium.json.Json; import org.openqa.selenium.net.NetworkUtils; import org.openqa.selenium.remote.server.jmx.JMXHelper; import org.openqa.selenium.remote.server.jmx.ManagedAttribute; import org.openqa.selenium.remote.server.jmx.ManagedService; import org.seleniumhq.jetty9.security.ConstraintMapping; import org.seleniumhq.jetty9.security.ConstraintSecurityHandler; import org.seleniumhq.jetty9.server.HttpConfiguration; import org.seleniumhq.jetty9.server.HttpConnectionFactory; import org.seleniumhq.jetty9.server.Server; import org.seleniumhq.jetty9.server.ServerConnector; import org.seleniumhq.jetty9.servlet.ServletContextHandler; import org.seleniumhq.jetty9.servlet.ServletHolder; import org.seleniumhq.jetty9.util.security.Constraint; import org.seleniumhq.jetty9.util.thread.QueuedThreadPool; import java.net.BindException; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import javax.servlet.Servlet; /** * Jetty server. Main entry point for everything about the grid. <p> Except for unit tests, this * should be a singleton. */ @ManagedService(objectName = "org.seleniumhq.grid:type=Hub", description = "Selenium Grid Hub") public class Hub implements Stoppable { private static final Logger log = Logger.getLogger(Hub.class.getName()); private final GridHubConfiguration config; private final GridRegistry registry; private final Map<String, Class<? extends Servlet>> extraServlet = new HashMap<>(); private Server server; private void addServlet(String key, Class<? extends Servlet> s) { extraServlet.put(key, s); } /** * get the registry backing up the hub state. * * @return The registry */ public GridRegistry getRegistry() { return registry; } public Hub(GridHubConfiguration gridHubConfiguration) { config = gridHubConfiguration == null ? new GridHubConfiguration() : gridHubConfiguration; try { registry = (GridRegistry) Class.forName(config.registry).newInstance(); registry.setHub(this); registry.setThrowOnCapabilityNotPresent(config.throwOnCapabilityNotPresent); } catch (Throwable e) { throw new GridConfigurationException("Error creating class with " + config.registry + " : " + e.getMessage(), e); } if (config.host == null) { config.host = "0.0.0.0"; //default to all adapters } if (config.port == null) { config.port = 4444; } if (config.servlets != null) { for (String s : config.servlets) { Class<? extends Servlet> servletClass = ExtraServletUtil.createServlet(s); if (servletClass != null) { String path = "/grid/admin/" + servletClass.getSimpleName() + "/*"; log.info("binding " + servletClass.getCanonicalName() + " to " + path); addServlet(path, servletClass); } } } // start the registry, now that 'config' is all setup registry.start(); new JMXHelper().register(this); } private void addDefaultServlets(ServletContextHandler handler) { // add mandatory default servlets handler.addServlet(RegistrationServlet.class.getName(), "/grid/register/*"); handler.addServlet(DriverServlet.class.getName(), "/wd/hub/*"); handler.addServlet(DriverServlet.class.getName(), "/selenium-server/driver/*"); handler.addServlet(ProxyStatusServlet.class.getName(), "/grid/api/proxy/*"); handler.addServlet(NodeSessionsServlet.class.getName(), "/grid/api/sessions/*"); handler.addServlet(HubStatusServlet.class.getName(), "/grid/api/hub/*"); ServletHolder statusHolder = new ServletHolder(new HubW3CStatusServlet(getRegistry())); handler.addServlet(statusHolder, "/status"); handler.addServlet(statusHolder, "/wd/hub/status"); handler.addServlet(TestSessionStatusServlet.class.getName(), "/grid/api/testsession/*"); // add optional default servlets if (!config.isWithOutServlet(ResourceServlet.class)) { handler.addServlet(ResourceServlet.class.getName(), "/grid/resources/*"); } if (!config.isWithOutServlet(DisplayHelpServlet.class)) { handler.addServlet(DisplayHelpServlet.class.getName(), "/*"); handler.setInitParameter(DisplayHelpServlet.HELPER_TYPE_PARAMETER, config.role); } if (!config.isWithOutServlet(ConsoleServlet.class)) { handler.addServlet(ConsoleServlet.class.getName(), "/grid/console/*"); handler.setInitParameter(ConsoleServlet.CONSOLE_PATH_PARAMETER, "/grid/console"); } if (!config.isWithOutServlet(LifecycleServlet.class)) { handler.addServlet(LifecycleServlet.class.getName(), "/lifecycle-manager/*"); } if (!config.isWithOutServlet(Grid1HeartbeatServlet.class)) { handler.addServlet(Grid1HeartbeatServlet.class.getName(), "/heartbeat"); } } private void initServer() { try { if (config.jettyMaxThreads != null && config.jettyMaxThreads > 0) { QueuedThreadPool pool = new QueuedThreadPool(); pool.setMaxThreads(config.jettyMaxThreads); server = new Server(pool); } else { server = new Server(); } HttpConfiguration httpConfig = new HttpConfiguration(); httpConfig.setSecureScheme("https"); httpConfig.setSecurePort(config.port); ServerConnector http = new ServerConnector(server, new HttpConnectionFactory(httpConfig)); http.setHost(config.host); if (config.host.equals("0.0.0.0")) { // though we bind to all IPv4 interfaces, we need to advertise that connections // should come in on a public (non-loopback) interface updateHostToNonLoopBackAddressOfThisMachine(); } http.setPort(config.port); server.addConnector(http); ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS + ServletContextHandler.SECURITY); root.setContextPath("/"); ConstraintSecurityHandler securityHandler = (ConstraintSecurityHandler) root.getSecurityHandler(); Constraint disableTrace = new Constraint(); disableTrace.setName("Disable TRACE"); disableTrace.setAuthenticate(true); ConstraintMapping disableTraceMapping = new ConstraintMapping(); disableTraceMapping.setConstraint(disableTrace); disableTraceMapping.setMethod("TRACE"); disableTraceMapping.setPathSpec("/"); securityHandler.addConstraintMapping(disableTraceMapping); Constraint enableOther = new Constraint(); enableOther.setName("Enable everything but TRACE"); ConstraintMapping enableOtherMapping = new ConstraintMapping(); enableOtherMapping.setConstraint(enableOther); enableOtherMapping.setMethodOmissions(new String[] {"TRACE"}); enableOtherMapping.setPathSpec("/"); securityHandler.addConstraintMapping(enableOtherMapping); server.setHandler(root); root.setAttribute(GridRegistry.KEY, registry); addDefaultServlets(root); // Load any additional servlets provided by the user. for (Map.Entry<String, Class<? extends Servlet>> entry : extraServlet.entrySet()) { root.addServlet(entry.getValue().getName(), entry.getKey()); } } catch (Throwable e) { throw new RuntimeException("Error initializing the hub " + e.getMessage(), e); } } public GridHubConfiguration getConfiguration() { return config; } @ManagedAttribute(name = "Configuration") public Map<?,?> getConfigurationForJMX() { Json json = new Json(); return json.toType(json.toJson(config.toJson()), Map.class); } public void start() { initServer(); try { server.start(); } catch (Exception e) { try { stop(); } catch (Exception ignore) { } if (e instanceof BindException) { log.severe(String.format( "Port %s is busy, please choose a free port for the hub and specify it using -port option", config.port)); return; } else { throw new RuntimeException(e); } } log.info("Selenium Grid hub is up and running"); log.info(String.format("Nodes should register to %s", getRegistrationURL())); log.info(String.format("Clients should connect to %s", getWebDriverHubRequestURL())); } public void stop() { registry.stop(); try { server.stop(); } catch (Exception ignore) { } } @ManagedAttribute(name= "URL") public URL getUrl() { return getUrl(""); } public URL getUrl(String path) { try { return new URL("http://" + config.host + ":" + config.port + path); } catch (MalformedURLException e) { throw new RuntimeException(e.getMessage()); } } public URL getRegistrationURL() { return getUrl("/grid/register/"); } /** * @return URL one would use to request a new WebDriver session on this hub. */ public URL getWebDriverHubRequestURL() { return getUrl("/wd/hub"); } public URL getConsoleURL() { return getUrl("/grid/console"); } @ManagedAttribute(name = "NewSessionRequestCount") public int getNewSessionRequestCount() { return getRegistry().getNewSessionRequestCount(); } private void updateHostToNonLoopBackAddressOfThisMachine() { NetworkUtils utils = new NetworkUtils(); config.host = utils.getIp4NonLoopbackAddressOfThisMachine().getHostAddress(); } }
/* * Copyright 2014-2019 Groupon, Inc * Copyright 2014-2019 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.catalog.dao; import java.util.ArrayList; import java.util.List; import org.joda.time.DateTime; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.catalog.api.Plan; import org.killbill.billing.catalog.api.PlanPhase; import org.killbill.billing.catalog.api.PlanPhasePriceOverride; import org.killbill.billing.catalog.api.Tier; import org.killbill.billing.catalog.api.TierPriceOverride; import org.killbill.billing.catalog.api.TieredBlockPriceOverride; import org.killbill.billing.catalog.api.Usage; import org.killbill.billing.catalog.api.UsagePriceOverride; import org.killbill.clock.Clock; import org.killbill.commons.jdbi.mapper.LowerToCamelBeanMapperFactory; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.skife.jdbi.v2.TransactionCallback; import org.skife.jdbi.v2.TransactionStatus; import com.google.inject.Inject; public class DefaultCatalogOverrideDao implements CatalogOverrideDao { private final IDBI dbi; private final Clock clock; @Inject public DefaultCatalogOverrideDao(final IDBI dbi, final Clock clock) { this.dbi = dbi; this.clock = clock; // There is no real good place to do that but here (since the sqlDao are NOT EntitySqlDao and DBPProvider belongs in common)... oh well.. ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverridePlanDefinitionModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverridePhaseDefinitionModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverridePlanPhaseModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverrideBlockDefinitionModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverrideTierBlockModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverrideTierDefinitionModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverrideUsageDefinitionModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverrideUsageTierModelDao.class)); ((DBI) dbi).registerMapper(new LowerToCamelBeanMapperFactory(CatalogOverridePhaseUsageModelDao.class)); } @Override public CatalogOverridePlanDefinitionModelDao getOrCreateOverridePlanDefinition(final Plan parentPlan, final DateTime catalogEffectiveDate, final PlanPhasePriceOverride[] resolvedOverride, final InternalCallContext context) { return dbi.inTransaction(new TransactionCallback<CatalogOverridePlanDefinitionModelDao>() { @Override public CatalogOverridePlanDefinitionModelDao inTransaction(final Handle handle, final TransactionStatus status) throws Exception { final CatalogOverridePhaseDefinitionModelDao[] overridePhaseDefinitionModelDaos = new CatalogOverridePhaseDefinitionModelDao[resolvedOverride.length]; for (int i = 0; i < resolvedOverride.length; i++) { final PlanPhasePriceOverride curOverride = resolvedOverride[i]; if (curOverride != null) { PlanPhase parentPlanPhase = parentPlan.getAllPhases()[i]; final CatalogOverridePhaseDefinitionModelDao createdOverridePhaseDefinitionModelDao = getOrCreateOverridePhaseDefinitionFromTransaction(parentPlanPhase, curOverride.getPhaseName(),curOverride.getCurrency(), catalogEffectiveDate, curOverride, handle, context); overridePhaseDefinitionModelDaos[i] = createdOverridePhaseDefinitionModelDao; } } final CatalogOverridePlanDefinitionSqlDao sqlDao = handle.attach(CatalogOverridePlanDefinitionSqlDao.class); final Long targetPlanDefinitionRecordId = getOverridePlanDefinitionFromTransaction(overridePhaseDefinitionModelDaos, handle, context); if (targetPlanDefinitionRecordId != null) { return sqlDao.getByRecordId(targetPlanDefinitionRecordId, context); } final CatalogOverridePlanDefinitionModelDao inputPlanDef = new CatalogOverridePlanDefinitionModelDao(parentPlan.getName(), true, catalogEffectiveDate); final Long recordId = sqlDao.create(inputPlanDef, context); final CatalogOverridePlanDefinitionModelDao resultPlanDef = sqlDao.getByRecordId(recordId, context); for (short i = 0; i < overridePhaseDefinitionModelDaos.length; i++) { if (overridePhaseDefinitionModelDaos[i] != null) { createCatalogOverridePlanPhaseFromTransaction(i, overridePhaseDefinitionModelDaos[i], resultPlanDef, handle, context); } } return resultPlanDef; } }); } private Long getOverridePlanDefinitionFromTransaction(final CatalogOverridePhaseDefinitionModelDao[] overridePhaseDefinitionModelDaos, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePlanPhaseSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePlanPhaseSqlDao.class); final List<String> keys = new ArrayList<String>(); for (int i = 0; i < overridePhaseDefinitionModelDaos.length; i++) { final CatalogOverridePhaseDefinitionModelDao cur = overridePhaseDefinitionModelDaos[i]; if (cur != null) { // Each key is the concatenation of the phase_number, phase_definition_record_id keys.add(getConcatenatedKey(i, cur.getRecordId()).toString()); } } return keys.size() > 0 ? sqlDao.getTargetPlanDefinition(keys, keys.size(), context) : null; } private void createCatalogOverridePlanPhaseFromTransaction(final short phaseNum, final CatalogOverridePhaseDefinitionModelDao phaseDef, final CatalogOverridePlanDefinitionModelDao planDef, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePlanPhaseSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePlanPhaseSqlDao.class); final CatalogOverridePlanPhaseModelDao modelDao = new CatalogOverridePlanPhaseModelDao(phaseNum, phaseDef.getRecordId(), planDef.getRecordId()); sqlDao.create(modelDao, context); } private CatalogOverridePhaseDefinitionModelDao getOrCreateOverridePhaseDefinitionFromTransaction(final PlanPhase parentPlanPhase, final String parentPhaseName, final Currency currency, final DateTime catalogEffectiveDate, final PlanPhasePriceOverride override, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePhaseDefinitionSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePhaseDefinitionSqlDao.class); if(override.getUsagePriceOverrides() == null || (override.getUsagePriceOverrides() != null && isUsageOverrideListHasOnlyNull(override.getUsagePriceOverrides()))) { return getOrCreatePhaseDefinitionFromTransactionWithoutUsageOverrides(parentPhaseName, catalogEffectiveDate, override, inTransactionHandle, context); } final CatalogOverrideUsageDefinitionModelDao[] overrideUsageDefinitionModelDaos = new CatalogOverrideUsageDefinitionModelDao[override.getUsagePriceOverrides().size()]; List<UsagePriceOverride> resolvedUsageOverrides = override.getUsagePriceOverrides(); for (int i = 0; i < resolvedUsageOverrides.size(); i++) { final UsagePriceOverride curOverride = resolvedUsageOverrides.get(i); if (curOverride != null) { Usage parentUsage = parentPlanPhase.getUsages()[i]; final CatalogOverrideUsageDefinitionModelDao createdOverrideUsageDefinitionModelDao = getOrCreateOverrideUsageDefinitionFromTransaction(parentUsage, currency, catalogEffectiveDate, curOverride, inTransactionHandle, context); overrideUsageDefinitionModelDaos[i] = createdOverrideUsageDefinitionModelDao; } } final List<Long> targetPhaseDefinitionRecordIds = getOverridePhaseDefinitionFromTransaction(overrideUsageDefinitionModelDaos, inTransactionHandle, context); List<CatalogOverridePhaseDefinitionModelDao> results = sqlDao.getByAttributes(parentPhaseName, override.getCurrency().name(), override.getFixedPrice(), override.getRecurringPrice(), context); for(CatalogOverridePhaseDefinitionModelDao phase : results) { if (targetPhaseDefinitionRecordIds != null && targetPhaseDefinitionRecordIds.contains(phase.getRecordId())) { return phase; } } final CatalogOverridePhaseDefinitionModelDao inputPhaseDef = new CatalogOverridePhaseDefinitionModelDao(parentPhaseName, override.getCurrency().name(), override.getFixedPrice(), override.getRecurringPrice(), catalogEffectiveDate); final Long recordId = sqlDao.create(inputPhaseDef, context); final CatalogOverridePhaseDefinitionModelDao resultPhaseDef = sqlDao.getByRecordId(recordId, context); for (short i = 0; i < overrideUsageDefinitionModelDaos.length; i++) { if (overrideUsageDefinitionModelDaos[i] != null) { createCatalogOverridePhaseUsageFromTransaction(i, overrideUsageDefinitionModelDaos[i], resultPhaseDef, inTransactionHandle, context); } } return resultPhaseDef; } private CatalogOverridePhaseDefinitionModelDao getOrCreatePhaseDefinitionFromTransactionWithoutUsageOverrides(String parentPhaseName,final DateTime catalogEffectiveDate, final PlanPhasePriceOverride override, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePhaseDefinitionSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePhaseDefinitionSqlDao.class); List<CatalogOverridePhaseDefinitionModelDao> resultPhases = sqlDao.getByAttributes(parentPhaseName, override.getCurrency().name(), override.getFixedPrice(), override.getRecurringPrice(), context); for(CatalogOverridePhaseDefinitionModelDao resultPhase : resultPhases) { if (resultPhase != null && getOverriddenPhaseUsages(resultPhase.getRecordId(), context).size() == 0) { return resultPhase; } } final CatalogOverridePhaseDefinitionModelDao phaseDef = new CatalogOverridePhaseDefinitionModelDao(parentPhaseName, override.getCurrency().name(), override.getFixedPrice(), override.getRecurringPrice(), catalogEffectiveDate); final Long recordId = sqlDao.create(phaseDef, context); return sqlDao.getByRecordId(recordId, context); } private List<Long> getOverridePhaseDefinitionFromTransaction(final CatalogOverrideUsageDefinitionModelDao[] overrideUsageDefinitionModelDaos, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePhaseUsageSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePhaseUsageSqlDao.class); final List<String> keys = new ArrayList<String>(); for (int i = 0; i < overrideUsageDefinitionModelDaos.length; i++) { final CatalogOverrideUsageDefinitionModelDao cur = overrideUsageDefinitionModelDaos[i]; // Each key is the concatenation of the usage_number, usage_definition_record_id if (cur != null) { keys.add(getConcatenatedKey(i, cur.getRecordId()).toString()); } } return keys.size() > 0 ? sqlDao.getTargetPhaseDefinition(keys, keys.size(), context) : null; } private void createCatalogOverridePhaseUsageFromTransaction(final short usageNum, final CatalogOverrideUsageDefinitionModelDao usageDef, final CatalogOverridePhaseDefinitionModelDao phaseDef, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverridePhaseUsageSqlDao sqlDao = inTransactionHandle.attach(CatalogOverridePhaseUsageSqlDao.class); final CatalogOverridePhaseUsageModelDao modelDao = new CatalogOverridePhaseUsageModelDao(usageNum, usageDef.getRecordId(), phaseDef.getRecordId()); sqlDao.create(modelDao, context); } private CatalogOverrideUsageDefinitionModelDao getOrCreateOverrideUsageDefinitionFromTransaction(final Usage parentUsage, Currency currency, final DateTime catalogEffectiveDate, final UsagePriceOverride override, final Handle inTransactionHandle, final InternalCallContext context){ final List<TierPriceOverride> resolvedTierOverrides = override.getTierPriceOverrides(); final CatalogOverrideTierDefinitionModelDao[] overrideTierDefinitionModelDaos = new CatalogOverrideTierDefinitionModelDao[resolvedTierOverrides.size()]; for (int i = 0; i < resolvedTierOverrides.size(); i++) { final TierPriceOverride curOverride = resolvedTierOverrides.get(i); if (curOverride != null) { Tier parentTier = parentUsage.getTiers()[i]; final CatalogOverrideTierDefinitionModelDao createdOverrideTierDefinitionModelDao = getOrCreateOverrideTierDefinitionFromTransaction(parentTier, curOverride, currency, catalogEffectiveDate, inTransactionHandle, context); overrideTierDefinitionModelDaos[i] = createdOverrideTierDefinitionModelDao; } } final CatalogOverrideUsageDefinitionSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideUsageDefinitionSqlDao.class); final List<Long> targetUsageDefinitionRecordIds = getOverrideUsageDefinitionFromTransaction(overrideTierDefinitionModelDaos, inTransactionHandle, context); List<CatalogOverrideUsageDefinitionModelDao> results = sqlDao.getByAttributes(parentUsage.getName(), context); for(CatalogOverrideUsageDefinitionModelDao usage : results) { if (targetUsageDefinitionRecordIds != null && targetUsageDefinitionRecordIds.contains(usage.getRecordId())) { return usage; } } final CatalogOverrideUsageDefinitionModelDao inputUsageDef = new CatalogOverrideUsageDefinitionModelDao(parentUsage.getName(), parentUsage.getUsageType().name(), currency.name(), null, null, catalogEffectiveDate); final Long recordId = sqlDao.create(inputUsageDef, context); final CatalogOverrideUsageDefinitionModelDao resultUsageDef = sqlDao.getByRecordId(recordId, context); for (short i = 0; i < overrideTierDefinitionModelDaos.length; i++) { if (overrideTierDefinitionModelDaos[i] != null) { createCatalogOverrideUsageTierFromTransaction(i, overrideTierDefinitionModelDaos[i], resultUsageDef, inTransactionHandle, context); } } return resultUsageDef; } private List<Long> getOverrideUsageDefinitionFromTransaction(final CatalogOverrideTierDefinitionModelDao[] overrideTierDefinitionModelDaos, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverrideUsageTierSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideUsageTierSqlDao.class); final List<String> keys = new ArrayList<String>(); for (int i = 0; i < overrideTierDefinitionModelDaos.length; i++) { final CatalogOverrideTierDefinitionModelDao cur = overrideTierDefinitionModelDaos[i]; if (cur != null) { // Each key is the concatenation of the tier_number, tier_definition_record_id keys.add(getConcatenatedKey(i, cur.getRecordId()).toString()); } } return keys.size() > 0 ? sqlDao.getTargetUsageDefinition(keys, keys.size(), context) : null; } private void createCatalogOverrideUsageTierFromTransaction(final short tierNum, final CatalogOverrideTierDefinitionModelDao tierDef, final CatalogOverrideUsageDefinitionModelDao usageDef, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverrideUsageTierSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideUsageTierSqlDao.class); final CatalogOverrideUsageTierModelDao modelDao = new CatalogOverrideUsageTierModelDao(tierNum, tierDef.getRecordId(), usageDef.getRecordId()); sqlDao.create(modelDao, context); } private CatalogOverrideTierDefinitionModelDao getOrCreateOverrideTierDefinitionFromTransaction(final Tier parentTier, final TierPriceOverride tierPriceOverride,Currency currency, final DateTime catalogEffectiveDate, final Handle inTransactionHandle, final InternalCallContext context){ final List<TieredBlockPriceOverride> resolvedTierBlockOverrides = tierPriceOverride.getTieredBlockPriceOverrides(); final CatalogOverrideBlockDefinitionModelDao[] overrideBlockDefinitionModelDaos = new CatalogOverrideBlockDefinitionModelDao[resolvedTierBlockOverrides.size()]; for (int i = 0; i < resolvedTierBlockOverrides.size(); i++) { final TieredBlockPriceOverride curOverride = resolvedTierBlockOverrides.get(i); if (curOverride != null) { final CatalogOverrideBlockDefinitionModelDao createdOverrideBlockDefinitionModelDao = getOrCreateOverriddenBlockDefinitionFromTransaction(curOverride,catalogEffectiveDate, currency.name(), inTransactionHandle, context); overrideBlockDefinitionModelDaos[i] = createdOverrideBlockDefinitionModelDao; } } final CatalogOverrideTierDefinitionSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideTierDefinitionSqlDao.class); final Long targetTierDefinitionRecordId = getOverrideTierDefinitionFromTransaction(overrideBlockDefinitionModelDaos, inTransactionHandle, context); if (targetTierDefinitionRecordId != null) { return sqlDao.getByRecordId(targetTierDefinitionRecordId, context); } final CatalogOverrideTierDefinitionModelDao inputTierDef = new CatalogOverrideTierDefinitionModelDao(currency.name(), null, null, catalogEffectiveDate); final Long recordId = sqlDao.create(inputTierDef, context); final CatalogOverrideTierDefinitionModelDao resultTierDef = sqlDao.getByRecordId(recordId, context); for (short i = 0; i < overrideBlockDefinitionModelDaos.length; i++) { if (overrideBlockDefinitionModelDaos[i] != null) { createCatalogOverrideTierBlockFromTransaction(i, overrideBlockDefinitionModelDaos[i], resultTierDef, inTransactionHandle, context); } } return resultTierDef; } private void createCatalogOverrideTierBlockFromTransaction(final short blockNum, final CatalogOverrideBlockDefinitionModelDao blockDef, final CatalogOverrideTierDefinitionModelDao tierDef, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverrideTierBlockSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideTierBlockSqlDao.class); final CatalogOverrideTierBlockModelDao modelDao = new CatalogOverrideTierBlockModelDao(blockNum, blockDef.getRecordId(), tierDef.getRecordId()); sqlDao.create(modelDao, context); } private Long getOverrideTierDefinitionFromTransaction(final CatalogOverrideBlockDefinitionModelDao[] overrideBlockDefinitionModelDaos, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverrideTierBlockSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideTierBlockSqlDao.class); final List<String> keys = new ArrayList<String>(); for (int i = 0; i < overrideBlockDefinitionModelDaos.length; i++) { final CatalogOverrideBlockDefinitionModelDao cur = overrideBlockDefinitionModelDaos[i]; if (cur != null) { // Each key is the concatenation of the block_number, block_definition_record_id keys.add(getConcatenatedKey(i, cur.getRecordId()).toString()); } } return keys.size() > 0 ? sqlDao.getTargetTierDefinition(keys, keys.size(), context) : null; } private CatalogOverrideBlockDefinitionModelDao getOrCreateOverriddenBlockDefinitionFromTransaction(TieredBlockPriceOverride tieredBlockPriceOverride,final DateTime catalogEffectiveDate, String currency, final Handle inTransactionHandle, final InternalCallContext context) { final CatalogOverrideBlockDefinitionSqlDao sqlDao = inTransactionHandle.attach(CatalogOverrideBlockDefinitionSqlDao.class); CatalogOverrideBlockDefinitionModelDao result = sqlDao.getByAttributes(tieredBlockPriceOverride.getUnitName(), currency, tieredBlockPriceOverride.getPrice(), tieredBlockPriceOverride.getMax(), tieredBlockPriceOverride.getSize(),context); if (result == null) { final CatalogOverrideBlockDefinitionModelDao blockDef = new CatalogOverrideBlockDefinitionModelDao(tieredBlockPriceOverride.getUnitName(),currency, tieredBlockPriceOverride.getPrice(), tieredBlockPriceOverride.getSize(),tieredBlockPriceOverride.getMax(), catalogEffectiveDate); final Long recordId = sqlDao.create(blockDef, context); result = sqlDao.getByRecordId(recordId, context); } return result; } @Override public List<CatalogOverridePhaseDefinitionModelDao> getOverriddenPlanPhases(final Long planDefRecordId, final InternalTenantContext context) { return dbi.inTransaction(new TransactionCallback<List<CatalogOverridePhaseDefinitionModelDao>>() { @Override public List<CatalogOverridePhaseDefinitionModelDao> inTransaction(final Handle handle, final TransactionStatus status) throws Exception { final CatalogOverridePhaseDefinitionSqlDao sqlDao = handle.attach(CatalogOverridePhaseDefinitionSqlDao.class); return sqlDao.getOverriddenPlanPhases(planDefRecordId, context); } }); } @Override public List<CatalogOverrideUsageDefinitionModelDao> getOverriddenPhaseUsages(final Long phaseDefRecordId, final InternalTenantContext context) { return dbi.inTransaction(new TransactionCallback<List<CatalogOverrideUsageDefinitionModelDao>>() { @Override public List<CatalogOverrideUsageDefinitionModelDao> inTransaction(final Handle handle, final TransactionStatus status) throws Exception { final CatalogOverrideUsageDefinitionSqlDao sqlDao = handle.attach(CatalogOverrideUsageDefinitionSqlDao.class); return sqlDao.getOverriddenPhaseUsages(phaseDefRecordId, context); } }); } @Override public List<CatalogOverrideTierDefinitionModelDao> getOverriddenUsageTiers(final Long usageDefRecordId, final InternalTenantContext context) { return dbi.inTransaction(new TransactionCallback<List<CatalogOverrideTierDefinitionModelDao>>() { @Override public List<CatalogOverrideTierDefinitionModelDao> inTransaction(final Handle handle, final TransactionStatus status) throws Exception { final CatalogOverrideTierDefinitionSqlDao sqlDao = handle.attach(CatalogOverrideTierDefinitionSqlDao.class); return sqlDao.getOverriddenUsageTiers(usageDefRecordId, context); } }); } @Override public List<CatalogOverrideBlockDefinitionModelDao> getOverriddenTierBlocks(final Long tierDefRecordId, final InternalTenantContext context) { return dbi.inTransaction(new TransactionCallback<List<CatalogOverrideBlockDefinitionModelDao>>() { @Override public List<CatalogOverrideBlockDefinitionModelDao> inTransaction(final Handle handle, final TransactionStatus status) throws Exception { final CatalogOverrideBlockDefinitionSqlDao sqlDao = handle.attach(CatalogOverrideBlockDefinitionSqlDao.class); return sqlDao.getOverriddenTierBlocks(tierDefRecordId, context); } }); } private boolean isUsageOverrideListHasOnlyNull(List<UsagePriceOverride> usagePriceOverrides) { for (UsagePriceOverride override : usagePriceOverrides) { if (override != null) { return false; } } return true; } private StringBuffer getConcatenatedKey(int index, Long recordId) { final StringBuffer key = new StringBuffer(); key.append(index); key.append(","); key.append(recordId); return key; } }
package com.cell.user.dao.entiy; import java.util.ArrayList; import java.util.List; public class SysAuthorityExample { /** * This field was generated by MyBatis Generator. * This field corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected String orderByClause; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected boolean distinct; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected List<Criteria> oredCriteria; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected Integer limitStart; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected Integer limitEnd; /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public SysAuthorityExample() { oredCriteria = new ArrayList<Criteria>(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public String getOrderByClause() { return orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void setDistinct(boolean distinct) { this.distinct = distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public boolean isDistinct() { return distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public List<Criteria> getOredCriteria() { return oredCriteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void or(Criteria criteria) { oredCriteria.add(criteria); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void setLimitStart(Integer limitStart) { this.limitStart=limitStart; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public Integer getLimitStart() { return limitStart; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public void setLimitEnd(Integer limitEnd) { this.limitEnd=limitEnd; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public Integer getLimitEnd() { return limitEnd; } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andOrganizationIdIsNull() { addCriterion("organization_id is null"); return (Criteria) this; } public Criteria andOrganizationIdIsNotNull() { addCriterion("organization_id is not null"); return (Criteria) this; } public Criteria andOrganizationIdEqualTo(Long value) { addCriterion("organization_id =", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdNotEqualTo(Long value) { addCriterion("organization_id <>", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdGreaterThan(Long value) { addCriterion("organization_id >", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdGreaterThanOrEqualTo(Long value) { addCriterion("organization_id >=", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdLessThan(Long value) { addCriterion("organization_id <", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdLessThanOrEqualTo(Long value) { addCriterion("organization_id <=", value, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdIn(List<Long> values) { addCriterion("organization_id in", values, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdNotIn(List<Long> values) { addCriterion("organization_id not in", values, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdBetween(Long value1, Long value2) { addCriterion("organization_id between", value1, value2, "organizationId"); return (Criteria) this; } public Criteria andOrganizationIdNotBetween(Long value1, Long value2) { addCriterion("organization_id not between", value1, value2, "organizationId"); return (Criteria) this; } public Criteria andJobIdIsNull() { addCriterion("job_id is null"); return (Criteria) this; } public Criteria andJobIdIsNotNull() { addCriterion("job_id is not null"); return (Criteria) this; } public Criteria andJobIdEqualTo(Long value) { addCriterion("job_id =", value, "jobId"); return (Criteria) this; } public Criteria andJobIdNotEqualTo(Long value) { addCriterion("job_id <>", value, "jobId"); return (Criteria) this; } public Criteria andJobIdGreaterThan(Long value) { addCriterion("job_id >", value, "jobId"); return (Criteria) this; } public Criteria andJobIdGreaterThanOrEqualTo(Long value) { addCriterion("job_id >=", value, "jobId"); return (Criteria) this; } public Criteria andJobIdLessThan(Long value) { addCriterion("job_id <", value, "jobId"); return (Criteria) this; } public Criteria andJobIdLessThanOrEqualTo(Long value) { addCriterion("job_id <=", value, "jobId"); return (Criteria) this; } public Criteria andJobIdIn(List<Long> values) { addCriterion("job_id in", values, "jobId"); return (Criteria) this; } public Criteria andJobIdNotIn(List<Long> values) { addCriterion("job_id not in", values, "jobId"); return (Criteria) this; } public Criteria andJobIdBetween(Long value1, Long value2) { addCriterion("job_id between", value1, value2, "jobId"); return (Criteria) this; } public Criteria andJobIdNotBetween(Long value1, Long value2) { addCriterion("job_id not between", value1, value2, "jobId"); return (Criteria) this; } public Criteria andUserIdIsNull() { addCriterion("user_id is null"); return (Criteria) this; } public Criteria andUserIdIsNotNull() { addCriterion("user_id is not null"); return (Criteria) this; } public Criteria andUserIdEqualTo(Long value) { addCriterion("user_id =", value, "userId"); return (Criteria) this; } public Criteria andUserIdNotEqualTo(Long value) { addCriterion("user_id <>", value, "userId"); return (Criteria) this; } public Criteria andUserIdGreaterThan(Long value) { addCriterion("user_id >", value, "userId"); return (Criteria) this; } public Criteria andUserIdGreaterThanOrEqualTo(Long value) { addCriterion("user_id >=", value, "userId"); return (Criteria) this; } public Criteria andUserIdLessThan(Long value) { addCriterion("user_id <", value, "userId"); return (Criteria) this; } public Criteria andUserIdLessThanOrEqualTo(Long value) { addCriterion("user_id <=", value, "userId"); return (Criteria) this; } public Criteria andUserIdIn(List<Long> values) { addCriterion("user_id in", values, "userId"); return (Criteria) this; } public Criteria andUserIdNotIn(List<Long> values) { addCriterion("user_id not in", values, "userId"); return (Criteria) this; } public Criteria andUserIdBetween(Long value1, Long value2) { addCriterion("user_id between", value1, value2, "userId"); return (Criteria) this; } public Criteria andUserIdNotBetween(Long value1, Long value2) { addCriterion("user_id not between", value1, value2, "userId"); return (Criteria) this; } public Criteria andGroupIdIsNull() { addCriterion("group_id is null"); return (Criteria) this; } public Criteria andGroupIdIsNotNull() { addCriterion("group_id is not null"); return (Criteria) this; } public Criteria andGroupIdEqualTo(Long value) { addCriterion("group_id =", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdNotEqualTo(Long value) { addCriterion("group_id <>", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdGreaterThan(Long value) { addCriterion("group_id >", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdGreaterThanOrEqualTo(Long value) { addCriterion("group_id >=", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdLessThan(Long value) { addCriterion("group_id <", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdLessThanOrEqualTo(Long value) { addCriterion("group_id <=", value, "groupId"); return (Criteria) this; } public Criteria andGroupIdIn(List<Long> values) { addCriterion("group_id in", values, "groupId"); return (Criteria) this; } public Criteria andGroupIdNotIn(List<Long> values) { addCriterion("group_id not in", values, "groupId"); return (Criteria) this; } public Criteria andGroupIdBetween(Long value1, Long value2) { addCriterion("group_id between", value1, value2, "groupId"); return (Criteria) this; } public Criteria andGroupIdNotBetween(Long value1, Long value2) { addCriterion("group_id not between", value1, value2, "groupId"); return (Criteria) this; } public Criteria andRoleIdsIsNull() { addCriterion("role_ids is null"); return (Criteria) this; } public Criteria andRoleIdsIsNotNull() { addCriterion("role_ids is not null"); return (Criteria) this; } public Criteria andRoleIdsEqualTo(String value) { addCriterion("role_ids =", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsNotEqualTo(String value) { addCriterion("role_ids <>", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsGreaterThan(String value) { addCriterion("role_ids >", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsGreaterThanOrEqualTo(String value) { addCriterion("role_ids >=", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsLessThan(String value) { addCriterion("role_ids <", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsLessThanOrEqualTo(String value) { addCriterion("role_ids <=", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsLike(String value) { addCriterion("role_ids like", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsNotLike(String value) { addCriterion("role_ids not like", value, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsIn(List<String> values) { addCriterion("role_ids in", values, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsNotIn(List<String> values) { addCriterion("role_ids not in", values, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsBetween(String value1, String value2) { addCriterion("role_ids between", value1, value2, "roleIds"); return (Criteria) this; } public Criteria andRoleIdsNotBetween(String value1, String value2) { addCriterion("role_ids not between", value1, value2, "roleIds"); return (Criteria) this; } public Criteria andTypeIsNull() { addCriterion("type is null"); return (Criteria) this; } public Criteria andTypeIsNotNull() { addCriterion("type is not null"); return (Criteria) this; } public Criteria andTypeEqualTo(Boolean value) { addCriterion("type =", value, "type"); return (Criteria) this; } public Criteria andTypeNotEqualTo(Boolean value) { addCriterion("type <>", value, "type"); return (Criteria) this; } public Criteria andTypeGreaterThan(Boolean value) { addCriterion("type >", value, "type"); return (Criteria) this; } public Criteria andTypeGreaterThanOrEqualTo(Boolean value) { addCriterion("type >=", value, "type"); return (Criteria) this; } public Criteria andTypeLessThan(Boolean value) { addCriterion("type <", value, "type"); return (Criteria) this; } public Criteria andTypeLessThanOrEqualTo(Boolean value) { addCriterion("type <=", value, "type"); return (Criteria) this; } public Criteria andTypeIn(List<Boolean> values) { addCriterion("type in", values, "type"); return (Criteria) this; } public Criteria andTypeNotIn(List<Boolean> values) { addCriterion("type not in", values, "type"); return (Criteria) this; } public Criteria andTypeBetween(Boolean value1, Boolean value2) { addCriterion("type between", value1, value2, "type"); return (Criteria) this; } public Criteria andTypeNotBetween(Boolean value1, Boolean value2) { addCriterion("type not between", value1, value2, "type"); return (Criteria) this; } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table sys_authority * * @mbggenerated do_not_delete_during_merge Fri Mar 24 17:07:32 CST 2017 */ public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table sys_authority * * @mbggenerated Fri Mar 24 17:07:32 CST 2017 */ public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* * Copyright 2015 Alexey Andreev. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.classlib.java.nio.charset; import java.util.Arrays; import org.teavm.classlib.java.nio.TByteBuffer; import org.teavm.classlib.java.nio.TCharBuffer; /** * * @author Alexey Andreev */ public abstract class TCharsetEncoder { private static final int READY = 4; private static final int ONGOING = 1; private static final int END = 2; private static final int FLUSH = 3; private static final int INIT = 0; private TCharset charset; private byte[] replacement; private float averageBytesPerChar; private float maxBytesPerChar; private TCodingErrorAction malformedAction = TCodingErrorAction.REPORT; private TCodingErrorAction unmappableAction = TCodingErrorAction.REPORT; private int status; protected TCharsetEncoder(TCharset cs, float averageBytesPerChar, float maxBytesPerChar, byte[] replacement) { checkReplacement(replacement); this.charset = cs; this.replacement = replacement.clone(); this.averageBytesPerChar = averageBytesPerChar; this.maxBytesPerChar = maxBytesPerChar; } protected TCharsetEncoder(TCharset cs, float averageBytesPerChar, float maxBytesPerChar) { this(cs, averageBytesPerChar, maxBytesPerChar, new byte[] { (byte)'?' }); } public final TCharset charset() { return charset; } public final byte[] replacement() { return replacement.clone(); } public final TCharsetEncoder replaceWith(byte[] newReplacement) { checkReplacement(newReplacement); this.replacement = newReplacement.clone(); implReplaceWith(newReplacement); return this; } private void checkReplacement(byte[] replacement) { if (replacement == null || replacement.length == 0 || replacement.length < maxBytesPerChar) { throw new IllegalArgumentException("Replacement preconditions do not hold"); } } protected void implReplaceWith(@SuppressWarnings("unused") byte[] newReplacement) { } public TCodingErrorAction malformedInputAction() { return malformedAction; } public final TCharsetEncoder onMalformedInput(TCodingErrorAction newAction) { if (newAction == null) { throw new IllegalArgumentException("Action must be non-null"); } malformedAction = newAction; implOnMalformedInput(newAction); return this; } protected void implOnMalformedInput(@SuppressWarnings("unused") TCodingErrorAction newAction) { } public TCodingErrorAction unmappableCharacterAction() { return unmappableAction; } public final TCharsetEncoder onUnmappableCharacter(TCodingErrorAction newAction) { if (newAction == null) { throw new IllegalArgumentException("Action must be non-null"); } unmappableAction = newAction; implOnUnmappableCharacter(newAction); return this; } protected void implOnUnmappableCharacter(@SuppressWarnings("unused") TCodingErrorAction newAction) { } public final float averageBytesPerChar() { return averageBytesPerChar; } public final float maxBytesPerChar() { return maxBytesPerChar; } public final TCoderResult encode(TCharBuffer in, TByteBuffer out, boolean endOfInput) { if (status == FLUSH || !endOfInput && status == END) { throw new IllegalStateException(); } status = endOfInput ? END : ONGOING; TCoderResult result; while (true) { try { result = encodeLoop(in, out); } catch (RuntimeException e) { throw new TCoderMalfunctionError(e); } if (result.isUnderflow()) { if (endOfInput) { int remaining = in.remaining(); if (remaining > 0) { result = TCoderResult.malformedForLength(remaining); } else { return result; } } else { return result; } } else if (result.isOverflow()) { return result; } TCodingErrorAction action = result.isUnmappable() ? unmappableAction : malformedAction; if (action == TCodingErrorAction.REPLACE) { if (out.remaining() < replacement.length) { return TCoderResult.OVERFLOW; } out.put(replacement); } else { if (action != TCodingErrorAction.IGNORE) { return result; } } in.position(in.position() + result.length()); } } public final TByteBuffer encode(TCharBuffer in) throws TCharacterCodingException { if (in.remaining() == 0) { return TByteBuffer.allocate(0); } reset(); TByteBuffer output = TByteBuffer.allocate((int)(in.remaining() * averageBytesPerChar)); TCoderResult result; while (true) { result = encode(in, output, false); if (result == TCoderResult.UNDERFLOW) { break; } else if (result == TCoderResult.OVERFLOW) { output = allocateMore(output); continue; } if (result.isError()) { result.throwException(); } } result = encode(in, output, true); if (result.isError()) { result.throwException(); } while (true) { result = flush(output); if (result.isUnderflow()) { break; } else if (result.isOverflow()) { output = allocateMore(output); } } output.flip(); return output; } protected abstract TCoderResult encodeLoop(TCharBuffer in, TByteBuffer out); public boolean canEncode(char c) { return implCanEncode(TCharBuffer.wrap(new char[] { c })); } private boolean implCanEncode(TCharBuffer cb) { if (status == FLUSH || status == INIT) { status = READY; } if (status != READY) { throw new IllegalStateException(); } TCodingErrorAction malformBak = malformedAction; TCodingErrorAction unmapBak = unmappableAction; onMalformedInput(TCodingErrorAction.REPORT); onUnmappableCharacter(TCodingErrorAction.REPORT); boolean result = true; try { encode(cb); } catch (TCharacterCodingException e) { result = false; } onMalformedInput(malformBak); onUnmappableCharacter(unmapBak); reset(); return result; } public boolean canEncode(CharSequence sequence) { TCharBuffer cb; if (sequence instanceof TCharBuffer) { cb = ((TCharBuffer)sequence).duplicate(); } else { cb = TCharBuffer.wrap(sequence); } return implCanEncode(cb); } private TByteBuffer allocateMore(TByteBuffer buffer) { byte[] array = buffer.array(); array = Arrays.copyOf(array, array.length * 2); TByteBuffer result = TByteBuffer.wrap(array); result.position(buffer.position()); return result; } public final TCoderResult flush(TByteBuffer out) { if (status != END && status != READY) { throw new IllegalStateException(); } TCoderResult result = implFlush(out); if (result == TCoderResult.UNDERFLOW) { status = FLUSH; } return result; } protected TCoderResult implFlush(@SuppressWarnings("unused") TByteBuffer out) { return TCoderResult.UNDERFLOW; } public final TCharsetEncoder reset() { status = INIT; implReset(); return this; } protected void implReset() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.redis; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.GemFireCache; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.test.junit.categories.IntegrationTest; import org.apache.geode.redis.GeodeRedisServer; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import redis.clients.jedis.Jedis; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.geode.distributed.ConfigurationProperties.*; import static org.junit.Assert.*; @Category(IntegrationTest.class) public class ListsJUnitTest { private static Jedis jedis; private static GeodeRedisServer server; private static GemFireCache cache; private static Random rand; private static int port = 6379; @BeforeClass public static void setUp() throws IOException { rand = new Random(); CacheFactory cf = new CacheFactory(); // cf.set("log-file", "redis.log"); cf.set(LOG_LEVEL, "error"); cf.set(MCAST_PORT, "0"); cf.set(LOCATORS, ""); cache = cf.create(); port = AvailablePortHelper.getRandomAvailableTCPPort(); server = new GeodeRedisServer("localhost", port); server.start(); jedis = new Jedis("localhost", port, 10000000); } @Test public void testLindex() { int elements = 50; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); for (int i = 0; i < elements; i++) { String gemString = jedis.lindex(key, i); String s = strings.get(i); assertEquals(gemString, s); } } @Test public void testLPopRPush() { int elements = 50; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); for (int i = 0; i < elements; i++) { String gemString = jedis.lpop(key); String s = strings.get(i); assertEquals(s, gemString); } } @Test public void testRPopLPush() { int elements = 500; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.lpush(key, stringArray); for (int i = 0; i < elements; i++) { String gemString = jedis.rpop(key); String s = strings.get(i); assertEquals(gemString, s); } } @Test public void testLRange() { int elements = 10; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); for (int i = 0; i < elements; i++) { List<String> range = jedis.lrange(key, 0, i); assertEquals(range, strings.subList(0, i + 1)); } for (int i = 0; i < elements; i++) { List<String> range = jedis.lrange(key, i, -1); assertEquals(range, strings.subList(i, strings.size())); } } @Test public void testLTrim() { int elements = 5; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); // Take off last element one at a time for (int i = elements - 1; i >= 0; i--) { jedis.ltrim(key, 0, i); List<String> range = jedis.lrange(key, 0, -1); assertEquals(range, strings.subList(0, i + 1)); } jedis.rpop(key); jedis.rpush(key, stringArray); // Take off first element one at a time for (int i = 1; i < elements; i++) { jedis.ltrim(key, 1, -1); List<String> range = jedis.lrange(key, 0, -1); List<String> expected = strings.subList(i, strings.size()); assertEquals(range, expected); } } @Test public void testLRPushX() { String key = randString(); String otherKey = "Other key"; jedis.lpush(key, randString()); assertTrue(jedis.lpushx(key, randString()) > 0); assertTrue(jedis.rpushx(key, randString()) > 0); assertTrue(jedis.lpushx(otherKey, randString()) == 0); assertTrue(jedis.rpushx(otherKey, randString()) == 0); jedis.del(key); assertTrue(jedis.lpushx(key, randString()) == 0); assertTrue(jedis.rpushx(key, randString()) == 0); } @Test public void testLRem() { int elements = 5; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); for (int i = 0; i < elements; i++) { String remove = strings.remove(0); jedis.lrem(key, 0, remove); List<String> range = jedis.lrange(key, 0, -1); assertEquals(strings, range); } } @Test public void testLSet() { int elements = 10; ArrayList<String> strings = new ArrayList<String>(); String key = randString(); for (int i = 0; i < elements; i++) { String elem = randString(); strings.add(elem); } String[] stringArray = strings.toArray(new String[strings.size()]); jedis.rpush(key, stringArray); for (int i = 0; i < elements; i++) { String s = randString(); strings.set(i, s); jedis.lset(key, i, s); List<String> range = jedis.lrange(key, 0, -1); assertEquals(range, strings); } } private String randString() { int length = rand.nextInt(8) + 5; StringBuilder rString = new StringBuilder(); for (int i = 0; i < length; i++) rString.append((char) (rand.nextInt(57) + 65)); // return rString.toString(); return Long.toHexString(Double.doubleToLongBits(Math.random())); } @After public void flushAll() { jedis.flushAll(); } @AfterClass public static void tearDown() { jedis.close(); cache.close(); server.shutdown(); } }
/** * Copyright 2010 Richard Johnson & Orin Eman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * --- * * This file is part of java-libpst. * * java-libpst is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * java-libpst is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with java-libpst. If not, see <http://www.gnu.org/licenses/>. * */ package com.pff; /* import java.io.UnsupportedEncodingException; /**/ import java.util.*; import java.io.*; /** * Specific functions for the 7c table type ("Table Context"). * This is used for attachments. * @author Richard Johnson */ class PSTTable7C extends PSTTable { private final int BLOCK_SIZE = 8176; private List<HashMap<Integer, PSTTable7CItem>> items = null; private int numberOfDataSets = 0; private int cCols = 0; private int TCI_bm = 0; private NodeInfo rowNodeInfo = null; private int TCI_1b = 0; private int overrideCol = -1; protected PSTTable7C(PSTNodeInputStream in, HashMap<Integer, PSTDescriptorItem> subNodeDescriptorItems) throws PSTException, java.io.IOException { this(in, subNodeDescriptorItems, -1); } protected PSTTable7C(PSTNodeInputStream in, HashMap<Integer, PSTDescriptorItem> subNodeDescriptorItems, int entityToExtract) throws PSTException, java.io.IOException { super(in, subNodeDescriptorItems); if (tableTypeByte != 0x7c) { //System.out.println(Long.toHexString(this.tableTypeByte)); throw new PSTException("unable to create PSTTable7C, table does not appear to be a 7c!"); } // TCINFO header is in the hidUserRoot node //byte[] tcHeaderNode = getNodeInfo(hidUserRoot); NodeInfo tcHeaderNode = getNodeInfo(hidUserRoot); int offset = 0; // get the TCINFO header information //int cCols = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+1, offset+2); cCols = (int)tcHeaderNode.seekAndReadLong(offset+1, 1); @SuppressWarnings("unused") //int TCI_4b = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+2, offset+4); int TCI_4b = (int)tcHeaderNode.seekAndReadLong(offset+2, 2); @SuppressWarnings("unused") //int TCI_2b = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+4, offset+6); int TCI_2b = (int)tcHeaderNode.seekAndReadLong(offset+4, 2); //int TCI_1b = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+6, offset+8); TCI_1b = (int)tcHeaderNode.seekAndReadLong(offset+6, 2); //int TCI_bm = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+8, offset+10); TCI_bm = (int)tcHeaderNode.seekAndReadLong(offset+8, 2); //int hidRowIndex = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+10, offset+14); int hidRowIndex = (int)tcHeaderNode.seekAndReadLong(offset+10, 4); //int hnidRows = (int)PSTObject.convertLittleEndianBytesToLong(tcHeaderNode, offset+14, offset+18);// was 18 int hnidRows = (int)tcHeaderNode.seekAndReadLong(offset+14, 4); // 18..22 hidIndex - deprecated // 22... column descriptors offset += 22; if ( cCols != 0 ) { columnDescriptors = new ColumnDescriptor[cCols]; for ( int col = 0; col < cCols; ++col ) { //columnDescriptors[col] = new ColumnDescriptor(tcHeaderNode, offset); columnDescriptors[col] = new ColumnDescriptor(tcHeaderNode, offset); //System.out.println("iBit: "+col+" " +columnDescriptors[col].iBit); if (columnDescriptors[col].id == entityToExtract) { overrideCol = col; } offset += 8; } } // if we are asking for a specific column, only get that! if (overrideCol > -1) { cCols = overrideCol +1; } // Read the key table /* System.out.printf("Key table:\n"); /**/ keyMap = new HashMap<Integer, Integer>(); //byte[] keyTableInfo = getNodeInfo(hidRoot); NodeInfo keyTableInfo = getNodeInfo(hidRoot); numberOfKeys = keyTableInfo.length() / (sizeOfItemKey+sizeOfItemValue); offset = 0; for (int x = 0; x < numberOfKeys; x++) { int Context = (int)keyTableInfo.seekAndReadLong(offset, sizeOfItemKey); offset += sizeOfItemKey; int RowIndex = (int)keyTableInfo.seekAndReadLong(offset, sizeOfItemValue); offset += sizeOfItemValue; keyMap.put(Context, RowIndex); } // Read the Row Matrix rowNodeInfo = getNodeInfo(hnidRows); //numberOfDataSets = (rowNodeInfo.endOffset - rowNodeInfo.startOffset) / TCI_bm; description += "Number of keys: "+numberOfKeys+"\n"+ "Number of columns: "+cCols+"\n"+ "Row Size: "+TCI_bm+"\n"+ "hidRowIndex: "+hidRowIndex+"\n"+ "hnidRows: "+hnidRows+"\n"; int numberOfBlocks = rowNodeInfo.length() / BLOCK_SIZE; int numberOfRowsPerBlock = BLOCK_SIZE / TCI_bm; @SuppressWarnings("unused") int blockPadding = BLOCK_SIZE - (numberOfRowsPerBlock * TCI_bm); numberOfDataSets = (numberOfBlocks * numberOfRowsPerBlock) + ((rowNodeInfo.length() % BLOCK_SIZE) / TCI_bm); } /** * get all the items parsed out of this table. * @return */ List<HashMap<Integer, PSTTable7CItem>> getItems() throws PSTException, IOException { if ( items == null ) { items = getItems(-1, -1); } return items; } List<HashMap<Integer, PSTTable7CItem>> getItems(int startAtRecord, int numberOfRecordsToReturn) throws PSTException, IOException { List<HashMap<Integer, PSTTable7CItem>> itemList = new ArrayList<HashMap<Integer, PSTTable7CItem>>(); // okay, work out the number of records we have int numberOfBlocks = rowNodeInfo.length() / BLOCK_SIZE; int numberOfRowsPerBlock = BLOCK_SIZE / TCI_bm; int blockPadding = BLOCK_SIZE - (numberOfRowsPerBlock * TCI_bm); numberOfDataSets = (numberOfBlocks * numberOfRowsPerBlock) + ((rowNodeInfo.length() % BLOCK_SIZE) / TCI_bm); if (startAtRecord == -1) { numberOfRecordsToReturn = numberOfDataSets; startAtRecord = 0; } // repeat the reading process for every dataset int currentValueArrayStart = ((startAtRecord / numberOfRowsPerBlock) * BLOCK_SIZE) + ((startAtRecord % numberOfRowsPerBlock) * TCI_bm); if (numberOfRecordsToReturn > this.getRowCount() - startAtRecord) { numberOfRecordsToReturn = this.getRowCount() - startAtRecord; } int dataSetNumber = 0; //while ( currentValueArrayStart + ((cCols+7)/8) + TCI_1b <= rowNodeInfo.length()) for (int rowCounter = 0; rowCounter < numberOfRecordsToReturn; rowCounter++) { HashMap<Integer, PSTTable7CItem> currentItem = new HashMap<Integer, PSTTable7CItem>(); // add on some padding for block boundries? if (rowNodeInfo.in.getPSTFile().getPSTFileType() == PSTFile.PST_TYPE_ANSI) { if (currentValueArrayStart >= BLOCK_SIZE) { currentValueArrayStart = currentValueArrayStart + (4) * (currentValueArrayStart / BLOCK_SIZE); } if (rowNodeInfo.startOffset+ currentValueArrayStart + TCI_1b > rowNodeInfo.in.length()) { continue; } } else { if ((currentValueArrayStart % BLOCK_SIZE) > BLOCK_SIZE - TCI_bm) { // adjust! //currentValueArrayStart += 8176 - (currentValueArrayStart % 8176); currentValueArrayStart += blockPadding; if (currentValueArrayStart + TCI_bm < rowNodeInfo.length()) { continue; } } } byte[] bitmap = new byte[(cCols+7)/8]; //System.arraycopy(rowNodeInfo, currentValueArrayStart+TCI_1b, bitmap, 0, bitmap.length); rowNodeInfo.in.seek(rowNodeInfo.startOffset+ currentValueArrayStart + TCI_1b); rowNodeInfo.in.read(bitmap); //int id = (int)PSTObject.convertLittleEndianBytesToLong(rowNodeInfo, currentValueArrayStart, currentValueArrayStart+4); int id = (int)rowNodeInfo.seekAndReadLong(currentValueArrayStart, 4); // Put into the item map as PidTagLtpRowId (0x67F2) PSTTable7CItem item = new PSTTable7CItem(); item.itemIndex = -1; item.entryValueType = 3; item.entryType = 0x67F2; item.entryValueReference = id; item.isExternalValueReference = true; currentItem.put(item.entryType, item); int col = 0; if (overrideCol > -1) { col = overrideCol; } for ( ; col < cCols; ++col ) { // Does this column exist for this row? int bitIndex = columnDescriptors[col].iBit / 8; int bit = columnDescriptors[col].iBit % 8; if ( (bitmap[bitIndex] & (1<<bit)) == 0 ) { // Column doesn't exist //System.out.printf("Col %d (0x%04X) not present\n", col, columnDescriptors[col].id); /**/ continue; } item = new PSTTable7CItem(); item.itemIndex = col; item.entryValueType = columnDescriptors[col].type; item.entryType = columnDescriptors[col].id; item.entryValueReference = 0; switch ( columnDescriptors[col].cbData ) { case 1: // Single byte data //item.entryValueReference = rowNodeInfo[currentValueArrayStart+columnDescriptors[col].ibData] & 0xFF; item.entryValueReference = (int)rowNodeInfo.seekAndReadLong(currentValueArrayStart+columnDescriptors[col].ibData, 1) & 0xFF; item.isExternalValueReference = true; /* System.out.printf("\tboolean: %s %s\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), item.entryValueReference == 0 ? "false" : "true"); /**/ break; case 2: // Two byte data /* item.entryValueReference = (rowNodeInfo[currentValueArrayStart+columnDescriptors[col].ibData] & 0xFF) | ((rowNodeInfo[currentValueArrayStart+columnDescriptors[col].ibData+1] & 0xFF) << 8); */ item.entryValueReference = (int)rowNodeInfo.seekAndReadLong(currentValueArrayStart+columnDescriptors[col].ibData, 2) & 0xFFFF; item.isExternalValueReference = true; /* short i16 = (short)item.entryValueReference; System.out.printf("\tInteger16: %s %d\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), i16); /**/ break; case 8: // 8 byte data item.data = new byte[8]; //System.arraycopy(rowNodeInfo, currentValueArrayStart+columnDescriptors[col].ibData, item.data, 0, 8); rowNodeInfo.in.seek(rowNodeInfo.startOffset +currentValueArrayStart+columnDescriptors[col].ibData); rowNodeInfo.in.read(item.data); /* System.out.printf("\tInteger64: %s\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType)); /**/ break; default:// Four byte data /* if (numberOfIndexLevels > 0 ) { System.out.println("here"); System.out.println(rowNodeInfo.length()); PSTObject.printHexFormatted(rowNodeInfo, true); System.exit(0); } */ //item.entryValueReference = (int)PSTObject.convertLittleEndianBytesToLong(rowNodeInfo, currentValueArrayStart+columnDescriptors[col].ibData, currentValueArrayStart+columnDescriptors[col].ibData+4); item.entryValueReference = (int)rowNodeInfo.seekAndReadLong(currentValueArrayStart+columnDescriptors[col].ibData, 4); if ( columnDescriptors[col].type == 0x0003 || columnDescriptors[col].type == 0x0004 || columnDescriptors[col].type == 0x000A ) { // True 32bit data item.isExternalValueReference = true; /* System.out.printf("\tInteger32: %s %d\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), item.entryValueReference); /**/ break; } // Variable length data so it's an hnid if ( (item.entryValueReference & 0x1F) != 0 ) { // Some kind of external reference... item.isExternalValueReference = true; /* System.out.printf("\tOther: %s 0x%08X\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), item.entryValueReference); /**/ break; } if ( item.entryValueReference == 0 ) { /* System.out.printf("\tOther: %s 0 bytes\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType)); /**/ item.data = new byte[0]; break; } else { NodeInfo entryInfo = getNodeInfo(item.entryValueReference); item.data = new byte[entryInfo.length()]; //System.arraycopy(entryInfo, 0, item.data, 0, item.data.length); entryInfo.in.seek(entryInfo.startOffset); entryInfo.in.read(item.data); } /* if ( item.entryValueType != 0x001F ) { System.out.printf("\tOther: %s %d bytes\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), item.data.length); } else { try { String s = new String(item.data, "UTF-16LE"); System.out.printf("\tString: %s \"%s\"\n", PSTFile.getPropertyDescription(item.entryType, item.entryValueType), s); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } /**/ break; } currentItem.put(item.entryType, item); description += item.toString()+"\n\n"; } itemList.add(dataSetNumber, currentItem); dataSetNumber++; currentValueArrayStart += TCI_bm; } // System.out.println(description); return itemList; } class ColumnDescriptor { ColumnDescriptor(NodeInfo nodeInfo, int offset) throws PSTException, IOException { //type = (int)(PSTObject.convertLittleEndianBytesToLong(data, offset, offset+2) & 0xFFFF); type = ((int)nodeInfo.seekAndReadLong(offset, 2) & 0xFFFF); //id = (int)(PSTObject.convertLittleEndianBytesToLong(data, offset+2, offset+4) & 0xFFFF); id = (int)(nodeInfo.seekAndReadLong(offset+2, 2) & 0xFFFF); //ibData = (int)(PSTObject.convertLittleEndianBytesToLong(data, offset+4, offset+6) & 0xFFFF); ibData = (int)(nodeInfo.seekAndReadLong(offset+4, 2) & 0xFFFF); //cbData = (int)data[offset+6] & 0xFF; cbData = (int)nodeInfo.in.read() & 0xFF; //iBit = (int)data[offset+7] & 0xFF; iBit = (int)nodeInfo.in.read() & 0xFF; } int type; int id; int ibData; int cbData; int iBit; } @Override public int getRowCount() { return this.numberOfDataSets; } /* Not used... public HashMap<Integer, PSTTable7CItem> getItem(int itemNumber) { if ( items == null || itemNumber >= items.size() ) { return null; } return items.get(itemNumber); } /**/ @Override public String toString() { return this.description; } public String getItemsString() { if ( items == null ) { return ""; } return items.toString(); } ColumnDescriptor[] columnDescriptors = null; HashMap<Integer, Integer> keyMap = null; }
package es.carm.mydom.filters; import java.io.IOException; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.naming.resources.Resource; import es.carm.mydom.parser.BeanMethod; import es.carm.mydom.parser.HTMLCompiler; import es.carm.mydom.parser.Field; import es.carm.mydom.parser.HTMLProgram; import es.carm.mydom.parser.ParserException; import es.carm.mydom.parser.Program; import es.carm.mydom.parser.ProgramContext; import es.carm.mydom.entity.Database; import es.carm.mydom.entity.Document; import es.carm.mydom.entity.DominoSession; import es.carm.mydom.filters.utils.AgentResponse; import es.carm.mydom.filters.utils.Dispatcher; import es.carm.mydom.filters.utils.Resources; import es.carm.mydom.servlet.HttpFilter; import es.carm.mydom.servlet.ServerConfig; import es.carm.mydom.utils.URLComponents; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ViewAction implements HttpFilter { final Logger log = LoggerFactory.getLogger(ViewAction.class); private ServerConfig cfg; public void doFilter(DominoSession domSession, HttpServletRequest request, HttpServletResponse response) throws IOException,ServletException{ Database database = domSession.getDatabase(); URLComponents urlComponents = domSession.getUrlComponents(); String actionName = urlComponents.getActionName(); String fileName = urlComponents.getElFileName(); String formName = urlComponents.getElName(); String path = urlComponents.getPath(); if ((actionName.equals("openview"))||(actionName.equals("searchview"))){ //Acceso al elemento //dado el nombre completo obtener el elemento. Resource resource = Resources.getResource(cfg,"view",fileName); if (resource==null){ response.sendError(HttpServletResponse.SC_NOT_FOUND,"Elemento no encontrado"); return; } try { //obtengo el programa String value = Resources.getStringFromStream(resource,cfg.getResourceCharset()); HTMLCompiler compiler = new HTMLCompiler(value,cfg.getResourceCharset(),cfg.getDirContext()); HTMLProgram prg = (HTMLProgram) compiler.compile(); //actualizo la sesion para indicar la vista actual y el bean que la soporta domSession.setCurrentView(database.getView(domSession, fileName.substring(1))); //Actualizo el documento de contexto // TODO El documento de contexto en este caso son las variables CGI Document doc = domSession.getDocumentContext(); doc.setEditMode(false); doc.setForm(formName); //TODO limitacion de formularios a un unico directorio fillFields(ProgramContext.ACTION_NEW,prg,domSession); //Obligo a calcular todos los defaults-values //ejecuto el webqueryopen //if (prg.getQueryOpen()!=null) domSession.executeAction(prg.getQueryOpen()); if (prg.getQueryOpen()!=null) { AgentResponse agentResponse = new AgentResponse(response); log.debug("EJECUTANDO AGENTE:"+prg.getQueryOpen().getMethod()+" MODIFIED="+agentResponse.isModified()); boolean doOpen=domSession.executeAgent(prg.getQueryOpen(),request,agentResponse); //atiendo la peticion siempre que no se haya modificado la salida del response previamente //o si cancelo la apertura log.debug("MODIFIED="+agentResponse.isModified()); if (agentResponse.isModified()||(doOpen==false)) return; } //configuro el programa con la accion openview ProgramContext pc = new ProgramContext(prg); pc.setDomSession(domSession); pc.setActionType(ProgramContext.ACTION_READ); //actuo como si fuera un read pc.setDocAct(domSession.getDocumentContext()); //ejecuto el programa y el resultado lo saco por pantalla List<String> res = prg.execute(pc); log.debug("doc="+doc.toString()); Dispatcher.sendArrayString(res,cfg.getResourceCharset(),response); } catch (Exception e) { e.printStackTrace(); response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,e.getMessage()); return; } } else if ((actionName.equals("getdata"))||(actionName.equals("getcount"))){ //Acceso al elemento // GET /vista/?getData -> getAll // GET /vista/key?getData -> getOne // //dado el nombre completo obtener el elemento. String elpath = urlComponents.getElPath(); String elname = urlComponents.getElName(); String viewName; String keyName; boolean getCount = actionName.equals("getcount"); System.out.println("Method:"+request.getMethod()); System.out.println("ActionName:"+actionName); System.out.println("fileName="+urlComponents.getElFileName()); System.out.println("formName="+urlComponents.getElName()); System.out.println("formPath="+urlComponents.getElPath()); //determino si hay key o no if (elpath.equals("/")){ //caso donde no hay key viewName = elname; keyName = null; } else { //hay key y se limita a una (quito la / del principio y del final) viewName = elpath.substring(1,elpath.length()-1); keyName = elname; if (keyName.equals("")) { //subcaso especial donde no hay key pero se pone / keyName = null; } } //obtengo la vista Resource resource = Resources.getResource(cfg,"view",viewName); if (resource==null){ response.sendError(HttpServletResponse.SC_NOT_FOUND,"Elemento no encontrado:"+viewName); return; } try { //obtengo el programa String value = Resources.getStringFromStream(resource,cfg.getResourceCharset()); HTMLCompiler compiler = new HTMLCompiler(value,cfg.getResourceCharset(),cfg.getDirContext()); HTMLProgram prg = (HTMLProgram) compiler.compile(); //actualizo la sesion para indicar la vista actual y el bean que la soporta domSession.setCurrentView(database.getView(domSession, viewName)); //Actualizo el documento de contexto // TODO El documento de contexto en este caso son las variables CGI Document doc = domSession.getDocumentContext(); doc.setEditMode(false); doc.setForm(viewName); //TODO limitacion de formularios a un unico directorio //inserto los parametros de ejecucion de la vista pues por url no puedo pasarlos doc.setItemValue("exportMode", "json"); //parametro que indica al ejecutar el modo de exportacion doc.setItemValue("keyName", keyName); doc.setItemValue("getCount", getCount?"true":"false"); fillFields(ProgramContext.ACTION_NEW,prg,domSession); //Obligo a calcular todos los defaults-values //ejecuto el webqueryopen //if (prg.getQueryOpen()!=null) domSession.executeAction(prg.getQueryOpen()); if (prg.getQueryOpen()!=null) { AgentResponse agentResponse = new AgentResponse(response); log.debug("EJECUTANDO AGENTE:"+prg.getQueryOpen().getMethod()+" MODIFIED="+agentResponse.isModified()); boolean doOpen=domSession.executeAgent(prg.getQueryOpen(),request,agentResponse); //atiendo la peticion siempre que no se haya modificado la salida del response previamente //o si cancelo la apertura log.debug("MODIFIED="+agentResponse.isModified()); if (agentResponse.isModified()||(doOpen==false)) return; } //configuro el programa con la accion openview /*ProgramContext pc = new ProgramContext(prg); pc.setDomSession(domSession); pc.setActionType(ProgramContext.ACTION_READ); //actuo como si fuera un read pc.setDocAct(domSession.getDocumentContext()); //ejecuto el programa y el resultado lo saco por pantalla List<String> res = prg.execute(pc);*/ //Como estoy en getData / getCount no ejecuto el programa, simplemente llamo a dominoViewBean.getBody String res = domSession.executeGet(BeanMethod.getBeanMethod("domServerViewBean.getComputedText_viewBody")); log.debug("doc="+doc.toString()); Dispatcher.sendString(res,"application/json",cfg.getResourceCharset(),response); } catch (Exception e) { e.printStackTrace(); response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,e.getMessage()); return; } } else { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,"No se ha definido correctamente el dispatcher para esta accion:"+actionName); return; } } private void fillFields(int actionType,HTMLProgram prg,DominoSession domSession) throws ParserException { Document docAct = domSession.getDocumentContext(); String value = ""; for(Field field:prg.getFields()){ boolean calc=false; if (actionType==ProgramContext.ACTION_NEW) calc = true; else if (field.getItemKind()==Field.KIND_COMPUTED_FOR_DISPLAY) calc = true; else if ((actionType==ProgramContext.ACTION_SAVE)&&(field.getItemKind()==Field.KIND_COMPUTED)) calc = true; if (calc){ value=""; if (field.getDefaultValue()!=null) value = domSession.executeGet(field.getDefaultValue()); docAct.setItemValue(field.getItemName(), value); } } } public ServerConfig getCfg() { return cfg; } public void setCfg(ServerConfig cfg) { this.cfg = cfg; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import org.apache.ignite.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.*; import org.apache.ignite.internal.processors.cache.transactions.*; import org.apache.ignite.internal.processors.cache.version.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.extensions.communication.*; import java.io.*; import java.nio.*; import java.util.*; /** * DHT transaction prepare response. */ public class GridDhtTxPrepareResponse extends GridDistributedTxPrepareResponse { /** */ private static final long serialVersionUID = 0L; /** Evicted readers. */ @GridToStringInclude @GridDirectCollection(IgniteTxKey.class) private Collection<IgniteTxKey> nearEvicted; /** Future ID. */ private IgniteUuid futId; /** Mini future ID. */ private IgniteUuid miniId; /** Invalid partitions. */ @GridToStringInclude @GridDirectCollection(int.class) private Collection<Integer> invalidParts; /** Preload entries. */ @GridDirectCollection(GridCacheEntryInfo.class) private List<GridCacheEntryInfo> preloadEntries; /** * Empty constructor required by {@link Externalizable}. */ public GridDhtTxPrepareResponse() { // No-op. } /** * @param xid Xid version. * @param futId Future ID. * @param miniId Mini future ID. */ public GridDhtTxPrepareResponse(GridCacheVersion xid, IgniteUuid futId, IgniteUuid miniId) { super(xid); assert futId != null; assert miniId != null; this.futId = futId; this.miniId = miniId; } /** * @param xid Xid version. * @param futId Future ID. * @param miniId Mini future ID. * @param err Error. */ public GridDhtTxPrepareResponse(GridCacheVersion xid, IgniteUuid futId, IgniteUuid miniId, Throwable err) { super(xid, err); assert futId != null; assert miniId != null; this.futId = futId; this.miniId = miniId; } /** * @return Evicted readers. */ public Collection<IgniteTxKey> nearEvicted() { return nearEvicted; } /** * @param nearEvicted Evicted readers. */ public void nearEvicted(Collection<IgniteTxKey> nearEvicted) { this.nearEvicted = nearEvicted; } /** * @return Future ID. */ public IgniteUuid futureId() { return futId; } /** * @return Mini future ID. */ public IgniteUuid miniId() { return miniId; } /** * @return Invalid partitions. */ public Collection<Integer> invalidPartitions() { return invalidParts; } /** * @param invalidParts Invalid partitions. */ public void invalidPartitions(Collection<Integer> invalidParts) { this.invalidParts = invalidParts; } /** * Gets preload entries found on backup node. * * @return Collection of entry infos need to be preloaded. */ public Collection<GridCacheEntryInfo> preloadEntries() { return preloadEntries == null ? Collections.<GridCacheEntryInfo>emptyList() : preloadEntries; } /** * Adds preload entry. * * @param info Info to add. */ public void addPreloadEntry(GridCacheEntryInfo info) { assert info.cacheId() != 0; if (preloadEntries == null) preloadEntries = new ArrayList<>(); preloadEntries.add(info); } /** {@inheritDoc} * @param ctx*/ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (nearEvicted != null) { for (IgniteTxKey key : nearEvicted) { GridCacheContext cctx = ctx.cacheContext(key.cacheId()); key.prepareMarshal(cctx); } } if (preloadEntries != null) { for (GridCacheEntryInfo info : preloadEntries) { GridCacheContext cctx = ctx.cacheContext(info.cacheId()); info.marshal(cctx); } } } /** {@inheritDoc} */ @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (nearEvicted != null) { for (IgniteTxKey key : nearEvicted) { GridCacheContext cctx = ctx.cacheContext(key.cacheId()); key.finishUnmarshal(cctx, ldr); } } if (preloadEntries != null) { for (GridCacheEntryInfo info : preloadEntries) { GridCacheContext cctx = ctx.cacheContext(info.cacheId()); info.unmarshal(cctx, ldr); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtTxPrepareResponse.class, this, "super", super.toString()); } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 8: if (!writer.writeIgniteUuid("futId", futId)) return false; writer.incrementState(); case 9: if (!writer.writeCollection("invalidParts", invalidParts, MessageCollectionItemType.INT)) return false; writer.incrementState(); case 10: if (!writer.writeIgniteUuid("miniId", miniId)) return false; writer.incrementState(); case 11: if (!writer.writeCollection("nearEvicted", nearEvicted, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 12: if (!writer.writeCollection("preloadEntries", preloadEntries, MessageCollectionItemType.MSG)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 8: futId = reader.readIgniteUuid("futId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 9: invalidParts = reader.readCollection("invalidParts", MessageCollectionItemType.INT); if (!reader.isLastRead()) return false; reader.incrementState(); case 10: miniId = reader.readIgniteUuid("miniId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 11: nearEvicted = reader.readCollection("nearEvicted", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); case 12: preloadEntries = reader.readCollection("preloadEntries", MessageCollectionItemType.MSG); if (!reader.isLastRead()) return false; reader.incrementState(); } return true; } /** {@inheritDoc} */ @Override public byte directType() { return 35; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 13; } }
package com.badlogic.gdx.graphics.g3d.loader; import com.badlogic.gdx.assets.AssetLoaderParameters; import com.badlogic.gdx.assets.loaders.FileHandleResolver; import com.badlogic.gdx.assets.loaders.ModelLoader; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.GL10; import com.badlogic.gdx.graphics.VertexAttribute; import com.badlogic.gdx.graphics.g3d.Model; import com.badlogic.gdx.graphics.g3d.model.data.ModelAnimation; import com.badlogic.gdx.graphics.g3d.model.data.ModelData; import com.badlogic.gdx.graphics.g3d.model.data.ModelMaterial; import com.badlogic.gdx.graphics.g3d.model.data.ModelMesh; import com.badlogic.gdx.graphics.g3d.model.data.ModelMeshPart; import com.badlogic.gdx.graphics.g3d.model.data.ModelNode; import com.badlogic.gdx.graphics.g3d.model.data.ModelNodeAnimation; import com.badlogic.gdx.graphics.g3d.model.data.ModelNodeKeyframe; import com.badlogic.gdx.graphics.g3d.model.data.ModelNodePart; import com.badlogic.gdx.graphics.g3d.model.data.ModelTexture; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Quaternion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.ArrayMap; import com.badlogic.gdx.utils.BaseJsonReader; import com.badlogic.gdx.utils.GdxRuntimeException; import com.badlogic.gdx.utils.JsonValue; import com.badlogic.gdx.utils.UBJsonReader; public class G3dModelLoader extends ModelLoader<AssetLoaderParameters<Model>> { public static final short VERSION_HI = 0; public static final short VERSION_LO = 1; protected final BaseJsonReader reader; public G3dModelLoader(final BaseJsonReader reader) { this(reader, null); } public G3dModelLoader(BaseJsonReader reader, FileHandleResolver resolver) { super(resolver); this.reader = reader; } @Override public ModelData loadModelData (FileHandle fileHandle, AssetLoaderParameters<Model> parameters) { return parseModel(fileHandle); } public ModelData parseModel (FileHandle handle) { JsonValue json = reader.parse(handle); ModelData model = new ModelData(); JsonValue version = json.require("version"); model.version[0] = (short)version.getInt(0); model.version[1] = (short)version.getInt(1); if (model.version[0] != VERSION_HI || model.version[1] != VERSION_LO) throw new GdxRuntimeException("Model version not supported"); model.id = json.getString("id", ""); parseMeshes(model, json); parseMaterials(model, json, handle.parent().path()); parseNodes(model, json); parseAnimations(model, json); return model; } private void parseMeshes (ModelData model, JsonValue json) { JsonValue meshes = json.require("meshes"); model.meshes.ensureCapacity(meshes.size()); for (JsonValue mesh = meshes.child(); mesh != null; mesh = mesh.next()) { ModelMesh jsonMesh = new ModelMesh(); String id = mesh.getString("id", ""); jsonMesh.id = id; JsonValue attributes = mesh.require("attributes"); jsonMesh.attributes = parseAttributes(attributes); JsonValue vertices = mesh.require("vertices"); float[] verts = new float[vertices.size()]; int j = 0; for (JsonValue value = vertices.child(); value != null; value = value.next(), j++) { verts[j] = value.asFloat(); } jsonMesh.vertices = verts; JsonValue meshParts = mesh.require("parts"); Array<ModelMeshPart> parts = new Array<ModelMeshPart>(); for (JsonValue meshPart = meshParts.child(); meshPart != null; meshPart = meshPart.next()) { ModelMeshPart jsonPart = new ModelMeshPart(); String partId = meshPart.getString("id", null); if(id == null) { throw new GdxRuntimeException("Not id given for mesh part"); } for(ModelMeshPart other: parts) { if(other.id.equals(partId)) { throw new GdxRuntimeException("Mesh part with id '" + partId + "' already in defined"); } } jsonPart.id = partId; String type = meshPart.getString("type", null); if(type == null) { throw new GdxRuntimeException("No primitive type given for mesh part '" + partId + "'"); } jsonPart.primitiveType = parseType(type); JsonValue indices = meshPart.require("indices"); short[] partIndices = new short[indices.size()]; int k = 0; for (JsonValue value = indices.child(); value != null; value = value.next(), k++) { partIndices[k] = (short)value.asInt(); } jsonPart.indices = partIndices; parts.add(jsonPart); } jsonMesh.parts = parts.toArray(ModelMeshPart.class); model.meshes.add(jsonMesh); } } private int parseType (String type) { if(type.equals("TRIANGLES")) { return GL10.GL_TRIANGLES; } else if(type.equals("LINES")) { return GL10.GL_LINES; } else if(type.equals("POINTS")) { return GL10.GL_POINTS; } else if(type.equals("TRIANGLE_STRIP")) { return GL10.GL_TRIANGLE_STRIP; } else if(type.equals("LINE_STRIP")) { return GL10.GL_LINE_STRIP; } else { throw new GdxRuntimeException("Unknown primitive type '" + type + "', should be one of triangle, trianglestrip, line, linestrip, lineloop or point"); } } private VertexAttribute[] parseAttributes (JsonValue attributes) { Array<VertexAttribute> vertexAttributes = new Array<VertexAttribute>(); int unit = 0; int blendWeightCount = 0; for (JsonValue value = attributes.child(); value != null; value = value.next()) { String attribute = value.asString(); String attr = (String)attribute; if(attr.equals("POSITION")) { vertexAttributes.add(VertexAttribute.Position()); } else if(attr.equals("NORMAL")) { vertexAttributes.add(VertexAttribute.Normal()); } else if(attr.equals("COLOR")) { vertexAttributes.add(VertexAttribute.ColorUnpacked()); } else if(attr.equals("COLORPACKED")) { vertexAttributes.add(VertexAttribute.Color()); } else if(attr.equals("TANGENT")) { vertexAttributes.add(VertexAttribute.Tangent()); } else if(attr.equals("BINORMAL")) { vertexAttributes.add(VertexAttribute.Binormal()); } else if(attr.startsWith("TEXCOORD")) { vertexAttributes.add(VertexAttribute.TexCoords(unit++)); } else if(attr.startsWith("BLENDWEIGHT")) { vertexAttributes.add(VertexAttribute.BoneWeight(blendWeightCount++)); } else { throw new GdxRuntimeException("Unknown vertex attribute '" + attr + "', should be one of position, normal, uv, tangent or binormal"); } } return vertexAttributes.toArray(VertexAttribute.class); } private void parseMaterials (ModelData model, JsonValue json, String materialDir) { JsonValue materials = json.get("materials"); if(materials == null) { // we should probably create some default material in this case } else { model.materials.ensureCapacity(materials.size()); for (JsonValue material = materials.child(); material != null; material = material.next()) { ModelMaterial jsonMaterial = new ModelMaterial(); String id = material.getString("id", null); if(id == null) throw new GdxRuntimeException("Material needs an id."); jsonMaterial.id = id; // Read material colors final JsonValue diffuse = material.get("diffuse"); if (diffuse != null) jsonMaterial.diffuse = parseColor(diffuse); final JsonValue ambient = material.get("ambient"); if (ambient != null) jsonMaterial.ambient = parseColor(ambient); final JsonValue emissive= material.get("emissive"); if (emissive!= null) jsonMaterial.emissive = parseColor(emissive); final JsonValue specular= material.get("specular"); if (specular!= null) jsonMaterial.specular = parseColor(specular); // Read shininess jsonMaterial.shininess = material.getFloat("shininess", 0.0f); // Read opacity jsonMaterial.opacity = material.getFloat("opacity", 1.0f); // Read textures JsonValue textures = material.get("textures"); if(textures != null){ for (JsonValue texture = textures.child(); texture != null; texture = texture.next()) { ModelTexture jsonTexture = new ModelTexture(); String textureId = texture.getString("id", null); if(textureId == null) throw new GdxRuntimeException("Texture has no id."); jsonTexture.id = textureId; String fileName = texture.getString("filename", null); if(fileName == null) throw new GdxRuntimeException("Texture needs filename."); jsonTexture.fileName = materialDir + (materialDir.length() == 0 || materialDir.endsWith("/") ? "" : "/") + fileName; jsonTexture.uvTranslation = readVector2(texture.get("uvTranslation"), 0f, 0f); jsonTexture.uvScaling = readVector2(texture.get("uvScaling"), 1f, 1f); String textureType = texture.getString("type", null); if(textureType == null) throw new GdxRuntimeException("Texture needs type."); jsonTexture.usage = parseTextureUsage(textureType); if(jsonMaterial.textures == null) jsonMaterial.textures = new Array<ModelTexture>(); jsonMaterial.textures.add(jsonTexture); } } model.materials.add(jsonMaterial); } } } private int parseTextureUsage(final String value) { if (value.equalsIgnoreCase("AMBIENT")) return ModelTexture.USAGE_AMBIENT; else if (value.equalsIgnoreCase("BUMP")) return ModelTexture.USAGE_BUMP; else if (value.equalsIgnoreCase("DIFFUSE")) return ModelTexture.USAGE_DIFFUSE; else if (value.equalsIgnoreCase("EMISSIVE")) return ModelTexture.USAGE_EMISSIVE; else if (value.equalsIgnoreCase("NONE")) return ModelTexture.USAGE_NONE; else if (value.equalsIgnoreCase("NORMAL")) return ModelTexture.USAGE_NORMAL; else if (value.equalsIgnoreCase("REFLECTION")) return ModelTexture.USAGE_REFLECTION; else if (value.equalsIgnoreCase("SHININESS")) return ModelTexture.USAGE_SHININESS; else if (value.equalsIgnoreCase("SPECULAR")) return ModelTexture.USAGE_SPECULAR; else if (value.equalsIgnoreCase("TRANSPARENCY")) return ModelTexture.USAGE_TRANSPARENCY; return ModelTexture.USAGE_UNKNOWN; } private Color parseColor (JsonValue colorArray) { if(colorArray.size >= 3) return new Color(colorArray.getFloat(0), colorArray.getFloat(1), colorArray.getFloat(2), 1.0f); else throw new GdxRuntimeException("Expected Color values <> than three."); } private Vector2 readVector2 (JsonValue vectorArray, float x, float y) { if(vectorArray == null) return new Vector2(x, y); else if(vectorArray.size == 2) return new Vector2(vectorArray.getFloat(0), vectorArray.getFloat(1)); else throw new GdxRuntimeException("Expected Vector2 values <> than two."); } private Array<ModelNode> parseNodes (ModelData model, JsonValue json) { JsonValue nodes = json.get("nodes"); if(nodes == null) { throw new GdxRuntimeException("At least one node is required."); } model.nodes.ensureCapacity(nodes.size()); for (JsonValue node = nodes.child(); node != null; node = node.next()) { model.nodes.add(parseNodesRecursively(node)); } return model.nodes; } private final Quaternion tempQ = new Quaternion(); private ModelNode parseNodesRecursively(JsonValue json){ ModelNode jsonNode = new ModelNode(); String id = json.getString("id", null); if(id == null) throw new GdxRuntimeException("Node id missing."); jsonNode.id = id; JsonValue translation = json.get("translation"); if (translation != null && translation.size() != 3) throw new GdxRuntimeException("Node translation incomplete"); jsonNode.translation = translation == null ? null : new Vector3(translation.getFloat(0), translation.getFloat(1), translation.getFloat(2)); JsonValue rotation = json.get("rotation"); if(rotation != null && rotation.size() != 4) throw new GdxRuntimeException("Node rotation incomplete"); jsonNode.rotation = rotation == null ? null : new Quaternion(rotation.getFloat(0), rotation.getFloat(1), rotation.getFloat(2), rotation.getFloat(3)); JsonValue scale = json.get("scale"); if(scale != null && scale.size() != 3) throw new GdxRuntimeException("Node scale incomplete"); jsonNode.scale = scale == null ? null : new Vector3(scale.getFloat(0), scale.getFloat(1), scale.getFloat(2)); String meshId = json.getString("mesh", null); if(meshId != null) jsonNode.meshId = meshId; JsonValue materials = json.get("parts"); if(materials != null){ jsonNode.parts = new ModelNodePart[materials.size()]; int i = 0; for (JsonValue material = materials.child(); material != null; material = material.next(), i++) { ModelNodePart nodePart = new ModelNodePart(); String meshPartId = material.getString("meshpartid", null); String materialId = material.getString("materialid", null); if(meshPartId == null || materialId == null){ throw new GdxRuntimeException("Node "+id+" part is missing meshPartId or materialId"); } nodePart.materialId = materialId; nodePart.meshPartId = meshPartId; JsonValue bones = material.get("bones"); if (bones != null) { nodePart.bones = new ArrayMap<String, Matrix4>(true, bones.size(), String.class, Matrix4.class); int j = 0; for (JsonValue bone = bones.child(); bone != null; bone = bone.next(), j++) { String nodeId = bone.getString("node", null); if (nodeId == null) throw new GdxRuntimeException("Bone node ID missing"); Matrix4 transform = new Matrix4(); JsonValue val = bone.get("translation"); if (val != null && val.size() >= 3) transform.translate(val.getFloat(0), val.getFloat(1), val.getFloat(2)); val = bone.get("rotation"); if(val != null && val.size() >= 4) transform.rotate(tempQ.set(val.getFloat(0), val.getFloat(1), val.getFloat(2), val.getFloat(3))); val = bone.get("scale"); if(val != null && val.size() >= 3) transform.scale(val.getFloat(0), val.getFloat(1), val.getFloat(2)); nodePart.bones.put(nodeId, transform); } } jsonNode.parts[i] = nodePart; } } JsonValue children = json.get("children"); if(children != null){ jsonNode.children = new ModelNode[children.size()]; int i = 0; for (JsonValue child = children.child(); child != null; child = child.next(), i++) { jsonNode.children[i] = parseNodesRecursively(child); } } return jsonNode; } private void parseAnimations (ModelData model, JsonValue json) { JsonValue animations = json.get("animations"); if(animations == null) return; model.animations.ensureCapacity(animations.size()); for (JsonValue anim = animations.child(); anim != null; anim = anim.next()) { JsonValue nodes = anim.get("bones"); if (nodes == null) continue; ModelAnimation animation = new ModelAnimation(); model.animations.add(animation); animation.nodeAnimations.ensureCapacity(nodes.size()); animation.id = anim.getString("id"); for (JsonValue node = nodes.child(); node != null; node = node.next()) { JsonValue keyframes = node.get("keyframes"); ModelNodeAnimation nodeAnim = new ModelNodeAnimation(); animation.nodeAnimations.add(nodeAnim); nodeAnim.nodeId = node.getString("boneId"); nodeAnim.keyframes.ensureCapacity(keyframes.size()); for (JsonValue keyframe = keyframes.child(); keyframe != null; keyframe = keyframe.next()) { ModelNodeKeyframe kf = new ModelNodeKeyframe(); nodeAnim.keyframes.add(kf); kf.keytime = keyframe.getFloat("keytime") / 1000.f; JsonValue translation = keyframe.get("translation"); if (translation != null && translation.size() == 3) kf.translation = new Vector3(translation.getFloat(0), translation.getFloat(1), translation.getFloat(2)); JsonValue rotation = keyframe.get("rotation"); if (rotation != null && rotation.size() == 4) kf.rotation = new Quaternion(rotation.getFloat(0), rotation.getFloat(1), rotation.getFloat(2), rotation.getFloat(3)); JsonValue scale = keyframe.get("scale"); if (scale != null && scale.size() == 3) kf.scale = new Vector3(scale.getFloat(0), scale.getFloat(1), scale.getFloat(2)); } } } } }
/* * Copyright 2008 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.generator.internal; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.mybatis.generator.api.GeneratedJavaFile; import org.mybatis.generator.api.GeneratedXmlFile; import org.mybatis.generator.api.Plugin; import org.mybatis.generator.api.IntrospectedColumn; import org.mybatis.generator.api.IntrospectedTable; import org.mybatis.generator.api.dom.java.Field; import org.mybatis.generator.api.dom.java.Interface; import org.mybatis.generator.api.dom.java.Method; import org.mybatis.generator.api.dom.java.TopLevelClass; import org.mybatis.generator.api.dom.xml.Document; import org.mybatis.generator.api.dom.xml.XmlElement; import org.mybatis.generator.config.Context; /** * This class is for internal use only. It contains a list of plugins for the * current context and is used to aggregate plugins together. This class * implements the rule that if any plugin returns "false" from a method, then no * other plugin is called. * <p> * This class does not follow the normal plugin lifecycle and should not be * subclassed by clients. * * @author Jeff Butler * */ public final class PluginAggregator implements Plugin { private List<Plugin> plugins; public PluginAggregator() { plugins = new ArrayList<Plugin>(); } public void addPlugin(Plugin plugin) { plugins.add(plugin); } public void setContext(Context context) { throw new UnsupportedOperationException(); } public void setProperties(Properties properties) { throw new UnsupportedOperationException(); } public boolean validate(List<String> warnings) { throw new UnsupportedOperationException(); } public boolean modelBaseRecordClassGenerated(TopLevelClass tlc, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelBaseRecordClassGenerated(tlc, introspectedTable)) { rc = false; break; } } return rc; } public boolean modelRecordWithBLOBsClassGenerated(TopLevelClass tlc, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelRecordWithBLOBsClassGenerated(tlc, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapCountByExampleElementGenerated(XmlElement element, IntrospectedTable table) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapCountByExampleElementGenerated(element, table)) { rc = false; break; } } return rc; } public boolean sqlMapDeleteByExampleElementGenerated(XmlElement element, IntrospectedTable table) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapDeleteByExampleElementGenerated(element, table)) { rc = false; break; } } return rc; } public boolean sqlMapDeleteByPrimaryKeyElementGenerated(XmlElement element, IntrospectedTable table) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapDeleteByPrimaryKeyElementGenerated(element, table)) { rc = false; break; } } return rc; } public boolean modelExampleClassGenerated(TopLevelClass tlc, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelExampleClassGenerated(tlc, introspectedTable)) { rc = false; break; } } return rc; } public List<GeneratedJavaFile> contextGenerateAdditionalJavaFiles(IntrospectedTable introspectedTable) { List<GeneratedJavaFile> answer = new ArrayList<GeneratedJavaFile>(); for (Plugin plugin : plugins) { List<GeneratedJavaFile> temp = plugin.contextGenerateAdditionalJavaFiles(introspectedTable); if (temp != null) { answer.addAll(temp); } } return answer; } public List<GeneratedXmlFile> contextGenerateAdditionalXmlFiles(IntrospectedTable introspectedTable) { List<GeneratedXmlFile> answer = new ArrayList<GeneratedXmlFile>(); for (Plugin plugin : plugins) { List<GeneratedXmlFile> temp = plugin.contextGenerateAdditionalXmlFiles(introspectedTable); if (temp != null) { answer.addAll(temp); } } return answer; } public boolean modelPrimaryKeyClassGenerated(TopLevelClass tlc, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelPrimaryKeyClassGenerated(tlc, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapResultMapWithoutBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapResultMapWithoutBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapExampleWhereClauseElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapExampleWhereClauseElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapInsertElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapInsertElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapResultMapWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapResultMapWithBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapSelectByExampleWithoutBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapSelectByExampleWithoutBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapSelectByExampleWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapSelectByExampleWithBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapSelectByPrimaryKeyElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapSelectByPrimaryKeyElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapGenerated(GeneratedXmlFile sqlMap, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapGenerated(sqlMap, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByExampleSelectiveElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByExampleSelectiveElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByExampleWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByExampleWithBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByExampleWithoutBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByExampleWithoutBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByPrimaryKeySelectiveElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByPrimaryKeySelectiveElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByPrimaryKeyWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByPrimaryKeyWithBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapUpdateByPrimaryKeyWithoutBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapUpdateByPrimaryKeyWithoutBLOBsElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientCountByExampleMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientCountByExampleMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientCountByExampleMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientCountByExampleMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientDeleteByExampleMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientDeleteByExampleMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientDeleteByExampleMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientDeleteByExampleMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientDeleteByPrimaryKeyMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientDeleteByPrimaryKeyMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientDeleteByPrimaryKeyMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientDeleteByPrimaryKeyMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientInsertMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientInsertMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientInsertMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientInsertMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientGenerated(Interface interfaze, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientGenerated(interfaze, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByExampleWithBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByExampleWithBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByExampleWithBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByExampleWithBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByExampleWithoutBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByExampleWithoutBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByExampleWithoutBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByExampleWithoutBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByPrimaryKeyMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByPrimaryKeyMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientSelectByPrimaryKeyMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientSelectByPrimaryKeyMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleSelectiveMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleSelectiveMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleSelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleSelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleWithBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleWithBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleWithBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleWithBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleWithoutBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleWithoutBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByExampleWithoutBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByExampleWithoutBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeySelectiveMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeySelectiveMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeySelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeySelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeyWithBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeyWithBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeyWithBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeyWithBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeyWithoutBLOBsMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeyWithoutBLOBsMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientUpdateByPrimaryKeyWithoutBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientUpdateByPrimaryKeyWithoutBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public List<GeneratedJavaFile> contextGenerateAdditionalJavaFiles() { List<GeneratedJavaFile> answer = new ArrayList<GeneratedJavaFile>(); for (Plugin plugin : plugins) { List<GeneratedJavaFile> temp = plugin.contextGenerateAdditionalJavaFiles(); if (temp != null) { answer.addAll(temp); } } return answer; } public List<GeneratedXmlFile> contextGenerateAdditionalXmlFiles() { List<GeneratedXmlFile> answer = new ArrayList<GeneratedXmlFile>(); for (Plugin plugin : plugins) { List<GeneratedXmlFile> temp = plugin.contextGenerateAdditionalXmlFiles(); if (temp != null) { answer.addAll(temp); } } return answer; } public boolean sqlMapDocumentGenerated(Document document, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapDocumentGenerated(document, introspectedTable)) { rc = false; break; } } return rc; } public boolean modelFieldGenerated(Field field, TopLevelClass topLevelClass, IntrospectedColumn introspectedColumn, IntrospectedTable introspectedTable, ModelClassType modelClassType) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin .modelFieldGenerated(field, topLevelClass, introspectedColumn, introspectedTable, modelClassType)) { rc = false; break; } } return rc; } public boolean modelGetterMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedColumn introspectedColumn, IntrospectedTable introspectedTable, ModelClassType modelClassType) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelGetterMethodGenerated(method, topLevelClass, introspectedColumn, introspectedTable, modelClassType)) { rc = false; break; } } return rc; } public boolean modelSetterMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedColumn introspectedColumn, IntrospectedTable introspectedTable, ModelClassType modelClassType) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.modelSetterMethodGenerated(method, topLevelClass, introspectedColumn, introspectedTable, modelClassType)) { rc = false; break; } } return rc; } public boolean sqlMapInsertSelectiveElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapInsertSelectiveElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientInsertSelectiveMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientInsertSelectiveMethodGenerated(method, interfaze, introspectedTable)) { rc = false; break; } } return rc; } public boolean clientInsertSelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.clientInsertSelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public void initialized(IntrospectedTable introspectedTable) { for (Plugin plugin : plugins) { plugin.initialized(introspectedTable); } } public boolean sqlMapBaseColumnListElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapBaseColumnListElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean sqlMapBlobColumnListElementGenerated(XmlElement element, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.sqlMapBlobColumnListElementGenerated(element, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerGenerated(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerGenerated(topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerApplyWhereMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerApplyWhereMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerCountByExampleMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerCountByExampleMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerDeleteByExampleMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerDeleteByExampleMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerInsertSelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerInsertSelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerSelectByExampleWithBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerSelectByExampleWithBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerSelectByExampleWithoutBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerSelectByExampleWithoutBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerUpdateByExampleSelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerUpdateByExampleSelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerUpdateByExampleWithBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerUpdateByExampleWithBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerUpdateByExampleWithoutBLOBsMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerUpdateByExampleWithoutBLOBsMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } public boolean providerUpdateByPrimaryKeySelectiveMethodGenerated(Method method, TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { boolean rc = true; for (Plugin plugin : plugins) { if (!plugin.providerUpdateByPrimaryKeySelectiveMethodGenerated(method, topLevelClass, introspectedTable)) { rc = false; break; } } return rc; } }
/* * Copyright 2014-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package myfeed.user; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.springframework.restdocs.RestDocumentation.document; import static org.springframework.restdocs.hypermedia.HypermediaDocumentation.linkWithRel; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import javax.servlet.RequestDispatcher; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.hateoas.MediaTypes; import org.springframework.restdocs.config.RestDocumentationConfigurer; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import com.fasterxml.jackson.databind.ObjectMapper; @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = UserApp.class) @WebAppConfiguration public class ApiDocumentation { @Autowired private ObjectMapper objectMapper; @Autowired private WebApplicationContext context; private MockMvc mockMvc; @Before public void setUp() { this.mockMvc = MockMvcBuilders.webAppContextSetup(this.context) .apply(new RestDocumentationConfigurer()).build(); } @Test public void errorExample() throws Exception { this.mockMvc .perform(get("/error") .requestAttr(RequestDispatcher.ERROR_STATUS_CODE, 400) .requestAttr(RequestDispatcher.ERROR_REQUEST_URI, "/notes") .requestAttr(RequestDispatcher.ERROR_MESSAGE, "The tag 'http://localhost:8080/tags/123' does not exist")) .andDo(print()).andExpect(status().isBadRequest()) .andExpect(jsonPath("error", is("Bad Request"))) .andExpect(jsonPath("timestamp", is(notNullValue()))) .andExpect(jsonPath("status", is(400))) .andExpect(jsonPath("path", is(notNullValue()))) .andDo(document("error-example")); } @Test @Ignore public void indexExample() throws Exception { this.mockMvc.perform(get("/")) .andExpect(status().isOk()) .andDo(document("index-example").withLinks( linkWithRel("users").description( "The <<resources-users,Users resource>>"))); } /* @Test public void notesListExample() throws Exception { this.noteRepository.deleteAll(); createNote("REST maturity model", "http://martinfowler.com/articles/richardsonMaturityModel.html"); createNote("Hypertext Application Language (HAL)", "http://stateless.co/hal_specification.html"); createNote("Application-Level Profile Semantics (ALPS)", "http://alps.io/spec/"); this.mockMvc.perform(get("/notes")) .andExpect(status().isOk()) .andDo(document("notes-list-example")); } @Test public void notesCreateExample() throws Exception { Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); String tagLocation = this.mockMvc .perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); Map<String, Object> note = new HashMap<String, Object>(); note.put("title", "REST maturity model"); note.put("body", "http://martinfowler.com/articles/richardsonMaturityModel.html"); note.put("tags", Arrays.asList(tagLocation)); this.mockMvc.perform( post("/notes").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(note))) .andExpect(status().isCreated()) .andDo(document("notes-create-example")); } @Test public void noteGetExample() throws Exception { Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); String tagLocation = this.mockMvc .perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); Map<String, Object> note = new HashMap<String, Object>(); note.put("title", "REST maturity model"); note.put("body", "http://martinfowler.com/articles/richardsonMaturityModel.html"); note.put("tags", Arrays.asList(tagLocation)); String noteLocation = this.mockMvc .perform( post("/notes").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(note))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); this.mockMvc.perform(get(noteLocation)) .andExpect(status().isOk()) .andExpect(jsonPath("title", is(note.get("title")))) .andExpect(jsonPath("body", is(note.get("body")))) .andExpect(jsonPath("_links.self.href", is(noteLocation))) .andExpect(jsonPath("_links.note-tags", is(notNullValue()))) .andDo(document("note-get-example").withLinks( linkWithRel("self").description("This <<resources-note,note>>"), linkWithRel("note-tags").description( "This note's <<resources-note-tags,tags>>"))); } @Test public void tagsListExample() throws Exception { this.noteRepository.deleteAll(); this.tagRepository.deleteAll(); createTag("REST"); createTag("Hypermedia"); createTag("HTTP"); this.mockMvc.perform(get("/tags")) .andExpect(status().isOk()) .andDo(document("tags-list-example")); } @Test public void tagsCreateExample() throws Exception { Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); this.mockMvc.perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()) .andDo(document("tags-create-example")); } @Test public void noteUpdateExample() throws Exception { Map<String, Object> note = new HashMap<String, Object>(); note.put("title", "REST maturity model"); note.put("body", "http://martinfowler.com/articles/richardsonMaturityModel.html"); String noteLocation = this.mockMvc .perform( post("/notes").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(note))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); this.mockMvc.perform(get(noteLocation)).andExpect(status().isOk()) .andExpect(jsonPath("title", is(note.get("title")))) .andExpect(jsonPath("body", is(note.get("body")))) .andExpect(jsonPath("_links.self.href", is(noteLocation))) .andExpect(jsonPath("_links.note-tags", is(notNullValue()))); Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); String tagLocation = this.mockMvc .perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); Map<String, Object> noteUpdate = new HashMap<String, Object>(); noteUpdate.put("tags", Arrays.asList(tagLocation)); this.mockMvc.perform( patch(noteLocation).contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(noteUpdate))) .andExpect(status().isNoContent()) .andDo(document("note-update-example")); } @Test public void tagGetExample() throws Exception { Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); String tagLocation = this.mockMvc .perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); this.mockMvc.perform(get(tagLocation)) .andExpect(status().isOk()) .andExpect(jsonPath("name", is(tag.get("name")))) .andDo(document("tag-get-example").withLinks( linkWithRel("self").description("This <<resources-tag,tag>>"), linkWithRel("tagged-notes") .description( "The <<resources-tagged-notes,notes>> that have this tag"))); } @Test public void tagUpdateExample() throws Exception { Map<String, String> tag = new HashMap<String, String>(); tag.put("name", "REST"); String tagLocation = this.mockMvc .perform( post("/tags").contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tag))) .andExpect(status().isCreated()).andReturn().getResponse() .getHeader("Location"); Map<String, Object> tagUpdate = new HashMap<String, Object>(); tagUpdate.put("name", "RESTful"); this.mockMvc.perform( patch(tagLocation).contentType(MediaTypes.HAL_JSON).content( this.objectMapper.writeValueAsString(tagUpdate))) .andExpect(status().isNoContent()) .andDo(document("tag-update-example")); } private void createNote(String title, String body) { Note note = new Note(); note.setTitle(title); note.setBody(body); this.noteRepository.save(note); } private void createTag(String name) { Tag tag = new Tag(); tag.setName(name); this.tagRepository.save(tag); }*/ }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import com.facebook.presto.operator.scalar.ScalarFunction; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.block.InterleavedBlockBuilder; import com.facebook.presto.spi.type.SqlTimestamp; import com.facebook.presto.spi.type.SqlVarbinary; import com.facebook.presto.spi.type.StandardTypes; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Slice; import org.testng.annotations.Test; import java.util.HashMap; import java.util.Map; import static com.facebook.presto.SessionTestUtils.TEST_SESSION; import static com.facebook.presto.block.BlockSerdeUtil.writeBlock; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.type.JsonType.JSON; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static com.facebook.presto.util.StructuralTestUtil.arrayBlockOf; import static com.facebook.presto.util.StructuralTestUtil.mapBlockOf; import static java.nio.charset.StandardCharsets.UTF_8; import static org.testng.Assert.assertEquals; public class TestMapOperators extends AbstractTestFunctions { private TestMapOperators() { registerScalar(getClass()); } @ScalarFunction @SqlType(StandardTypes.JSON) public static Slice uncheckedToJson(@SqlType(StandardTypes.VARCHAR) Slice slice) { return slice; } @Test public void testStackRepresentation() throws Exception { Block array = arrayBlockOf(BIGINT, 1L, 2L); Block actualBlock = mapBlockOf(DOUBLE, new ArrayType(BIGINT), ImmutableMap.of(1.0, array)); DynamicSliceOutput actualSliceOutput = new DynamicSliceOutput(100); writeBlock(actualSliceOutput, actualBlock); Block expectedBlock = new InterleavedBlockBuilder(ImmutableList.<Type>of(DOUBLE, new ArrayType(BIGINT)), new BlockBuilderStatus(), 3) .writeDouble(1.0) .closeEntry() .writeObject( BIGINT .createBlockBuilder(new BlockBuilderStatus(), 1) .writeLong(1L) .closeEntry() .writeLong(2L) .closeEntry() .build() ) .closeEntry() .build(); DynamicSliceOutput expectedSliceOutput = new DynamicSliceOutput(100); writeBlock(expectedSliceOutput, expectedBlock); assertEquals(actualSliceOutput.slice(), expectedSliceOutput.slice()); } @Test public void testConstructor() throws Exception { assertFunction("MAP(ARRAY ['1','3'], ARRAY [2,4])", new MapType(createVarcharType(1), INTEGER), ImmutableMap.of("1", 2, "3", 4)); Map<Integer, Integer> map = new HashMap<>(); map.put(1, 2); map.put(3, null); assertFunction("MAP(ARRAY [1, 3], ARRAY[2, NULL])", new MapType(INTEGER, INTEGER), map); assertFunction("MAP(ARRAY [1, 3], ARRAY [2.0, 4.0])", new MapType(INTEGER, DOUBLE), ImmutableMap.of(1, 2.0, 3, 4.0)); assertFunction("MAP(ARRAY[1.0, 2.0], ARRAY[ ARRAY[1, 2], ARRAY[3]])", new MapType(DOUBLE, new ArrayType(INTEGER)), ImmutableMap.of(1.0, ImmutableList.of(1, 2), 2.0, ImmutableList.of(3))); assertFunction("MAP(ARRAY[1.0, 2.0], ARRAY[ ARRAY[BIGINT '1', BIGINT '2'], ARRAY[ BIGINT '3' ]])", new MapType(DOUBLE, new ArrayType(BIGINT)), ImmutableMap.of(1.0, ImmutableList.of(1L, 2L), 2.0, ImmutableList.of(3L))); assertFunction("MAP(ARRAY['puppies'], ARRAY['kittens'])", new MapType(createVarcharType(7), createVarcharType(7)), ImmutableMap.of("puppies", "kittens")); assertFunction("MAP(ARRAY[TRUE, FALSE], ARRAY[2,4])", new MapType(BOOLEAN, INTEGER), ImmutableMap.of(true, 2, false, 4)); assertFunction("MAP(ARRAY['1', '100'], ARRAY[from_unixtime(1), from_unixtime(100)])", new MapType(createVarcharType(3), TIMESTAMP), ImmutableMap.of( "1", new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey()), "100", new SqlTimestamp(100_000, TEST_SESSION.getTimeZoneKey()))); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[1.0, 100.0])", new MapType(TIMESTAMP, DOUBLE), ImmutableMap.of( new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey()), 1.0, new SqlTimestamp(100_000, TEST_SESSION.getTimeZoneKey()), 100.0)); assertInvalidFunction("MAP(ARRAY [1], ARRAY [2, 4])", "Key and value arrays must be the same length"); } @Test public void testCardinality() throws Exception { assertFunction("CARDINALITY(MAP(ARRAY ['1','3'], ARRAY [2,4]))", BIGINT, 2L); assertFunction("CARDINALITY(MAP(ARRAY [1, 3], ARRAY[2, NULL]))", BIGINT, 2L); assertFunction("CARDINALITY(MAP(ARRAY [1, 3], ARRAY [2.0, 4.0]))", BIGINT, 2L); assertFunction("CARDINALITY(MAP(ARRAY[1.0, 2.0], ARRAY[ ARRAY[1, 2], ARRAY[3]]))", BIGINT, 2L); assertFunction("CARDINALITY(MAP(ARRAY['puppies'], ARRAY['kittens']))", BIGINT, 1L); assertFunction("CARDINALITY(MAP(ARRAY[TRUE], ARRAY[2]))", BIGINT, 1L); assertFunction("CARDINALITY(MAP(ARRAY['1'], ARRAY[from_unixtime(1)]))", BIGINT, 1L); assertFunction("CARDINALITY(MAP(ARRAY[from_unixtime(1)], ARRAY[1.0]))", BIGINT, 1L); } @Test public void testMapToJson() throws Exception { assertFunction("CAST(MAP(ARRAY[7,5,3,1], ARRAY[8,6,4,2]) AS JSON)", JSON, "{\"1\":2,\"3\":4,\"5\":6,\"7\":8}"); assertFunction("CAST(MAP(ARRAY[1,3,5,7], ARRAY[2,4,6,8]) AS JSON)", JSON, "{\"1\":2,\"3\":4,\"5\":6,\"7\":8}"); assertFunction("CAST(MAP(ARRAY [1, 3], ARRAY[2, NULL]) AS JSON)", JSON, "{\"1\":2,\"3\":null}"); assertFunction("CAST(MAP(ARRAY [1, 3], ARRAY [2.0, 4.0]) AS JSON)", JSON, "{\"1\":2.0,\"3\":4.0}"); assertFunction("CAST(MAP(ARRAY[1.0, 2.0], ARRAY[ ARRAY[1, 2], ARRAY[3]]) AS JSON)", JSON, "{\"1.0\":[1,2],\"2.0\":[3]}"); assertFunction("CAST(MAP(ARRAY['puppies'], ARRAY['kittens']) AS JSON)", JSON, "{\"puppies\":\"kittens\"}"); assertFunction("CAST(MAP(ARRAY[TRUE], ARRAY[2]) AS JSON)", JSON, "{\"true\":2}"); assertFunction("CAST(MAP(ARRAY['1'], ARRAY[from_unixtime(1)]) AS JSON)", JSON, "{\"1\":\"" + new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey()) + "\"}"); assertFunction("CAST(MAP(ARRAY[from_unixtime(1)], ARRAY[1.0]) AS JSON)", JSON, "{\"" + new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey()) + "\":1.0}"); } @Test public void testJsonToMap() throws Exception { assertFunction("CAST(JSON '{\"1\":2, \"3\": 4}' AS MAP<BIGINT, BIGINT>)", new MapType(BIGINT, BIGINT), ImmutableMap.of(1L, 2L, 3L, 4L)); assertFunction("CAST(JSON '{\"1\":2.0, \"3\": 4.0}' AS MAP<BIGINT, DOUBLE>)", new MapType(BIGINT, DOUBLE), ImmutableMap.of(1L, 2.0, 3L, 4.0)); assertFunction("CAST(JSON '{\"1\":[2, 3], \"4\": [5]}' AS MAP<BIGINT, ARRAY<BIGINT>>)", new MapType(BIGINT, new ArrayType(BIGINT)), ImmutableMap.of(1L, ImmutableList.of(2L, 3L), 4L, ImmutableList.of(5L))); assertFunction("CAST(JSON '{\"puppies\":\"kittens\"}' AS MAP<VARCHAR, VARCHAR>)", new MapType(VARCHAR, VARCHAR), ImmutableMap.of("puppies", "kittens")); assertFunction("CAST(JSON '{\"true\":\"kittens\"}' AS MAP<BOOLEAN, VARCHAR>)", new MapType(BOOLEAN, VARCHAR), ImmutableMap.of(true, "kittens")); assertFunction("CAST(JSON 'null' AS MAP<BOOLEAN, VARCHAR>)", new MapType(BOOLEAN, VARCHAR), null); assertFunction("CAST(JSON '{\"k1\": 5, \"k2\":[1, 2, 3], \"k3\":\"e\", \"k4\":{\"a\": \"b\"}, \"k5\":null, \"k6\":\"null\", \"k7\":[null]}' AS MAP<VARCHAR, JSON>)", new MapType(VARCHAR, JSON), ImmutableMap.builder() .put("k1", "5") .put("k2", "[1,2,3]") .put("k3", "\"e\"") .put("k4", "{\"a\":\"b\"}") .put("k5", "null") .put("k6", "\"null\"") .put("k7", "[null]") .build() ); // These two tests verifies that partial json cast preserves input order // The second test should never happen in real life because valid json in presto requires natural key ordering. // However, it is added to make sure that the order in the first test is not a coincidence. assertFunction("CAST(JSON '{\"k1\": {\"1klmnopq\":1, \"2klmnopq\":2, \"3klmnopq\":3, \"4klmnopq\":4, \"5klmnopq\":5, \"6klmnopq\":6, \"7klmnopq\":7}}' AS MAP<VARCHAR, JSON>)", new MapType(VARCHAR, JSON), ImmutableMap.of("k1", "{\"1klmnopq\":1,\"2klmnopq\":2,\"3klmnopq\":3,\"4klmnopq\":4,\"5klmnopq\":5,\"6klmnopq\":6,\"7klmnopq\":7}") ); assertFunction("CAST(unchecked_to_json('{\"k1\": {\"7klmnopq\":7, \"6klmnopq\":6, \"5klmnopq\":5, \"4klmnopq\":4, \"3klmnopq\":3, \"2klmnopq\":2, \"1klmnopq\":1}}') AS MAP<VARCHAR, JSON>)", new MapType(VARCHAR, JSON), ImmutableMap.of("k1", "{\"7klmnopq\":7,\"6klmnopq\":6,\"5klmnopq\":5,\"4klmnopq\":4,\"3klmnopq\":3,\"2klmnopq\":2,\"1klmnopq\":1}") ); assertInvalidCast("CAST(JSON '{\"true\":\"kittens\"}' AS MAP<BOOLEAN, VARBINARY>)"); assertInvalidCast("CAST(JSON '{\"[1, 2]\": 1}' AS MAP<ARRAY<BIGINT>, BIGINT>)"); } @Test public void testSubscript() throws Exception { assertFunction("MAP(ARRAY [1], ARRAY [null])[1]", UNKNOWN, null); assertFunction("MAP(ARRAY [1.0], ARRAY [null])[1.0]", UNKNOWN, null); assertFunction("MAP(ARRAY [TRUE], ARRAY [null])[TRUE]", UNKNOWN, null); assertFunction("MAP(ARRAY['puppies'], ARRAY [null])['puppies']", UNKNOWN, null); assertInvalidFunction("MAP(ARRAY [CAST(null as bigint)], ARRAY [1])", "map key cannot be null"); assertInvalidFunction("MAP(ARRAY [CAST(null as bigint)], ARRAY [CAST(null as bigint)])", "map key cannot be null"); assertInvalidFunction("MAP(ARRAY [1,null], ARRAY [null,2])", "map key cannot be null"); assertFunction("MAP(ARRAY [1, 3], ARRAY [2, 4])[3]", INTEGER, 4); assertFunction("MAP(ARRAY [BIGINT '1', 3], ARRAY [BIGINT '2', 4])[3]", BIGINT, 4L); assertFunction("MAP(ARRAY [1, 3], ARRAY[2, NULL])[3]", INTEGER, null); assertFunction("MAP(ARRAY [BIGINT '1', 3], ARRAY[2, NULL])[3]", INTEGER, null); assertFunction("MAP(ARRAY [1, 3], ARRAY [2.0, 4.0])[1]", DOUBLE, 2.0); assertFunction("MAP(ARRAY[1.0, 2.0], ARRAY[ ARRAY[1, 2], ARRAY[3]])[1.0]", new ArrayType(INTEGER), ImmutableList.of(1, 2)); assertFunction("MAP(ARRAY['puppies'], ARRAY['kittens'])['puppies']", createVarcharType(7), "kittens"); assertFunction("MAP(ARRAY[TRUE,FALSE],ARRAY[2,4])[TRUE]", INTEGER, 2); assertFunction("MAP(ARRAY['1', '100'], ARRAY[from_unixtime(1), from_unixtime(100)])['1']", TIMESTAMP, new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey())); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[1.0, 100.0])[from_unixtime(1)]", DOUBLE, 1.0); } @Test public void testMapKeys() throws Exception { assertFunction("MAP_KEYS(MAP(ARRAY['1', '3'], ARRAY['2', '4']))", new ArrayType(createVarcharType(1)), ImmutableList.of("1", "3")); assertFunction("MAP_KEYS(MAP(ARRAY[1.0, 2.0], ARRAY[ARRAY[1, 2], ARRAY[3]]))", new ArrayType(DOUBLE), ImmutableList.of(1.0, 2.0)); assertFunction("MAP_KEYS(MAP(ARRAY['puppies'], ARRAY['kittens']))", new ArrayType(createVarcharType(7)), ImmutableList.of("puppies")); assertFunction("MAP_KEYS(MAP(ARRAY[TRUE], ARRAY[2]))", new ArrayType(BOOLEAN), ImmutableList.of(true)); assertFunction("MAP_KEYS(MAP(ARRAY[from_unixtime(1)], ARRAY[1.0]))", new ArrayType(TIMESTAMP), ImmutableList.of(new SqlTimestamp(1000, TEST_SESSION.getTimeZoneKey()))); assertFunction("MAP_KEYS(MAP(ARRAY[CAST('puppies' as varbinary)], ARRAY['kittens']))", new ArrayType(VARBINARY), ImmutableList.of(new SqlVarbinary("puppies".getBytes(UTF_8)))); assertFunction("MAP_KEYS(MAP(ARRAY[1,2], ARRAY[ARRAY[1, 2], ARRAY[3]]))", new ArrayType(INTEGER), ImmutableList.of(1, 2)); assertFunction("MAP_KEYS(MAP(ARRAY[1,4], ARRAY[MAP(ARRAY[2], ARRAY[3]), MAP(ARRAY[5], ARRAY[6])]))", new ArrayType(INTEGER), ImmutableList.of(1, 4)); assertFunction("MAP_KEYS(MAP(ARRAY [ARRAY [1], ARRAY [2, 3]], ARRAY [ARRAY [3, 4], ARRAY [5]]))", new ArrayType(new ArrayType(INTEGER)), ImmutableList.of(ImmutableList.of(1), ImmutableList.of(2, 3))); } @Test public void testMapValues() throws Exception { assertFunction("MAP_VALUES(MAP(ARRAY['1'], ARRAY[ARRAY[TRUE, FALSE, NULL]]))", new ArrayType(new ArrayType(BOOLEAN)), ImmutableList.of(Lists.newArrayList(true, false, null))); assertFunction("MAP_VALUES(MAP(ARRAY['1'], ARRAY[ARRAY[ARRAY[1, 2]]]))", new ArrayType(new ArrayType(new ArrayType(INTEGER))), ImmutableList.of(ImmutableList.of(ImmutableList.of(1, 2)))); assertFunction("MAP_VALUES(MAP(ARRAY [1, 3], ARRAY ['2', '4']))", new ArrayType(createVarcharType(1)), ImmutableList.of("2", "4")); assertFunction("MAP_VALUES(MAP(ARRAY[1.0,2.0], ARRAY[ARRAY[1, 2], ARRAY[3]]))", new ArrayType(new ArrayType(INTEGER)), ImmutableList.of(ImmutableList.of(1, 2), ImmutableList.of(3))); assertFunction("MAP_VALUES(MAP(ARRAY['puppies'], ARRAY['kittens']))", new ArrayType(createVarcharType(7)), ImmutableList.of("kittens")); assertFunction("MAP_VALUES(MAP(ARRAY[TRUE], ARRAY[2]))", new ArrayType(INTEGER), ImmutableList.of(2)); assertFunction("MAP_VALUES(MAP(ARRAY['1'], ARRAY[NULL]))", new ArrayType(UNKNOWN), Lists.newArrayList((Object) null)); assertFunction("MAP_VALUES(MAP(ARRAY['1'], ARRAY[TRUE]))", new ArrayType(BOOLEAN), ImmutableList.of(true)); assertFunction("MAP_VALUES(MAP(ARRAY['1'], ARRAY[1.0]))", new ArrayType(DOUBLE), ImmutableList.of(1.0)); assertFunction("MAP_VALUES(MAP(ARRAY['1', '2'], ARRAY[ARRAY[1.0, 2.0], ARRAY[3.0, 4.0]]))", new ArrayType(new ArrayType(DOUBLE)), ImmutableList.of(ImmutableList.of(1.0, 2.0), ImmutableList.of(3.0, 4.0))); } @Test public void testEquals() throws Exception { // single item assertFunction("MAP(ARRAY[1], ARRAY[2]) = MAP(ARRAY[1], ARRAY[2])", BOOLEAN, true); assertFunction("MAP(ARRAY[1], ARRAY[2]) = MAP(ARRAY[1], ARRAY[4])", BOOLEAN, false); assertFunction("MAP(ARRAY[3], ARRAY[1]) = MAP(ARRAY[2], ARRAY[1])", BOOLEAN, false); // multiple items assertFunction("MAP(ARRAY[1], ARRAY[2]) = MAP(ARRAY[1, 3], ARRAY[2, 4])", BOOLEAN, false); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) = MAP(ARRAY[1], ARRAY[2])", BOOLEAN, false); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) = MAP(ARRAY[3, 1], ARRAY[4, 2])", BOOLEAN, true); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) = MAP(ARRAY[3, 1], ARRAY[2, 4])", BOOLEAN, false); assertFunction("MAP(ARRAY['1', '3'], ARRAY[2.0, 4.0]) = MAP(ARRAY['3', '1'], ARRAY[4.0, 2.0])", BOOLEAN, true); assertFunction("MAP(ARRAY['1', '3'], ARRAY[2.0, 4.0]) = MAP(ARRAY['3', '1'], ARRAY[2.0, 4.0])", BOOLEAN, false); assertFunction("MAP(ARRAY[TRUE, FALSE], ARRAY['2', '4']) = MAP(ARRAY[FALSE, TRUE], ARRAY['4', '2'])", BOOLEAN, true); assertFunction("MAP(ARRAY[TRUE, FALSE], ARRAY['2', '4']) = MAP(ARRAY[FALSE, TRUE], ARRAY['2', '4'])", BOOLEAN, false); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[TRUE, FALSE]) = MAP(ARRAY[3.0, 1.0], ARRAY[FALSE, TRUE])", BOOLEAN, true); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[TRUE, FALSE]) = MAP(ARRAY[3.0, 1.0], ARRAY[TRUE, FALSE])", BOOLEAN, false); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[from_unixtime(1), from_unixtime(100)]) = MAP(ARRAY[3.0, 1.0], ARRAY[from_unixtime(100), from_unixtime(1)])", BOOLEAN, true); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[from_unixtime(1), from_unixtime(100)]) = MAP(ARRAY[3.0, 1.0], ARRAY[from_unixtime(1), from_unixtime(100)])", BOOLEAN, false); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY['kittens', 'puppies']) = MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY['puppies', 'kittens'])", BOOLEAN, true); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY['kittens', 'puppies']) = MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY['kittens', 'puppies'])", BOOLEAN, false); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]]) = MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]])", BOOLEAN, true); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]]) = MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[3], ARRAY[1, 2]])", BOOLEAN, false); assertFunction("MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['dog', 'cat']], ARRAY[ARRAY[1, 2], ARRAY[3]]) = MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['dog', 'cat']], ARRAY[ARRAY[1, 2], ARRAY[3]])", BOOLEAN, true); assertFunction("MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['dog', 'cat']], ARRAY[ARRAY[1, 2], ARRAY[3]]) = MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['dog', 'cat']], ARRAY[ARRAY[3], ARRAY[1, 2]])", BOOLEAN, false); assertFunction("MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['cat', 'dog']], ARRAY[ARRAY[1, 2], ARRAY[3]]) = MAP(ARRAY[ARRAY['kittens', 'puppies'], ARRAY['dog', 'cat']], ARRAY[ARRAY[1, 2], ARRAY[3]])", BOOLEAN, false); // nulls assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, 3]) = MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, 2])", BOOLEAN, null); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, NULL]) = MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, NULL])", BOOLEAN, null); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[NULL, FALSE]) = MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY[FALSE, NULL])", BOOLEAN, null); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[TRUE, NULL]) = MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY[TRUE, NULL])", BOOLEAN, null); } @Test public void testNotEquals() throws Exception { // single item assertFunction("MAP(ARRAY[1], ARRAY[2]) != MAP(ARRAY[1], ARRAY[2])", BOOLEAN, false); assertFunction("MAP(ARRAY[1], ARRAY[2]) != MAP(ARRAY[1], ARRAY[4])", BOOLEAN, true); assertFunction("MAP(ARRAY[3], ARRAY[1]) != MAP(ARRAY[2], ARRAY[1])", BOOLEAN, true); // multiple items assertFunction("MAP(ARRAY[1], ARRAY[2]) != MAP(ARRAY[1, 3], ARRAY[2, 4])", BOOLEAN, true); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) != MAP(ARRAY[1], ARRAY[2])", BOOLEAN, true); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) != MAP(ARRAY[3, 1], ARRAY[4, 2])", BOOLEAN, false); assertFunction("MAP(ARRAY[1, 3], ARRAY[2, 4]) != MAP(ARRAY[3, 1], ARRAY[2, 4])", BOOLEAN, true); assertFunction("MAP(ARRAY['1', '3'], ARRAY[2.0, 4.0]) != MAP(ARRAY['3', '1'], ARRAY[4.0, 2.0])", BOOLEAN, false); assertFunction("MAP(ARRAY['1', '3'], ARRAY[2.0, 4.0]) != MAP(ARRAY['3', '1'], ARRAY[2.0, 4.0])", BOOLEAN, true); assertFunction("MAP(ARRAY[TRUE, FALSE], ARRAY['2', '4']) != MAP(ARRAY[FALSE, TRUE], ARRAY['4', '2'])", BOOLEAN, false); assertFunction("MAP(ARRAY[TRUE, FALSE], ARRAY['2', '4']) != MAP(ARRAY[FALSE, TRUE], ARRAY['2', '4'])", BOOLEAN, true); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[TRUE, FALSE]) != MAP(ARRAY[3.0, 1.0], ARRAY[FALSE, TRUE])", BOOLEAN, false); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[TRUE, FALSE]) != MAP(ARRAY[3.0, 1.0], ARRAY[TRUE, FALSE])", BOOLEAN, true); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[from_unixtime(1), from_unixtime(100)]) != MAP(ARRAY[3.0, 1.0], ARRAY[from_unixtime(100), from_unixtime(1)])", BOOLEAN, false); assertFunction("MAP(ARRAY[1.0, 3.0], ARRAY[from_unixtime(1), from_unixtime(100)]) != MAP(ARRAY[3.0, 1.0], ARRAY[from_unixtime(1), from_unixtime(100)])", BOOLEAN, true); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY['kittens','puppies']) != MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY['puppies', 'kittens'])", BOOLEAN, false); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY['kittens','puppies']) != MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY['kittens', 'puppies'])", BOOLEAN, true); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]]) != MAP(ARRAY['kittens','puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]])", BOOLEAN, false); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[ARRAY[1, 2], ARRAY[3]]) != MAP(ARRAY['kittens','puppies'], ARRAY[ARRAY[3], ARRAY[1, 2]])", BOOLEAN, true); // nulls assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, 3]) != MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, 2])", BOOLEAN, null); assertFunction("MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, NULL]) != MAP(ARRAY['kittens', 'puppies'], ARRAY[NULL, NULL])", BOOLEAN, null); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[NULL, FALSE]) != MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY[FALSE, NULL])", BOOLEAN, null); assertFunction("MAP(ARRAY[from_unixtime(1), from_unixtime(100)], ARRAY[TRUE, NULL]) != MAP(ARRAY[from_unixtime(100), from_unixtime(1)], ARRAY[TRUE, NULL])", BOOLEAN, null); } @Test public void testMapConcat() throws Exception { assertFunction("MAP_CONCAT(MAP (ARRAY [TRUE], ARRAY [1]), MAP (CAST(ARRAY [] AS ARRAY(BOOLEAN)), CAST(ARRAY [] AS ARRAY(INTEGER))))", new MapType(BOOLEAN, INTEGER), ImmutableMap.of(true, 1)); // <BOOLEAN, INTEGER> Tests assertFunction("MAP_CONCAT(MAP (ARRAY [TRUE], ARRAY [1]), MAP (ARRAY [TRUE, FALSE], ARRAY [10, 20]))", new MapType(BOOLEAN, INTEGER), ImmutableMap.of(true, 10, false, 20)); assertFunction("MAP_CONCAT(MAP (ARRAY [TRUE, FALSE], ARRAY [1, 2]), MAP (ARRAY [TRUE, FALSE], ARRAY [10, 20]))", new MapType(BOOLEAN, INTEGER), ImmutableMap.of(true, 10, false, 20)); assertFunction("MAP_CONCAT(MAP (ARRAY [TRUE, FALSE], ARRAY [1, 2]), MAP (ARRAY [TRUE], ARRAY [10]))", new MapType(BOOLEAN, INTEGER), ImmutableMap.of(true, 10, false, 2)); // <VARCHAR, INTEGER> Tests assertFunction("MAP_CONCAT(MAP (ARRAY ['1', '2', '3'], ARRAY [1, 2, 3]), MAP (ARRAY ['1', '2', '3', '4'], ARRAY [10, 20, 30, 40]))", new MapType(createVarcharType(1), INTEGER), ImmutableMap.of("1", 10, "2", 20, "3", 30, "4", 40)); assertFunction("MAP_CONCAT(MAP (ARRAY ['1', '2', '3', '4'], ARRAY [1, 2, 3, 4]), MAP (ARRAY ['1', '2', '3', '4'], ARRAY [10, 20, 30, 40]))", new MapType(createVarcharType(1), INTEGER), ImmutableMap.of("1", 10, "2", 20, "3", 30, "4", 40)); assertFunction("MAP_CONCAT(MAP (ARRAY ['1', '2', '3', '4'], ARRAY [1, 2, 3, 4]), MAP (ARRAY ['1', '2', '3'], ARRAY [10, 20, 30]))", new MapType(createVarcharType(1), INTEGER), ImmutableMap.of("1", 10, "2", 20, "3", 30, "4", 4)); // <INTEGER, ARRAY<DOUBLE>> Tests assertFunction("MAP_CONCAT(MAP (ARRAY [1, 2, 3], ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0]]), MAP (ARRAY [1, 2, 3, 4], ARRAY [ARRAY [10.0], ARRAY [20.0], ARRAY [30.0], ARRAY [40.0]]))", new MapType(INTEGER, new ArrayType(DOUBLE)), ImmutableMap.of(1, ImmutableList.of(10.0), 2, ImmutableList.of(20.0), 3, ImmutableList.of(30.0), 4, ImmutableList.of(40.0))); assertFunction("MAP_CONCAT(MAP (ARRAY [1, 2, 3, 4], ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0], ARRAY [4.0]]), MAP (ARRAY [1, 2, 3, 4], ARRAY [ARRAY [10.0], ARRAY [20.0], ARRAY [30.0], ARRAY [40.0]]))", new MapType(INTEGER, new ArrayType(DOUBLE)), ImmutableMap.of(1, ImmutableList.of(10.0), 2, ImmutableList.of(20.0), 3, ImmutableList.of(30.0), 4, ImmutableList.of(40.0))); assertFunction("MAP_CONCAT(MAP (ARRAY [1, 2, 3, 4], ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0], ARRAY [4.0]]), MAP (ARRAY [1, 2, 3], ARRAY [ARRAY [10.0], ARRAY [20.0], ARRAY [30.0]]))", new MapType(INTEGER, new ArrayType(DOUBLE)), ImmutableMap.of(1, ImmutableList.of(10.0), 2, ImmutableList.of(20.0), 3, ImmutableList.of(30.0), 4, ImmutableList.of(4.0))); // <ARRAY<DOUBLE>, VARCHAR> Tests assertFunction( "MAP_CONCAT(MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0]], ARRAY ['1', '2', '3']), MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0], ARRAY [4.0]], ARRAY ['10', '20', '30', '40']))", new MapType(new ArrayType(DOUBLE), createVarcharType(2)), ImmutableMap.of(ImmutableList.of(1.0), "10", ImmutableList.of(2.0), "20", ImmutableList.of(3.0), "30", ImmutableList.of(4.0), "40")); assertFunction( "MAP_CONCAT(MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0]], ARRAY ['1', '2', '3']), MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0], ARRAY [4.0]], ARRAY ['10', '20', '30', '40']))", new MapType(new ArrayType(DOUBLE), createVarcharType(2)), ImmutableMap.of(ImmutableList.of(1.0), "10", ImmutableList.of(2.0), "20", ImmutableList.of(3.0), "30", ImmutableList.of(4.0), "40")); assertFunction("MAP_CONCAT(MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0], ARRAY [4.0]], ARRAY ['1', '2', '3', '4']), MAP (ARRAY [ARRAY [1.0], ARRAY [2.0], ARRAY [3.0]], ARRAY ['10', '20', '30']))", new MapType(new ArrayType(DOUBLE), createVarcharType(2)), ImmutableMap.of(ImmutableList.of(1.0), "10", ImmutableList.of(2.0), "20", ImmutableList.of(3.0), "30", ImmutableList.of(4.0), "4")); } @Test public void testMapToMapCast() { assertFunction("CAST(MAP(ARRAY['1', '100'], ARRAY[true, false]) AS MAP<varchar,bigint>)", new MapType(VARCHAR, BIGINT), ImmutableMap.of("1", 1L, "100", 0L)); assertFunction("CAST(MAP(ARRAY[1,2], ARRAY[1,2]) AS MAP<bigint, boolean>)", new MapType(BIGINT, BOOLEAN), ImmutableMap.of(1L, true, 2L, true)); assertFunction("CAST(MAP(ARRAY[1,2], ARRAY[array[1],array[2]]) AS MAP<bigint, array<boolean>>)", new MapType(BIGINT, new ArrayType(BOOLEAN)), ImmutableMap.of(1L, ImmutableList.of(true), 2L, ImmutableList.of(true))); assertFunction("CAST(MAP(ARRAY[1], ARRAY[MAP(ARRAY[1.0], ARRAY[false])]) AS MAP<varchar, MAP(bigint,bigint)>)", new MapType(VARCHAR, new MapType(BIGINT, BIGINT)), ImmutableMap.of("1", ImmutableMap.of(1L, 0L))); assertFunction("CAST(MAP(ARRAY[1,2], ARRAY[DATE '2016-01-02', DATE '2016-02-03']) AS MAP(bigint, varchar))", new MapType(BIGINT, VARCHAR), ImmutableMap.of(1L, "2016-01-02", 2L, "2016-02-03")); assertFunction("CAST(MAP(ARRAY[1,2], ARRAY[TIMESTAMP '2016-01-02 01:02:03', TIMESTAMP '2016-02-03 03:04:05']) AS MAP(bigint, varchar))", new MapType(BIGINT, VARCHAR), ImmutableMap.of(1L, "2016-01-02 01:02:03.000", 2L, "2016-02-03 03:04:05.000")); assertInvalidCast("CAST(MAP(ARRAY[1, 2], ARRAY[6, 9]) AS MAP<boolean, bigint>)", "duplicate keys"); } }
/* * Copyright 2016 Huawei Technologies Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openo.nfvo.resmanagement.service.entity; import java.io.Serializable; import org.apache.commons.lang3.StringUtils; import org.openo.nfvo.resmanagement.common.util.JsonUtil; import net.sf.json.JSONObject; /** * @author l00345485 * @date 2016-10-29 */ public class VnfStatusEntity implements Serializable { /** */ private String vnfInstanceId; /** */ private String jobId; /** */ private String nsId; /** */ private String vnfmId; /** */ private String responseDescriptor; /** */ private String status; /** */ private String progress; /** */ private String statusDescription; /** */ private String errorCode; /** */ private String responseId; /** */ private String responseHistoryList; /** */ private String addVm; /** */ private String delVm; private static final long serialVersionUID = 1L; public String getVnfInstanceId() { return vnfInstanceId; } public void setVnfInstanceId(String vnfInstanceId) { this.vnfInstanceId = vnfInstanceId; } public String getJobId() { return jobId; } public void setJobId(String jobId) { this.jobId = jobId; } public String getNsId() { return nsId; } public void setNsId(String nsId) { this.nsId = nsId; } public String getVnfmId() { return vnfmId; } public void setVnfmId(String vnfmId) { this.vnfmId = vnfmId; } public String getResponseDescriptor() { return responseDescriptor; } public void setResponseDescriptor(String responseDescriptor) { this.responseDescriptor = responseDescriptor; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getProgress() { return progress; } public void setProgress(String progress) { this.progress = progress; } public String getStatusDescription() { return statusDescription; } public void setStatusDescription(String statusDescription) { this.statusDescription = statusDescription; } public String getErrorCode() { return errorCode; } public void setErrorCode(String errorCode) { this.errorCode = errorCode; } public String getResponseId() { return responseId; } public void setResponseId(String responseId) { this.responseId = responseId; } public String getResponseHistoryList() { return responseHistoryList; } public void setResponseHistoryList(String responseHistoryList) { this.responseHistoryList = responseHistoryList; } public String getAddVm() { return addVm; } public void setAddVm(String addVm) { this.addVm = addVm; } public String getDelVm() { return delVm; } public void setDelVm(String delVm) { this.delVm = delVm; } public static VnfStatusEntity toEntity(JSONObject jsonObject) { VnfStatusEntity vnfStatusEntity = new VnfStatusEntity(); vnfStatusEntity.setVnfInstanceId(JsonUtil.getJsonFieldStr(jsonObject, "vnfInstanceId")); vnfStatusEntity.setJobId(JsonUtil.getJsonFieldStr(jsonObject, "jobId")); vnfStatusEntity.setNsId(JsonUtil.getJsonFieldStr(jsonObject, "nsId")); vnfStatusEntity.setVnfmId(JsonUtil.getJsonFieldStr(jsonObject, "vnfmId")); vnfStatusEntity.setResponseDescriptor(JsonUtil.getJsonFieldStr(jsonObject, "responseDescriptor")); vnfStatusEntity.setStatus(JsonUtil.getJsonFieldStr(jsonObject, "status")); vnfStatusEntity.setProgress(JsonUtil.getJsonFieldStr(jsonObject, "progress")); vnfStatusEntity.setStatusDescription(JsonUtil.getJsonFieldStr(jsonObject, "statusDescription")); vnfStatusEntity.setErrorCode(JsonUtil.getJsonFieldStr(jsonObject, "errorCode")); vnfStatusEntity.setResponseId(JsonUtil.getJsonFieldStr(jsonObject, "responseId")); vnfStatusEntity.setResponseHistoryList(JsonUtil.getJsonFieldStr(jsonObject, "responseHistoryList")); vnfStatusEntity.setAddVm(JsonUtil.getJsonFieldStr(jsonObject, "addVm")); vnfStatusEntity.setDelVm(JsonUtil.getJsonFieldStr(jsonObject, "delVm")); return vnfStatusEntity; } @Override public String toString() { JSONObject vnfStatusResJson = new JSONObject(); vnfStatusResJson.put("vnfInstanceId", StringUtils.trimToEmpty(this.getVnfInstanceId())); vnfStatusResJson.put("jobId", StringUtils.trimToEmpty(this.getJobId())); vnfStatusResJson.put("nsId", StringUtils.trimToEmpty(this.getNsId())); vnfStatusResJson.put("vnfmId", StringUtils.trimToEmpty(this.getVnfmId())); vnfStatusResJson.put("responseDescriptor", StringUtils.trimToEmpty(this.getResponseDescriptor())); vnfStatusResJson.put("status", StringUtils.trimToEmpty(this.getStatus())); vnfStatusResJson.put("progress", StringUtils.trimToEmpty(this.getProgress())); vnfStatusResJson.put("statusDescription", StringUtils.trimToEmpty(this.getStatusDescription())); vnfStatusResJson.put("errorCode", StringUtils.trimToEmpty(this.getErrorCode())); vnfStatusResJson.put("responseId", StringUtils.trimToEmpty(this.getResponseId())); vnfStatusResJson.put("responseHistoryList", StringUtils.trimToEmpty(this.getResponseHistoryList())); vnfStatusResJson.put("addVm", StringUtils.trimToEmpty(this.getAddVm())); vnfStatusResJson.put("delVm", StringUtils.trimToEmpty(this.getDelVm())); return vnfStatusResJson.toString(); } }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.photon.security.login; import java.io.IOException; import java.io.ObjectInputStream; import java.time.Duration; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.helger.commons.ValueEnforcer; import com.helger.commons.annotation.Nonempty; import com.helger.commons.annotation.ReturnsMutableCopy; import com.helger.commons.annotation.ReturnsMutableObject; import com.helger.commons.annotation.UsedViaReflection; import com.helger.commons.callback.CallbackList; import com.helger.commons.collection.impl.CommonsHashMap; import com.helger.commons.collection.impl.ICommonsCollection; import com.helger.commons.collection.impl.ICommonsMap; import com.helger.commons.collection.impl.ICommonsSet; import com.helger.commons.state.EChange; import com.helger.commons.string.ToStringGenerator; import com.helger.photon.audit.AuditHelper; import com.helger.photon.security.lock.ObjectLockManager; import com.helger.photon.security.mgr.PhotonSecurityManager; import com.helger.photon.security.password.GlobalPasswordSettings; import com.helger.photon.security.user.IUser; import com.helger.photon.security.user.IUserManager; import com.helger.photon.security.util.SecurityHelper; import com.helger.scope.IScope; import com.helger.scope.ISessionScope; import com.helger.scope.mgr.ScopeManager; import com.helger.scope.singleton.AbstractGlobalSingleton; import com.helger.security.authentication.subject.user.ICurrentUserIDProvider; import com.helger.web.scope.ISessionWebScope; import com.helger.web.scope.session.ISessionWebScopeActivationHandler; import com.helger.web.scope.singleton.AbstractSessionWebSingleton; /** * This class manages all logged-in users. * * @author Philip Helger */ @ThreadSafe public final class LoggedInUserManager extends AbstractGlobalSingleton implements ICurrentUserIDProvider { /** * This class manages the user ID of the current session. This is an internal * class and should not be used from the outside! * * @author Philip Helger */ public static final class InternalSessionUserHolder extends AbstractSessionWebSingleton implements ISessionWebScopeActivationHandler { private IUser m_aUser; private String m_sUserID; private LoggedInUserManager m_aOwningMgr; @Deprecated @UsedViaReflection public InternalSessionUserHolder () {} /** * @return The instance of the current session. If none exists, an instance * is created. Never <code>null</code>. */ @Nonnull private static InternalSessionUserHolder _getInstance () { return getSessionSingleton (InternalSessionUserHolder.class); } /** * @return The instance of the current session. If none exists, * <code>null</code> is returned. */ @Nullable private static InternalSessionUserHolder _getInstanceIfInstantiated () { return getSessionSingletonIfInstantiated (InternalSessionUserHolder.class); } @Nullable private static InternalSessionUserHolder _getInstanceIfInstantiatedInScope (@Nullable final ISessionScope aScope) { return getSingletonIfInstantiated (aScope, InternalSessionUserHolder.class); } private void readObject (@Nonnull final ObjectInputStream aOIS) throws IOException, ClassNotFoundException { aOIS.defaultReadObject (); // Resolve user ID if (m_sUserID != null) { m_aUser = PhotonSecurityManager.getUserMgr ().getUserOfID (m_sUserID); if (m_aUser == null) throw new IllegalStateException ("Failed to resolve user with ID '" + m_sUserID + "'"); } // Resolve manager m_aOwningMgr = LoggedInUserManager.getInstance (); } public void onSessionDidActivate (@Nonnull final ISessionWebScope aSessionScope) { // Finally remember that the user is logged in m_aOwningMgr.internalSessionActivateUser (m_aUser, aSessionScope); } private boolean _hasUser () { return m_aUser != null; } @Nullable private String _getUserID () { return m_sUserID; } private void _setUser (@Nonnull final LoggedInUserManager aOwningMgr, @Nonnull final IUser aUser) { ValueEnforcer.notNull (aOwningMgr, "OwningMgr"); ValueEnforcer.notNull (aUser, "User"); if (m_aUser != null) throw new IllegalStateException ("Session already has a user!"); m_aOwningMgr = aOwningMgr; m_aUser = aUser; m_sUserID = aUser.getID (); } private void _reset () { // Reset to avoid access while or after logout m_aUser = null; m_sUserID = null; m_aOwningMgr = null; } @Override protected void onDestroy (@Nonnull final IScope aScopeInDestruction) { // Called when the session is destroyed // -> Ensure the user is logged out! // Remember stuff final LoggedInUserManager aOwningMgr = m_aOwningMgr; final String sUserID = m_sUserID; _reset (); // Finally logout the user if (aOwningMgr != null) aOwningMgr.logoutUser (sUserID); } @Override public String toString () { return ToStringGenerator.getDerived (super.toString ()).append ("userID", m_sUserID).getToString (); } } /** * Special logout callback that is executed every time a user logs out. It * removes all objects from the {@link ObjectLockManager}. * * @author Philip Helger */ static final class InternalUserLogoutCallbackUnlockAllObjects implements IUserLogoutCallback { @Override public void onUserLogout (@Nonnull final LoginInfo aInfo) { final ObjectLockManager aOLMgr = ObjectLockManager.getInstanceIfInstantiated (); if (aOLMgr != null) aOLMgr.getDefaultLockMgr ().unlockAllObjectsOfUser (aInfo.getUserID ()); } } public static final boolean DEFAULT_LOGOUT_ALREADY_LOGGED_IN_USER = false; public static final boolean DEFAULT_ANONYMOUS_LOGGING = false; private static final Logger LOGGER = LoggerFactory.getLogger (LoggedInUserManager.class); // Set of logged in user IDs @GuardedBy ("m_aRWLock") private final ICommonsMap <String, LoginInfo> m_aLoggedInUsers = new CommonsHashMap <> (); private final CallbackList <IUserLoginCallback> m_aUserLoginCallbacks = new CallbackList <> (); private final CallbackList <IUserLogoutCallback> m_aUserLogoutCallbacks = new CallbackList <> (); private boolean m_bLogoutAlreadyLoggedInUser = DEFAULT_LOGOUT_ALREADY_LOGGED_IN_USER; private boolean m_bAnonymousLogging = DEFAULT_ANONYMOUS_LOGGING; @Deprecated @UsedViaReflection public LoggedInUserManager () { // Ensure that all objects of a user are unlocked upon logout m_aUserLogoutCallbacks.add (new InternalUserLogoutCallbackUnlockAllObjects ()); } /** * @return The global instance of this class. Never <code>null</code>. */ @Nonnull public static LoggedInUserManager getInstance () { return getGlobalSingleton (LoggedInUserManager.class); } /** * @return The user login callback list. Never <code>null</code>. */ @Nonnull @ReturnsMutableObject public CallbackList <IUserLoginCallback> userLoginCallbacks () { return m_aUserLoginCallbacks; } /** * @return The user logout callback list. Never <code>null</code>. */ @Nonnull @ReturnsMutableObject public CallbackList <IUserLogoutCallback> userLogoutCallbacks () { return m_aUserLogoutCallbacks; } /** * @return <code>true</code> if a new login of a user, destroys any previously * present session, <code>false</code> if a login should fail, if that * user is already logged in. */ public boolean isLogoutAlreadyLoggedInUser () { return m_aRWLock.readLockedBoolean ( () -> m_bLogoutAlreadyLoggedInUser); } public void setLogoutAlreadyLoggedInUser (final boolean bLogoutAlreadyLoggedInUser) { m_aRWLock.writeLocked ( () -> m_bLogoutAlreadyLoggedInUser = bLogoutAlreadyLoggedInUser); } public boolean isAnonymousLogging () { return m_aRWLock.readLockedBoolean ( () -> m_bAnonymousLogging); } public void setAnonymousLogging (final boolean bAnonymousLogging) { m_aRWLock.writeLocked ( () -> m_bAnonymousLogging = bAnonymousLogging); } @Nonnull private String _getUserIDLogText (@Nonnull final String sUserID) { if (isAnonymousLogging ()) return "a user"; return "user '" + sUserID + "'"; } @Nonnull private ELoginResult _onLoginError (@Nonnull @Nonempty final String sUserID, @Nonnull final ELoginResult eLoginResult) { m_aUserLoginCallbacks.forEach (aCB -> aCB.onUserLoginError (sUserID, eLoginResult)); return eLoginResult; } void internalSessionActivateUser (@Nonnull final IUser aUser, @Nonnull final ISessionScope aSessionScope) { ValueEnforcer.notNull (aUser, "User"); ValueEnforcer.notNull (aSessionScope, "SessionScope"); final LoginInfo aInfo = new LoginInfo (aUser, aSessionScope); m_aRWLock.writeLocked ( () -> m_aLoggedInUsers.put (aUser.getID (), aInfo)); } /** * Login the passed user without much ado. * * @param sLoginName * Login name of the user to log-in. May be <code>null</code>. * @param sPlainTextPassword * Plain text password to use. May be <code>null</code>. * @return Never <code>null</code> login status. */ @Nonnull public ELoginResult loginUser (@Nullable final String sLoginName, @Nullable final String sPlainTextPassword) { return loginUser (sLoginName, sPlainTextPassword, (Iterable <String>) null); } /** * Login the passed user and require a set of certain roles, the used needs to * have to login here. * * @param sLoginName * Login name of the user to log-in. May be <code>null</code>. * @param sPlainTextPassword * Plain text password to use. May be <code>null</code>. * @param aRequiredRoleIDs * A set of required role IDs, the user needs to have. May be * <code>null</code>. * @return Never <code>null</code> login status. */ @Nonnull public ELoginResult loginUser (@Nullable final String sLoginName, @Nullable final String sPlainTextPassword, @Nullable final Iterable <String> aRequiredRoleIDs) { // Try to resolve the user final IUser aUser = PhotonSecurityManager.getUserMgr ().getUserOfLoginName (sLoginName); if (aUser == null) { AuditHelper.onAuditExecuteFailure ("login", sLoginName, "no-such-loginname"); return ELoginResult.USER_NOT_EXISTING; } return loginUser (aUser, sPlainTextPassword, aRequiredRoleIDs); } /** * Login the passed user and require a set of certain roles, the used needs to * have to login here. * * @param aUser * The user to log-in. May be <code>null</code>. When the user is * <code>null</code> the login must fail. * @param sPlainTextPassword * Plain text password to use. May be <code>null</code>. * @param aRequiredRoleIDs * A set of required role IDs, the user needs to have. May be * <code>null</code>. * @return Never <code>null</code> login status. */ @Nonnull public ELoginResult loginUser (@Nullable final IUser aUser, @Nullable final String sPlainTextPassword, @Nullable final Iterable <String> aRequiredRoleIDs) { if (aUser == null) return ELoginResult.USER_NOT_EXISTING; final String sUserID = aUser.getID (); // Deleted user? if (aUser.isDeleted ()) { AuditHelper.onAuditExecuteFailure ("login", sUserID, "user-is-deleted"); return _onLoginError (sUserID, ELoginResult.USER_IS_DELETED); } // Disabled user? if (aUser.isDisabled ()) { AuditHelper.onAuditExecuteFailure ("login", sUserID, "user-is-disabled"); return _onLoginError (sUserID, ELoginResult.USER_IS_DISABLED); } // Check the password final IUserManager aUserMgr = PhotonSecurityManager.getUserMgr (); if (!aUserMgr.areUserIDAndPasswordValid (sUserID, sPlainTextPassword)) { AuditHelper.onAuditExecuteFailure ("login", sUserID, "invalid-password"); return _onLoginError (sUserID, ELoginResult.INVALID_PASSWORD); } // Are all roles present? if (!SecurityHelper.hasUserAllRoles (sUserID, aRequiredRoleIDs)) { AuditHelper.onAuditExecuteFailure ("login", sUserID, "user-is-missing-required-roles", aRequiredRoleIDs); return _onLoginError (sUserID, ELoginResult.USER_IS_MISSING_ROLE); } // Check if the password hash needs to be updated final String sExistingPasswordHashAlgorithmName = aUser.getPasswordHash ().getAlgorithmName (); final String sDefaultPasswordHashAlgorithmName = GlobalPasswordSettings.getPasswordHashCreatorManager () .getDefaultPasswordHashCreatorAlgorithmName (); if (!sExistingPasswordHashAlgorithmName.equals (sDefaultPasswordHashAlgorithmName)) { // This implicitly implies using the default hash creator algorithm // This automatically saves the file aUserMgr.setUserPassword (sUserID, sPlainTextPassword); if (LOGGER.isInfoEnabled ()) LOGGER.info ("Updated password hash of " + _getUserIDLogText (sUserID) + " from algorithm '" + sExistingPasswordHashAlgorithmName + "' to '" + sDefaultPasswordHashAlgorithmName + "'"); } boolean bLoggedOutUser = false; LoginInfo aInfo; m_aRWLock.writeLock ().lock (); try { if (m_aLoggedInUsers.containsKey (sUserID)) { // The user is already logged in if (isLogoutAlreadyLoggedInUser ()) { // Explicitly log out logoutUser (sUserID); // Just a short check if (m_aLoggedInUsers.containsKey (sUserID)) throw new IllegalStateException ("Failed to logout '" + sUserID + "'"); AuditHelper.onAuditExecuteSuccess ("logout-in-login", sUserID); bLoggedOutUser = true; } else { // Error: user already logged in AuditHelper.onAuditExecuteFailure ("login", sUserID, "user-already-logged-in"); return _onLoginError (sUserID, ELoginResult.USER_ALREADY_LOGGED_IN); } } // Update user in session final InternalSessionUserHolder aSUH = InternalSessionUserHolder._getInstance (); if (aSUH._hasUser ()) { // This session already has a user if (LOGGER.isWarnEnabled ()) LOGGER.warn ("The session user holder already has the user ID '" + aSUH._getUserID () + "' so the new ID '" + sUserID + "' will not be set!"); AuditHelper.onAuditExecuteFailure ("login", sUserID, "session-already-has-user"); return _onLoginError (sUserID, ELoginResult.SESSION_ALREADY_HAS_USER); } aInfo = new LoginInfo (aUser, ScopeManager.getSessionScope ()); m_aLoggedInUsers.put (sUserID, aInfo); aSUH._setUser (this, aUser); } finally { m_aRWLock.writeLock ().unlock (); } if (LOGGER.isInfoEnabled ()) LOGGER.info ("Logged in " + _getUserIDLogText (sUserID) + (isAnonymousLogging () ? "" : " with login name '" + aUser.getLoginName () + "'")); AuditHelper.onAuditExecuteSuccess ("login-user", sUserID, aUser.getLoginName ()); // Execute callback as the very last action m_aUserLoginCallbacks.forEach (aCB -> aCB.onUserLogin (aInfo)); return bLoggedOutUser ? ELoginResult.SUCCESS_WITH_LOGOUT : ELoginResult.SUCCESS; } /** * Manually log out the specified user * * @param sUserID * The user ID to log out * @return {@link EChange} if something changed */ @Nonnull public EChange logoutUser (@Nullable final String sUserID) { LoginInfo aInfo; m_aRWLock.writeLock ().lock (); try { aInfo = m_aLoggedInUsers.remove (sUserID); if (aInfo == null) { AuditHelper.onAuditExecuteSuccess ("logout", sUserID, "user-not-logged-in"); return EChange.UNCHANGED; } // Ensure that the SessionUser is empty. This is only relevant if user is // manually logged out without destructing the underlying session final InternalSessionUserHolder aSUH = InternalSessionUserHolder._getInstanceIfInstantiatedInScope (aInfo.getSessionScope ()); if (aSUH != null) aSUH._reset (); // Set logout time - in case somebody has a strong reference to the // LoginInfo object aInfo.setLogoutDTNow (); } finally { m_aRWLock.writeLock ().unlock (); } if (LOGGER.isInfoEnabled ()) LOGGER.info ("Logged out " + _getUserIDLogText (sUserID) + " after " + Duration.between (aInfo.getLoginDT (), aInfo.getLogoutDT ()).toString ()); AuditHelper.onAuditExecuteSuccess ("logout", sUserID); // Execute callback as the very last action m_aUserLogoutCallbacks.forEach (aCB -> aCB.onUserLogout (aInfo)); return EChange.CHANGED; } /** * Manually log out the current user * * @return {@link EChange} if something changed */ @Nonnull public EChange logoutCurrentUser () { return logoutUser (getCurrentUserID ()); } /** * Check if the specified user is logged in or not * * @param sUserID * The user ID to check. May be <code>null</code>. * @return <code>true</code> if the user is logged in, <code>false</code> * otherwise. */ public boolean isUserLoggedIn (@Nullable final String sUserID) { return m_aRWLock.readLockedBoolean ( () -> m_aLoggedInUsers.containsKey (sUserID)); } /** * @return A non-<code>null</code> but maybe empty set with all currently * logged in user IDs. */ @Nonnull @ReturnsMutableCopy public ICommonsSet <String> getAllLoggedInUserIDs () { return m_aRWLock.readLockedGet (m_aLoggedInUsers::copyOfKeySet); } /** * Get the login details of the specified user. * * @param sUserID * The user ID to check. May be <code>null</code>. * @return <code>null</code> if the passed user is not logged in. */ @Nullable public LoginInfo getLoginInfo (@Nullable final String sUserID) { return m_aRWLock.readLockedGet ( () -> m_aLoggedInUsers.get (sUserID)); } /** * @return A non-<code>null</code> but maybe empty collection with the details * of all currently logged in users. */ @Nonnull @ReturnsMutableCopy public ICommonsCollection <LoginInfo> getAllLoginInfos () { return m_aRWLock.readLockedGet (m_aLoggedInUsers::copyOfValues); } /** * @return The number of currently logged in users. Always &ge; 0. */ @Nonnegative public int getLoggedInUserCount () { return m_aRWLock.readLockedInt (m_aLoggedInUsers::size); } /** * @return The ID of the user logged in this session or <code>null</code> if * no user is logged in. */ @Nullable public String getCurrentUserID () { final InternalSessionUserHolder aSUH = InternalSessionUserHolder._getInstanceIfInstantiated (); return aSUH == null ? null : aSUH.m_sUserID; } /** * @return <code>true</code> if a user is currently logged into this session, * <code>false</code> otherwise. This is the inverse of * {@link #isNoUserLoggedInInCurrentSession()}. */ public boolean isUserLoggedInInCurrentSession () { return getCurrentUserID () != null; } /** * @return <code>true</code> if not user is currently logged into this * session, <code>false</code> if it is. This is the inverse of * {@link #isUserLoggedInInCurrentSession()}. */ public boolean isNoUserLoggedInInCurrentSession () { return getCurrentUserID () == null; } /** * @return The user currently logged in this session or <code>null</code> if * no user is logged in. */ @Nullable public IUser getCurrentUser () { final InternalSessionUserHolder aSUH = InternalSessionUserHolder._getInstanceIfInstantiated (); return aSUH == null ? null : aSUH.m_aUser; } /** * @return <code>true</code> if a user is logged in and is administrator */ public boolean isCurrentUserAdministrator () { final IUser aUser = getCurrentUser (); return aUser != null && aUser.isAdministrator (); } @Override public String toString () { return ToStringGenerator.getDerived (super.toString ()) .append ("loggedInUsers", m_aLoggedInUsers) .append ("userLoginCallbacks", m_aUserLoginCallbacks) .append ("userLogoutCallbacks", m_aUserLogoutCallbacks) .append ("logoutAlreadyLoggedInUser", m_bLogoutAlreadyLoggedInUser) .getToString (); } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.tasks.tab_management; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.chromium.chrome.browser.tasks.tab_management.TabListModel.CardProperties.CARD_ALPHA; import static org.chromium.chrome.browser.tasks.tab_management.TabListModel.CardProperties.CARD_TYPE; import static org.chromium.chrome.browser.tasks.tab_management.TabListModel.CardProperties.ModelType.TAB; import android.graphics.Canvas; import android.view.View; import androidx.recyclerview.widget.GridLayoutManager; import androidx.recyclerview.widget.ItemTouchHelper; import androidx.recyclerview.widget.RecyclerView; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.robolectric.annotation.Config; import org.chromium.base.ContextUtils; import org.chromium.base.test.BaseRobolectricTestRunner; import org.chromium.chrome.browser.feature_engagement.TrackerFactory; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.tabmodel.EmptyTabModelFilter; import org.chromium.chrome.browser.tabmodel.TabModel; import org.chromium.chrome.browser.tabmodel.TabModelFilterProvider; import org.chromium.chrome.browser.tabmodel.TabModelSelectorImpl; import org.chromium.chrome.browser.tasks.tab_groups.TabGroupModelFilter; import org.chromium.chrome.browser.tasks.tab_management.TabListCoordinator.TabListMode; import org.chromium.chrome.test.util.browser.Features; import org.chromium.components.feature_engagement.EventConstants; import org.chromium.components.feature_engagement.Tracker; import org.chromium.ui.modelutil.MVCListAdapter; import org.chromium.ui.modelutil.PropertyKey; import org.chromium.ui.modelutil.PropertyModel; import org.chromium.ui.modelutil.SimpleRecyclerViewAdapter; import java.util.ArrayList; import java.util.List; /** * Tests for {@link TabGridItemTouchHelperCallback}. */ @SuppressWarnings("ResultOfMethodCallIgnored") @RunWith(BaseRobolectricTestRunner.class) @Config(manifest = Config.NONE, instrumentedPackages = { "androidx.recyclerview.widget.RecyclerView" // required to mock final }) @Features.EnableFeatures({ChromeFeatureList.TAB_GROUPS_ANDROID}) public class TabGridItemTouchHelperCallbackUnitTest { @Rule public TestRule mProcessor = new Features.JUnitProcessor(); private static final String TAB1_TITLE = "Tab1"; private static final String TAB2_TITLE = "Tab2"; private static final String TAB3_TITLE = "Tab3"; private static final String TAB4_TITLE = "Tab4"; private static final int TAB1_ID = 456; private static final int TAB2_ID = 789; private static final int TAB3_ID = 123; private static final int TAB4_ID = 357; private static final int POSITION1 = 0; private static final int POSITION2 = 1; private static final int POSITION3 = 2; private static final int POSITION4 = 3; private static final float THRESHOLD = 2f; @Mock Canvas mCanvas; @Mock RecyclerView mRecyclerView; @Mock RecyclerView.Adapter mAdapter; @Mock TabModel mTabModel; @Mock TabModelSelectorImpl mTabModelSelector; @Mock TabModelFilterProvider mTabModelFilterProvider; @Mock TabListMediator.TabActionListener mTabClosedListener; @Mock TabGroupModelFilter mTabGroupModelFilter; @Mock EmptyTabModelFilter mEmptyTabModelFilter; @Mock TabListMediator.TabGridDialogHandler mTabGridDialogHandler; @Mock Profile mProfile; @Mock Tracker mTracker; @Mock GridLayoutManager mGridLayoutManager; private SimpleRecyclerViewAdapter.ViewHolder mMockViewHolder1; private SimpleRecyclerViewAdapter.ViewHolder mMockViewHolder2; private RecyclerView.ViewHolder mDummyViewHolder1; private RecyclerView.ViewHolder mDummyViewHolder2; private RecyclerView.ViewHolder mDummyViewHolder3; private RecyclerView.ViewHolder mDummyViewHolder4; private View mItemView1; private View mItemView2; private View mItemView3; private View mItemView4; private TabGridItemTouchHelperCallback mItemTouchHelperCallback; private TabListModel mModel; @Before public void setUp() { MockitoAnnotations.initMocks(this); Tab tab1 = prepareTab(TAB1_ID, TAB1_TITLE); Tab tab2 = prepareTab(TAB2_ID, TAB2_TITLE); Tab tab3 = prepareTab(TAB3_ID, TAB3_TITLE); Tab tab4 = prepareTab(TAB4_ID, TAB4_TITLE); mMockViewHolder1 = prepareMockViewHolder(TAB1_ID, POSITION1); mMockViewHolder2 = prepareMockViewHolder(TAB2_ID, POSITION2); // Mock four cards in a grid layout. Each card is of width 4 and height 4. Both the side // gaps and top gaps between adjacent cards are 1. mItemView1 = prepareItemView(0, 0, 4, 4); mItemView2 = prepareItemView(5, 0, 9, 4); mItemView3 = prepareItemView(0, 5, 4, 9); mItemView4 = prepareItemView(5, 5, 9, 9); mDummyViewHolder1 = prepareDummyViewHolder(mItemView1); mDummyViewHolder2 = prepareDummyViewHolder(mItemView2); mDummyViewHolder3 = prepareDummyViewHolder(mItemView3); mDummyViewHolder4 = prepareDummyViewHolder(mItemView4); List<TabModel> tabModelList = new ArrayList<>(); tabModelList.add(mTabModel); doReturn(mTabModel).when(mTabModelSelector).getCurrentModel(); doReturn(tabModelList).when(mTabModelSelector).getModels(); doReturn(mTabModelFilterProvider).when(mTabModelSelector).getTabModelFilterProvider(); doReturn(mTabGroupModelFilter).when(mTabModelFilterProvider).getCurrentTabModelFilter(); doReturn(tab1).when(mTabModel).getTabAt(POSITION1); doReturn(tab2).when(mTabModel).getTabAt(POSITION2); doReturn(tab3).when(mTabModel).getTabAt(POSITION3); doReturn(tab4).when(mTabModel).getTabAt(POSITION4); doReturn(4).when(mTabModel).getCount(); doReturn(tab1).when(mTabGroupModelFilter).getTabAt(POSITION1); doReturn(tab2).when(mTabGroupModelFilter).getTabAt(POSITION2); doReturn(tab3).when(mTabGroupModelFilter).getTabAt(POSITION3); doReturn(tab4).when(mTabGroupModelFilter).getTabAt(POSITION4); setupRecyclerView(); mModel = new TabListModel(); setupItemTouchHelperCallback(false); TrackerFactory.setTrackerForTests(mTracker); } private void setupRecyclerView() { doReturn(mAdapter).when(mRecyclerView).getAdapter(); doReturn(mGridLayoutManager).when(mRecyclerView).getLayoutManager(); doReturn(12).when(mRecyclerView).getBottom(); doReturn(4).when(mRecyclerView).getChildCount(); doReturn(4).when(mAdapter).getItemCount(); when(mRecyclerView.getChildAt(POSITION1)).thenReturn(mItemView1); when(mRecyclerView.getChildAt(POSITION2)).thenReturn(mItemView2); when(mRecyclerView.getChildAt(POSITION3)).thenReturn(mItemView3); when(mRecyclerView.getChildAt(POSITION4)).thenReturn(mItemView4); doReturn(mRecyclerView).when(mItemView1).getParent(); doReturn(mRecyclerView).when(mItemView2).getParent(); doReturn(mRecyclerView).when(mItemView3).getParent(); doReturn(mRecyclerView).when(mItemView4).getParent(); when(mRecyclerView.findViewHolderForAdapterPosition(POSITION1)) .thenReturn(mDummyViewHolder1); when(mRecyclerView.findViewHolderForAdapterPosition(POSITION2)) .thenReturn(mDummyViewHolder2); when(mRecyclerView.findViewHolderForAdapterPosition(POSITION3)) .thenReturn(mDummyViewHolder3); when(mRecyclerView.findViewHolderForAdapterPosition(POSITION4)) .thenReturn(mDummyViewHolder4); } private void setupItemTouchHelperCallback(boolean isDialog) { mItemTouchHelperCallback = new TabGridItemTouchHelperCallback( ContextUtils.getApplicationContext(), mModel, mTabModelSelector, mTabClosedListener, isDialog ? mTabGridDialogHandler : null, "", !isDialog, TabListMode.GRID); mItemTouchHelperCallback.setupCallback(THRESHOLD, THRESHOLD, THRESHOLD, mProfile); mItemTouchHelperCallback.getMovementFlags(mRecyclerView, mMockViewHolder1); } @Test public void initializesWithCurrentTabs() { initAndAssertAllProperties(); } @Test public void onStartDraggingTab() { initAndAssertAllProperties(); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(0).model.get(CARD_ALPHA), equalTo(1f)); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_DRAG); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN)); assertThat(mModel.get(0).model.get(CARD_ALPHA), equalTo(0.8f)); } @Test public void onSwipeTab_Delete() { initAndAssertAllProperties(); mItemTouchHelperCallback.onSwiped(mMockViewHolder1, POSITION1); verify(mTabClosedListener).run(TAB1_ID); } @Test public void onReleaseTab_NoMerge() { initAndAssertAllProperties(); // Simulate the selection of card#1 in TabListModel. mModel.get(0).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN); mModel.get(0).model.set(CARD_ALPHA, 0.8f); mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_OUT)); assertThat(mModel.get(0).model.get(CARD_ALPHA), equalTo(1f)); assertThat(mModel.get(1).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(1).model.get(CARD_ALPHA), equalTo(1f)); } @Test public void onReleaseTab_MergeBackward() { initAndAssertAllProperties(); // Simulate the selection of card#1 in TabListModel. mModel.get(0).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN); mModel.get(0).model.set(CARD_ALPHA, 0.8f); mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION1); // Simulate hovering on card#2. mModel.get(1).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION2); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); verify(mTabGroupModelFilter).mergeTabsToGroup(TAB1_ID, TAB2_ID); verify(mGridLayoutManager).removeView(mItemView1); verify(mTracker).notifyEvent(eq(EventConstants.TAB_DRAG_AND_DROP_TO_GROUP)); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_OUT)); } @Test public void onReleaseTab_MergeForward() { initAndAssertAllProperties(); // Simulate the selection of card#2 in TabListModel. mModel.get(1).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN); mModel.get(1).model.set(CARD_ALPHA, 0.8f); mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION2); // Simulate hovering on card#1. mModel.get(0).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder2, ItemTouchHelper.ACTION_STATE_IDLE); verify(mGridLayoutManager).removeView(mItemView2); verify(mTabGroupModelFilter).mergeTabsToGroup(TAB2_ID, TAB1_ID); verify(mTracker).notifyEvent(eq(EventConstants.TAB_DRAG_AND_DROP_TO_GROUP)); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_OUT)); } @Test @Features.DisableFeatures({ChromeFeatureList.TAB_GROUPS_ANDROID}) public void onReleaseTab_MergeBackward_WithoutGroup() { initAndAssertAllProperties(); doReturn(mEmptyTabModelFilter).when(mTabModelFilterProvider).getCurrentTabModelFilter(); // Simulate the selection of card#1 in TabListModel. mModel.get(0).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN); mModel.get(0).model.set(CARD_ALPHA, 0.8f); mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION1); // Simulate hovering on card#2. mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION2); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_OUT)); assertThat(mModel.get(0).model.get(CARD_ALPHA), equalTo(1f)); // Merge signal should never be sent. verify(mTabGroupModelFilter, never()).mergeTabsToGroup(anyInt(), anyInt()); verify(mGridLayoutManager, never()).removeView(any(View.class)); } @Test @Features.DisableFeatures({ChromeFeatureList.TAB_GROUPS_ANDROID}) public void onReleaseTab_MergeForward_WithoutGroup() { initAndAssertAllProperties(); doReturn(mEmptyTabModelFilter).when(mTabModelFilterProvider).getCurrentTabModelFilter(); // Simulate the selection of card#2 in TabListModel. mModel.get(1).model.set(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_IN); mModel.get(1).model.set(CARD_ALPHA, 0.8f); mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION2); // Simulate hovering on card#1. mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder2, ItemTouchHelper.ACTION_STATE_IDLE); assertThat(mModel.get(1).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.SELECTED_CARD_ZOOM_OUT)); assertThat(mModel.get(1).model.get(CARD_ALPHA), equalTo(1f)); // Merge signal should never be sent. verify(mTabGroupModelFilter, never()).mergeTabsToGroup(anyInt(), anyInt()); verify(mGridLayoutManager, never()).removeView(any(View.class)); } @Test public void onReleaseTab_Merge_CleanOut() { initAndAssertAllProperties(); // Simulate the selection of card#2 in TabListModel. mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION2); // Simulate hovering on card#1. mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION1); // Simulate that the recyclerView is cleaned out when the drop-to-merge happens. doReturn(null).when(mRecyclerView).findViewHolderForAdapterPosition(anyInt()); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder2, ItemTouchHelper.ACTION_STATE_IDLE); verify(mGridLayoutManager, never()).removeView(mItemView2); verify(mTabGroupModelFilter, never()).mergeTabsToGroup(TAB2_ID, TAB1_ID); verify(mTracker, never()).notifyEvent(eq(EventConstants.TAB_DRAG_AND_DROP_TO_GROUP)); } @Test public void onReleaseTab_Merge_Scrolling() { initAndAssertAllProperties(); // Simulate the selection of card#2 in TabListModel. mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION2); // Simulate hovering on card#1. mItemTouchHelperCallback.setHoveredTabIndexForTesting(POSITION1); // Simulate that the recyclerView is scrolling when the drop-to-merge happens. when(mRecyclerView.isComputingLayout()).thenReturn(true); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder2, ItemTouchHelper.ACTION_STATE_IDLE); verify(mGridLayoutManager, never()).removeView(mItemView2); verify(mTabGroupModelFilter, never()).mergeTabsToGroup(TAB2_ID, TAB1_ID); verify(mTracker, never()).notifyEvent(eq(EventConstants.TAB_DRAG_AND_DROP_TO_GROUP)); } @Test public void onReleaseTab_UngroupBar_Hide() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HIDE); } @Test public void onReleaseTab_Ungroup() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); mItemTouchHelperCallback.setUnGroupTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); verify(mTabGroupModelFilter).moveTabOutOfGroup(TAB1_ID); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HIDE); verify(mGridLayoutManager).removeView(mItemView1); } @Test public void onReleaseTab_Ungroup_Scrolling() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); mItemTouchHelperCallback.setUnGroupTabIndexForTesting(POSITION1); // Simulate that the recyclerView is scrolling when the drop-to-ungroup happens. when(mRecyclerView.isComputingLayout()).thenReturn(true); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); verify(mTabGroupModelFilter, never()).moveTabOutOfGroup(TAB1_ID); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HIDE); verify(mGridLayoutManager, never()).removeView(mItemView1); } @Test public void onReleaseTab_Ungroup_CleanOut() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); mItemTouchHelperCallback.setUnGroupTabIndexForTesting(POSITION1); // Simulate that the recyclerView is cleaned out when the drop-to-ungroup happens. doReturn(null).when(mRecyclerView).findViewHolderForAdapterPosition(anyInt()); mItemTouchHelperCallback.onSelectedChanged( mMockViewHolder1, ItemTouchHelper.ACTION_STATE_IDLE); verify(mTabGroupModelFilter, never()).moveTabOutOfGroup(TAB1_ID); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HIDE); verify(mGridLayoutManager, never()).removeView(mItemView1); } @Test public void onDragTab_Hovered_GTS_Horizontal() { initAndAssertAllProperties(); // Drag card#1 rightwards to hover on card#2. verifyDrag(mDummyViewHolder1, 5, 0, POSITION2, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Drag card#2 leftwards to hover on card#1. verifyDrag(mDummyViewHolder2, -5, 0, POSITION1, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); } @Test public void onDragTab_Hovered_GTS_Vertical() { initAndAssertAllProperties(); // Drag card#1 downwards to hover on card#3. verifyDrag(mDummyViewHolder1, 0, 5, POSITION3, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Drag card#3 upwards to hover on card#1. verifyDrag(mDummyViewHolder3, 0, -5, POSITION1, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); } @Test public void onDragTab_Hovered_GTS_Diagonal() { initAndAssertAllProperties(); // Drag card#1 diagonally to hover on card#4. verifyDrag(mDummyViewHolder1, 5, 5, POSITION4, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Drag card#4 diagonally to hover on card#1. verifyDrag(mDummyViewHolder4, -5, -5, POSITION1, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Drag card#2 diagonally to hover on card#3. verifyDrag(mDummyViewHolder2, -5, 5, POSITION3, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Drag card#3 diagonally to hover on card#2. verifyDrag(mDummyViewHolder3, 5, -5, POSITION2, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); } @Test public void onDragTab_NotHovered_GTS_Horizontal() { initAndAssertAllProperties(); // With merge threshold equal to 2, any horizontal drag with |dX| <= (5 - threshold) should // never trigger hovering. verifyDrag(mDummyViewHolder1, 3, 0, POSITION2, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder2, -3, 0, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); // With merge threshold equal to 2, any horizontal drag with |dX| >= (5 + threshold) should // never trigger hovering. verifyDrag(mDummyViewHolder1, 7, 0, POSITION2, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder2, -7, 0, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); } @Test public void onDragTab_NotHovered_GTS_Vertical() { initAndAssertAllProperties(); // With merge threshold equal to 2, any vertical drag with |dY| <= (5 - threshold) should // never trigger hovering. verifyDrag(mDummyViewHolder1, 0, 3, POSITION3, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder3, 0, -3, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); // With merge threshold equal to 2, any vertical drag with |dY| >= (5 + threshold) should // never trigger hovering. verifyDrag(mDummyViewHolder1, 0, 7, POSITION3, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder3, 0, -7, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); } @Test public void onDragTab_NotHovered_GTS_Diagonal() { initAndAssertAllProperties(); // With merge threshold equal to 2, any diagonal drag with |dX| <= (5 - threshold) or |dY| // <= (5 - threshold) should never trigger hovering. verifyDrag(mDummyViewHolder1, 3, 4, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 4, 3, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -4, -3, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -3, -4, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); // With merge threshold equal to 2, any vertical drag with |dX| >= (5 + threshold) or |dY| // >= (5 + threshold) should never trigger hovering. verifyDrag(mDummyViewHolder1, 7, 6, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 6, 7, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -6, -7, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -7, -6, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); } @Test public void onDragTab_Restore_Hovered_GTS() { initAndAssertAllProperties(); // Simulate the process of hovering card#1 on card#2. verifyDrag(mDummyViewHolder1, 5, 0, POSITION2, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Continue to drag card#1 horizontally so that it is no longer hovering on card#2. verifyDrag(mDummyViewHolder1, 10, 0, POSITION2, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_OUT); // Simulate the process of hovering card#1 on card#3. verifyDrag(mDummyViewHolder1, 0, 5, POSITION3, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Continue to drag card#1 vertically so that it is no longer hovering on card#3. verifyDrag(mDummyViewHolder1, 0, 10, POSITION3, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_OUT); // Simulate the process of hovering card#1 on card#4. verifyDrag(mDummyViewHolder1, 5, 5, POSITION4, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_IN); // Continue to drag card#1 diagonally so that it is no longer hovering on card#4. verifyDrag(mDummyViewHolder1, 10, 10, POSITION4, ClosableTabGridView.AnimationStatus.HOVERED_CARD_ZOOM_OUT); } @Test @Features.DisableFeatures({ChromeFeatureList.TAB_GROUPS_ANDROID}) public void onDragTab_Hovered_GTS_WithoutGroup() { initAndAssertAllProperties(); doReturn(mEmptyTabModelFilter).when(mTabModelFilterProvider).getCurrentTabModelFilter(); // Hovering shouldn't make any difference. verifyDrag(mDummyViewHolder1, 5, 0, POSITION2, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder2, -5, 0, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 0, 5, POSITION3, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder3, 0, -5, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 5, 5, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -5, -5, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); } @Test public void onDragTab_Hovered_NonGTS() { initAndAssertAllProperties(); // Suppose drag happens in components other than GTS. mItemTouchHelperCallback.setActionsOnAllRelatedTabsForTesting(false); // Hovering shouldn't make any difference. verifyDrag(mDummyViewHolder1, 5, 0, POSITION2, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder2, -5, 0, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 0, 5, POSITION3, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder3, 0, -5, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder1, 5, 5, POSITION4, ClosableTabGridView.AnimationStatus.CARD_RESTORE); verifyDrag(mDummyViewHolder4, -5, -5, POSITION1, ClosableTabGridView.AnimationStatus.CARD_RESTORE); } @Test public void onDragTab_Ungroup() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); // Simulate dragging card#1 down to the ungroup bar. mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, mDummyViewHolder1, 0, 7, ItemTouchHelper.ACTION_STATE_DRAG, true); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HOVERED); // Simulate dragging card#3 down to the ungroup bar. mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, mDummyViewHolder1, 0, 2, ItemTouchHelper.ACTION_STATE_DRAG, true); verify(mTabGridDialogHandler) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HOVERED); } @Test public void onDragTab_NotUngroup() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); // With recyclerview bottom equal to 12 and ungroup threshold equal to 2, any drag with // itemview.bottom + dY <= 10 should never trigger ungroup. mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION1); mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, mDummyViewHolder1, 0, 6, ItemTouchHelper.ACTION_STATE_DRAG, true); // Simulate dragging card#3 down to the ungroup bar. mItemTouchHelperCallback.setSelectedTabIndexForTesting(POSITION3); mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, mDummyViewHolder3, 0, 1, ItemTouchHelper.ACTION_STATE_DRAG, true); verify(mTabGridDialogHandler, times(2)) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.SHOW); verify(mTabGridDialogHandler, never()) .updateUngroupBarStatus(TabGridDialogView.UngroupBarStatus.HOVERED); } @Test public void onDragTab_AfterRelease() { initAndAssertAllProperties(); setupItemTouchHelperCallback(true); // Simulate that drop is finished, but there are some extra onChildDraw calls. mItemTouchHelperCallback.setSelectedTabIndexForTesting(TabModel.INVALID_TAB_INDEX); // Simulate dragging the tab down to the ungroup bar. mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, mDummyViewHolder1, 0, 8, ItemTouchHelper.ACTION_STATE_DRAG, true); verify(mTabGridDialogHandler, never()).updateUngroupBarStatus(anyInt()); } @Test public void onDraggingAnimationEnd_Stale() { initAndAssertAllProperties(); setupItemTouchHelperCallback(false); // Mock that when the dragging animation ends, the recyclerView is in an inconsistent state: // recyclerView should be cleaned out, yet the animated view is stale. mItemTouchHelperCallback.setCurrentActionStateForTesting(ItemTouchHelper.ACTION_STATE_DRAG); doReturn(1).when(mRecyclerView).getChildCount(); doReturn(0).when(mAdapter).getItemCount(); mItemTouchHelperCallback.clearView(mRecyclerView, mDummyViewHolder1); verify(mGridLayoutManager).removeView(mItemView1); } @Test public void onDraggingAnimationEnd_NonStale() { initAndAssertAllProperties(); setupItemTouchHelperCallback(false); // Mock that when the dragging animation ends, the recyclerView is in consistent state. mItemTouchHelperCallback.setCurrentActionStateForTesting(ItemTouchHelper.ACTION_STATE_DRAG); assertThat(mRecyclerView.getChildCount(), equalTo(mAdapter.getItemCount())); mItemTouchHelperCallback.clearView(mRecyclerView, mDummyViewHolder1); verify(mGridLayoutManager, never()).removeView(mItemView1); } @Test public void messageItemNotDraggable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.MESSAGE); setupItemTouchHelperCallback(false); assertFalse( mItemTouchHelperCallback.hasDragFlagForTesting(mRecyclerView, mMockViewHolder1)); } @Test public void messageItemSwipeable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.MESSAGE); setupItemTouchHelperCallback(false); assertTrue(mItemTouchHelperCallback.hasSwipeFlag(mRecyclerView, mMockViewHolder1)); } @Test public void messageItemNotDropable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.MESSAGE); setupItemTouchHelperCallback(false); assertFalse(mItemTouchHelperCallback.canDropOver( mRecyclerView, mMockViewHolder2, mMockViewHolder1)); } @Test(expected = AssertionError.class) public void messageItemOnMoveFail() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.MESSAGE); setupItemTouchHelperCallback(false); mItemTouchHelperCallback.onMove(mRecyclerView, mMockViewHolder1, mMockViewHolder2); } @Test public void newTabTileNotDraggable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.NEW_TAB_TILE); setupItemTouchHelperCallback(false); assertFalse( mItemTouchHelperCallback.hasDragFlagForTesting(mRecyclerView, mMockViewHolder1)); } @Test public void newTabTileNotSwipeable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.NEW_TAB_TILE); setupItemTouchHelperCallback(false); assertFalse(mItemTouchHelperCallback.hasSwipeFlag(mRecyclerView, mMockViewHolder1)); } @Test public void newTabTileNotDropable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.NEW_TAB_TILE); setupItemTouchHelperCallback(false); assertFalse(mItemTouchHelperCallback.canDropOver( mRecyclerView, mMockViewHolder2, mMockViewHolder1)); } @Test(expected = AssertionError.class) public void newTabTileOnMoveFail() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.NEW_TAB_TILE); setupItemTouchHelperCallback(false); mItemTouchHelperCallback.onMove(mRecyclerView, mMockViewHolder1, mMockViewHolder2); } @Test public void largeMessageItemNotDraggable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.LARGE_MESSAGE); setupItemTouchHelperCallback(false); assertFalse( mItemTouchHelperCallback.hasDragFlagForTesting(mRecyclerView, mMockViewHolder1)); } @Test public void largeMessageItemSwipeable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.LARGE_MESSAGE); setupItemTouchHelperCallback(false); assertTrue(mItemTouchHelperCallback.hasSwipeFlag(mRecyclerView, mMockViewHolder1)); } @Test public void largeMessageItemNotDropable() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.LARGE_MESSAGE); setupItemTouchHelperCallback(false); assertFalse(mItemTouchHelperCallback.canDropOver( mRecyclerView, mMockViewHolder2, mMockViewHolder1)); } @Test(expected = AssertionError.class) public void largeMessageItemOnMoveFail() { when(mMockViewHolder1.getItemViewType()).thenReturn(TabProperties.UiType.LARGE_MESSAGE); setupItemTouchHelperCallback(false); mItemTouchHelperCallback.onMove(mRecyclerView, mMockViewHolder1, mMockViewHolder2); } private void verifyDrag( RecyclerView.ViewHolder viewHolder, float dX, float dY, int targetIndex, int status) { // Simulate the process of dragging one card to a position. mItemTouchHelperCallback.onChildDraw(mCanvas, mRecyclerView, viewHolder, dX, dY, ItemTouchHelper.ACTION_STATE_DRAG, true); // Verify the card in target index is in correct status. assertThat(mModel.get(targetIndex).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(status)); } private void initAndAssertAllProperties() { for (int i = 0; i < mTabModel.getCount(); i++) { Tab tab = mTabModel.getTabAt(i); addTabInfoModel(tab); } assertThat(mModel.size(), equalTo(4)); assertThat(mModel.get(0).model.get(TabProperties.TAB_ID), equalTo(TAB1_ID)); assertThat(mModel.get(1).model.get(TabProperties.TAB_ID), equalTo(TAB2_ID)); assertThat(mModel.get(2).model.get(TabProperties.TAB_ID), equalTo(TAB3_ID)); assertThat(mModel.get(3).model.get(TabProperties.TAB_ID), equalTo(TAB4_ID)); assertThat(mModel.get(0).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(1).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(2).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(3).model.get(TabProperties.CARD_ANIMATION_STATUS), equalTo(ClosableTabGridView.AnimationStatus.CARD_RESTORE)); assertThat(mModel.get(0).model.get(CARD_ALPHA), equalTo(1f)); assertThat(mModel.get(1).model.get(CARD_ALPHA), equalTo(1f)); assertThat(mModel.get(2).model.get(CARD_ALPHA), equalTo(1f)); assertThat(mModel.get(3).model.get(CARD_ALPHA), equalTo(1f)); } private void addTabInfoModel(Tab tab) { PropertyKey[] testKeysTabGrid = new PropertyKey[] { TabProperties.TAB_ID, TabProperties.CARD_ANIMATION_STATUS, CARD_ALPHA, CARD_TYPE}; PropertyModel tabInfo = new PropertyModel.Builder(testKeysTabGrid) .with(TabProperties.TAB_ID, tab.getId()) .with(TabProperties.CARD_ANIMATION_STATUS, ClosableTabGridView.AnimationStatus.CARD_RESTORE) .with(CARD_ALPHA, 1f) .with(CARD_TYPE, TAB) .build(); mModel.add(new MVCListAdapter.ListItem(0, tabInfo)); } private Tab prepareTab(int id, String title) { Tab tab = mock(Tab.class); doReturn(id).when(tab).getId(); doReturn(title).when(tab).getTitle(); return tab; } private SimpleRecyclerViewAdapter.ViewHolder prepareMockViewHolder(int id, int position) { SimpleRecyclerViewAdapter.ViewHolder viewHolder = mock(SimpleRecyclerViewAdapter.ViewHolder.class); viewHolder.model = new PropertyModel.Builder(TabProperties.ALL_KEYS_TAB_GRID) .with(TabProperties.TAB_ID, id) .with(CARD_TYPE, TAB) .build(); return viewHolder; } private View prepareItemView(int left, int top, int right, int bottom) { View view = mock(View.class); doReturn(left).when(view).getLeft(); doReturn(top).when(view).getTop(); doReturn(right).when(view).getRight(); doReturn(bottom).when(view).getBottom(); return view; } private RecyclerView.ViewHolder prepareDummyViewHolder(View itemView) { return new RecyclerView.ViewHolder(itemView) {}; } }
/** * <copyright> * </copyright> * * $Id$ */ package au.edu.swin.ict.road.designer.smc.provider; import au.edu.swin.ict.road.designer.smc.SmcPackage; import au.edu.swin.ict.road.designer.smc.TaskRef; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link au.edu.swin.ict.road.designer.smc.TaskRef} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class TaskRefItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TaskRefItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addPreEPPropertyDescriptor(object); addPostEPPropertyDescriptor(object); addPerformanceValPropertyDescriptor(object); addTaskPropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Pre EP feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addPreEPPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_TaskRef_preEP_feature"), getString("_UI_PropertyDescriptor_description", "_UI_TaskRef_preEP_feature", "_UI_TaskRef_type"), SmcPackage.Literals.TASK_REF__PRE_EP, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Post EP feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addPostEPPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_TaskRef_postEP_feature"), getString("_UI_PropertyDescriptor_description", "_UI_TaskRef_postEP_feature", "_UI_TaskRef_type"), SmcPackage.Literals.TASK_REF__POST_EP, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Performance Val feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addPerformanceValPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_TaskRef_performanceVal_feature"), getString("_UI_PropertyDescriptor_description", "_UI_TaskRef_performanceVal_feature", "_UI_TaskRef_type"), SmcPackage.Literals.TASK_REF__PERFORMANCE_VAL, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Task feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addTaskPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_TaskRef_Task_feature"), getString("_UI_PropertyDescriptor_description", "_UI_TaskRef_Task_feature", "_UI_TaskRef_type"), SmcPackage.Literals.TASK_REF__TASK, true, false, true, null, null, null)); } /** * This returns TaskRef.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/TaskRef")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((TaskRef)object).getPreEP(); return label == null || label.length() == 0 ? getString("_UI_TaskRef_type") : getString("_UI_TaskRef_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(TaskRef.class)) { case SmcPackage.TASK_REF__PRE_EP: case SmcPackage.TASK_REF__POST_EP: case SmcPackage.TASK_REF__PERFORMANCE_VAL: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResourceLocator getResourceLocator() { return SmcEditPlugin.INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.ArrayList; import java.util.Collection; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; import javax.cache.CacheException; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Tests correct cache stopping. */ public class GridCacheStopSelfTest extends GridCommonAbstractTest { /** */ private static final String EXPECTED_MSG = "Cache has been closed or destroyed"; /** */ private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private boolean atomic; /** */ private boolean replicated; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); TcpDiscoverySpi disc = new TcpDiscoverySpi(); disc.setIpFinder(ipFinder); cfg.setDiscoverySpi(disc); CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setCacheMode(replicated ? REPLICATED : PARTITIONED); if (!replicated) ccfg.setBackups(1); ccfg.setAtomicityMode(atomic ? ATOMIC : TRANSACTIONAL); ccfg.setSwapEnabled(true); cfg.setCacheConfiguration(ccfg); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** * @throws Exception If failed. */ public void testStopExplicitTransactions() throws Exception { testStop(true); } /** * @throws Exception If failed. */ public void testStopImplicitTransactions() throws Exception { testStop(false); } /** * @throws Exception If failed. */ public void testStopExplicitTransactionsReplicated() throws Exception { replicated = true; testStop(true); } /** * @throws Exception If failed. */ public void testStopImplicitTransactionsReplicated() throws Exception { replicated = true; testStop(false); } /** * @throws Exception If failed. */ public void testStopAtomic() throws Exception { atomic = true; testStop(false); } /** * @throws Exception If failed. */ public void testStopMultithreaded() throws Exception { try { startGrid(0); for (int i = 0; i < 5; i++) { log.info("Iteration: " + i); startGridsMultiThreaded(1, 3); final AtomicInteger threadIdx = new AtomicInteger(0); final IgniteInternalFuture<?> fut1 = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() { @Override public Void call() throws Exception { int idx = threadIdx.getAndIncrement(); IgniteKernal node = (IgniteKernal)ignite(idx % 3 + 1); IgniteCache<Integer, Integer> cache = node.cache(null); while (true) { try { cacheOperations(node, cache); } catch (Exception e) { if (node.isStopping()) break; } } return null; } }, 20, "tx-node-stop-thread"); IgniteInternalFuture<?> fut2 = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() { @Override public Void call() throws Exception { IgniteKernal node = (IgniteKernal)ignite(0); IgniteCache<Integer, Integer> cache = node.cache(null); while (!fut1.isDone()) { try { cacheOperations(node, cache); } catch (Exception ignore) { // No-op. } } return null; } }, 2, "tx-thread"); Thread.sleep(3000); final AtomicInteger nodeIdx = new AtomicInteger(1); GridTestUtils.runMultiThreaded(new Callable<Void>() { @Override public Void call() throws Exception { int idx = nodeIdx.getAndIncrement(); log.info("Stop node: " + idx); ignite(idx).close(); return null; } }, 3, "stop-node"); fut1.get(); fut2.get(); } } finally { stopAllGrids(); } } /** * @param node Node. * @param cache Cache. */ private void cacheOperations(Ignite node, IgniteCache<Integer, Integer> cache) { ThreadLocalRandom rnd = ThreadLocalRandom.current(); Integer key = rnd.nextInt(1000); cache.put(key, key); cache.get(key); try (Transaction tx = node.transactions().txStart(OPTIMISTIC, REPEATABLE_READ)) { cache.put(key, key); tx.commit(); } try (Transaction tx = node.transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) { cache.put(key, key); tx.commit(); } } /** * @param startTx If {@code true} starts transactions. * @throws Exception If failed. */ private void testStop(final boolean startTx) throws Exception { for (int i = 0; i < 10; i++) { startGrid(0); final int PUT_THREADS = 50; final CountDownLatch stopLatch = new CountDownLatch(1); final CountDownLatch readyLatch = new CountDownLatch(PUT_THREADS); final IgniteCache<Integer, Integer> cache = grid(0).cache(null); assertNotNull(cache); CacheConfiguration ccfg = cache.getConfiguration(CacheConfiguration.class); assertEquals(atomic ? ATOMIC : TRANSACTIONAL, ccfg.getAtomicityMode()); assertEquals(replicated ? REPLICATED : PARTITIONED, ccfg.getCacheMode()); Collection<IgniteInternalFuture<?>> putFuts = new ArrayList<>(); for (int j = 0; j < PUT_THREADS; j++) { final int key = j; putFuts.add(GridTestUtils.runAsync(new Callable<Void>() { @Override public Void call() throws Exception { try { if (startTx) { TransactionConcurrency concurrency = key % 2 == 0 ? OPTIMISTIC : PESSIMISTIC; try (Transaction tx = grid(0).transactions().txStart(concurrency, REPEATABLE_READ)) { cache.put(key, key); readyLatch.countDown(); stopLatch.await(); tx.commit(); } } else { readyLatch.countDown(); stopLatch.await(); cache.put(key, key); } } catch (CacheException | IgniteException | IllegalStateException e) { log.info("Ignore error: " + e); } return null; } })); } readyLatch.await(); stopLatch.countDown(); stopGrid(0); for (IgniteInternalFuture<?> fut : putFuts) fut.get(); try { cache.put(1, 1); } catch (IllegalStateException e) { if (!e.getMessage().startsWith(EXPECTED_MSG)) e.printStackTrace(); assertTrue("Unexpected error message: " + e.getMessage(), e.getMessage().startsWith(EXPECTED_MSG)); } } } }
package org.apache.lucene.search.suggest.jaspell; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.lucene.search.suggest.InputIterator; import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.UnsortedInputIterator; import org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.UnicodeUtil; /** * Suggest implementation based on * <a href="http://jaspell.sourceforge.net/">JaSpell</a>. * * @see JaspellTernarySearchTrie * @deprecated Migrate to one of the newer suggesters which are much more RAM efficient. */ @Deprecated public class JaspellLookup extends Lookup implements Accountable { JaspellTernarySearchTrie trie = new JaspellTernarySearchTrie(); private boolean usePrefix = true; private int editDistance = 2; /** Number of entries the lookup was built with */ private long count = 0; /** * Creates a new empty trie * @see #build(InputIterator) * */ public JaspellLookup() {} @Override public void build(InputIterator tfit) throws IOException { if (tfit.hasPayloads()) { throw new IllegalArgumentException("this suggester doesn't support payloads"); } if (tfit.getComparator() != null) { // make sure it's unsorted // WTF - this could result in yet another sorted iteration.... tfit = new UnsortedInputIterator(tfit); } if (tfit.hasContexts()) { throw new IllegalArgumentException("this suggester doesn't support contexts"); } count = 0; trie = new JaspellTernarySearchTrie(); trie.setMatchAlmostDiff(editDistance); BytesRef spare; final CharsRef charsSpare = new CharsRef(); while ((spare = tfit.next()) != null) { final long weight = tfit.weight(); if (spare.length == 0) { continue; } charsSpare.grow(spare.length); UnicodeUtil.UTF8toUTF16(spare.bytes, spare.offset, spare.length, charsSpare); trie.put(charsSpare.toString(), Long.valueOf(weight)); } } /** * Adds a new node if <code>key</code> already exists, * otherwise replaces its value. * <p> * This method always returns false. */ public boolean add(CharSequence key, Object value) { trie.put(key, value); // XXX return false; } /** * Returns the value for the specified key, or null * if the key does not exist. */ public Object get(CharSequence key) { return trie.get(key); } @Override public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) { if (contexts != null) { throw new IllegalArgumentException("this suggester doesn't support contexts"); } List<LookupResult> res = new ArrayList<>(); List<String> list; int count = onlyMorePopular ? num * 2 : num; if (usePrefix) { list = trie.matchPrefix(key, count); } else { list = trie.matchAlmost(key, count); } if (list == null || list.size() == 0) { return res; } int maxCnt = Math.min(num, list.size()); if (onlyMorePopular) { LookupPriorityQueue queue = new LookupPriorityQueue(num); for (String s : list) { long freq = ((Number)trie.get(s)).longValue(); queue.insertWithOverflow(new LookupResult(new CharsRef(s), freq)); } for (LookupResult lr : queue.getResults()) { res.add(lr); } } else { for (int i = 0; i < maxCnt; i++) { String s = list.get(i); long freq = ((Number)trie.get(s)).longValue(); res.add(new LookupResult(new CharsRef(s), freq)); } } return res; } private static final byte LO_KID = 0x01; private static final byte EQ_KID = 0x02; private static final byte HI_KID = 0x04; private static final byte HAS_VALUE = 0x08; private void readRecursively(DataInput in, TSTNode node) throws IOException { node.splitchar = in.readString().charAt(0); byte mask = in.readByte(); if ((mask & HAS_VALUE) != 0) { node.data = Long.valueOf(in.readLong()); } if ((mask & LO_KID) != 0) { TSTNode kid = trie.new TSTNode('\0', node); node.relatives[TSTNode.LOKID] = kid; readRecursively(in, kid); } if ((mask & EQ_KID) != 0) { TSTNode kid = trie.new TSTNode('\0', node); node.relatives[TSTNode.EQKID] = kid; readRecursively(in, kid); } if ((mask & HI_KID) != 0) { TSTNode kid = trie.new TSTNode('\0', node); node.relatives[TSTNode.HIKID] = kid; readRecursively(in, kid); } } private void writeRecursively(DataOutput out, TSTNode node) throws IOException { if (node == null) { return; } out.writeString(new String(new char[] {node.splitchar}, 0, 1)); byte mask = 0; if (node.relatives[TSTNode.LOKID] != null) mask |= LO_KID; if (node.relatives[TSTNode.EQKID] != null) mask |= EQ_KID; if (node.relatives[TSTNode.HIKID] != null) mask |= HI_KID; if (node.data != null) mask |= HAS_VALUE; out.writeByte(mask); if (node.data != null) { out.writeLong(((Number)node.data).longValue()); } writeRecursively(out, node.relatives[TSTNode.LOKID]); writeRecursively(out, node.relatives[TSTNode.EQKID]); writeRecursively(out, node.relatives[TSTNode.HIKID]); } @Override public boolean store(DataOutput output) throws IOException { output.writeVLong(count); TSTNode root = trie.getRoot(); if (root == null) { // empty tree return false; } writeRecursively(output, root); return true; } @Override public boolean load(DataInput input) throws IOException { count = input.readVLong(); TSTNode root = trie.new TSTNode('\0', null); readRecursively(input, root); trie.setRoot(root); return true; } @Override public long ramBytesUsed() { return trie.ramBytesUsed(); } @Override public long getCount() { return count; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.auttc.data; import java.io.File; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.auttc.business.Blog; import com.auttc.business.Comment; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Attr; /** * * @author yufeiyan */ public class BlogXML { public static List<Blog> xmlToBlogList (String fileName) { List<Blog> blogList = new ArrayList<>(); try { File xmlFile = new File(fileName); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(xmlFile); // List<Comment> newCommentList = new ArrayList<>(); // Blog newBlog1 = new Blog(106, "This blog is from the function", "04/30/2017", "This is the body of the 11111th blog.", newCommentList); // blogList.add(newBlog1); NodeList blogNodeList = doc.getElementsByTagName("blog"); // loop along all the blogs for (int i = 0; i < blogNodeList.getLength(); i++) { // for each blog List<Comment> commentList = new ArrayList<>(); Element blogElement = (Element) blogNodeList.item(i); // get information about blog_id, title, date, and body int blogId = Integer.parseInt(blogElement.getAttribute("id")); String blogTitle = blogElement.getElementsByTagName("title").item(0).getTextContent(); String blogDate = blogElement.getElementsByTagName("date").item(0).getTextContent(); String blogBody = blogElement.getElementsByTagName("body").item(0).getTextContent(); // get comments NodeList commentNodeList = blogElement.getElementsByTagName("comment"); for (int j = 0; j < commentNodeList.getLength(); j++) { // for each comment Element commentElement = (Element) commentNodeList.item(j); String username = commentElement.getElementsByTagName("username").item(0).getTextContent(); String commentDate = commentElement.getElementsByTagName("cdate").item(0).getTextContent(); String commentBody = commentElement.getElementsByTagName("cbody").item(0).getTextContent(); Comment newComment = new Comment(username, commentDate, commentBody); commentList.add(newComment); } Blog newBlog = new Blog(blogId, blogTitle, blogDate, blogBody, commentList, commentList.size()); blogList.add(newBlog); } } catch (Exception e) { e.printStackTrace(); } return blogList; } public static void addBlog (Blog inputBlog, String fileName) { try { File xmlFile = new File(fileName); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(xmlFile); Element rootBlogsElement = doc.getDocumentElement(); // append <blog> tag to root element Element blogElement = doc.createElement("blog"); rootBlogsElement.appendChild(blogElement); // add attribute "id" to blog tag Attr blogID = doc.createAttribute("id"); blogID.setValue(Integer.toString(inputBlog.getId())); blogElement.setAttributeNode(blogID); // add title to blog Element titleElement = doc.createElement("title"); titleElement.appendChild(doc.createTextNode(inputBlog.getTitle())); blogElement.appendChild(titleElement); // add date to blog Element dateElement = doc.createElement("date"); dateElement.appendChild(doc.createTextNode(inputBlog.getDate())); blogElement.appendChild(dateElement); // add body to blog Element bodyElement = doc.createElement("body"); bodyElement.appendChild(doc.createTextNode(inputBlog.getBody())); blogElement.appendChild(bodyElement); // add comments tag to blog Element rootCommentsElement = doc.createElement("comments"); blogElement.appendChild(rootCommentsElement); List<Comment> commentList = inputBlog.getCommentList(); for (Comment comment: commentList) { // add comment to <comments> Element commentElement = doc.createElement("comment"); rootCommentsElement.appendChild(commentElement); Element usernameElement = doc.createElement("username"); usernameElement.appendChild(doc.createTextNode(comment.getUsername())); commentElement.appendChild(usernameElement); Element cdateElement = doc.createElement("cdate"); cdateElement.appendChild(doc.createTextNode(comment.getCdate())); commentElement.appendChild(cdateElement); Element cbodyElement = doc.createElement("cbody"); cbodyElement.appendChild(doc.createTextNode(comment.getCbody())); commentElement.appendChild(cbodyElement); } TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); StreamResult result = new StreamResult(fileName); DOMSource source = new DOMSource(doc); transformer.transform(source, result); } catch (Exception e) { e.printStackTrace(); } } public static void removeBlog(int delId, String fileName) { try { File xmlFile = new File(fileName); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(xmlFile); NodeList blogNodeList = doc.getElementsByTagName("blog"); for (int i = 0; i < blogNodeList.getLength(); i++) { Node blogNode = blogNodeList.item(i); Element blogElement = (Element) blogNode; int blogId = Integer.parseInt(blogElement.getAttribute("id")); if (blogId == delId) { blogNode.getParentNode().removeChild(blogNode); break; } } TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); StreamResult result = new StreamResult(fileName); DOMSource source = new DOMSource(doc); transformer.transform(source, result); } catch (Exception e) { e.printStackTrace(); } } public static void addComment(String fileName, Comment newComment, int targetId) { try { File xmlFile = new File(fileName); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(xmlFile); NodeList blogNodeList = doc.getElementsByTagName("blog"); for (int i = 0; i < blogNodeList.getLength(); i++) { Element blogElement = (Element) blogNodeList.item(i); int blogId = Integer.parseInt(blogElement.getAttribute("id")); if (blogId == targetId) { Element rootCommentsElement = (Element) blogElement.getElementsByTagName("comments").item(0); Element newCommentElement = doc.createElement("comment"); rootCommentsElement.appendChild(newCommentElement); Element usernameElement = doc.createElement("username"); usernameElement.appendChild(doc.createTextNode(newComment.getUsername())); newCommentElement.appendChild(usernameElement); Element cdateElement = doc.createElement("cdate"); cdateElement.appendChild(doc.createTextNode(newComment.getCdate())); newCommentElement.appendChild(cdateElement); Element cbodyElement = doc.createElement("cbody"); cbodyElement.appendChild(doc.createTextNode(newComment.getCbody())); newCommentElement.appendChild(cbodyElement); break; } } TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); StreamResult result = new StreamResult(fileName); DOMSource source = new DOMSource(doc); transformer.transform(source, result); } catch (Exception e) { e.printStackTrace(); } } }
//==================================================================== // //File: $RCSfile: POP.java,v $ //Version: $Revision: 1.1.10.1 $ //Modified: $Date: 2013/01/02 18:02:13 $ // //(c) Copyright 2004-2012 by Mentor Graphics Corp. All rights reserved. // //==================================================================== package lib; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.StringTokenizer; import com.mentor.nucleus.bp.core.Association_c; import com.mentor.nucleus.bp.core.AttributeValue_c; import com.mentor.nucleus.bp.core.Attribute_c; import com.mentor.nucleus.bp.core.BaseAttribute_c; import com.mentor.nucleus.bp.core.ClassAsAssociatedOneSide_c; import com.mentor.nucleus.bp.core.ClassAsAssociatedOtherSide_c; import com.mentor.nucleus.bp.core.ClassAsLink_c; import com.mentor.nucleus.bp.core.ClassAsSimpleFormalizer_c; import com.mentor.nucleus.bp.core.ClassAsSimpleParticipant_c; import com.mentor.nucleus.bp.core.ClassAsSubtype_c; import com.mentor.nucleus.bp.core.ClassAsSupertype_c; import com.mentor.nucleus.bp.core.ClassInAssociation_c; import com.mentor.nucleus.bp.core.ClassInEngine_c; import com.mentor.nucleus.bp.core.ComponentInstance_c; import com.mentor.nucleus.bp.core.ComponentPackage_c; import com.mentor.nucleus.bp.core.CoreDataType_c; import com.mentor.nucleus.bp.core.CorePlugin; import com.mentor.nucleus.bp.core.DataType_c; import com.mentor.nucleus.bp.core.DerivedAssociation_c; import com.mentor.nucleus.bp.core.DerivedBaseAttribute_c; import com.mentor.nucleus.bp.core.Domain_c; import com.mentor.nucleus.bp.core.Instance_c; import com.mentor.nucleus.bp.core.LinkParticipation_c; import com.mentor.nucleus.bp.core.Link_c; import com.mentor.nucleus.bp.core.LinkedAssociation_c; import com.mentor.nucleus.bp.core.ModelClass_c; import com.mentor.nucleus.bp.core.Ooaofooa; import com.mentor.nucleus.bp.core.Package_c; import com.mentor.nucleus.bp.core.ReferredToClassInAssoc_c; import com.mentor.nucleus.bp.core.ReferringClassInAssoc_c; import com.mentor.nucleus.bp.core.SimpleAssociation_c; import com.mentor.nucleus.bp.core.Subsystem_c; import com.mentor.nucleus.bp.core.SubtypeSupertypeAssociation_c; import com.mentor.nucleus.bp.core.SystemDatatypePackage_c; import com.mentor.nucleus.bp.core.SystemModel_c; import com.mentor.nucleus.bp.core.common.ClassQueryInterface_c; import com.mentor.nucleus.bp.core.common.ModelRoot; import com.mentor.nucleus.bp.core.common.NonRootModelElement; import com.mentor.nucleus.bp.core.common.Transaction; import com.mentor.nucleus.bp.core.common.TransactionManager; public class POP { private static Ooaofooa mmRoot; private static ComponentInstance_c engine; public static void populate(final String project) { TransactionManager manager = null; Transaction transaction = null; try { HashMap<NonRootModelElement, Instance_c> instMap = new HashMap<NonRootModelElement, Instance_c>(); HashMap<NonRootModelElement, Instance_c> classMap; /* * Find the model root belonging to the model being translated. */ SystemModel_c sysMdl = SystemModel_c.SystemModelInstance(Ooaofooa.getDefaultInstance(), new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { return ((SystemModel_c)systemModel).getName().equals(project); } }); // collect all model roots for this system model Domain_c[] domains = Domain_c.getManyS_DOMsOnR28(sysMdl); ComponentPackage_c[] componentPackages = ComponentPackage_c.getManyCP_CPsOnR4602(sysMdl); SystemDatatypePackage_c[] systemDatatypePackages = SystemDatatypePackage_c.getManySLD_SDPsOnR4400(sysMdl); Package_c[] packages = Package_c.getManyEP_PKGsOnR1401(sysMdl); ArrayList<Ooaofooa> modelRoots = new ArrayList<Ooaofooa>(); for (Package_c pkg : packages ) { debugLog("Found package: " + pkg.getName() + " -- " + ((Ooaofooa) pkg.getModelRoot()).getResourcePath().toOSString()); modelRoots.add((Ooaofooa) pkg.getModelRoot()); } for (Domain_c domain : domains) { debugLog("Found domain: " + domain.getName() + " -- " + ((Ooaofooa) domain.getModelRoot()).getResourcePath().toOSString()); modelRoots.add((Ooaofooa) domain.getModelRoot()); } for (ComponentPackage_c componentPackage : componentPackages) { debugLog("Found component package: " + componentPackage.getName() + " -- " + ((Ooaofooa) componentPackage.getModelRoot()).getResourcePath().toOSString()); modelRoots.add((Ooaofooa) componentPackage.getModelRoot()); } for (SystemDatatypePackage_c systemDatatypePackage : systemDatatypePackages) { debugLog("Found SLDT package: " + systemDatatypePackage.getName() + " -- " + ((Ooaofooa) systemDatatypePackage.getModelRoot()).getResourcePath().toOSString()); modelRoots.add((Ooaofooa) systemDatatypePackage.getModelRoot()); } debugLog("1"); /* * Find the model root of the model compiler (holding the meta model). * This assumes there is only one model named ooaofooa in any ExecutionEngine */ SystemModel_c sysMM = SystemModel_c.SystemModelInstance(Ooaofooa.getDefaultInstance(), new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { if (systemModel instanceof SystemModel_c) { SystemModel_c sysModel = (SystemModel_c) systemModel; Domain_c dom = Domain_c.getOneS_DOMOnR28(sysModel, new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { return ((Domain_c)systemModel).getName().equals("ooaofooa"); } }); return ComponentInstance_c.getOneI_EXEOnR2948(dom) != null; } return false; } }); Domain_c metaModel = Domain_c.getOneS_DOMOnR28(sysMM, new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { return ((Domain_c)systemModel).getName().equals("ooaofooa"); } }); engine = ComponentInstance_c.getOneI_EXEOnR2948(metaModel); mmRoot = (Ooaofooa) engine.getModelRoot(); debugLog("Got System " + sysMM.getName()); debugLog("Got Domain " + metaModel.getName()); debugLog("Got engine " + engine.getName()); debugLog("Got mm root " + mmRoot.toString()); debugLog("2"); /* * Group all instance population activities into one transaction, * this will force Session Explorer one the transaction ends */ manager = sysMM.getTransactionManager(); transaction = manager.startTransaction("Populate metamodel in Verifier", mmRoot); // hard code the system model instantiation as it is not related to the modelRoot ModelClass_c sysMdlClass = ModelClass_c.getOneO_OBJOnR2(Subsystem_c.getManyS_SSsOnR1(metaModel), new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { return ((ModelClass_c)systemModel).getName().equals("System Model"); } }); Instance_c inst = addInstance(sysMdlClass); Attribute_c sysMdlNameAttr = Attribute_c.getOneO_ATTROnR102(sysMdlClass, new ClassQueryInterface_c() { public boolean evaluate(Object systemModel) { return ((Attribute_c)systemModel).getName().equals("Name"); } }); getAttribute(sysMdlNameAttr, inst).Setstring(project);; ModelClass_c[] metaClasses = ModelClass_c.getManyO_OBJsOnR2(Subsystem_c.getManyS_SSsOnR1(metaModel)); HashMap<ModelClass_c, HashMap<NonRootModelElement, Instance_c>> instMapByClass = new HashMap<ModelClass_c, HashMap<NonRootModelElement, Instance_c>>(metaClasses.length); // Store the system instance in the maps debugLog("Creating map data for system " + sysMdl.getName()); classMap = new HashMap<NonRootModelElement, Instance_c>(); classMap.put(sysMdl, inst); instMapByClass.put(sysMdlClass, classMap); instMap.put(sysMdl, inst); debugLog("3"); /* * Create all instances in the meta model execution engine based on * the input model. */ for (ModelClass_c metaClass : metaClasses) { debugLog("3 - Processing meta class " + metaClass.getName()); if ( metaClass.getName().equals("System Model")) { continue; } classMap = new HashMap<NonRootModelElement, Instance_c>(); instMapByClass.put(metaClass, classMap); for (Ooaofooa modelRoot : modelRoots) { debugLog(" - looping on model root: " + modelRoot.getResourcePath().toOSString()); // derive the java class name from the meta class, make sure it exists and use it // to query the in-memory meta model String metaClassName = deriveMcJavaName(metaClass.getName()); String metaClassJavaName = "com.mentor.nucleus.bp.core." + metaClassName + "_c"; debugLog(" - cn: " + metaClassJavaName); String metaClassFindInstancesMethodName = metaClassName + "Instances"; debugLog(" - fin: " + metaClassFindInstancesMethodName); try { Class<?> javaClass = Class.forName(metaClassJavaName); Method findInstancesMethod = javaClass.getMethod(metaClassFindInstancesMethodName, ModelRoot.class); debugLog(" - findInstancesMethod is " + findInstancesMethod.getName()); // query the in-memory meta class for all its instances associated to the input model NonRootModelElement[] metaClassInstances = (NonRootModelElement[])findInstancesMethod.invoke(null, modelRoot); debugLog(" - count of instances found: " + metaClassInstances.length); // find all attribute names and query the in-memory meta model to // find the values for these attributes for the given instance Attribute_c[] attrs = Attribute_c.getManyO_ATTRsOnR102(metaClass); CoreDataType_c[] cdts = new CoreDataType_c[attrs.length]; for (int i = 0; i < attrs.length; i++) { cdts[i] = CoreDataType_c.getOneS_CDTOnR17(DataType_c.getOneS_DTOnR114(attrs[i])); } for (NonRootModelElement metaClassInstance : metaClassInstances) { debugLog(" - looping on instances: " + metaClassInstance.getName()); inst = addInstance(metaClass); classMap.put(metaClassInstance, inst); instMap.put(metaClassInstance, inst); for (int i = 0; i < attrs.length; i++) { DerivedBaseAttribute_c derived = DerivedBaseAttribute_c.getOneO_DBATTROnR107(BaseAttribute_c.getOneO_BATTROnR106(attrs[i])); if (derived != null) { // don't care about derived attributes continue; } if (cdts[i] != null) { switch (cdts[i].getCore_typ()) { case 1: case 2: case 3: case 4: Method m = javaClass.getMethod("get" + deriveMcJavaName(attrs[i].getName())); Object val = m.invoke(metaClassInstance); getAttribute(attrs[i], inst).Setstring(val + ""); break; default: break; } } } } } catch (ClassNotFoundException e) { debugLog(" *** ClassNotFoundException ***"); // as document is also part of ooaofooa, this is actually a normal case } catch (NoSuchMethodException e) { debugLog(" *** NoSuchMethodException ***"); CorePlugin.err.println(e.toString()); } catch (InvocationTargetException e) { debugLog(" *** InvocationTargetException ***"); // TODO: put back inCorePlugin.err.println(e.toString() + " " + e.getTargetException()); } catch (IllegalAccessException e) { debugLog(" *** IllegalAccessException ***"); CorePlugin.err.println(e.toString()); } } } debugLog("4"); /* * Relate all instances that were created in the previous step */ for (ModelClass_c metaClass : metaClasses) { debugLog("4 - Processing meta class " + metaClass.getName()); classMap = instMapByClass.get(metaClass); // find all associations where this meta class is participating Association_c[] assos = Association_c.getManyR_RELsOnR201(metaClass); for (Association_c asso : assos) { debugLog(" " + asso.getNumb()); // figure out which type of association this is SimpleAssociation_c simpleAssociation = SimpleAssociation_c.getOneR_SIMPOnR206(asso); LinkedAssociation_c linkedAssociation = LinkedAssociation_c.getOneR_ASSOCOnR206(asso); SubtypeSupertypeAssociation_c subtypeSupertypeAssociation = SubtypeSupertypeAssociation_c.getOneR_SUBSUPOnR206(asso); DerivedAssociation_c derivedAssociation = DerivedAssociation_c.getOneR_COMPOnR206(asso); if (simpleAssociation != null) { // for simple associations there are really two cases, formalized and unformalized ClassAsSimpleParticipant_c[] classAsSimpleParticipant = ClassAsSimpleParticipant_c.getManyR_PARTsOnR207(simpleAssociation); if (classAsSimpleParticipant.length == 1) { // formalized association debugLog(" is a simple formalized association"); ClassAsSimpleFormalizer_c classAsSimpleFormalizer = ClassAsSimpleFormalizer_c.getOneR_FORMOnR208(simpleAssociation); ReferringClassInAssoc_c referringClassInAssoc = ReferringClassInAssoc_c.getOneR_RGOOnR205(classAsSimpleFormalizer); ReferredToClassInAssoc_c referredToClassInAssoc = ReferredToClassInAssoc_c.getOneR_RTOOnR204(classAsSimpleParticipant[0]); ModelClass_c formalizer = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(referringClassInAssoc)); ModelClass_c participant = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(referredToClassInAssoc)); String formalizerClassName = deriveMcJavaName(formalizer.getName()); String formalizerClassJavaName = "com.mentor.nucleus.bp.core." + formalizerClassName + "_c"; String participantClassName = deriveMcJavaName(participant.getName()); String participantClassJavaName = "com.mentor.nucleus.bp.core." + participantClassName + "_c"; try { Class<?> formalizerClass = Class.forName(formalizerClassJavaName); debugLog(" - formalizer: " + formalizerClassName); Class<?> participantClass = Class.forName(participantClassJavaName); debugLog(" - participant: " + participantClassName); for (NonRootModelElement elem : classMap.keySet()) { debugLog(" - looping on element " + elem.getName()); try { if (participantClass.isInstance(elem)) { debugLog(" - participant class is an instance of elem"); // it might be a one to many, so do a 'select many' query String txtPhrase = formalizer == participant ? classAsSimpleFormalizer.getTxt_phrs() : ""; String formalizerMethodName = "getMany" + formalizer.getKey_lett() + "sOn" + asso.getName() + deriveMcJavaName(txtPhrase); debugLog(" - method name = " + formalizerMethodName); Method findFormalizers = formalizerClass.getMethod(formalizerMethodName, participantClass); NonRootModelElement[] formalizers = (NonRootModelElement[])findFormalizers.invoke(null, elem); debugLog(" - found " + formalizers.length + " possible formalizer instances"); for (NonRootModelElement form : formalizers) { debugLog(" - formalizing " + form.getName()); Instance_c toInstance = instMap.get(form); // there may be an instance of the investigate meta class // that does not participate in this type of association if (toInstance != null) { debugLog(" - relating: " + elem.getName() + " to " + form.getName() + " across " + asso.getName()); relate(classMap.get(elem), toInstance, null, asso); } else { debugLog(" - could not relate, no toInstance found"); } } } /* SKB: Not needed. If this is here then we hook up relationships twice, once for each side as we're processing the instance data. else if (formalizerClass.isInstance(elem)) { debugLog(" - formalizer class is an instance of elem"); // it might be a one to many, so do a 'select many' query String txtPhrase = formalizer == participant ? classAsSimpleFormalizer.getTxt_phrs() : ""; String participantMethodName = "getMany" + participant.getKey_lett() + "sOn" + asso.getName() + deriveMcJavaName(txtPhrase); debugLog(" - method name = " + participantMethodName); Method findParticipants = participantClass.getMethod(participantMethodName, formalizerClass); NonRootModelElement[] participants = (NonRootModelElement[])findParticipants.invoke(null, elem); debugLog(" - found " + participants.length + " possible participant instances"); for (NonRootModelElement part : participants) { debugLog(" - formalizing " + part.getName()); Instance_c toInstance = instMap.get(part); // there may be an instance of the investigate meta class // that does not participate in this type of association if (toInstance != null) { debugLog(" - relating: " + elem.getName() + " to " + part.getName() + " across " + asso.getName()); relate(classMap.get(elem), toInstance, null, asso); } else { debugLog(" - could not relate, no toInstance found"); } } }*/ } catch (NoSuchMethodException e) { debugLog(" *** NoSuchMethodException ***"); CorePlugin.err.println(e.toString()); } catch (InvocationTargetException e) { debugLog(" *** InvocationTargetException ***"); CorePlugin.err.println(e.toString()); } catch (IllegalAccessException e) { debugLog(" *** IllegalAccessException ***"); CorePlugin.err.println(e.toString()); } catch (IllegalArgumentException e) { debugLog(" *** IllegalArgumentException ***"); CorePlugin.err.println(e.toString()); } } } catch (ClassNotFoundException e) { // as document is also part of ooaofooa, this is actually a normal case debugLog(" *** ClassNotFoundException ***"); } } else { // unformalized association debugLog(" is a simple unformalized association"); ReferredToClassInAssoc_c referredToClassInAssoc1 = ReferredToClassInAssoc_c.getOneR_RTOOnR204(classAsSimpleParticipant[0]); ClassInAssociation_c classInAssociation1 = ClassInAssociation_c.getOneR_OIROnR203(referredToClassInAssoc1); ModelClass_c participant1 = ModelClass_c.getOneO_OBJOnR201(classInAssociation1); ReferredToClassInAssoc_c referredToClassInAssoc2 = ReferredToClassInAssoc_c.getOneR_RTOOnR204(classAsSimpleParticipant[1]); ClassInAssociation_c classInAssociation2 = ClassInAssociation_c.getOneR_OIROnR203(referredToClassInAssoc2); ModelClass_c participant2 = ModelClass_c.getOneO_OBJOnR201(classInAssociation2); String part1ClassName = deriveMcJavaName(participant1.getName()); String part1ClassJavaName = "com.mentor.nucleus.bp.core." + part1ClassName + "_c"; String participant2ClassName = deriveMcJavaName(participant2.getName()); String participant2ClassJavaName = "com.mentor.nucleus.bp.core." + participant2ClassName + "_c"; try { Class<?> part1Class = Class.forName(part1ClassJavaName); Class<?> participant2Class = Class.forName(participant2ClassJavaName); for (NonRootModelElement elem : classMap.keySet()) { try { // TODO: this implementation will cause reflexive associations to be // instantiated twice, one for each direction (not a problem right now // as the meta model is not using this type of association if (part1Class.isInstance(elem)) { // it might be a one to many, so do a 'select many' query String txtPhrase = participant1 == participant2 ? classAsSimpleParticipant[1].getTxt_phrs() : ""; Method findPerticipants = part1Class.getMethod("getMany" + participant1.getKey_lett() + "sOn" + asso.getName() + deriveMcJavaName(txtPhrase), participant2Class); NonRootModelElement[] participants = (NonRootModelElement[])findPerticipants.invoke(null, elem); for (NonRootModelElement part : participants) { Instance_c toInstance = instMap.get(part); // there may be an instance of the investigate meta class // that does not participate in this type of association if (toInstance != null) { relate(classMap.get(elem), toInstance, null, asso); } } } } catch (NoSuchMethodException e) { CorePlugin.err.println(e.toString()); } catch (InvocationTargetException e) { CorePlugin.err.println(e.toString()); } catch (IllegalAccessException e) { CorePlugin.err.println(e.toString()); } } } catch (ClassNotFoundException e) { // as document is also part of ooaofooa, this is actually a normal case } } } else if (linkedAssociation != null) { ClassAsAssociatedOneSide_c one = ClassAsAssociatedOneSide_c.getOneR_AONEOnR209(linkedAssociation); ClassAsAssociatedOtherSide_c oth = ClassAsAssociatedOtherSide_c.getOneR_AOTHOnR210(linkedAssociation); ClassAsLink_c link = ClassAsLink_c.getOneR_ASSROnR211(linkedAssociation); ModelClass_c referred1 = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(ReferredToClassInAssoc_c .getOneR_RTOOnR204(one))); ModelClass_c referred2 = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(ReferredToClassInAssoc_c.getOneR_RTOOnR204(oth))); ModelClass_c across = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(ReferringClassInAssoc_c.getOneR_RGOOnR205(link))); debugLog(" is a linked association"); if (across == metaClass) { // go head and establish the relationships String ref1ClassName = deriveMcJavaName(referred1.getName()); String ref1ClassJavaName = "com.mentor.nucleus.bp.core." + ref1ClassName + "_c"; String ref2ClassName = deriveMcJavaName(referred2.getName()); String ref2ClassJavaName = "com.mentor.nucleus.bp.core." + ref2ClassName + "_c"; String acrossClassName = deriveMcJavaName(across.getName()); String acrossClassJavaName = "com.mentor.nucleus.bp.core." + acrossClassName + "_c"; try { Class<?> ref1Class = Class.forName(ref1ClassJavaName); Class<?> ref2Class = Class.forName(ref2ClassJavaName); Class<?> acrossClass = Class.forName(acrossClassJavaName); for (NonRootModelElement elem : classMap.keySet()) { try { if (acrossClass.isInstance(elem)) { Method findReferred2one = ref1Class.getMethod("getOne" + referred1.getKey_lett() + "On" + asso.getName(), acrossClass); Method findReferred2oth = ref2Class.getMethod("getOne" + referred2.getKey_lett() + "On" + asso.getName(), acrossClass); NonRootModelElement referred2one = (NonRootModelElement)findReferred2one.invoke(null, elem); NonRootModelElement referred2oth = (NonRootModelElement)findReferred2oth.invoke(null, elem); Instance_c instFrom = instMap.get(referred2one); Instance_c instTo = instMap.get(referred2oth); // there may be an instance of the investigate meta class // that does not participate in this type of association if (instFrom != null && instTo != null) { relate(instFrom, instTo, classMap.get(elem), asso); } } } catch (NoSuchMethodException e) { CorePlugin.err.println(e.toString()); } catch (InvocationTargetException e) { CorePlugin.err.println(e.toString() + elem); } catch (IllegalAccessException e) { CorePlugin.err.println(e.toString()); } } } catch (ClassNotFoundException e) { // as document is also part of ooaofooa, this is actually a normal case } } } else if (subtypeSupertypeAssociation != null) { debugLog(" is a subtype/supertype association"); ClassAsSupertype_c sup = ClassAsSupertype_c.getOneR_SUPEROnR212(subtypeSupertypeAssociation); ClassAsSubtype_c[] subs = ClassAsSubtype_c.getManyR_SUBsOnR213(subtypeSupertypeAssociation); for (ClassAsSubtype_c sub : subs) { ModelClass_c superClass = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203( ReferredToClassInAssoc_c.getOneR_RTOOnR204(sup))); ModelClass_c subClass = ModelClass_c.getOneO_OBJOnR201(ClassInAssociation_c.getOneR_OIROnR203(ReferringClassInAssoc_c.getOneR_RGOOnR205(sub))); // go head and establish the relationships String superClassName = deriveMcJavaName(superClass.getName()); String superClassJavaName = "com.mentor.nucleus.bp.core." + superClassName + "_c"; String subClassName = deriveMcJavaName(subClass.getName()); String subClassJavaName = "com.mentor.nucleus.bp.core." + subClassName + "_c"; try { Class<?> superJavaClass = Class.forName(superClassJavaName); Class<?> subJavaClass = Class.forName(subClassJavaName); // for each inst of referring for (NonRootModelElement elem : classMap.keySet()) { try { if (subJavaClass.isInstance(elem)) { Method findSuperClassInstance = superJavaClass.getMethod("getOne" + superClass.getKey_lett() + "On" + asso.getName(), subJavaClass); NonRootModelElement superClassInstance = (NonRootModelElement)findSuperClassInstance.invoke(null, elem); // no need to null check, there should be a super class // TODO: Packageable Element doesn't follow the rules yet, hence the null check if (superClassInstance != null) { relate(classMap.get(elem), instMap.get(superClassInstance), null, asso); } } } catch (NoSuchMethodException e) { CorePlugin.err.println("No such method " + e.toString()); } catch (InvocationTargetException e) { CorePlugin.err.println("Invocation target exception " + e.toString()); } catch (IllegalAccessException e) { CorePlugin.err.println("Illegal access " + e.toString()); } } } catch (ClassNotFoundException e) { // as document is also part of ooaofooa, this is actually a normal case } } } else if (derivedAssociation != null) { CorePlugin.err.println("DerivedAssociation_c, not implemented"); } } } } catch (Exception e) { CorePlugin.err.println(e.toString()); e.printStackTrace(); StackTraceElement ts[] = e.getStackTrace(); for (StackTraceElement t : ts) CorePlugin.err.println(t.toString()); } finally { if (manager != null) { manager.endTransaction(transaction); } } debugLog("5"); } private static String deriveMcJavaName(String name) { StringTokenizer tokenizer = new StringTokenizer(name, " "); String convertedName = ""; while (tokenizer.hasMoreElements()) { String str = tokenizer.nextToken(); convertedName += str.substring(0, 1).toUpperCase(); if (str.length() > 1) { convertedName += str.substring(1).toLowerCase(); } } return convertedName; } private static void relate(Instance_c from, Instance_c to, Instance_c using, final Association_c association) { LinkParticipation_c fromLinkParticipation = LinkParticipation_c.getOneI_LIPOnR2958(from, new ClassQueryInterface_c() { public boolean evaluate(Object obj) { if (obj instanceof LinkParticipation_c) { LinkParticipation_c linkPart = (LinkParticipation_c) obj; return linkPart.getLabel().equals(association.getName()); } return false; } }); if (fromLinkParticipation == null) { fromLinkParticipation = new LinkParticipation_c(mmRoot); from.relateAcrossR2958To(fromLinkParticipation, true); } LinkParticipation_c toLinkParticipation = LinkParticipation_c.getOneI_LIPOnR2958(to, new ClassQueryInterface_c() { public boolean evaluate(Object obj) { if (obj instanceof LinkParticipation_c) { LinkParticipation_c linkPart = (LinkParticipation_c) obj; return linkPart.getLabel().equals(association.getName()); } return false; } }); if (toLinkParticipation == null) { toLinkParticipation = new LinkParticipation_c(mmRoot); to.relateAcrossR2958To(toLinkParticipation, true); } Link_c link = new Link_c(mmRoot); if (association != null) { association.relateAcrossR2904To(link, true); association.relateAcrossR2959To(fromLinkParticipation, true); association.relateAcrossR2959To(toLinkParticipation, true); link.relateAcrossR2901To(fromLinkParticipation, true); link.relateAcrossR2902To(toLinkParticipation, true); if (using != null) { LinkParticipation_c associativeLinkParticipation = LinkParticipation_c.getOneI_LIPOnR2958(using, new ClassQueryInterface_c() { public boolean evaluate(Object obj) { if (obj instanceof LinkParticipation_c) { LinkParticipation_c linkPart = (LinkParticipation_c) obj; return linkPart.getLabel().equals(association.getName()); } return false; } }); if (associativeLinkParticipation == null) { associativeLinkParticipation = new LinkParticipation_c(mmRoot); using.relateAcrossR2958To(associativeLinkParticipation, true); } association.relateAcrossR2959To(associativeLinkParticipation, true); link.relateAcrossR2903To(associativeLinkParticipation); } } } private static Instance_c addInstance(ModelClass_c mcClass) { // find the corresponding model class in the meta model ClassInEngine_c classInEngine = ClassInEngine_c.getOneCSME_CIEOnR2961(mcClass); if (classInEngine != null) { // create the instance and relate it to the Verifier session and its class Instance_c inst = new Instance_c(mmRoot); inst.relateAcrossR2957To(engine); inst.relateAcrossR2962To(classInEngine, true); inst.Initialize(false); return inst; } return null; } private static AttributeValue_c getAttribute(final Attribute_c attribute, Instance_c instance) { // set name return AttributeValue_c.getOneI_AVLOnR2909(instance, new ClassQueryInterface_c() { public boolean evaluate(Object obj) { Attribute_c attr = Attribute_c.getOneO_ATTROnR2910((AttributeValue_c)obj); return attribute == attr; } }); } private static void debugLog(String msg) { boolean debugOn = false; if ( debugOn == true ) { LOG.LogInfo(msg); } } }
package cn.jarlen.richcommon.widget; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.support.annotation.ColorInt; import android.support.annotation.ColorRes; import android.support.annotation.DrawableRes; import android.util.AttributeSet; import android.widget.ImageView; import cn.jarlen.richcommon.R; /** * Created by jarlen on 2017/8/30. */ public class CircleImageView extends ImageView { private static final ScaleType SCALE_TYPE = ScaleType.CENTER_CROP; private static final Bitmap.Config BITMAP_CONFIG = Bitmap.Config.ARGB_8888; private static final int COLORDRAWABLE_DIMENSION = 2; private static final int DEFAULT_BORDER_WIDTH = 0; private static final int DEFAULT_BORDER_COLOR = Color.BLACK; private static final int DEFAULT_FILL_COLOR = Color.TRANSPARENT; private static final boolean DEFAULT_BORDER_OVERLAY = false; private final RectF mDrawableRect = new RectF(); private final RectF mBorderRect = new RectF(); private final Matrix mShaderMatrix = new Matrix(); private final Paint mBitmapPaint = new Paint(); private final Paint mBorderPaint = new Paint(); private final Paint mFillPaint = new Paint(); private int mBorderColor = DEFAULT_BORDER_COLOR; private int mBorderWidth = DEFAULT_BORDER_WIDTH; private int mFillColor = DEFAULT_FILL_COLOR; private Bitmap mBitmap; private BitmapShader mBitmapShader; private int mBitmapWidth; private int mBitmapHeight; private float mDrawableRadius; private float mBorderRadius; private ColorFilter mColorFilter; private boolean mReady; private boolean mSetupPending; private boolean mBorderOverlay; private boolean mDisableCircularTransformation; public CircleImageView(Context context) { super(context); init(); } public CircleImageView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public CircleImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircleImageView, defStyle, 0); mBorderWidth = a.getDimensionPixelSize(R.styleable.CircleImageView_civ_border_width, DEFAULT_BORDER_WIDTH); mBorderColor = a.getColor(R.styleable.CircleImageView_civ_border_color, DEFAULT_BORDER_COLOR); mBorderOverlay = a.getBoolean(R.styleable.CircleImageView_civ_border_overlay, DEFAULT_BORDER_OVERLAY); mFillColor = a.getColor(R.styleable.CircleImageView_civ_fill_color, DEFAULT_FILL_COLOR); a.recycle(); init(); } private void init() { super.setScaleType(SCALE_TYPE); mReady = true; if (mSetupPending) { setup(); mSetupPending = false; } } @Override public ScaleType getScaleType() { return SCALE_TYPE; } @Override public void setScaleType(ScaleType scaleType) { if (scaleType != SCALE_TYPE) { throw new IllegalArgumentException(String.format("ScaleType %s not supported.", scaleType)); } } @Override public void setAdjustViewBounds(boolean adjustViewBounds) { if (adjustViewBounds) { throw new IllegalArgumentException("adjustViewBounds not supported."); } } @Override protected void onDraw(Canvas canvas) { if (mDisableCircularTransformation) { super.onDraw(canvas); return; } if (mBitmap == null) { return; } if (mFillColor != Color.TRANSPARENT) { canvas.drawCircle(mDrawableRect.centerX(), mDrawableRect.centerY(), mDrawableRadius, mFillPaint); } canvas.drawCircle(mDrawableRect.centerX(), mDrawableRect.centerY(), mDrawableRadius, mBitmapPaint); if (mBorderWidth > 0) { canvas.drawCircle(mBorderRect.centerX(), mBorderRect.centerY(), mBorderRadius, mBorderPaint); } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); setup(); } @Override public void setPadding(int left, int top, int right, int bottom) { super.setPadding(left, top, right, bottom); setup(); } @Override public void setPaddingRelative(int start, int top, int end, int bottom) { super.setPaddingRelative(start, top, end, bottom); setup(); } public int getBorderColor() { return mBorderColor; } public void setBorderColor(@ColorInt int borderColor) { if (borderColor == mBorderColor) { return; } mBorderColor = borderColor; mBorderPaint.setColor(mBorderColor); invalidate(); } /** * @deprecated Use {@link #setBorderColor(int)} instead */ @Deprecated public void setBorderColorResource(@ColorRes int borderColorRes) { setBorderColor(getContext().getResources().getColor(borderColorRes)); } /** * Return the color drawn behind the circle-shaped drawable. * * @return The color drawn behind the drawable * @deprecated Fill color support is going to be removed in the future */ @Deprecated public int getFillColor() { return mFillColor; } /** * Set a color to be drawn behind the circle-shaped drawable. Note that * this has no effect if the drawable is opaque or no drawable is set. * * @param fillColor The color to be drawn behind the drawable * @deprecated Fill color support is going to be removed in the future */ @Deprecated public void setFillColor(@ColorInt int fillColor) { if (fillColor == mFillColor) { return; } mFillColor = fillColor; mFillPaint.setColor(fillColor); invalidate(); } /** * Set a color to be drawn behind the circle-shaped drawable. Note that * this has no effect if the drawable is opaque or no drawable is set. * * @param fillColorRes The color resource to be resolved to a color and * drawn behind the drawable * @deprecated Fill color support is going to be removed in the future */ @Deprecated public void setFillColorResource(@ColorRes int fillColorRes) { setFillColor(getContext().getResources().getColor(fillColorRes)); } public int getBorderWidth() { return mBorderWidth; } public void setBorderWidth(int borderWidth) { if (borderWidth == mBorderWidth) { return; } mBorderWidth = borderWidth; setup(); } public boolean isBorderOverlay() { return mBorderOverlay; } public void setBorderOverlay(boolean borderOverlay) { if (borderOverlay == mBorderOverlay) { return; } mBorderOverlay = borderOverlay; setup(); } public boolean isDisableCircularTransformation() { return mDisableCircularTransformation; } public void setDisableCircularTransformation(boolean disableCircularTransformation) { if (mDisableCircularTransformation == disableCircularTransformation) { return; } mDisableCircularTransformation = disableCircularTransformation; initializeBitmap(); } @Override public void setImageBitmap(Bitmap bm) { super.setImageBitmap(bm); initializeBitmap(); } @Override public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); initializeBitmap(); } @Override public void setImageResource(@DrawableRes int resId) { super.setImageResource(resId); initializeBitmap(); } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); initializeBitmap(); } @Override public void setColorFilter(ColorFilter cf) { if (cf == mColorFilter) { return; } mColorFilter = cf; applyColorFilter(); invalidate(); } @Override public ColorFilter getColorFilter() { return mColorFilter; } private void applyColorFilter() { if (mBitmapPaint != null) { mBitmapPaint.setColorFilter(mColorFilter); } } private Bitmap getBitmapFromDrawable(Drawable drawable) { if (drawable == null) { return null; } if (drawable instanceof BitmapDrawable) { return ((BitmapDrawable) drawable).getBitmap(); } try { Bitmap bitmap; if (drawable instanceof ColorDrawable) { bitmap = Bitmap.createBitmap(COLORDRAWABLE_DIMENSION, COLORDRAWABLE_DIMENSION, BITMAP_CONFIG); } else { bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), BITMAP_CONFIG); } Canvas canvas = new Canvas(bitmap); drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight()); drawable.draw(canvas); return bitmap; } catch (Exception e) { e.printStackTrace(); return null; } } private void initializeBitmap() { if (mDisableCircularTransformation) { mBitmap = null; } else { mBitmap = getBitmapFromDrawable(getDrawable()); } setup(); } private void setup() { if (!mReady) { mSetupPending = true; return; } if (getWidth() == 0 && getHeight() == 0) { return; } if (mBitmap == null) { invalidate(); return; } mBitmapShader = new BitmapShader(mBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); mBitmapPaint.setAntiAlias(true); mBitmapPaint.setShader(mBitmapShader); mBorderPaint.setStyle(Paint.Style.STROKE); mBorderPaint.setAntiAlias(true); mBorderPaint.setColor(mBorderColor); mBorderPaint.setStrokeWidth(mBorderWidth); mFillPaint.setStyle(Paint.Style.FILL); mFillPaint.setAntiAlias(true); mFillPaint.setColor(mFillColor); mBitmapHeight = mBitmap.getHeight(); mBitmapWidth = mBitmap.getWidth(); mBorderRect.set(calculateBounds()); mBorderRadius = Math.min((mBorderRect.height() - mBorderWidth) / 2.0f, (mBorderRect.width() - mBorderWidth) / 2.0f); mDrawableRect.set(mBorderRect); if (!mBorderOverlay && mBorderWidth > 0) { mDrawableRect.inset(mBorderWidth - 1.0f, mBorderWidth - 1.0f); } mDrawableRadius = Math.min(mDrawableRect.height() / 2.0f, mDrawableRect.width() / 2.0f); applyColorFilter(); updateShaderMatrix(); invalidate(); } private RectF calculateBounds() { int availableWidth = getWidth() - getPaddingLeft() - getPaddingRight(); int availableHeight = getHeight() - getPaddingTop() - getPaddingBottom(); int sideLength = Math.min(availableWidth, availableHeight); float left = getPaddingLeft() + (availableWidth - sideLength) / 2f; float top = getPaddingTop() + (availableHeight - sideLength) / 2f; return new RectF(left, top, left + sideLength, top + sideLength); } private void updateShaderMatrix() { float scale; float dx = 0; float dy = 0; mShaderMatrix.set(null); if (mBitmapWidth * mDrawableRect.height() > mDrawableRect.width() * mBitmapHeight) { scale = mDrawableRect.height() / (float) mBitmapHeight; dx = (mDrawableRect.width() - mBitmapWidth * scale) * 0.5f; } else { scale = mDrawableRect.width() / (float) mBitmapWidth; dy = (mDrawableRect.height() - mBitmapHeight * scale) * 0.5f; } mShaderMatrix.setScale(scale, scale); mShaderMatrix.postTranslate((int) (dx + 0.5f) + mDrawableRect.left, (int) (dy + 0.5f) + mDrawableRect.top); mBitmapShader.setLocalMatrix(mShaderMatrix); } }
package com.idleworx.mybatisdao; import java.util.ArrayList; import org.apache.ibatis.exceptions.PersistenceException; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class contains all the basic CRUD related methods which are inherited by all objects. * Children daos should generally not overwrite these method but add extra ones as needed. */ public abstract class MyBatisDAO<T, PK> implements IParentDAO<T, PK>{ private static Logger log = LoggerFactory.getLogger(MyBatisDAO.class); private static final String NAMESPACE = "mappers"; private SqlSessionFactory sf; //reference to mybatis session factory private Class<T> type; /** * Define prefixes for easier naming convetions between XML mapper files and the DAO class **/ public static final String PREFIX_SELECT_QUERY = "get"; //prefix of select queries in mapper files (eg. getAddressType) public static final String PREFIX_INSERT_QUERY = "create"; //prefix of create queries in mapper files (eg. createAddressType) public static final String PREFIX_UPDATE_QUERY = "update"; //prefix of update queries in mapper files (eg. updateAddressType) public static final String PREFIX_DELETE_QUERY = "delete"; //prefix of delete queries in mapper files (eg. deleteAddressType) /** Default Constructor */ public MyBatisDAO(Class<T> type,SqlSessionFactory sf) { this.type = type; this.sf = sf; if(sf==null) log.error("Error: Could not instantiate MyBatisDAO. Loading myBatis sessionFactory failed."); } /** Use this method to get a session factory for using in any methods impelmented in child dao classes */ protected SqlSessionFactory getSessionFactory() { return sf; } /** * Default get by id method. * </br></br> * Almost all objects in the db will * need this (except mapping tables for multiple joins, which you * probably shouldn't even have as objects in your model, since proper * MyBatis mappings can take care of that). * </br></br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;select id="getCarInfo" ... */ public T get(PK id) throws PersistenceException { SqlSession session = sf.openSession(); T obj = null; try { String query = NAMESPACE+"."+PREFIX_SELECT_QUERY+this.type.getSimpleName(); //If the object's calls name is AddressType.java, this matches the mapper query id: "namespace.getAddressType" obj = (T)session.selectOne(query,id); } finally { session.close(); } return obj; } /** * Method returns all rows for this object. * </br></br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;select id="getAllCarInfo" ... * </br></br> * SQL Executed: select * from [tablename] * </br></br> * Notes: * </br> * Consider overdiding this method in order to handle large numbers of objects * with multiple references. * LAZY LOADING should be enabled in this case, otherwise you might run out of memory (eg. get all UserAccounts if the table has 1,000,000 rows) * look into the aggresiveLazyLoading property * */ public ArrayList<T> getAll() throws PersistenceException { SqlSession session = sf.openSession(); ArrayList<T> list = null; try { String query = NAMESPACE+"."+PREFIX_SELECT_QUERY+"All"+this.type.getSimpleName(); list = (ArrayList<T>)session.selectList(query); } finally { session.close(); } return list; } /** * Method returns first object which matches the given name (exact match). * </br></br> * It's up to you to decide what constitutes an object's name. Typically you would have a * NAME column in the table, but not all objects have this. Generally this method should be overriden (if you need it at all) * in the child dao class. * </br></br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;select id="getCarInfoByName" ... * </br></br> * SQL Executed (example): select * from [tablename] where NAME = ? * */ public T getByName(String name) throws PersistenceException { SqlSession session = sf.openSession(); T obj = null; try { String query = NAMESPACE+"."+PREFIX_SELECT_QUERY+this.type.getSimpleName()+"ByName"; obj = (T)session.selectOne(query,name); } finally { session.close(); } return obj; } /** * Method inserts the object into the table. * </br></br> * You will usually override this method, especially if you're inserting associated objects. * </br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;insert id="createCarInfo" ... * </br></br> * SQL Executed (example): insert into [tablename] (fieldname1,fieldname2,...) values(value1,value2...) ... * */ public int create(T o) throws PersistenceException{ SqlSession session = sf.openSession(); Integer status = null; try { String query = NAMESPACE+"."+PREFIX_INSERT_QUERY+o.getClass().getSimpleName(); status = (Integer)session.insert(query,o); session.commit(); } finally { session.close(); } return status; } /** * Method updates the object by id. * </br></br> * You will usually override this method. But it can be used for simple objects. * </br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;update id="updateCarInfo" ... * </br></br> * SQL Executed (example): update [tablename] set fieldname1 = value1 where id = #{id} * */ public int update(T o)throws PersistenceException { SqlSession session = sf.openSession(); Integer status = null; try { String query = NAMESPACE+"."+PREFIX_UPDATE_QUERY+o.getClass().getSimpleName(); status = session.update(query,o); session.commit(); } finally { session.close(); } return status; } /** * Method deletes the object by id. * </br></br> * Example: * </br> * If your DAO object is called CarInfo.java, * the corresponding mapper query id should be: &lt;delete id="deleteCarInfo" ... * </br></br> * SQL Executed (example): update [tablename] set fieldname1 = value1 where id = #{id} * */ public int delete(PK id) throws PersistenceException{ SqlSession session = sf.openSession(); Integer status = null; try { String query = NAMESPACE+"."+PREFIX_DELETE_QUERY+this.type.getSimpleName(); status = session.delete(query,id); session.commit(); } finally { session.close(); } return status; } }
/** * Copyright 2000-2010 Geometria Contributors * http://geocentral.net/geometria * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License * http://www.gnu.org/licenses */ package net.geocentral.geometria.view; import java.awt.Dimension; import java.awt.Frame; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.ButtonGroup; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JTabbedPane; import javax.swing.JTextField; import javax.swing.WindowConstants; import javax.vecmath.Point3d; import net.geocentral.geometria.action.GProblemAnswerAction; import net.geocentral.geometria.model.GFigure; import net.geocentral.geometria.model.GNotepadRecord; import net.geocentral.geometria.model.GProblem; import net.geocentral.geometria.model.answer.GAnswer; import net.geocentral.geometria.model.answer.GConditionAnswer; import net.geocentral.geometria.model.answer.GFixedPlaneAnswer; import net.geocentral.geometria.model.answer.GNumberAnswer; import net.geocentral.geometria.model.answer.GPointSetAnswer; import net.geocentral.geometria.model.answer.condition.GCondition; import net.geocentral.geometria.model.answer.condition.GEquilateralTriangleCondition; import net.geocentral.geometria.model.answer.condition.GIsoscellesTriangleCondition; import net.geocentral.geometria.model.answer.condition.GNotThroughLineCondition; import net.geocentral.geometria.model.answer.condition.GNotThroughPointCondition; import net.geocentral.geometria.model.answer.condition.GParallelToLineCondition; import net.geocentral.geometria.model.answer.condition.GParallelToPlaneCondition; import net.geocentral.geometria.model.answer.condition.GParallelogramCondition; import net.geocentral.geometria.model.answer.condition.GPerpendicularToLineCondition; import net.geocentral.geometria.model.answer.condition.GPerpendicularToPlaneCondition; import net.geocentral.geometria.model.answer.condition.GRectangleCondition; import net.geocentral.geometria.model.answer.condition.GRectangularTriangleCondition; import net.geocentral.geometria.model.answer.condition.GRhombusCondition; import net.geocentral.geometria.model.answer.condition.GSquareCondition; import net.geocentral.geometria.model.answer.condition.GThroughLineCondition; import net.geocentral.geometria.model.answer.condition.GThroughNoEdgeCondition; import net.geocentral.geometria.model.answer.condition.GThroughNoVertexCondition; import net.geocentral.geometria.model.answer.condition.GThroughPointCondition; import net.geocentral.geometria.model.answer.condition.GVolumeCutInRatioCondition; import net.geocentral.geometria.util.GDictionary; import net.geocentral.geometria.util.GGraphicsFactory; import net.geocentral.geometria.util.GMath; import net.geocentral.geometria.util.GPointSetFactory; import net.geocentral.geometria.util.GGraphicsFactory.LocationType; import org.apache.log4j.Logger; public class GProblemAnswerDialog extends JDialog implements GHelpOkCancelDialog, ActionListener { private int option = CANCEL_OPTION; private GProblem document; private JPanel valuePane; private JRadioButton numberButton; private JRadioButton pointSetButton; private JRadioButton fixedPlaneButton; private JRadioButton conditionButton; private JTextField numberTextField; private JTextField pointSetTextField; private JTextField[] conditionTextFields; private JComboBox[] conditionComboBoxes; private GCondition[][] conditionGroups = new GCondition[][] { new GCondition[] { new GThroughPointCondition(), new GThroughLineCondition(), new GNotThroughPointCondition(), new GNotThroughLineCondition(), new GThroughNoVertexCondition(), new GThroughNoEdgeCondition(), new GParallelToLineCondition(), new GParallelToPlaneCondition(), new GPerpendicularToLineCondition(), new GPerpendicularToPlaneCondition() }, new GCondition[] { new GVolumeCutInRatioCondition() }, new GCondition[] { new GIsoscellesTriangleCondition(), new GRectangularTriangleCondition(), new GEquilateralTriangleCondition(), new GParallelogramCondition(), new GRhombusCondition(), new GRectangleCondition(), new GSquareCondition() } }; private GAnswer answer; private GProblemAnswerAction action; private static Logger logger = Logger.getLogger("net.geocentral.geometria"); public GProblemAnswerDialog(Frame ownerFrame, GProblemAnswerAction action, GProblem document) { super(ownerFrame, true); logger.info(""); this.document = document; this.action = action; setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); layoutComponents(); pack(); GGraphicsFactory.getInstance().setLocation(this, ownerFrame, LocationType.CENTER); setTitle(GDictionary.get("Answer")); setResizable(true); } private void layoutComponents() { logger.info(""); getContentPane().setLayout( new BoxLayout(getContentPane(), BoxLayout.Y_AXIS)); JTabbedPane tabbedPane = new JTabbedPane(); tabbedPane.setPreferredSize(new Dimension(400, 320)); getContentPane().add(tabbedPane); JPanel typePane = new JPanel(); valuePane = new JPanel(); tabbedPane.addTab(GDictionary.get("Type"), typePane); tabbedPane.addTab(GDictionary.get("Value"), valuePane); layoutTypePane(typePane); JPanel helpOkCancelPane = GGraphicsFactory.getInstance().createHelpOkCancelPane(this, action.getHelpId()); getContentPane().add(helpOkCancelPane); } private void layoutTypePane(JPanel typePane) { logger.info(""); typePane.setLayout(new BoxLayout(typePane, BoxLayout.X_AXIS)); numberButton = new JRadioButton(GDictionary.get("Number")); numberButton.addActionListener(this); pointSetButton = new JRadioButton(GDictionary.get("PointSet")); pointSetButton.addActionListener(this); fixedPlaneButton = new JRadioButton(GDictionary.get("FixedPlane")); fixedPlaneButton.addActionListener(this); conditionButton = new JRadioButton(GDictionary.get("ConditionPlane")); conditionButton.addActionListener(this); ButtonGroup buttonGroup = new ButtonGroup(); buttonGroup.add(numberButton); buttonGroup.add(pointSetButton); buttonGroup.add(fixedPlaneButton); buttonGroup.add(conditionButton); JPanel buttonPane = new JPanel(); buttonPane.setLayout(new BoxLayout(buttonPane, BoxLayout.Y_AXIS)); buttonPane.add(Box.createVerticalGlue()); buttonPane.add(numberButton); buttonPane.add(pointSetButton); buttonPane.add(fixedPlaneButton); buttonPane.add(conditionButton); buttonPane.add(Box.createVerticalGlue()); typePane.add(Box.createRigidArea(new Dimension(20, 20))); typePane.add(buttonPane); typePane.add(Box.createHorizontalGlue()); GAnswer answer = document.getAnswer(); if (answer instanceof GNumberAnswer) numberButton.setSelected(true); else if (answer instanceof GPointSetAnswer) pointSetButton.setSelected(true); else if (answer instanceof GFixedPlaneAnswer) fixedPlaneButton.setSelected(true); else if (answer instanceof GConditionAnswer) conditionButton.setSelected(true); layoutValuePane(); } private void layoutValuePane() { logger.info(""); valuePane.removeAll(); if (numberButton.isSelected()) layoutNumberValuePane(); else if (pointSetButton.isSelected()) layoutPointSetValuePane(); else if (fixedPlaneButton.isSelected()) layoutFixedPlaneValuePane(); else if (conditionButton.isSelected()) layoutConditionValuePane(); } private void layoutNumberValuePane() { logger.info(""); valuePane.setLayout(new BoxLayout(valuePane, BoxLayout.Y_AXIS)); valuePane.add(Box.createVerticalGlue()); numberTextField = GGraphicsFactory.getInstance().createAnswerInput(null); JPanel inputPane = layoutPane(GDictionary.get("Value"), new JComponent[] { numberTextField }); valuePane.add(inputPane); valuePane.add(Box.createVerticalGlue()); GAnswer ans = document.getAnswer(); if (ans instanceof GNumberAnswer && ((GNumberAnswer)ans).getValue() != null && ((GNumberAnswer)ans).getValue() != 0) { String stringValue = String.valueOf(((GNumberAnswer) ans).getValue()); numberTextField.setText(stringValue); } else { GNotepadRecord record = document.getNotepad().getSelectedRecord(); if (record != null) { String variableName = record.getVariable().getName(); numberTextField.setText(variableName); } } } private void layoutPointSetValuePane() { logger.info(""); valuePane.setLayout(new BoxLayout(valuePane, BoxLayout.Y_AXIS)); valuePane.add(Box.createVerticalGlue()); pointSetTextField = GGraphicsFactory.getInstance().createAnswerInput(null); JPanel inputPane = layoutPane(GDictionary.get("Points"), new JComponent[] { pointSetTextField }); valuePane.add(inputPane); valuePane.add(Box.createVerticalGlue()); GAnswer ans = document.getAnswer(); if (ans instanceof GPointSetAnswer) { Point3d[] coords = ((GPointSetAnswer)ans).getCoords(); String valueString; if (coords == null) valueString = ""; else valueString = ans.toString(); pointSetTextField.setText(valueString); } else { String[] selectedLabels = GPointSetFactory.getInstance().fromSelection(document); if (selectedLabels.length > 0) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < selectedLabels.length; i++) { buf.append(selectedLabels[i]); if (i < selectedLabels.length - 1) buf.append(","); } pointSetTextField.setText(String.valueOf(buf)); } } } private void layoutFixedPlaneValuePane() { logger.info(""); valuePane.setLayout(new BoxLayout(valuePane, BoxLayout.Y_AXIS)); valuePane.add(Box.createVerticalGlue()); pointSetTextField = GGraphicsFactory.getInstance().createAnswerInput(null); JPanel inputPane = layoutPane(GDictionary.get("ReferencePoints"), new JComponent[] { pointSetTextField }); valuePane.add(inputPane); valuePane.add(Box.createVerticalGlue()); GAnswer ans = document.getAnswer(); if (ans instanceof GFixedPlaneAnswer) { Point3d[] coords = ((GFixedPlaneAnswer)ans).getCoords(); String valueString; if (coords == null) valueString = ""; else valueString = ans.toString(); pointSetTextField.setText(valueString); } else { String[] selectedLabels = GPointSetFactory.getInstance().fromSelection(document); if (selectedLabels.length > 0) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < selectedLabels.length; i++) { buf.append(selectedLabels[i]); if (i < selectedLabels.length - 1) buf.append(","); } pointSetTextField.setText(String.valueOf(buf)); } } } private void layoutConditionValuePane() { logger.info(""); valuePane.setLayout(new BoxLayout(valuePane, BoxLayout.Y_AXIS)); valuePane.add(Box.createVerticalGlue()); conditionComboBoxes = new JComboBox[conditionGroups.length]; conditionTextFields = new JTextField[conditionGroups.length]; conditionTextFields[0] = GGraphicsFactory.getInstance().createAnswerInput(null); conditionTextFields[1] = GGraphicsFactory.getInstance().createAnswerInput(null); String[] captions = { GDictionary.get("Passes"), GDictionary.get("Miscellaneous"), GDictionary.get("SectionShapedLike") }; for (int i = 0; i < conditionGroups.length; i++) { String[] labels = new String[conditionGroups[i].length + 1]; labels[0] = "-- " + GDictionary.get("NotSelected") + " --"; for (int j = 0; j < conditionGroups[i].length; j++) labels[j + 1] = conditionGroups[i][j].getDescription(); conditionComboBoxes[i] = GGraphicsFactory.getInstance().createComboBox(labels); JComponent[] components; if (conditionTextFields[i] == null) components = new JComponent[] { conditionComboBoxes[i] }; else components = new JComponent[] { conditionComboBoxes[i], conditionTextFields[i] }; JPanel pane = layoutPane(captions[i], components); valuePane.add(pane); valuePane.add(Box.createVerticalGlue()); } GAnswer ans = document.getAnswer(); if (ans instanceof GConditionAnswer) { GCondition[] conditions = ((GConditionAnswer)ans).getConditions(); for (int i = 0; i < conditions.length; i++) { if (conditions[i] == null) continue; for (int j = 0; j < conditionGroups[i].length; j++) { if (conditionGroups[i][j].getClass().equals( conditions[i].getClass())) { conditionComboBoxes[i].setSelectedIndex(j + 1); if (conditionTextFields[i] != null) { String stringValue = conditions[i].getStringValue(); conditionTextFields[i].setText(stringValue); } } } } } else { String[] selectedLabels = GPointSetFactory.getInstance().fromSelection(document); if (selectedLabels.length > 0) { StringBuffer buf = new StringBuffer(); for (int i = 0; i < selectedLabels.length; i++) { buf.append(selectedLabels[i]); if (i < selectedLabels.length - 1) buf.append(","); } conditionTextFields[0].setText(String.valueOf(buf)); } } } private JPanel layoutPane(String caption, JComponent[] components) { Dimension padding = new Dimension(10, 10); JPanel pane = GGraphicsFactory.getInstance().createTitledBorderPane(caption); pane.setLayout(new BoxLayout(pane, BoxLayout.X_AXIS)); for (int i = 0; i < components.length; i++) { pane.add(components[i]); if (i < components.length - 1) pane.add(Box.createRigidArea(padding)); } return pane; } public void ok() { logger.info(""); GAnswer ans = null; if (numberButton.isSelected()) { ans = new GNumberAnswer(); try { ((GNumberAnswer)ans).validateInput( numberTextField.getText().trim(), document); } catch (Exception exception) { GGraphicsFactory.getInstance().showErrorDialog( this, exception.getMessage()); return; } } else if (pointSetButton.isSelected()) { Point3d[] coords; try { GFigure figure = document.getSelectedFigure(); coords = GPointSetFactory.getInstance().fromString( pointSetTextField.getText().trim(), figure); } catch (Exception exception) { GGraphicsFactory.getInstance().showErrorDialog( this, exception.getMessage()); return; } if (coords.length < 1) { logger.info("No points"); GGraphicsFactory.getInstance().showErrorDialog(this, GDictionary.get("EnterPoints")); return; } ans = new GPointSetAnswer(coords); } else if (fixedPlaneButton.isSelected()) { GFigure figure = document.getSelectedFigure(); Point3d[] coords; try { coords = GPointSetFactory.getInstance().fromString( pointSetTextField.getText().trim(), figure); } catch (Exception exception) { GGraphicsFactory.getInstance().showErrorDialog( this, exception.getMessage()); return; } if (coords.length != 3) { GGraphicsFactory.getInstance().showErrorDialog(this, GDictionary.get("RefPlaneBy3Points")); return; } if (GMath.areCollinear(coords, GMath.EPSILON)) { GGraphicsFactory.getInstance().showErrorDialog(this, GDictionary.get("RefPointsAreCollinear")); return; } ans = new GFixedPlaneAnswer(coords); } else if (conditionButton.isSelected()) { GCondition[] conditions = new GCondition[conditionGroups.length]; for (int i = 0; i < conditionGroups.length; i++) { int j = conditionComboBoxes[i].getSelectedIndex(); if (j > 0) { conditions[i] = conditionGroups[i][j - 1]; try { String input = conditionTextFields[i] == null ? null : conditionTextFields[i].getText().trim(); conditions[i].validate(input, document); } catch (Exception exception) { GGraphicsFactory.getInstance().showErrorDialog(this, exception.getMessage()); return; } } } ans = new GConditionAnswer(conditions); } answer = ans; option = OK_OPTION; dispose(); } public void cancel() { logger.info(""); dispose(); } public int getOption() { return option; } public GAnswer getAnswer() { return answer; } public void actionPerformed(ActionEvent e) { layoutValuePane(); } private static final long serialVersionUID = 1L; }
/* * Copyright 2012, Google Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jf.dexlib2.dexbacked; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.jf.dexlib2.base.reference.BaseMethodReference; import org.jf.dexlib2.dexbacked.raw.MethodIdItem; import org.jf.dexlib2.dexbacked.raw.ProtoIdItem; import org.jf.dexlib2.dexbacked.raw.TypeListItem; import org.jf.dexlib2.dexbacked.util.AnnotationsDirectory; import org.jf.dexlib2.dexbacked.util.FixedSizeList; import org.jf.dexlib2.dexbacked.util.ParameterIterator; import org.jf.dexlib2.iface.Annotation; import org.jf.dexlib2.iface.Method; import org.jf.dexlib2.iface.MethodParameter; import org.jf.util.AbstractForwardSequentialList; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; public class DexBackedMethod extends BaseMethodReference implements Method { @Nonnull public final DexBackedDexFile dexFile; @Nonnull public final DexBackedClassDef classDef; public final int accessFlags; private final int codeOffset; private final int parameterAnnotationSetListOffset; private final int methodAnnotationSetOffset; public final int methodIndex; private int methodIdItemOffset; private int protoIdItemOffset; private int parametersOffset = -1; public DexBackedMethod(@Nonnull DexReader reader, @Nonnull DexBackedClassDef classDef, int previousMethodIndex) { this.dexFile = reader.dexBuf; this.classDef = classDef; // large values may be used for the index delta, which cause the cumulative index to overflow upon // addition, effectively allowing out of order entries. int methodIndexDiff = reader.readLargeUleb128(); this.methodIndex = methodIndexDiff + previousMethodIndex; this.accessFlags = reader.readSmallUleb128(); this.codeOffset = reader.readSmallUleb128(); this.methodAnnotationSetOffset = 0; this.parameterAnnotationSetListOffset = 0; } public DexBackedMethod(@Nonnull DexReader reader, @Nonnull DexBackedClassDef classDef, int previousMethodIndex, @Nonnull AnnotationsDirectory.AnnotationIterator methodAnnotationIterator, @Nonnull AnnotationsDirectory.AnnotationIterator paramaterAnnotationIterator) { this.dexFile = reader.dexBuf; this.classDef = classDef; // large values may be used for the index delta, which cause the cumulative index to overflow upon // addition, effectively allowing out of order entries. int methodIndexDiff = reader.readLargeUleb128(); this.methodIndex = methodIndexDiff + previousMethodIndex; this.accessFlags = reader.readSmallUleb128(); this.codeOffset = reader.readSmallUleb128(); this.methodAnnotationSetOffset = methodAnnotationIterator.seekTo(methodIndex); this.parameterAnnotationSetListOffset = paramaterAnnotationIterator.seekTo(methodIndex); } public int getMethodIndex() { return methodIndex; } @Nonnull @Override public String getDefiningClass() { return classDef.getType(); } @Override public int getAccessFlags() { return accessFlags; } @Nonnull @Override public String getName() { return dexFile.getString(dexFile.readSmallUint(getMethodIdItemOffset() + MethodIdItem.NAME_OFFSET)); } @Nonnull @Override public String getReturnType() { return dexFile.getType(dexFile.readSmallUint(getProtoIdItemOffset() + ProtoIdItem.RETURN_TYPE_OFFSET)); } @Nonnull @Override public List<? extends MethodParameter> getParameters() { int parametersOffset = getParametersOffset(); if (parametersOffset > 0) { final List<String> parameterTypes = getParameterTypes(); return new AbstractForwardSequentialList<MethodParameter>() { @Nonnull @Override public Iterator<MethodParameter> iterator() { return new ParameterIterator(parameterTypes, getParameterAnnotations(), getParameterNames()); } @Override public int size() { return parameterTypes.size(); } }; } return ImmutableList.of(); } @Nonnull public List<? extends Set<? extends DexBackedAnnotation>> getParameterAnnotations() { return AnnotationsDirectory.getParameterAnnotations(dexFile, parameterAnnotationSetListOffset); } @Nonnull public Iterator<String> getParameterNames() { DexBackedMethodImplementation methodImpl = getImplementation(); if (methodImpl != null) { return methodImpl.getParameterNames(null); } return Iterators.emptyIterator(); } @Nonnull @Override public List<String> getParameterTypes() { final int parametersOffset = getParametersOffset(); if (parametersOffset > 0) { final int parameterCount = dexFile.readSmallUint(parametersOffset + TypeListItem.SIZE_OFFSET); final int paramListStart = parametersOffset + TypeListItem.LIST_OFFSET; return new FixedSizeList<String>() { @Nonnull @Override public String readItem(final int index) { return dexFile.getType(dexFile.readUshort(paramListStart + 2*index)); } @Override public int size() { return parameterCount; } }; } return ImmutableList.of(); } @Nonnull @Override public Set<? extends Annotation> getAnnotations() { return AnnotationsDirectory.getAnnotations(dexFile, methodAnnotationSetOffset); } @Nullable @Override public DexBackedMethodImplementation getImplementation() { if (codeOffset > 0) { return new DexBackedMethodImplementation(dexFile, this, codeOffset); } return null; } private int getMethodIdItemOffset() { if (methodIdItemOffset == 0) { methodIdItemOffset = dexFile.getMethodIdItemOffset(methodIndex); } return methodIdItemOffset; } private int getProtoIdItemOffset() { if (protoIdItemOffset == 0) { int protoIndex = dexFile.readUshort(getMethodIdItemOffset() + MethodIdItem.PROTO_OFFSET); protoIdItemOffset = dexFile.getProtoIdItemOffset(protoIndex); } return protoIdItemOffset; } private int getParametersOffset() { if (parametersOffset == -1) { parametersOffset = dexFile.readSmallUint(getProtoIdItemOffset() + ProtoIdItem.PARAMETERS_OFFSET); } return parametersOffset; } /** * Skips the reader over the specified number of encoded_method structures * * @param reader The reader to skip * @param count The number of encoded_method structures to skip over */ public static void skipMethods(@Nonnull DexReader reader, int count) { for (int i=0; i<count; i++) { reader.skipUleb128(); reader.skipUleb128(); reader.skipUleb128(); } } }
/* * Copyright 2003-2015 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.migration; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.psiutils.ComparisonUtils; import com.siyeh.ig.psiutils.ExpectedTypeUtils; import com.siyeh.ig.psiutils.MethodCallUtils; import com.siyeh.ig.psiutils.ParenthesesUtils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.HashMap; import java.util.Map; public class UnnecessaryUnboxingInspection extends BaseInspection { @SuppressWarnings("PublicField") public boolean onlyReportSuperfluouslyUnboxed = false; @NonNls static final Map<String, String> s_unboxingMethods = new HashMap<String, String>(8); static { s_unboxingMethods.put(CommonClassNames.JAVA_LANG_INTEGER, "intValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_SHORT, "shortValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_BOOLEAN, "booleanValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_LONG, "longValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_BYTE, "byteValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_FLOAT, "floatValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_DOUBLE, "doubleValue"); s_unboxingMethods.put(CommonClassNames.JAVA_LANG_CHARACTER, "charValue"); } @Override @NotNull public String getDisplayName() { return InspectionGadgetsBundle.message( "unnecessary.unboxing.display.name"); } @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message( "unnecessary.unboxing.problem.descriptor"); } @Override public boolean isEnabledByDefault() { return true; } @Nullable @Override public JComponent createOptionsPanel() { return new SingleCheckboxOptionsPanel(InspectionGadgetsBundle.message("unnecessary.unboxing.superfluous.option"), this, "onlyReportSuperfluouslyUnboxed"); } @Override public InspectionGadgetsFix buildFix(Object... infos) { return new UnnecessaryUnboxingFix(); } private static class UnnecessaryUnboxingFix extends InspectionGadgetsFix { @Override @NotNull public String getName() { return InspectionGadgetsBundle.message( "unnecessary.unboxing.remove.quickfix"); } @NotNull @Override public String getFamilyName() { return getName(); } @Override public void doFix(Project project, ProblemDescriptor descriptor) { final PsiMethodCallExpression methodCall = (PsiMethodCallExpression)descriptor.getPsiElement(); final PsiReferenceExpression methodExpression = methodCall.getMethodExpression(); final PsiExpression qualifier = methodExpression.getQualifierExpression(); final PsiExpression strippedQualifier = ParenthesesUtils.stripParentheses(qualifier); if (strippedQualifier == null) { return; } if (strippedQualifier instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)strippedQualifier; final PsiElement element = referenceExpression.resolve(); if (element instanceof PsiField) { final PsiField field = (PsiField)element; final PsiClass containingClass = field.getContainingClass(); if (containingClass == null) { return; } final String classname = containingClass.getQualifiedName(); if (CommonClassNames.JAVA_LANG_BOOLEAN.equals(classname)) { @NonNls final String name = field.getName(); if ("TRUE".equals(name)) { PsiReplacementUtil.replaceExpression(methodCall, "true"); return; } else if ("FALSE".equals(name)) { PsiReplacementUtil.replaceExpression(methodCall, "false"); return; } } } } final String strippedQualifierText = strippedQualifier.getText(); PsiReplacementUtil.replaceExpression(methodCall, strippedQualifierText); } } @Override public boolean shouldInspect(PsiFile file) { return PsiUtil.isLanguageLevel5OrHigher(file); } @Override public BaseInspectionVisitor buildVisitor() { return new UnnecessaryUnboxingVisitor(); } private class UnnecessaryUnboxingVisitor extends BaseInspectionVisitor { @Override public void visitMethodCallExpression(@NotNull PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); if (!isUnboxingExpression(expression)) { return; } final PsiReferenceExpression methodExpression = expression.getMethodExpression(); final PsiExpression qualifier = methodExpression.getQualifierExpression(); if (qualifier == null || !canRemainBoxed(expression, qualifier)) { return; } registerError(expression); } private boolean canRemainBoxed(@NotNull PsiExpression expression, @NotNull PsiExpression unboxedExpression) { PsiElement parent = expression.getParent(); while (parent instanceof PsiParenthesizedExpression) { expression = (PsiExpression)parent; parent = parent.getParent(); } if (parent instanceof PsiPolyadicExpression) { final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)parent; if (isPossibleObjectComparison(expression, polyadicExpression)) { return false; } } if (parent instanceof PsiTypeCastExpression) { final PsiTypeCastExpression typeCastExpression = (PsiTypeCastExpression)parent; final PsiTypeElement typeElement = typeCastExpression.getCastType(); if (typeElement == null) { return false; } final PsiType castType = typeElement.getType(); final PsiType expressionType = expression.getType(); if (expressionType == null || !castType.isAssignableFrom(expressionType)) { return false; } } else if (parent instanceof PsiConditionalExpression) { final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)parent; final PsiExpression thenExpression = conditionalExpression.getThenExpression(); if (thenExpression == null) { return false; } final PsiExpression elseExpression = conditionalExpression.getElseExpression(); if (elseExpression == null) { return false; } if (PsiTreeUtil.isAncestor(thenExpression, expression, false)) { final PsiType type = elseExpression.getType(); if (!(type instanceof PsiPrimitiveType)) { return false; } } else if (PsiTreeUtil.isAncestor(elseExpression, expression, false)) { final PsiType type = thenExpression.getType(); if (!(type instanceof PsiPrimitiveType)) { return false; } } } else if (parent instanceof PsiExpressionList) { final PsiElement grandParent = parent.getParent(); if (!(grandParent instanceof PsiCallExpression)) { return true; } final PsiCallExpression methodCallExpression = (PsiCallExpression)grandParent; if (!isSameMethodCalledWithoutUnboxing(methodCallExpression, expression, unboxedExpression)) { return false; } } if (onlyReportSuperfluouslyUnboxed) { final PsiType expectedType = ExpectedTypeUtils.findExpectedType(expression, false, true); if (!(expectedType instanceof PsiClassType)) { return false; } } return true; } private boolean isPossibleObjectComparison(PsiExpression expression, PsiPolyadicExpression polyadicExpression) { if (!ComparisonUtils.isEqualityComparison(polyadicExpression)) { return false; } for (PsiExpression operand : polyadicExpression.getOperands()) { if (operand == expression) { continue; } if (!(operand.getType() instanceof PsiPrimitiveType) || isUnboxingExpression(operand)) { return true; } } return false; } private boolean isUnboxingExpression(PsiExpression expression) { if (!(expression instanceof PsiMethodCallExpression)) { return false; } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)expression; final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final PsiExpression qualifier = methodExpression.getQualifierExpression(); if (qualifier == null) { return false; } final PsiType qualifierType = qualifier.getType(); if (qualifierType == null) { return false; } final String qualifierTypeName = qualifierType.getCanonicalText(); if (!s_unboxingMethods.containsKey(qualifierTypeName)) { return false; } final String methodName = methodExpression.getReferenceName(); final String unboxingMethod = s_unboxingMethods.get(qualifierTypeName); return unboxingMethod.equals(methodName); } private boolean isSameMethodCalledWithoutUnboxing(@NotNull PsiCallExpression callExpression, @NotNull PsiExpression unboxingExpression, @NotNull PsiExpression unboxedExpression) { final PsiMethod originalMethod = callExpression.resolveMethod(); if (originalMethod == null) { return false; } final PsiMethod method = MethodCallUtils.findMethodWithReplacedArgument(callExpression, unboxingExpression, unboxedExpression); return originalMethod == method; } } }
package tamestaj.examples.mini; import tamestaj.GlobalCarrier; import tamestaj.StaticInfo; import tamestaj.annotations.Accept; import tamestaj.annotations.Stage; import java.util.IdentityHashMap; public final class Mini { private Mini() { } public abstract static class BoolE extends GlobalCarrier { BoolE() { } abstract boolean run(IdentityHashMap<Object, Object> environment); } public abstract static class BoolV extends BoolE { BoolV() { } } public abstract static class IntE extends GlobalCarrier { IntE() { } abstract int run(IdentityHashMap<Object, Object> environment); } public abstract static class IntV extends IntE { IntV() { } } public abstract static class Stmt extends GlobalCarrier { Stmt() { } abstract IdentityHashMap<Object, Object> run(IdentityHashMap<Object, Object> environment); @Stage(language = MiniL.class, staticInfoElements = { StaticInfo.Element.ORIGIN }) public Stmt then(Stmt v) { return new Stmt() { @Override IdentityHashMap<Object, Object> run(IdentityHashMap<Object, Object> environment) { return v.run( Stmt.this.run(environment)); } }; } @Stage(language = MiniL.class, isStrict = true, staticInfoElements = { StaticInfo.Element.ORIGIN }) public int intRun(IntV e) { return (int) this.run(new IdentityHashMap<>()).get(e); } @Stage(language = MiniL.class, isStrict = true, staticInfoElements = { StaticInfo.Element.ORIGIN }) public boolean boolRun(BoolV e) { return (boolean) this.run(new IdentityHashMap<>()).get(e); } } @Stage(language = MiniL.class) public static IntE add(IntE a, IntE b) { return new IntE() { @Override int run(IdentityHashMap<Object, Object> environment) { int aR = a.run(environment); int bR = b.run(environment); return aR + bR; } }; } @Stage(language = MiniL.class) public static IntE mul(IntE a, IntE b) { return new IntE() { @Override int run(IdentityHashMap<Object, Object> environment) { int aR = a.run(environment); int bR = b.run(environment); return aR * bR; } }; } @Stage(language = MiniL.class) public static BoolE eq(IntE a, IntE b) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { int aR = a.run(environment); int bR = b.run(environment); return aR == bR; } }; } @Stage(language = MiniL.class) public static BoolE leq(IntE a, IntE b) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { int aR = a.run(environment); int bR = b.run(environment); return aR <= bR; } }; } @Stage(language = MiniL.class) public static BoolE and(BoolE a, BoolE b) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { boolean aR = a.run(environment); boolean bR = b.run(environment); return aR && bR; } }; } @Stage(language = MiniL.class) public static BoolE or(BoolE a, BoolE b) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { boolean aR = a.run(environment); boolean bR = b.run(environment); return aR || bR; } }; } @Stage(language = MiniL.class) public static IntE neg(IntE a) { return new IntE() { @Override int run(IdentityHashMap<Object, Object> environment) { int aR = a.run(environment); return -aR; } }; } @Stage(language = MiniL.class) public static BoolE not(BoolE a) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { boolean aR = a.run(environment); return !aR; } }; } @Stage(language = MiniL.class, staticInfoElements = { StaticInfo.Element.ORIGIN }) public static IntV intVar(String name) { return new IntV() { @Override int run(IdentityHashMap<Object, Object> environment) { return (int) environment.get(this); } }; } @Stage(language = MiniL.class, staticInfoElements = { StaticInfo.Element.ORIGIN }) public static BoolV boolVar(String name) { return new BoolV() { @Override boolean run(IdentityHashMap<Object, Object> environment) { return (boolean) environment.get(this); } }; } @Stage(language = MiniL.class) public static Stmt intAssign(IntV v, IntE e) { return new Stmt() { @Override IdentityHashMap<Object, Object> run(IdentityHashMap<Object, Object> environment) { int eR = e.run(environment); IdentityHashMap<Object, Object> env = new IdentityHashMap<>(environment); env.put(v, eR); return env; } }; } @Stage(language = MiniL.class) public static Stmt boolAssign(BoolV v, BoolE e) { return new Stmt() { @Override IdentityHashMap<Object, Object> run(IdentityHashMap<Object, Object> environment) { boolean eR = e.run(environment); IdentityHashMap<Object, Object> env = new IdentityHashMap<>(environment); env.put(v, eR); return env; } }; } @Stage(language = MiniL.class) public static IntE intLit(@Accept(languages = {}) int a) { return new IntE() { @Override int run(IdentityHashMap<Object, Object> environment) { return a; } }; } @Stage(language = MiniL.class) public static BoolE boolLit(@Accept(languages = {}) boolean a) { return new BoolE() { @Override boolean run(IdentityHashMap<Object, Object> environment) { return a; } }; } @Stage(language = MiniL.class) public static Stmt whileDo(BoolE test, Stmt s) { return new Stmt() { @Override IdentityHashMap<Object, Object> run(IdentityHashMap<Object, Object> environment) { while (test.run(environment)) { environment = s.run(environment); } return environment; } }; } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.pipelines.fabric.impl.behaviour; import org.onosproject.net.pi.model.PiActionId; import org.onosproject.net.pi.model.PiActionParamId; import org.onosproject.net.pi.model.PiActionProfileId; import org.onosproject.net.pi.model.PiMeterId; import org.onosproject.net.pi.model.PiPacketMetadataId; import org.onosproject.net.pi.model.PiCounterId; import org.onosproject.net.pi.model.PiMatchFieldId; import org.onosproject.net.pi.model.PiTableId; /** * Constants for fabric pipeline. */ public final class FabricConstants { // hide default constructor private FabricConstants() { } // Header field IDs public static final PiMatchFieldId HDR_IG_PORT = PiMatchFieldId.of("ig_port"); public static final PiMatchFieldId HDR_VLAN_IS_VALID = PiMatchFieldId.of("vlan_is_valid"); public static final PiMatchFieldId HDR_IPV6_SRC_NET_ID = PiMatchFieldId.of("ipv6_src_net_id"); public static final PiMatchFieldId HDR_C_TAG = PiMatchFieldId.of("c_tag"); public static final PiMatchFieldId HDR_IPV4_SRC = PiMatchFieldId.of("ipv4_src"); public static final PiMatchFieldId HDR_IPV6_DST = PiMatchFieldId.of("ipv6_dst"); public static final PiMatchFieldId HDR_L4_DPORT = PiMatchFieldId.of("l4_dport"); public static final PiMatchFieldId HDR_PPPOE_CODE = PiMatchFieldId.of("pppoe_code"); public static final PiMatchFieldId HDR_IPV6_SRC = PiMatchFieldId.of("ipv6_src"); public static final PiMatchFieldId HDR_ETH_SRC = PiMatchFieldId.of("eth_src"); public static final PiMatchFieldId HDR_FAR_ID = PiMatchFieldId.of("far_id"); public static final PiMatchFieldId HDR_VLAN_ID = PiMatchFieldId.of("vlan_id"); public static final PiMatchFieldId HDR_ICMP_CODE = PiMatchFieldId.of("icmp_code"); public static final PiMatchFieldId HDR_ETH_DST = PiMatchFieldId.of("eth_dst"); public static final PiMatchFieldId HDR_S_TAG = PiMatchFieldId.of("s_tag"); public static final PiMatchFieldId HDR_IPV4_DST = PiMatchFieldId.of("ipv4_dst"); public static final PiMatchFieldId HDR_IPV6_TRAFFIC_CLASS = PiMatchFieldId.of("ipv6_traffic_class"); public static final PiMatchFieldId HDR_IPV4_PREFIX = PiMatchFieldId.of("ipv4_prefix"); public static final PiMatchFieldId HDR_ETH_TYPE = PiMatchFieldId.of("eth_type"); public static final PiMatchFieldId HDR_PPPOE_SESSION_ID = PiMatchFieldId.of("pppoe_session_id"); public static final PiMatchFieldId HDR_IP_ETH_TYPE = PiMatchFieldId.of("ip_eth_type"); public static final PiMatchFieldId HDR_L4_SPORT = PiMatchFieldId.of("l4_sport"); public static final PiMatchFieldId HDR_ICMP_TYPE = PiMatchFieldId.of("icmp_type"); public static final PiMatchFieldId HDR_INNER_VLAN_ID = PiMatchFieldId.of("inner_vlan_id"); public static final PiMatchFieldId HDR_IPV4_ECN = PiMatchFieldId.of("ipv4_ecn"); public static final PiMatchFieldId HDR_TUNNEL_IPV4_DST = PiMatchFieldId.of("tunnel_ipv4_dst"); public static final PiMatchFieldId HDR_EG_SPEC = PiMatchFieldId.of("eg_spec"); public static final PiMatchFieldId HDR_LINE_ID = PiMatchFieldId.of("line_id"); public static final PiMatchFieldId HDR_SPGW_DIRECTION = PiMatchFieldId.of("spgw_direction"); public static final PiMatchFieldId HDR_IPV4_DSCP = PiMatchFieldId.of("ipv4_dscp"); public static final PiMatchFieldId HDR_NEXT_ID = PiMatchFieldId.of("next_id"); public static final PiMatchFieldId HDR_EG_PORT = PiMatchFieldId.of("eg_port"); public static final PiMatchFieldId HDR_UE_ADDR = PiMatchFieldId.of("ue_addr"); public static final PiMatchFieldId HDR_TEID = PiMatchFieldId.of("teid"); public static final PiMatchFieldId HDR_GTP_IPV4_DST = PiMatchFieldId.of("gtp_ipv4_dst"); public static final PiMatchFieldId HDR_INT_IS_VALID = PiMatchFieldId.of("int_is_valid"); public static final PiMatchFieldId HDR_MPLS_LABEL = PiMatchFieldId.of("mpls_label"); public static final PiMatchFieldId HDR_IP_PROTO = PiMatchFieldId.of("ip_proto"); public static final PiMatchFieldId HDR_PPPOE_PROTOCOL = PiMatchFieldId.of("pppoe_protocol"); // Table IDs public static final PiTableId FABRIC_INGRESS_NEXT_HASHED = PiTableId.of("FabricIngress.next.hashed"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_T_LINE_MAP = PiTableId.of("FabricIngress.bng_ingress.t_line_map"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_FAR_LOOKUP = PiTableId.of("FabricIngress.spgw_ingress.far_lookup"); public static final PiTableId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_TRANSIT_TB_INT_INSERT = PiTableId.of("FabricEgress.process_int_main.process_int_transit.tb_int_insert"); public static final PiTableId FABRIC_INGRESS_FILTERING_FWD_CLASSIFIER = PiTableId.of("FabricIngress.filtering.fwd_classifier"); public static final PiTableId FABRIC_INGRESS_NEXT_XCONNECT = PiTableId.of("FabricIngress.next.xconnect"); public static final PiTableId FABRIC_INGRESS_NEXT_NEXT_VLAN = PiTableId.of("FabricIngress.next.next_vlan"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_DOWNLINK_PDR_LOOKUP = PiTableId.of("FabricIngress.spgw_ingress.downlink_pdr_lookup"); public static final PiTableId FABRIC_INGRESS_NEXT_SIMPLE = PiTableId.of("FabricIngress.next.simple"); public static final PiTableId FABRIC_INGRESS_NEXT_MULTICAST = PiTableId.of("FabricIngress.next.multicast"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_UPLINK_PDR_LOOKUP = PiTableId.of("FabricIngress.spgw_ingress.uplink_pdr_lookup"); public static final PiTableId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_SOURCE_TB_INT_SOURCE = PiTableId.of("FabricEgress.process_int_main.process_int_source.tb_int_source"); public static final PiTableId FABRIC_INGRESS_FORWARDING_ROUTING_V6 = PiTableId.of("FabricIngress.forwarding.routing_v6"); public static final PiTableId FABRIC_INGRESS_FORWARDING_MPLS = PiTableId.of("FabricIngress.forwarding.mpls"); public static final PiTableId FABRIC_INGRESS_FORWARDING_ROUTING_V4 = PiTableId.of("FabricIngress.forwarding.routing_v4"); public static final PiTableId FABRIC_INGRESS_ACL_ACL = PiTableId.of("FabricIngress.acl.acl"); public static final PiTableId FABRIC_INGRESS_FILTERING_INGRESS_PORT_VLAN = PiTableId.of("FabricIngress.filtering.ingress_port_vlan"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_T_PPPOE_CP = PiTableId.of("FabricIngress.bng_ingress.upstream.t_pppoe_cp"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_T_PPPOE_TERM_V4 = PiTableId.of("FabricIngress.bng_ingress.upstream.t_pppoe_term_v4"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_T_PPPOE_TERM_V6 = PiTableId.of("FabricIngress.bng_ingress.upstream.t_pppoe_term_v6"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_DOWNLINK_FILTER_TABLE = PiTableId.of("FabricIngress.spgw_ingress.downlink_filter_table"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_FLEXIBLE_PDR_LOOKUP = PiTableId.of("FabricIngress.spgw_ingress.flexible_pdr_lookup"); public static final PiTableId FABRIC_INGRESS_SPGW_INGRESS_UPLINK_FILTER_TABLE = PiTableId.of("FabricIngress.spgw_ingress.uplink_filter_table"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_T_LINE_SESSION_MAP = PiTableId.of("FabricIngress.bng_ingress.downstream.t_line_session_map"); public static final PiTableId FABRIC_EGRESS_EGRESS_NEXT_EGRESS_VLAN = PiTableId.of("FabricEgress.egress_next.egress_vlan"); public static final PiTableId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_TB_SET_SINK = PiTableId.of("FabricIngress.process_set_source_sink.tb_set_sink"); public static final PiTableId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_REPORT_TB_GENERATE_REPORT = PiTableId.of("FabricEgress.process_int_main.process_int_report.tb_generate_report"); public static final PiTableId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_TB_SET_SOURCE = PiTableId.of("FabricIngress.process_set_source_sink.tb_set_source"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_T_QOS_V6 = PiTableId.of("FabricIngress.bng_ingress.downstream.t_qos_v6"); public static final PiTableId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_T_QOS_V4 = PiTableId.of("FabricIngress.bng_ingress.downstream.t_qos_v4"); public static final PiTableId FABRIC_INGRESS_FORWARDING_BRIDGING = PiTableId.of("FabricIngress.forwarding.bridging"); // Indirect Counter IDs public static final PiCounterId FABRIC_INGRESS_SPGW_INGRESS_PDR_COUNTER = PiCounterId.of("FabricIngress.spgw_ingress.pdr_counter"); public static final PiCounterId FABRIC_EGRESS_BNG_EGRESS_DOWNSTREAM_C_LINE_TX = PiCounterId.of("FabricEgress.bng_egress.downstream.c_line_tx"); public static final PiCounterId FABRIC_EGRESS_SPGW_EGRESS_PDR_COUNTER = PiCounterId.of("FabricEgress.spgw_egress.pdr_counter"); public static final PiCounterId FABRIC_INGRESS_PORT_COUNTERS_CONTROL_EGRESS_PORT_COUNTER = PiCounterId.of("FabricIngress.port_counters_control.egress_port_counter"); public static final PiCounterId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_C_DROPPED = PiCounterId.of("FabricIngress.bng_ingress.upstream.c_dropped"); public static final PiCounterId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_C_CONTROL = PiCounterId.of("FabricIngress.bng_ingress.upstream.c_control"); public static final PiCounterId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_C_TERMINATED = PiCounterId.of("FabricIngress.bng_ingress.upstream.c_terminated"); public static final PiCounterId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_C_LINE_RX = PiCounterId.of("FabricIngress.bng_ingress.downstream.c_line_rx"); public static final PiCounterId FABRIC_INGRESS_PORT_COUNTERS_CONTROL_INGRESS_PORT_COUNTER = PiCounterId.of("FabricIngress.port_counters_control.ingress_port_counter"); // Direct Counter IDs public static final PiCounterId FABRIC_INGRESS_NEXT_MULTICAST_COUNTER = PiCounterId.of("FabricIngress.next.multicast_counter"); public static final PiCounterId FABRIC_INGRESS_NEXT_SIMPLE_COUNTER = PiCounterId.of("FabricIngress.next.simple_counter"); public static final PiCounterId FABRIC_INGRESS_FILTERING_FWD_CLASSIFIER_COUNTER = PiCounterId.of("FabricIngress.filtering.fwd_classifier_counter"); public static final PiCounterId FABRIC_INGRESS_FORWARDING_BRIDGING_COUNTER = PiCounterId.of("FabricIngress.forwarding.bridging_counter"); public static final PiCounterId FABRIC_INGRESS_NEXT_HASHED_COUNTER = PiCounterId.of("FabricIngress.next.hashed_counter"); public static final PiCounterId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_COUNTER_SET_SOURCE = PiCounterId.of("FabricIngress.process_set_source_sink.counter_set_source"); public static final PiCounterId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_SOURCE_COUNTER_INT_SOURCE = PiCounterId.of("FabricEgress.process_int_main.process_int_source.counter_int_source"); public static final PiCounterId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_COUNTER_SET_SINK = PiCounterId.of("FabricIngress.process_set_source_sink.counter_set_sink"); public static final PiCounterId FABRIC_EGRESS_EGRESS_NEXT_EGRESS_VLAN_COUNTER = PiCounterId.of("FabricEgress.egress_next.egress_vlan_counter"); public static final PiCounterId FABRIC_INGRESS_ACL_ACL_COUNTER = PiCounterId.of("FabricIngress.acl.acl_counter"); public static final PiCounterId FABRIC_INGRESS_NEXT_XCONNECT_COUNTER = PiCounterId.of("FabricIngress.next.xconnect_counter"); public static final PiCounterId FABRIC_INGRESS_NEXT_NEXT_VLAN_COUNTER = PiCounterId.of("FabricIngress.next.next_vlan_counter"); public static final PiCounterId FABRIC_INGRESS_FORWARDING_ROUTING_V6_COUNTER = PiCounterId.of("FabricIngress.forwarding.routing_v6_counter"); public static final PiCounterId FABRIC_INGRESS_FILTERING_INGRESS_PORT_VLAN_COUNTER = PiCounterId.of("FabricIngress.filtering.ingress_port_vlan_counter"); public static final PiCounterId FABRIC_INGRESS_FORWARDING_MPLS_COUNTER = PiCounterId.of("FabricIngress.forwarding.mpls_counter"); // Action IDs public static final PiActionId FABRIC_INGRESS_NEXT_SET_NEXT_ID_XCONNECT = PiActionId.of("FabricIngress.next.set_next_id_xconnect"); public static final PiActionId FABRIC_INGRESS_FORWARDING_NOP_ROUTING_V4 = PiActionId.of("FabricIngress.forwarding.nop_routing_v4"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_QOS_BESTEFF = PiActionId.of("FabricIngress.bng_ingress.downstream.qos_besteff"); public static final PiActionId FABRIC_INGRESS_FILTERING_PERMIT_WITH_INTERNAL_VLAN = PiActionId.of("FabricIngress.filtering.permit_with_internal_vlan"); public static final PiActionId FABRIC_INGRESS_NEXT_ROUTING_HASHED = PiActionId.of("FabricIngress.next.routing_hashed"); public static final PiActionId FABRIC_INGRESS_FORWARDING_SET_NEXT_ID_BRIDGING = PiActionId.of("FabricIngress.forwarding.set_next_id_bridging"); public static final PiActionId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_SOURCE_INT_SOURCE_DSCP = PiActionId.of("FabricEgress.process_int_main.process_int_source.int_source_dscp"); public static final PiActionId FABRIC_INGRESS_NEXT_SET_DOUBLE_VLAN = PiActionId.of("FabricIngress.next.set_double_vlan"); public static final PiActionId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_TRANSIT_INIT_METADATA = PiActionId.of("FabricEgress.process_int_main.process_int_transit.init_metadata"); public static final PiActionId FABRIC_INGRESS_SPGW_INGRESS_LOAD_TUNNEL_FAR_ATTRIBUTES = PiActionId.of("FabricIngress.spgw_ingress.load_tunnel_far_attributes"); public static final PiActionId FABRIC_INGRESS_ACL_SET_CLONE_SESSION_ID = PiActionId.of("FabricIngress.acl.set_clone_session_id"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_PUNT_TO_CPU = PiActionId.of("FabricIngress.bng_ingress.upstream.punt_to_cpu"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_DROP = PiActionId.of("FabricIngress.bng_ingress.downstream.drop"); public static final PiActionId FABRIC_INGRESS_SPGW_INGRESS_SET_PDR_ATTRIBUTES = PiActionId.of("FabricIngress.spgw_ingress.set_pdr_attributes"); public static final PiActionId FABRIC_INGRESS_NEXT_SET_VLAN = PiActionId.of("FabricIngress.next.set_vlan"); public static final PiActionId FABRIC_INGRESS_ACL_NOP_ACL = PiActionId.of("FabricIngress.acl.nop_acl"); public static final PiActionId FABRIC_INGRESS_SPGW_INGRESS_LOAD_NORMAL_FAR_ATTRIBUTES = PiActionId.of("FabricIngress.spgw_ingress.load_normal_far_attributes"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_SET_LINE = PiActionId.of("FabricIngress.bng_ingress.set_line"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_TERM_DISABLED = PiActionId.of("FabricIngress.bng_ingress.upstream.term_disabled"); public static final PiActionId FABRIC_INGRESS_ACL_SET_NEXT_ID_ACL = PiActionId.of("FabricIngress.acl.set_next_id_acl"); public static final PiActionId FABRIC_INGRESS_FILTERING_PERMIT = PiActionId.of("FabricIngress.filtering.permit"); public static final PiActionId FABRIC_INGRESS_FORWARDING_SET_NEXT_ID_ROUTING_V4 = PiActionId.of("FabricIngress.forwarding.set_next_id_routing_v4"); public static final PiActionId FABRIC_INGRESS_FORWARDING_SET_NEXT_ID_ROUTING_V6 = PiActionId.of("FabricIngress.forwarding.set_next_id_routing_v6"); public static final PiActionId FABRIC_INGRESS_NEXT_ROUTING_SIMPLE = PiActionId.of("FabricIngress.next.routing_simple"); public static final PiActionId FABRIC_EGRESS_BNG_EGRESS_DOWNSTREAM_ENCAP_V4 = PiActionId.of("FabricEgress.bng_egress.downstream.encap_v4"); public static final PiActionId FABRIC_INGRESS_NEXT_OUTPUT_HASHED = PiActionId.of("FabricIngress.next.output_hashed"); public static final PiActionId FABRIC_INGRESS_FORWARDING_POP_MPLS_AND_NEXT = PiActionId.of("FabricIngress.forwarding.pop_mpls_and_next"); public static final PiActionId FABRIC_EGRESS_BNG_EGRESS_DOWNSTREAM_ENCAP_V6 = PiActionId.of("FabricEgress.bng_egress.downstream.encap_v6"); public static final PiActionId FABRIC_INGRESS_NEXT_MPLS_ROUTING_SIMPLE = PiActionId.of("FabricIngress.next.mpls_routing_simple"); public static final PiActionId FABRIC_INGRESS_ACL_PUNT_TO_CPU = PiActionId.of("FabricIngress.acl.punt_to_cpu"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_QOS_PRIO = PiActionId.of("FabricIngress.bng_ingress.downstream.qos_prio"); public static final PiActionId FABRIC_EGRESS_EGRESS_NEXT_POP_VLAN = PiActionId.of("FabricEgress.egress_next.pop_vlan"); public static final PiActionId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_INT_SET_SINK = PiActionId.of("FabricIngress.process_set_source_sink.int_set_sink"); public static final PiActionId FABRIC_INGRESS_NEXT_MPLS_ROUTING_HASHED = PiActionId.of("FabricIngress.next.mpls_routing_hashed"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_TERM_ENABLED_V6 = PiActionId.of("FabricIngress.bng_ingress.upstream.term_enabled_v6"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_UPSTREAM_TERM_ENABLED_V4 = PiActionId.of("FabricIngress.bng_ingress.upstream.term_enabled_v4"); public static final PiActionId FABRIC_INGRESS_PROCESS_SET_SOURCE_SINK_INT_SET_SOURCE = PiActionId.of("FabricIngress.process_set_source_sink.int_set_source"); public static final PiActionId NOP = PiActionId.of("nop"); public static final PiActionId FABRIC_INGRESS_NEXT_OUTPUT_SIMPLE = PiActionId.of("FabricIngress.next.output_simple"); public static final PiActionId FABRIC_INGRESS_FILTERING_DENY = PiActionId.of("FabricIngress.filtering.deny"); public static final PiActionId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_SET_SESSION = PiActionId.of("FabricIngress.bng_ingress.downstream.set_session"); public static final PiActionId FABRIC_INGRESS_NEXT_SET_MCAST_GROUP_ID = PiActionId.of("FabricIngress.next.set_mcast_group_id"); public static final PiActionId FABRIC_INGRESS_FILTERING_SET_FORWARDING_TYPE = PiActionId.of("FabricIngress.filtering.set_forwarding_type"); public static final PiActionId FABRIC_EGRESS_PROCESS_INT_MAIN_PROCESS_INT_REPORT_DO_REPORT_ENCAPSULATION = PiActionId.of("FabricEgress.process_int_main.process_int_report.do_report_encapsulation"); public static final PiActionId NO_ACTION = PiActionId.of("NoAction"); public static final PiActionId FABRIC_INGRESS_NEXT_OUTPUT_XCONNECT = PiActionId.of("FabricIngress.next.output_xconnect"); public static final PiActionId FABRIC_INGRESS_ACL_DROP = PiActionId.of("FabricIngress.acl.drop"); // Action Param IDs public static final PiActionParamId DMAC = PiActionParamId.of("dmac"); public static final PiActionParamId MON_PORT = PiActionParamId.of("mon_port"); public static final PiActionParamId NOTIFY_CP = PiActionParamId.of("notify_cp"); public static final PiActionParamId SWITCH_ID = PiActionParamId.of("switch_id"); public static final PiActionParamId SMAC = PiActionParamId.of("smac"); public static final PiActionParamId CLONE_ID = PiActionParamId.of("clone_id"); public static final PiActionParamId FAR_ID = PiActionParamId.of("far_id"); public static final PiActionParamId VLAN_ID = PiActionParamId.of("vlan_id"); public static final PiActionParamId TUNNEL_SRC_ADDR = PiActionParamId.of("tunnel_src_addr"); public static final PiActionParamId LABEL = PiActionParamId.of("label"); public static final PiActionParamId SRC_IP = PiActionParamId.of("src_ip"); public static final PiActionParamId MON_MAC = PiActionParamId.of("mon_mac"); public static final PiActionParamId NEXT_ID = PiActionParamId.of("next_id"); public static final PiActionParamId INS_CNT = PiActionParamId.of("ins_cnt"); public static final PiActionParamId SRC_MAC = PiActionParamId.of("src_mac"); public static final PiActionParamId INNER_VLAN_ID = PiActionParamId.of("inner_vlan_id"); public static final PiActionParamId PPPOE_SESSION_ID = PiActionParamId.of("pppoe_session_id"); public static final PiActionParamId CTR_ID = PiActionParamId.of("ctr_id"); public static final PiActionParamId MON_IP = PiActionParamId.of("mon_ip"); public static final PiActionParamId INS_MASK0003 = PiActionParamId.of("ins_mask0003"); public static final PiActionParamId LINE_ID = PiActionParamId.of("line_id"); public static final PiActionParamId FWD_TYPE = PiActionParamId.of("fwd_type"); public static final PiActionParamId OUTER_VLAN_ID = PiActionParamId.of("outer_vlan_id"); public static final PiActionParamId INS_MASK0407 = PiActionParamId.of("ins_mask0407"); public static final PiActionParamId TEID = PiActionParamId.of("teid"); public static final PiActionParamId DROP = PiActionParamId.of("drop"); public static final PiActionParamId PORT_NUM = PiActionParamId.of("port_num"); public static final PiActionParamId TUNNEL_DST_ADDR = PiActionParamId.of("tunnel_dst_addr"); public static final PiActionParamId GROUP_ID = PiActionParamId.of("group_id"); public static final PiActionParamId MAX_HOP = PiActionParamId.of("max_hop"); // Action Profile IDs public static final PiActionProfileId FABRIC_INGRESS_NEXT_HASHED_SELECTOR = PiActionProfileId.of("FabricIngress.next.hashed_selector"); // Packet Metadata IDs public static final PiPacketMetadataId INGRESS_PORT = PiPacketMetadataId.of("ingress_port"); public static final PiPacketMetadataId EGRESS_PORT = PiPacketMetadataId.of("egress_port"); // Meter IDs public static final PiMeterId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_M_BESTEFF = PiMeterId.of("FabricIngress.bng_ingress.downstream.m_besteff"); public static final PiMeterId FABRIC_INGRESS_BNG_INGRESS_DOWNSTREAM_M_PRIO = PiMeterId.of("FabricIngress.bng_ingress.downstream.m_prio"); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.incremental; import com.google.common.base.Supplier; import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.collections.ResourceHolder; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.BufferAggregator; import org.apache.druid.segment.ColumnSelectorFactory; import javax.annotation.Nullable; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; /** * */ public class OffheapIncrementalIndex extends IncrementalIndex<BufferAggregator> { private static final Logger log = new Logger(OffheapIncrementalIndex.class); private final NonBlockingPool<ByteBuffer> bufferPool; private final List<ResourceHolder<ByteBuffer>> aggBuffers = new ArrayList<>(); private final List<int[]> indexAndOffsets = new ArrayList<>(); private final FactsHolder facts; private final AtomicInteger indexIncrement = new AtomicInteger(0); protected final int maxRowCount; @Nullable private volatile Map<String, ColumnSelectorFactory> selectors; //given a ByteBuffer and an offset where all aggregates for a row are stored //offset + aggOffsetInBuffer[i] would give position in ByteBuffer where ith aggregate //is stored @Nullable private volatile int[] aggOffsetInBuffer; private volatile int aggsTotalSize; @Nullable private String outOfRowsReason = null; OffheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, boolean deserializeComplexMetrics, boolean concurrentEventAdd, boolean sortFacts, int maxRowCount, NonBlockingPool<ByteBuffer> bufferPool ) { super(incrementalIndexSchema, deserializeComplexMetrics, concurrentEventAdd); this.maxRowCount = maxRowCount; this.bufferPool = bufferPool; this.facts = incrementalIndexSchema.isRollup() ? new RollupFactsHolder(sortFacts, dimsComparator(), getDimensions()) : new PlainFactsHolder(sortFacts, dimsComparator()); //check that stupid pool gives buffers that can hold at least one row's aggregators ResourceHolder<ByteBuffer> bb = bufferPool.take(); if (bb.get().capacity() < aggsTotalSize) { bb.close(); throw new IAE("bufferPool buffers capacity must be >= [%s]", aggsTotalSize); } aggBuffers.add(bb); } @Override public FactsHolder getFacts() { return facts; } @Override protected BufferAggregator[] initAggs( final AggregatorFactory[] metrics, final Supplier<InputRow> rowSupplier, final boolean deserializeComplexMetrics, final boolean concurrentEventAdd ) { selectors = new HashMap<>(); aggOffsetInBuffer = new int[metrics.length]; int aggsCurOffsetInBuffer = 0; for (int i = 0; i < metrics.length; i++) { AggregatorFactory agg = metrics[i]; ColumnSelectorFactory columnSelectorFactory = makeColumnSelectorFactory( agg, rowSupplier, deserializeComplexMetrics ); selectors.put( agg.getName(), new OnheapIncrementalIndex.CachingColumnSelectorFactory(columnSelectorFactory, concurrentEventAdd) ); aggOffsetInBuffer[i] = aggsCurOffsetInBuffer; aggsCurOffsetInBuffer += agg.getMaxIntermediateSizeWithNulls(); } aggsTotalSize = aggsCurOffsetInBuffer; return new BufferAggregator[metrics.length]; } @Override protected AddToFactsResult addToFacts( InputRow row, IncrementalIndexRow key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier, boolean skipMaxRowsInMemoryCheck // ignored, we always want to check this for offheap ) throws IndexSizeExceededException { ByteBuffer aggBuffer; int bufferIndex; int bufferOffset; synchronized (this) { final AggregatorFactory[] metrics = getMetrics(); final int priorIndex = facts.getPriorIndex(key); if (IncrementalIndexRow.EMPTY_ROW_INDEX != priorIndex) { final int[] indexAndOffset = indexAndOffsets.get(priorIndex); bufferIndex = indexAndOffset[0]; bufferOffset = indexAndOffset[1]; aggBuffer = aggBuffers.get(bufferIndex).get(); } else { if (metrics.length > 0 && getAggs()[0] == null) { // note: creation of Aggregators is done lazily when at least one row from input is available // so that FilteredAggregators could be initialized correctly. rowContainer.set(row); for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; getAggs()[i] = agg.factorizeBuffered(selectors.get(agg.getName())); } rowContainer.set(null); } bufferIndex = aggBuffers.size() - 1; ByteBuffer lastBuffer = aggBuffers.isEmpty() ? null : aggBuffers.get(aggBuffers.size() - 1).get(); int[] lastAggregatorsIndexAndOffset = indexAndOffsets.isEmpty() ? null : indexAndOffsets.get(indexAndOffsets.size() - 1); if (lastAggregatorsIndexAndOffset != null && lastAggregatorsIndexAndOffset[0] != bufferIndex) { throw new ISE("last row's aggregate's buffer and last buffer index must be same"); } bufferOffset = aggsTotalSize + (lastAggregatorsIndexAndOffset != null ? lastAggregatorsIndexAndOffset[1] : 0); if (lastBuffer != null && lastBuffer.capacity() - bufferOffset >= aggsTotalSize) { aggBuffer = lastBuffer; } else { ResourceHolder<ByteBuffer> bb = bufferPool.take(); aggBuffers.add(bb); bufferIndex = aggBuffers.size() - 1; bufferOffset = 0; aggBuffer = bb.get(); } for (int i = 0; i < metrics.length; i++) { getAggs()[i].init(aggBuffer, bufferOffset + aggOffsetInBuffer[i]); } // Last ditch sanity checks if (getNumEntries().get() >= maxRowCount && facts.getPriorIndex(key) == IncrementalIndexRow.EMPTY_ROW_INDEX) { throw new IndexSizeExceededException("Maximum number of rows [%d] reached", maxRowCount); } final int rowIndex = indexIncrement.getAndIncrement(); // note that indexAndOffsets must be updated before facts, because as soon as we update facts // concurrent readers get hold of it and might ask for newly added row indexAndOffsets.add(new int[]{bufferIndex, bufferOffset}); final int prev = facts.putIfAbsent(key, rowIndex); if (IncrementalIndexRow.EMPTY_ROW_INDEX == prev) { getNumEntries().incrementAndGet(); } else { throw new ISE("Unexpected state: Concurrent fact addition."); } } } rowContainer.set(row); final List<String> parseExceptionMessages = new ArrayList<>(); for (int i = 0; i < getMetrics().length; i++) { final BufferAggregator agg = getAggs()[i]; synchronized (agg) { try { agg.aggregate(aggBuffer, bufferOffset + aggOffsetInBuffer[i]); } catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. log.debug(e, "Encountered parse error, skipping aggregator[%s].", getMetricAggs()[i].getName()); parseExceptionMessages.add(e.getMessage()); } } } rowContainer.set(null); return new AddToFactsResult(getNumEntries().get(), 0, parseExceptionMessages); } @Override public int getLastRowIndex() { return indexIncrement.get() - 1; } @Override public boolean canAppendRow() { final boolean canAdd = size() < maxRowCount; if (!canAdd) { outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; } @Override public String getOutOfRowsReason() { return outOfRowsReason; } @Override protected BufferAggregator[] getAggsForRow(int rowOffset) { return getAggs(); } @Override protected Object getAggVal(BufferAggregator agg, int rowOffset, int aggPosition) { int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.get(bb, indexAndOffset[1] + aggOffsetInBuffer[aggPosition]); } @Override public float getMetricFloatValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getFloat(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public long getMetricLongValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getLong(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public Object getMetricObjectValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.get(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public double getMetricDoubleValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getDouble(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public boolean isNull(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.isNull(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } /** * NOTE: This is NOT thread-safe with add... so make sure all the adding is DONE before closing */ @Override public void close() { super.close(); facts.clear(); indexAndOffsets.clear(); if (selectors != null) { selectors.clear(); } Closer c = Closer.create(); aggBuffers.forEach(c::register); try { c.close(); } catch (IOException e) { throw new RuntimeException(e); } aggBuffers.clear(); } public static class Builder extends AppendableIndexBuilder { @Nullable NonBlockingPool<ByteBuffer> bufferPool = null; public Builder setBufferPool(final NonBlockingPool<ByteBuffer> bufferPool) { this.bufferPool = bufferPool; return this; } @Override public void validate() { super.validate(); if (bufferPool == null) { throw new IllegalArgumentException("bufferPool cannot be null"); } } @Override protected OffheapIncrementalIndex buildInner() { return new OffheapIncrementalIndex( Objects.requireNonNull(incrementalIndexSchema, "incrementalIndexSchema is null"), deserializeComplexMetrics, concurrentEventAdd, sortFacts, maxRowCount, Objects.requireNonNull(bufferPool, "bufferPool is null") ); } } }
/** Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2011 Eric Haddad Koenig Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.all.shared.model; import java.io.Serializable; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Transient; import javax.validation.constraints.NotNull; import com.all.chat.ChatStatus; import com.all.chat.ChatType; import com.all.chat.ChatUser; import com.all.shared.sync.SyncAble; import com.all.shared.sync.SyncAbleAbstractImpl; import com.all.shared.sync.SyncUpdateAble; import com.all.shared.util.NickNameRefiner; @Entity public class ContactInfo extends SyncAbleAbstractImpl implements ChatUser, Serializable { private static final long serialVersionUID = 3173471090676381694L; @Id @SyncUpdateAble private String email; @SyncUpdateAble private String name; @SyncUpdateAble private String firstName; @SyncUpdateAble private String lastName; @SyncUpdateAble private String nickName; @SyncUpdateAble private String message; @SyncUpdateAble private Gender gender; @SyncUpdateAble private boolean isDropping = false; private ContactStatus status = ContactStatus.offline; @SyncUpdateAble private String idLocation; @NotNull @SyncUpdateAble private Long id; @SyncUpdateAble private Integer day; @SyncUpdateAble private Integer month; @SyncUpdateAble private Integer year; @Transient private City city; @Transient private byte[] avatar; @Transient private ChatType chatType; @Transient private ChatStatus chatStatus; // CREATED ONLY FOR JSONCONVERTER @Deprecated public ContactInfo() { } public ContactInfo(ChatUser chatUser) { this.email = chatUser.getChatId(); this.nickName = chatUser.getChatName(); this.message = chatUser.getQuote(); this.chatType = chatUser.getChatType(); this.chatStatus = chatUser.getChatStatus(); this.avatar = chatUser.getAvatar(); } public ContactInfo(User userFriend) { this.id = userFriend.getId(); this.name = userFriend.getFullName(); this.firstName = userFriend.getFirstName(); this.lastName = userFriend.getLastName(); this.nickName = userFriend.getNickName(); this.email = userFriend.getEmail(); this.message = userFriend.getQuote(); this.gender = userFriend.getGender(); this.day = userFriend.getDay(); this.month = userFriend.getMonth(); this.year = userFriend.getYear(); this.idLocation = userFriend.getIdLocation(); this.city = userFriend.getCity(); this.avatar = userFriend.getAvatar(); } public ContactInfo(PendingEmail email) { this.id = email.getId(); this.email = email.getToMail(); this.status = ContactStatus.pending; } public String getName() { if (name == null && nickName == null) { return null; } return name == null ? getNickName() : name; } public String getMessage() { return message; } public Gender getGender() { return gender; } public void setName(String name) { this.name = name; } public void setMessage(String message) { this.message = message; } public void setGender(Gender sex) { this.gender = sex; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTooltipText() { if (getStatus() == ContactStatus.pending) { return getEmail(); } else { String tooltip = getChatName(); if (getQuote() != null) { tooltip += " - " + message; } return tooltip; } } public boolean isPending() { return ChatStatus.PENDING == getChatStatus(); } public void setIsDropping(boolean isDropping) { this.isDropping = isDropping; } public boolean isIsDropping() { return isDropping; } public boolean isOnline() { return ChatStatus.ONLINE == getChatStatus(); } public boolean isAway() { return ChatStatus.AWAY == getChatStatus(); } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public ContactStatus getStatus() { return status; } public void setStatus(ContactStatus status) { this.status = status; this.chatStatus = ChatStatus.valueOf(status.toString().toUpperCase()); } @Override public String toString() { if (name == null || "".equals(name.trim())) { return getEmail(); } return name; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (this == obj) { return true; } if (obj instanceof ContactInfo) { ContactInfo contact = (ContactInfo) obj; if (this.getEmail() != null && contact.getEmail() != null) { return this.getEmail().equals(contact.getEmail()); } else if (this.getId() != null && contact.getId() != null) { return this.getId().equals(contact.getId()); } else if (this.getChatId() != null && contact.getChatId() != null) { return this.getChatId().equals(contact.getChatId()); } } return false; } @Override public int hashCode() { if (getEmail() != null) { return getEmail().hashCode(); } if (getId() != null) { return getId().hashCode(); } return getChatId() == null ? super.hashCode() : getChatId().hashCode(); } public String getFirstName() { splitNamesIfNecessary(); if (firstName == null && nickName == null) { return null; } return NickNameRefiner.isNullOrEmpty(firstName) == true ? NickNameRefiner.refine(getNickName()) : firstName; } private void splitNamesIfNecessary() { if (firstName == null && lastName == null) { if (name != null) { int nameParstSeparatorIndex = name.lastIndexOf(" "); firstName = name.substring(0, nameParstSeparatorIndex); lastName = name.substring(nameParstSeparatorIndex); } } } public String getLastName() { splitNamesIfNecessary(); if (lastName == null && nickName == null) { return null; } return NickNameRefiner.isNullOrEmpty(lastName) == true ? NickNameRefiner.refine(getNickName()) : lastName; } public void setFirstName(String first) { firstName = first; } public void setLastName(String last) { lastName = last; } public Date getBirthday() { if (day == null || month == null || year == null) { return null; } Calendar calendar = new GregorianCalendar(); calendar.set(Calendar.YEAR, year); calendar.set(Calendar.MONTH, month); calendar.set(Calendar.DATE, day); calendar.set(Calendar.HOUR, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); return calendar.getTime(); } public String getIdLocation() { return idLocation; } public void setIdLocation(String idLocation) { this.idLocation = idLocation; } @Override public int compareTo(ChatUser o) { if (o != null && o instanceof ContactInfo) { ContactInfo other = (ContactInfo) o; if (this.nickName == null || other.nickName == null) { return this.email.compareToIgnoreCase(other.email); } return this.nickName.compareToIgnoreCase(other.nickName); } return 0; } public void setMonth(Integer month) { this.month = month; } public void setYear(Integer year) { this.year = year; } public void setDay(Integer day) { this.day = day; } public Integer getDay() { return day; } public Integer getMonth() { return month; } public Integer getYear() { return year; } public City getCity() { return city; } public void setCity(City city) { this.city = city; } public void setAvatar(byte[] avatar) { this.avatar = avatar; } public String getSyncAbleId() { return this.email; } public String getNickName() { if (nickName == null && firstName == null && lastName == null) { return null; } if (nickName == null && firstName == null && lastName != null) { String defaultNickname = (getLastName()).replace(" ", ""); return defaultNickname.length() > 25 ? defaultNickname.substring(0, 25) : defaultNickname; } if (nickName == null && firstName != null && lastName == null) { String defaultNickname = (getFirstName()).replace(" ", ""); return defaultNickname.length() > 25 ? defaultNickname.substring(0, 25) : defaultNickname; } if (nickName == null) { String defaultNickname = (getFirstName() + "." + getLastName()).replace(" ", ""); return defaultNickname.length() > 25 ? defaultNickname.substring(0, 25) : defaultNickname; } return nickName; } public void setNickName(String nickName) { this.nickName = nickName; } public void clone(SyncAble updatedEntity) { ContactInfo ci = (ContactInfo) updatedEntity; this.setEmail(ci.getEmail()); this.setId(ci.getId()); this.setName(ci.getName()); this.setIdLocation(ci.getIdLocation()); this.setFirstName(ci.getFirstName()); this.setLastName(ci.getLastName()); this.setNickName(ci.getNickName()); this.setMessage(ci.getMessage()); this.setGender(ci.getGender()); this.setIsDropping(ci.isDropping); this.setDay(ci.getDay()); this.setMonth(ci.getMonth()); this.setYear(ci.getYear()); } @Override public byte[] getAvatar() { return avatar; } @Override public String getChatId() { return getEmail(); } @Override public String getChatName() { return getNickName(); } @Override public ChatStatus getChatStatus() { // this is needed when the status is loaded from the local DB but the // chatStatus is null because it has not been set return chatStatus != null ? chatStatus : ChatStatus.valueOf(getStatus().toString().toUpperCase()); } public void setChatStatus(ChatStatus chatStatus) { this.chatStatus = chatStatus; this.status = ContactStatus.valueOf(chatStatus.toString().toLowerCase()); } @Override public ChatType getChatType() { return chatType != null ? chatType : ChatType.ALL; } public void setChatType(ChatType chatType) { this.chatType = chatType; } @Override public String getQuote() { return getMessage(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.nodesinfo; import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.ingest.IngestInfo; import org.elasticsearch.ingest.ProcessorInfo; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.process.ProcessInfo; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.plugins.PluginType; import org.elasticsearch.search.aggregations.support.AggregationInfo; import org.elasticsearch.search.aggregations.support.AggregationUsageService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolInfo; import org.elasticsearch.transport.TransportInfo; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.core.IsEqual.equalTo; public class NodeInfoStreamingTests extends ESTestCase { public void testNodeInfoStreaming() throws IOException { NodeInfo nodeInfo = createNodeInfo(); try (BytesStreamOutput out = new BytesStreamOutput()) { nodeInfo.writeTo(out); try (StreamInput in = out.bytes().streamInput()) { NodeInfo readNodeInfo = new NodeInfo(in); assertExpectedUnchanged(nodeInfo, readNodeInfo); } } } // checks all properties that are expected to be unchanged. // Once we start changing them between versions this method has to be changed as well private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); compareJsonOutput(nodeInfo.getInfo(HttpInfo.class), readNodeInfo.getInfo(HttpInfo.class)); compareJsonOutput(nodeInfo.getInfo(JvmInfo.class), readNodeInfo.getInfo(JvmInfo.class)); compareJsonOutput(nodeInfo.getInfo(ProcessInfo.class), readNodeInfo.getInfo(ProcessInfo.class)); compareJsonOutput(nodeInfo.getSettings(), readNodeInfo.getSettings()); compareJsonOutput(nodeInfo.getInfo(ThreadPoolInfo.class), readNodeInfo.getInfo(ThreadPoolInfo.class)); compareJsonOutput(nodeInfo.getInfo(TransportInfo.class), readNodeInfo.getInfo(TransportInfo.class)); compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode()); compareJsonOutput(nodeInfo.getInfo(OsInfo.class), readNodeInfo.getInfo(OsInfo.class)); compareJsonOutput(nodeInfo.getInfo(PluginsAndModules.class), readNodeInfo.getInfo(PluginsAndModules.class)); compareJsonOutput(nodeInfo.getInfo(IngestInfo.class), readNodeInfo.getInfo(IngestInfo.class)); } private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException { if (param1 == null) { assertNull(param2); return; } ToXContent.Params params = ToXContent.EMPTY_PARAMS; XContentBuilder param1Builder = jsonBuilder(); param1Builder.startObject(); param1.toXContent(param1Builder, params); param1Builder.endObject(); XContentBuilder param2Builder = jsonBuilder(); param2Builder.startObject(); param2.toXContent(param2Builder, params); param2Builder.endObject(); assertThat(Strings.toString(param1Builder), equalTo(Strings.toString(param2Builder))); } private static NodeInfo createNodeInfo() { Build build = Build.CURRENT; DiscoveryNode node = new DiscoveryNode( "test_node", buildNewFakeTransportAddress(), emptyMap(), emptySet(), VersionUtils.randomVersion(random()) ); Settings settings = randomBoolean() ? null : Settings.builder().put("test", "setting").build(); OsInfo osInfo = null; if (randomBoolean()) { int availableProcessors = randomIntBetween(1, 64); int allocatedProcessors = randomIntBetween(1, availableProcessors); long refreshInterval = randomBoolean() ? -1 : randomNonNegativeLong(); String name = randomAlphaOfLengthBetween(3, 10); String arch = randomAlphaOfLengthBetween(3, 10); String version = randomAlphaOfLengthBetween(3, 10); osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, name, arch, version); } ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomNonNegativeLong()); JvmInfo jvm = randomBoolean() ? null : JvmInfo.jvmInfo(); ThreadPoolInfo threadPoolInfo = null; if (randomBoolean()) { int numThreadPools = randomIntBetween(1, 10); List<ThreadPool.Info> threadPoolInfos = new ArrayList<>(numThreadPools); for (int i = 0; i < numThreadPools; i++) { threadPoolInfos.add( new ThreadPool.Info(randomAlphaOfLengthBetween(3, 10), randomFrom(ThreadPool.ThreadPoolType.values()), randomInt()) ); } threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); } Map<String, BoundTransportAddress> profileAddresses = new HashMap<>(); BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress( new TransportAddress[] { buildNewFakeTransportAddress() }, buildNewFakeTransportAddress() ); profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = randomBoolean() ? null : new TransportInfo(dummyBoundTransportAddress, profileAddresses); HttpInfo httpInfo = randomBoolean() ? null : new HttpInfo(dummyBoundTransportAddress, randomNonNegativeLong()); PluginsAndModules pluginsAndModules = null; if (randomBoolean()) { int numPlugins = randomIntBetween(0, 5); List<PluginInfo> plugins = new ArrayList<>(); for (int i = 0; i < numPlugins; i++) { plugins.add( new PluginInfo( randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), VersionUtils.randomVersion(random()), "1.8", randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean(), randomFrom(PluginType.values()), randomAlphaOfLengthBetween(3, 10), randomBoolean() ) ); } int numModules = randomIntBetween(0, 5); List<PluginInfo> modules = new ArrayList<>(); for (int i = 0; i < numModules; i++) { modules.add( new PluginInfo( randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), VersionUtils.randomVersion(random()), "1.8", randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean(), randomFrom(PluginType.values()), randomAlphaOfLengthBetween(3, 10), randomBoolean() ) ); } pluginsAndModules = new PluginsAndModules(plugins, modules); } IngestInfo ingestInfo = null; if (randomBoolean()) { int numProcessors = randomIntBetween(0, 5); List<ProcessorInfo> processors = new ArrayList<>(numProcessors); for (int i = 0; i < numProcessors; i++) { processors.add(new ProcessorInfo(randomAlphaOfLengthBetween(3, 10))); } ingestInfo = new IngestInfo(processors); } AggregationInfo aggregationInfo = null; if (randomBoolean()) { AggregationUsageService.Builder builder = new AggregationUsageService.Builder(); int numOfAggs = randomIntBetween(0, 10); for (int i = 0; i < numOfAggs; i++) { String aggName = randomAlphaOfLength(10); try { if (randomBoolean()) { builder.registerAggregationUsage(aggName); } else { int numOfTypes = randomIntBetween(1, 10); for (int j = 0; j < numOfTypes; j++) { builder.registerAggregationUsage(aggName, randomAlphaOfLength(10)); } } } catch (IllegalArgumentException ex) { // Ignore duplicate strings } } aggregationInfo = builder.build().info(); } ByteSizeValue indexingBuffer = null; if (randomBoolean()) { // pick a random long that sometimes exceeds an int: indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L << 40) - 1)); } return new NodeInfo( VersionUtils.randomVersion(random()), build, node, settings, osInfo, process, jvm, threadPoolInfo, transport, httpInfo, pluginsAndModules, ingestInfo, aggregationInfo, indexingBuffer ); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.core.http.rest; import com.azure.core.http.HttpHeaders; import com.azure.core.http.HttpMethod; import com.azure.core.http.HttpRequest; import com.azure.core.util.paging.PageRetriever; import org.reactivestreams.Subscription; import reactor.core.CoreSubscriber; import reactor.core.publisher.BaseSubscriber; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.util.List; import java.util.UUID; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * Code snippets for {@link PagedFlux} */ public final class PagedFluxJavaDocCodeSnippets { /** * Code snippets for showing usage of {@link PagedFlux} in class docs */ public void classDocSnippet() { PagedFlux<Integer> pagedFlux = createAnInstance(); // BEGIN: com.azure.core.http.rest.pagedflux.items // Subscribe to process one item at a time pagedFlux .log() .subscribe(item -> System.out.println("Processing item with value: " + item), error -> System.err.println("An error occurred: " + error), () -> System.out.println("Processing complete.")); // END: com.azure.core.http.rest.pagedflux.items // BEGIN: com.azure.core.http.rest.pagedflux.pages // Subscribe to process one page at a time from the beginning pagedFlux .byPage() .log() .subscribe(page -> System.out.printf("Processing page containing item values: %s%n", page.getElements().stream().map(String::valueOf).collect(Collectors.joining(", "))), error -> System.err.println("An error occurred: " + error), () -> System.out.println("Processing complete.")); // END: com.azure.core.http.rest.pagedflux.pages // BEGIN: com.azure.core.http.rest.pagedflux.pagesWithContinuationToken // Subscribe to process one page at a time starting from a page associated with // a continuation token String continuationToken = getContinuationToken(); pagedFlux .byPage(continuationToken) .log() .doOnSubscribe(ignored -> System.out.println( "Subscribed to paged flux processing pages starting from: " + continuationToken)) .subscribe(page -> System.out.printf("Processing page containing item values: %s%n", page.getElements().stream().map(String::valueOf).collect(Collectors.joining(", "))), error -> System.err.println("An error occurred: " + error), () -> System.out.println("Processing complete.")); // END: com.azure.core.http.rest.pagedflux.pagesWithContinuationToken } /** * Code snippets for creating an instance of {@link PagedFlux} * * @return An instance of {@link PagedFlux} */ public PagedFlux<Integer> createAnInstance() { // BEGIN: com.azure.core.http.rest.pagedflux.instantiation // A supplier that fetches the first page of data from source/service Supplier<Mono<PagedResponse<Integer>>> firstPageRetriever = () -> getFirstPage(); // A function that fetches subsequent pages of data from source/service given a continuation token Function<String, Mono<PagedResponse<Integer>>> nextPageRetriever = continuationToken -> getNextPage(continuationToken); PagedFlux<Integer> pagedFlux = new PagedFlux<>(firstPageRetriever, nextPageRetriever); // END: com.azure.core.http.rest.pagedflux.instantiation // BEGIN: com.azure.core.http.rest.pagedflux.singlepage.instantiation // A supplier that fetches the first page of data from source/service Supplier<Mono<PagedResponse<Integer>>> firstPageRetrieverFunction = () -> getFirstPage(); PagedFlux<Integer> pagedFluxInstance = new PagedFlux<>(firstPageRetrieverFunction, nextPageRetriever); // END: com.azure.core.http.rest.pagedflux.singlepage.instantiation return pagedFlux; } /** * Code snippets for using {@link PagedFlux#byPage()} and {@link PagedFlux#byPage(String)} */ public void byPageSnippet() { PagedFlux<Integer> pagedFlux = createAnInstance(); // BEGIN: com.azure.core.http.rest.pagedflux.bypage // Start processing the results from first page pagedFlux.byPage() .log() .doOnSubscribe(ignoredVal -> System.out.println( "Subscribed to paged flux processing pages starting from first page")) .subscribe(page -> System.out.printf("Processing page containing item values: %s%n", page.getElements().stream().map(String::valueOf).collect(Collectors.joining(", "))), error -> System.err.println("An error occurred: " + error), () -> System.out.println("Processing complete.")); // END: com.azure.core.http.rest.pagedflux.bypage // BEGIN: com.azure.core.http.rest.pagedflux.bypage#String // Start processing the results from a page associated with the continuation token String continuationToken = getContinuationToken(); pagedFlux.byPage(continuationToken) .log() .doOnSubscribe(ignoredVal -> System.out.println( "Subscribed to paged flux processing page starting from " + continuationToken)) .subscribe(page -> System.out.printf("Processing page containing item values: %s%n", page.getElements().stream().map(String::valueOf).collect(Collectors.joining(", "))), error -> System.err.println("An error occurred: " + error), () -> System.out.println("Processing complete.")); // END: com.azure.core.http.rest.pagedflux.bypage#String } /** * Code snippets for using {@link PagedFlux#subscribe(CoreSubscriber)} */ public void byTSnippet() { PagedFlux<Integer> pagedFlux = createAnInstance(); // BEGIN: com.azure.core.http.rest.pagedflux.subscribe pagedFlux.subscribe(new BaseSubscriber<Integer>() { @Override protected void hookOnSubscribe(Subscription subscription) { System.out.println("Subscribed to paged flux processing items"); super.hookOnSubscribe(subscription); } @Override protected void hookOnNext(Integer value) { System.out.println("Processing item with value: " + value); } @Override protected void hookOnComplete() { System.out.println("Processing complete."); } }); // END: com.azure.core.http.rest.pagedflux.subscribe } /** * Code snippets for using {@link PagedFlux#create(Supplier)} to create a PagedFlux by applying decoration on * another PagedFlux. */ public void pagedFluxFromPagedFlux() { // BEGIN: com.azure.core.http.rest.pagedflux.create.decoration // Transform a PagedFlux with Integer items to PagedFlux of String items. final PagedFlux<Integer> intPagedFlux = createAnInstance(); // PagedResponse<Integer> to PagedResponse<String> mapper final Function<PagedResponse<Integer>, PagedResponse<String>> responseMapper = intResponse -> new PagedResponseBase<Void, String>(intResponse.getRequest(), intResponse.getStatusCode(), intResponse.getHeaders(), intResponse.getValue() .stream() .map(intValue -> Integer.toString(intValue)).collect(Collectors.toList()), intResponse.getContinuationToken(), null); final Supplier<PageRetriever<String, PagedResponse<String>>> provider = () -> (continuationToken, pageSize) -> { Flux<PagedResponse<Integer>> flux = (continuationToken == null) ? intPagedFlux.byPage() : intPagedFlux.byPage(continuationToken); return flux.map(responseMapper); }; PagedFlux<String> strPagedFlux = PagedFlux.create(provider); // Create a PagedFlux from a PagedFlux with all exceptions mapped to a specific exception. final PagedFlux<Integer> pagedFlux = createAnInstance(); final Supplier<PageRetriever<String, PagedResponse<Integer>>> eprovider = () -> (continuationToken, pageSize) -> { Flux<PagedResponse<Integer>> flux = (continuationToken == null) ? pagedFlux.byPage() : pagedFlux.byPage(continuationToken); return flux.onErrorMap(PaginationException::new); }; final PagedFlux<Integer> exceptionMappedPagedFlux = PagedFlux.create(eprovider); // END: com.azure.core.http.rest.pagedflux.create.decoration } /** * Implementation not provided * * @return A continuation token */ private String getContinuationToken() { return UUID.randomUUID().toString(); } /** * Retrieves the next page from a paged API. * * @param continuationToken Token to fetch the next page * @return A {@link Mono} of {@link PagedResponse} containing items of type {@code Integer} */ private Mono<PagedResponse<Integer>> getNextPage(String continuationToken) { return getPage(continuationToken); } /** * Retrieves the initial page from a paged API. * * @return A {@link Mono} of {@link PagedResponse} containing items of type {@code Integer} */ private Mono<PagedResponse<Integer>> getFirstPage() { return getPage(null); } /** * Retrieves a page from a paged API. * * @param continuationToken Token to fetch the next page, if {@code null} the first page is retrieved. * @return A {@link Mono} of {@link PagedResponse} containing items of type {@code Integer} */ private Mono<PagedResponse<Integer>> getPage(String continuationToken) { // Given this isn't calling an actual API we will arbitrarily generate a continuation token or end paging. boolean lastPage = Math.random() > 0.5; // If it is the last page there should be no additional continuation tokens returned. String nextContinuationToken = lastPage ? null : UUID.randomUUID().toString(); // Arbitrarily begin the next page of integers. int elementCount = (int) Math.ceil(Math.random() * 15); List<Integer> elements = IntStream.range(elementCount, elementCount + elementCount) .map(val -> (int) (Math.random() * val)) .boxed() .collect(Collectors.toList()); // This is a rough approximation of a service response. return Mono.just(new PagedResponseBase<Void, Integer>(new HttpRequest(HttpMethod.GET, "https://requestUrl.com"), 200, new HttpHeaders(), elements, nextContinuationToken, null)); } static class PaginationException extends RuntimeException { PaginationException(Throwable ex) { super(ex); } } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.storage; import com.orientechnologies.common.concur.resource.OCloseable; import com.orientechnologies.common.concur.resource.OSharedContainerImpl; import com.orientechnologies.common.concur.resource.OSharedResource; import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal; import com.orientechnologies.common.exception.OException; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageConfiguration; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.record.OCurrentStorageComponentsFactory; import com.orientechnologies.orient.core.exception.OSecurityException; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.OMetadataInternal; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.security.OSecurityShared; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicLong; public abstract class OStorageAbstract extends OSharedContainerImpl implements OStorage { protected final String url; protected final String mode; protected final OSharedResourceAdaptiveExternal lock; protected volatile OStorageConfiguration configuration; protected volatile OCurrentStorageComponentsFactory componentsFactory; protected String name; protected AtomicLong version = new AtomicLong(); protected volatile STATUS status = STATUS.CLOSED; public OStorageAbstract(final String name, final String iURL, final String mode, final int timeout) { if (OStringSerializerHelper.contains(name, '/')) this.name = name.substring(name.lastIndexOf("/") + 1); else this.name = name; if (OStringSerializerHelper.contains(name, ',')) throw new IllegalArgumentException("Invalid character in storage name: " + this.name); url = iURL; this.mode = mode; lock = new OSharedResourceAdaptiveExternal(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), timeout, true); } public abstract OCluster getClusterByName(final String iClusterName); public OStorage getUnderlying() { return this; } public OStorageConfiguration getConfiguration() { return configuration; } public boolean isClosed() { return status == STATUS.CLOSED; } public boolean checkForRecordValidity(final OPhysicalPosition ppos) { return ppos != null && !ppos.recordVersion.isTombstone(); } public String getName() { return name; } public String getURL() { return url; } public void close() { close(false, false); } public void close(final boolean iForce, boolean onDelete) { lock.acquireExclusiveLock(); try { for (Object resource : sharedResources.values()) { if (resource instanceof OSharedResource) ((OSharedResource) resource).releaseExclusiveLock(); if (resource instanceof OCloseable) ((OCloseable) resource).close(onDelete); } sharedResources.clear(); } finally { lock.releaseExclusiveLock(); } } /** * Returns current storage's version as serial. */ public long getVersion() { return version.get(); } public boolean dropCluster(final String iClusterName, final boolean iTruncate) { return dropCluster(getClusterIdByName(iClusterName), iTruncate); } public int getUsers() { return lock.getUsers(); } public int addUser() { return lock.addUser(); } public int removeUser() { return lock.removeUser(); } public OSharedResourceAdaptiveExternal getLock() { return lock; } public long countRecords() { long tot = 0; for (OCluster c : getClusterInstances()) if (c != null) tot += c.getEntries() - c.getTombstonesCount(); return tot; } public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { if (iExclusiveLock) lock.acquireExclusiveLock(); else lock.acquireSharedLock(); try { return iCallable.call(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new OException("Error on nested call in lock", e); } finally { if (iExclusiveLock) lock.releaseExclusiveLock(); else lock.releaseSharedLock(); } } @Override public String toString() { return url != null ? url : "?"; } public STATUS getStatus() { return status; } public void checkForClusterPermissions(final String iClusterName) { // CHECK FOR ORESTRICTED OMetadata metaData = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata(); if (metaData != null) { final Set<OClass> classes = ((OMetadataInternal)metaData).getImmutableSchemaSnapshot().getClassesRelyOnCluster(iClusterName); for (OClass c : classes) { if (c.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) throw new OSecurityException("Class " + c.getName() + " cannot be truncated because has record level security enabled (extends " + OSecurityShared.RESTRICTED_CLASSNAME + ")"); } } } @Override public boolean isDistributed() { return false; } @Override public boolean isAssigningClusterIds() { return true; } @Override public OCurrentStorageComponentsFactory getComponentsFactory() { return componentsFactory; } @Override public long getLastOperationId() { return 0; } protected boolean checkForClose(final boolean force) { if (status == STATUS.CLOSED) return false; lock.acquireSharedLock(); try { if (status == STATUS.CLOSED) return false; final int remainingUsers = getUsers() > 0 ? removeUser() : 0; return force || (!(this instanceof OAbstractPaginatedStorage) && remainingUsers == 0); } finally { lock.releaseSharedLock(); } } }
/* * Copyright 2012 pmp-android development team * Project: PMP * Project-Site: https://github.com/stachch/Privacy_Management_Platform * * --------------------------------------------------------------------- * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.unistuttgart.ipvs.pmp.gui.app; import android.app.Activity; import android.app.LocalActivityManager; import android.content.Intent; import android.os.Bundle; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.TabHost; import android.widget.TabHost.TabSpec; import android.widget.Toast; import de.unistuttgart.ipvs.pmp.R; import de.unistuttgart.ipvs.pmp.gui.util.ActivityKillReceiver; import de.unistuttgart.ipvs.pmp.gui.util.GUIConstants; import de.unistuttgart.ipvs.pmp.gui.util.GUITools; import de.unistuttgart.ipvs.pmp.gui.util.model.ModelProxy; import de.unistuttgart.ipvs.pmp.model.element.app.IApp; import de.unistuttgart.ipvs.pmp.shared.gui.view.BasicTitleView; /** * The {@link ActivityApp} displays a at PMP registered App. * For Details, Service Features and Presets is a tab available to display it. * * @author Jakob Jarosch */ public class ActivityApp extends Activity { /** * The reference to the real App in the model. */ private IApp app; /** * Activity manager is used to setup the {@link TabHost}. */ @SuppressWarnings("deprecation") private LocalActivityManager lam; /** * {@link TabHost} for the displayed tabs in the GUI. */ private TabHost mTabHost; /** * Tab tags */ private static final String TAB_DETAIL = "tab_detail"; private static final String TAB_SF = "tab_sf"; private static final String TAB_PRESET = "tab_preset"; /** * The {@link ActivityKillReceiver}. */ private ActivityKillReceiver akr; @SuppressWarnings("deprecation") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_app); String activeTab = checkExtendedIntentActions(); this.lam = new LocalActivityManager(this, true); this.lam.dispatchCreate(savedInstanceState); this.mTabHost = (TabHost) findViewById(android.R.id.tabhost); this.mTabHost.setup(this.lam); setupTabs(); BasicTitleView title = (BasicTitleView) findViewById(R.id.activity_title); title.setTitle(this.app.getName()); title.setIcon(this.app.getIcon()); this.mTabHost.setCurrentTabByTag(activeTab); /* Initiating the ActivityKillReceiver. */ this.akr = new ActivityKillReceiver(this); } @SuppressWarnings("deprecation") @Override protected void onResume() { super.onResume(); this.lam.dispatchResume(); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.app_menu, menu); return true; } @SuppressWarnings("deprecation") @Override protected void onPause() { super.onPause(); this.lam.dispatchPause(isFinishing()); } @SuppressWarnings("deprecation") @Override protected void onDestroy() { super.onDestroy(); this.lam.dispatchDestroy(isFinishing()); unregisterReceiver(this.akr); } private void setupTabs() { /* Details Tab */ TabSpec details = this.mTabHost.newTabSpec(TAB_DETAIL); details.setIndicator(getResources().getString(R.string.details)); // Create an Intent to start the inner activity Intent intentDetails = new Intent(this, TabDetails.class); intentDetails.putExtra(GUIConstants.APP_IDENTIFIER, this.app.getIdentifier()); details.setContent(intentDetails); this.mTabHost.addTab(details); // Change the preferred size of the Tab-header View tab1 = this.mTabHost.getTabWidget().getChildAt(0); LayoutParams lp = tab1.getLayoutParams(); lp.width = LayoutParams.WRAP_CONTENT; tab1.setLayoutParams(lp); /* Service Features Tab */ TabSpec sfs = this.mTabHost.newTabSpec(TAB_SF); sfs.setIndicator(getResources().getString(R.string.service_features)); // Create an Intent to start the inner activity Intent intentSfs = new Intent(this, TabServiceFeatures.class); intentSfs.putExtra(GUIConstants.APP_IDENTIFIER, this.app.getIdentifier()); intentSfs.putExtra(GUIConstants.ACTIVITY_ACTION, getIntent().getStringExtra(GUIConstants.ACTIVITY_ACTION)); intentSfs.putExtra(GUIConstants.REQUIRED_SERVICE_FEATURE, getIntent().getStringArrayExtra(GUIConstants.REQUIRED_SERVICE_FEATURE)); sfs.setContent(intentSfs); this.mTabHost.addTab(sfs); // Change the preferred size of the Tab-header View tab2 = this.mTabHost.getTabWidget().getChildAt(1); lp = tab2.getLayoutParams(); lp.width = LayoutParams.WRAP_CONTENT; tab2.setLayoutParams(lp); /* Presets Tab */ TabSpec presets = this.mTabHost.newTabSpec(TAB_PRESET); presets.setIndicator(getResources().getString(R.string.presets)); // Create an Intent to start the inner activity Intent intentPresets = new Intent(this, TabPresets.class); intentPresets.putExtra(GUIConstants.APP_IDENTIFIER, this.app.getIdentifier()); presets.setContent(intentPresets); this.mTabHost.addTab(presets); // Change the preferred size of the Tab-header View tab3 = this.mTabHost.getTabWidget().getChildAt(2); lp = tab3.getLayoutParams(); lp.width = LayoutParams.WRAP_CONTENT; tab3.setLayoutParams(lp); } /** * Checks if the Activity has been started with extended parameters like requested service features. */ private String checkExtendedIntentActions() { this.app = GUITools.getIAppFromIntent(getIntent()); if (GUITools.getIntentAction(getIntent()) != null && GUITools.getIntentAction(getIntent()).equals(GUIConstants.CHANGE_SERVICEFEATURE)) { return TAB_SF; } return TAB_DETAIL; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_app_open: // Open the Apps Main Activity String appPackageName = ActivityApp.this.app.getIdentifier(); Intent intent = getPackageManager().getLaunchIntentForPackage(appPackageName); if (intent != null) { startActivity(intent); } else { Toast.makeText(ActivityApp.this, getString(R.string.app_not_opened), Toast.LENGTH_LONG).show(); } break; case R.id.menu_app_unregister: ModelProxy.get().unregisterApp(ActivityApp.this.app.getIdentifier()); Toast.makeText(ActivityApp.this, getString(R.string.app_successfully_unregistered), Toast.LENGTH_LONG) .show(); ActivityApp.this.finish(); break; default: break; } return super.onOptionsItemSelected(item); } }
/* Diffusion -- a class within the Cellular Automaton Explorer. Copyright (C) 2005 David B. Bahr (http://academic.regis.edu/dbahr/) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package cellularAutomata.rules; import java.util.Random; import cellularAutomata.rules.templates.IntegerMargolusTemplate; import cellularAutomata.rules.util.RuleFolderNames; import cellularAutomata.util.math.RandomSingleton; /** * Rules for diffusion which obey the heat equation dp/dt = del^2 p. Uses the * Margolus neighborhood but could be rewritten as a lattice gas with vector * states. * * @author David Bahr */ public class Diffusion extends IntegerMargolusTemplate { // a display name for this class private static final String RULE_NAME = "Diffusion"; // used to randomly choose a rearrangement private static Random random = RandomSingleton.getInstance(); // a description of property choices that give the best results for this // rule (e.g., which lattice, how many states, etc.) private static final String BEST_RESULTS = "<html> <body><b>" + RULE_NAME + ".</b>" + "<p> " + "<b>For best results</b>, try a 50 by 50 or larger lattice with a single seed " + "(particle), " + "and observe its random walk. Then draw a tight ball of cells and observe how they " + "diffuse uniformly throughout the lattice. Then try mutiple states, as if there are " + "multiple gasses; by drawing a ball of cells for each state, the gasses will diffuse " + "and commingle." + leftClickInstructions + rightClickInstructions + "</body></html>"; // a tooltip description for this class private String TOOLTIP = "<html> <body><b>" + RULE_NAME + ".</b> Models the physics of diffusion by letting each cell take a random walk." + "</body></html>"; /** * Create the Diffusion rule using the given cellular automaton properties. * <p> * When calling the parent constructor, the minimalOrLazyInitialization * parameter must be included as shown. The boolean is intended to indicate * when the constructor should build a rule with as small a footprint as * possible. In order to load rules by reflection, the application must * query this class for information like the display name, tooltip * description, etc. At these times it makes no sense to build the complete * rule which may have a large footprint in memory. * <p> * It is recommended that the constructor and instance variables do not * initialize any memory intensive variables and that variables be * initialized only when first needed (lazy initialization). Or all * initializations in the constructor may be placed in an <code>if</code> * statement. * * <pre> * if(!minimalOrLazyInitialization) * { * ...initialize * } * </pre> * * @param minimalOrLazyInitialization * When true, the constructor instantiates an object with as * small a footprint as possible. When false, the rule is fully * constructed. This variable should be passed to the super * constructor <code>super(minimalOrLazyInitialization);</code>, * but if uncertain, you may safely ignore this variable. */ public Diffusion(boolean minimalOrLazyInitialization) { super(minimalOrLazyInitialization); } /** * Takes particles (or lack of) at each quadrant of the Margolus block and * rearranges them randomly. The particles are conserved. * * @param northWestCellValue * The current value of the northwest cell. * @param northEastCellValue * The current value of the northeast cell. * @param southEastCellValue * The current value of the southeast cell. * @param southWestCellValue * The current value of the southwest cell. * @return An array of values representing the randomly rearranged * particles. */ private int[] rearrangeTheBlock(int northWestCellValue, int northEastCellValue, int southEastCellValue, int southWestCellValue) { int[] newBlock = new int[4]; int randomRearrangement = random.nextInt(24); if(randomRearrangement == 0) { // no rearrangement! newBlock[0] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 1) { newBlock[0] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 2) { newBlock[0] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 3) { newBlock[0] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 4) { newBlock[0] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 5) { newBlock[0] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 6) { newBlock[1] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 7) { newBlock[1] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 8) { newBlock[1] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 9) { newBlock[1] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[0] = southWestCellValue; } else if(randomRearrangement == 10) { newBlock[1] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 11) { newBlock[1] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[0] = southWestCellValue; } else if(randomRearrangement == 12) { newBlock[2] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 13) { newBlock[2] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 14) { newBlock[2] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[3] = southWestCellValue; } else if(randomRearrangement == 15) { newBlock[2] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[3] = southEastCellValue; newBlock[0] = southWestCellValue; } else if(randomRearrangement == 16) { newBlock[2] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 17) { newBlock[2] = northWestCellValue; newBlock[3] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[0] = southWestCellValue; } else if(randomRearrangement == 18) { newBlock[3] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 19) { newBlock[3] = northWestCellValue; newBlock[0] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 20) { newBlock[3] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[2] = southWestCellValue; } else if(randomRearrangement == 21) { newBlock[3] = northWestCellValue; newBlock[1] = northEastCellValue; newBlock[2] = southEastCellValue; newBlock[0] = southWestCellValue; } else if(randomRearrangement == 22) { newBlock[3] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[0] = southEastCellValue; newBlock[1] = southWestCellValue; } else if(randomRearrangement == 23) { newBlock[3] = northWestCellValue; newBlock[2] = northEastCellValue; newBlock[1] = southEastCellValue; newBlock[0] = southWestCellValue; } return newBlock; } /** * A rule for diffusion. Takes the occupied sites of the Margolus * neighborhood and rearranges them randomly. * * @param northWestCellValue * The current value of the northwest cell. * @param northEastCellValue * The current value of the northeast cell. * @param southEastCellValue * The current value of the southeast cell. * @param southWestCellValue * The current value of the southwest cell. * @param numStates * The number of states. In other words, the returned state can * only have values between 0 and numStates - 1. * @param generation * The current generation of the CA. * @return An array of states that corresponds to the 2 by 2 Margolus block. * Array[0] is the northwest corner of the block, array[1] is the * northeast corner of the block, array[2] is the southeast corner * of the block, array[3] is the southwest corner of the block. */ protected int[] blockRule(int northWestCellValue, int northEastCellValue, int southEastCellValue, int southWestCellValue, int numStates, int generation) { // take the original block values (particles) and rearrange them // randomly within the block. Conserves the values (particles). // i.e., pick one of the 24 random rearrangements. And then we assign // the rearrangement to an array representing the new Margolus block. return rearrangeTheBlock(northWestCellValue, northEastCellValue, southEastCellValue, southWestCellValue); } /** * A brief description (written in HTML) that describes what parameters will * give best results for this rule (which lattice, how many states, etc). * The description will be displayed on the properties panel. Using html * permits line breaks, font colors, etcetera, as described in HTML * resources. Regular line breaks will not work. * <p> * Recommend starting with the title of the rule followed by "For best * results, ...". See Rule 102 for an example. * * @return An HTML string describing how to get best results from this rule. * May be null. */ public String getBestResultsDescription() { return BEST_RESULTS; } /** * When displayed for selection, the rule will be listed under specific * folders specified here. The rule will always be listed under the "All * rules" folder. And if the rule is contributed by a user and is placed in * the userRules folder, then it will also be shown in a folder called "User * rules". Any strings may be used; if the folder does not exist, then one * will be created with the specified name. If the folder already exists, * then that folder will be used. * <p> * By default, this returns null so that the rule is only placed in the * default folder(s). * <p> * Child classes should override this method if they want the rule to appear * in a specific folder. The "All rules" and "User rules" folder are * automatic and do not need to be specified; they are always added. * * @return A list of the folders in which rule will be displayed for * selection. May be null. */ public String[] getDisplayFolderNames() { String[] folders = {RuleFolderNames.PHYSICS_FOLDER, RuleFolderNames.PROBABILISTIC_FOLDER, RuleFolderNames.CLASSICS_FOLDER}; return folders; } /** * A brief one or two-word string describing the rule, appropriate for * display in a drop-down list. * * @return A string no longer than 15 characters. */ public String getDisplayName() { return RULE_NAME; } /** * A brief description (written in HTML) that describes this rule. The * description will be displayed as a tooltip. Using html permits line * breaks, font colors, etcetera, as described in HTML resources. Regular * line breaks will not work. * * @return An HTML string describing this rule. */ public String getToolTipDescription() { return TOOLTIP; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.classgen; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.BinaryExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.DeclarationExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.GStringExpression; import org.codehaus.groovy.ast.expr.MapEntryExpression; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.expr.TupleExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.CatchStatement; import org.codehaus.groovy.ast.tools.GeneralUtils; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.runtime.MetaClassHelper; import org.codehaus.groovy.syntax.Types; import org.codehaus.groovy.transform.trait.Traits; import static java.lang.reflect.Modifier.*; import static org.objectweb.asm.Opcodes.*; /** * ClassCompletionVerifier */ public class ClassCompletionVerifier extends ClassCodeVisitorSupport { private ClassNode currentClass; private SourceUnit source; private boolean inConstructor = false; private boolean inStaticConstructor = false; public ClassCompletionVerifier(SourceUnit source) { this.source = source; } public ClassNode getClassNode() { return currentClass; } public void visitClass(ClassNode node) { ClassNode oldClass = currentClass; currentClass = node; checkImplementsAndExtends(node); if (source != null && !source.getErrorCollector().hasErrors()) { checkClassForIncorrectModifiers(node); checkInterfaceMethodVisibility(node); checkClassForOverwritingFinal(node); checkMethodsForIncorrectModifiers(node); checkMethodsForWeakerAccess(node); checkMethodsForOverridingFinal(node); checkNoAbstractMethodsNonabstractClass(node); checkClassExtendsAllSelfTypes(node); checkNoStaticMethodWithSameSignatureAsNonStatic(node); checkGenericsUsage(node, node.getUnresolvedInterfaces()); checkGenericsUsage(node, node.getUnresolvedSuperClass()); } super.visitClass(node); currentClass = oldClass; } private void checkNoStaticMethodWithSameSignatureAsNonStatic(final ClassNode node) { Map<String, MethodNode> result = new HashMap<String, MethodNode>(); // add in unimplemented abstract methods from the interfaces for (ClassNode iface : node.getInterfaces()) { Map<String, MethodNode> ifaceMethodsMap = iface.getDeclaredMethodsMap(); for (String methSig : ifaceMethodsMap.keySet()) { if (!result.containsKey(methSig)) { MethodNode methNode = ifaceMethodsMap.get(methSig); result.put(methSig, methNode); } } } for (MethodNode methodNode : node.getMethods()) { MethodNode mn = result.get(methodNode.getTypeDescriptor()); if (mn!=null && methodNode.isStatic() && !methodNode.isStaticConstructor()) { ClassNode declaringClass = mn.getDeclaringClass(); ClassNode cn = declaringClass.getOuterClass(); if (cn==null && declaringClass.isResolved()) { // in case of a precompiled class, the outerclass is unknown Class typeClass = declaringClass.getTypeClass(); typeClass = typeClass.getEnclosingClass(); if (typeClass!=null) { cn = ClassHelper.make(typeClass); } } if (cn==null || !Traits.isTrait(cn)) { addError("Method '" + mn.getName() + "' is already defined in " + getDescription(node) + ". You cannot have " + "both a static and a non static method with the same signature", methodNode); } } result.put(methodNode.getTypeDescriptor(), methodNode); } } private void checkInterfaceMethodVisibility(ClassNode node) { if (!node.isInterface()) return; for (MethodNode method : node.getMethods()) { if (method.isPrivate()) { addError("Method '" + method.getName() + "' is private but should be public in " + getDescription(currentClass) + ".", method); } else if (method.isProtected()) { addError("Method '" + method.getName() + "' is protected but should be public in " + getDescription(currentClass) + ".", method); } } } private void checkNoAbstractMethodsNonabstractClass(ClassNode node) { if (isAbstract(node.getModifiers())) return; List<MethodNode> abstractMethods = node.getAbstractMethods(); if (abstractMethods == null) return; for (MethodNode method : abstractMethods) { MethodNode sameArgsMethod = node.getMethod(method.getName(), method.getParameters()); if (sameArgsMethod==null) { addError("Can't have an abstract method in a non-abstract class." + " The " + getDescription(node) + " must be declared abstract or" + " the " + getDescription(method) + " must be implemented.", node); } else { addError("Abstract "+getDescription(method)+" is not implemented but a " + "method of the same name but different return type is defined: "+ (sameArgsMethod.isStatic()?"static ":"")+ getDescription(sameArgsMethod), method ); } } } private void checkClassExtendsAllSelfTypes(ClassNode node) { int modifiers = node.getModifiers(); if (!isInterface(modifiers)) { for (ClassNode anInterface : GeneralUtils.getInterfacesAndSuperInterfaces(node)) { if (Traits.isTrait(anInterface)) { LinkedHashSet<ClassNode> selfTypes = new LinkedHashSet<ClassNode>(); for (ClassNode type : Traits.collectSelfTypes(anInterface, selfTypes, true, false)) { if (type.isInterface() && !node.implementsInterface(type)) { addError(getDescription(node) + " implements " + getDescription(anInterface) + " but does not implement self type " + getDescription(type), anInterface); } else if (!type.isInterface() && !node.isDerivedFrom(type)) { addError(getDescription(node) + " implements " + getDescription(anInterface) + " but does not extend self type " + getDescription(type), anInterface); } } } } } } private void checkClassForIncorrectModifiers(ClassNode node) { checkClassForAbstractAndFinal(node); checkClassForOtherModifiers(node); } private void checkClassForAbstractAndFinal(ClassNode node) { if (!isAbstract(node.getModifiers())) return; if (!isFinal(node.getModifiers())) return; if (node.isInterface()) { addError("The " + getDescription(node) + " must not be final. It is by definition abstract.", node); } else { addError("The " + getDescription(node) + " must not be both final and abstract.", node); } } private void checkClassForOtherModifiers(ClassNode node) { checkClassForModifier(node, isTransient(node.getModifiers()), "transient"); checkClassForModifier(node, isVolatile(node.getModifiers()), "volatile"); checkClassForModifier(node, isNative(node.getModifiers()), "native"); if (!(node instanceof InnerClassNode)) { checkClassForModifier(node, isStatic(node.getModifiers()), "static"); checkClassForModifier(node, isPrivate(node.getModifiers()), "private"); } // don't check synchronized here as it overlaps with ACC_SUPER } private void checkMethodForModifier(MethodNode node, boolean condition, String modifierName) { if (!condition) return; addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node); } private void checkClassForModifier(ClassNode node, boolean condition, String modifierName) { if (!condition) return; addError("The " + getDescription(node) + " has an incorrect modifier " + modifierName + ".", node); } private String getDescription(ClassNode node) { return (node.isInterface() ? (Traits.isTrait(node)?"trait":"interface") : "class") + " '" + node.getName() + "'"; } private String getDescription(MethodNode node) { return "method '" + node.getTypeDescriptor() + "'"; } private String getDescription(FieldNode node) { return "field '" + node.getName() + "'"; } private void checkAbstractDeclaration(MethodNode methodNode) { if (!methodNode.isAbstract()) return; if (isAbstract(currentClass.getModifiers())) return; addError("Can't have an abstract method in a non-abstract class." + " The " + getDescription(currentClass) + " must be declared abstract or the method '" + methodNode.getTypeDescriptor() + "' must not be abstract.", methodNode); } private void checkClassForOverwritingFinal(ClassNode cn) { ClassNode superCN = cn.getSuperClass(); if (superCN == null) return; if (!isFinal(superCN.getModifiers())) return; StringBuilder msg = new StringBuilder(); msg.append("You are not allowed to overwrite the final "); msg.append(getDescription(superCN)); msg.append("."); addError(msg.toString(), cn); } private void checkImplementsAndExtends(ClassNode node) { ClassNode cn = node.getSuperClass(); if (cn.isInterface() && !node.isInterface()) { addError("You are not allowed to extend the " + getDescription(cn) + ", use implements instead.", node); } for (ClassNode anInterface : node.getInterfaces()) { cn = anInterface; if (!cn.isInterface()) { addError("You are not allowed to implement the " + getDescription(cn) + ", use extends instead.", node); } } } private void checkMethodsForIncorrectModifiers(ClassNode cn) { if (!cn.isInterface()) return; for (MethodNode method : cn.getMethods()) { if (method.isFinal()) { addError("The " + getDescription(method) + " from " + getDescription(cn) + " must not be final. It is by definition abstract.", method); } if (method.isStatic() && !isConstructor(method)) { addError("The " + getDescription(method) + " from " + getDescription(cn) + " must not be static. Only fields may be static in an interface.", method); } } } private void checkMethodsForWeakerAccess(ClassNode cn) { for (MethodNode method : cn.getMethods()) { checkMethodForWeakerAccessPrivileges(method, cn); } } private boolean isConstructor(MethodNode method) { return method.getName().equals("<clinit>"); } private void checkMethodsForOverridingFinal(ClassNode cn) { for (MethodNode method : cn.getMethods()) { Parameter[] params = method.getParameters(); for (MethodNode superMethod : cn.getSuperClass().getMethods(method.getName())) { Parameter[] superParams = superMethod.getParameters(); if (!hasEqualParameterTypes(params, superParams)) continue; if (!superMethod.isFinal()) break; addInvalidUseOfFinalError(method, params, superMethod.getDeclaringClass()); return; } } } private void addInvalidUseOfFinalError(MethodNode method, Parameter[] parameters, ClassNode superCN) { StringBuilder msg = new StringBuilder(); msg.append("You are not allowed to override the final method ").append(method.getName()); msg.append("("); boolean needsComma = false; for (Parameter parameter : parameters) { if (needsComma) { msg.append(","); } else { needsComma = true; } msg.append(parameter.getType()); } msg.append(") from ").append(getDescription(superCN)); msg.append("."); addError(msg.toString(), method); } private void addWeakerAccessError(ClassNode cn, MethodNode method, Parameter[] parameters, MethodNode superMethod) { StringBuilder msg = new StringBuilder(); msg.append(method.getName()); msg.append("("); boolean needsComma = false; for (Parameter parameter : parameters) { if (needsComma) { msg.append(","); } else { needsComma = true; } msg.append(parameter.getType()); } msg.append(") in "); msg.append(cn.getName()); msg.append(" cannot override "); msg.append(superMethod.getName()); msg.append(" in "); msg.append(superMethod.getDeclaringClass().getName()); msg.append("; attempting to assign weaker access privileges; was "); msg.append(superMethod.isPublic() ? "public" : "protected"); addError(msg.toString(), method); } private boolean hasEqualParameterTypes(Parameter[] first, Parameter[] second) { if (first.length != second.length) return false; for (int i = 0; i < first.length; i++) { String ft = first[i].getType().getName(); String st = second[i].getType().getName(); if (ft.equals(st)) continue; return false; } return true; } protected SourceUnit getSourceUnit() { return source; } public void visitMethod(MethodNode node) { inConstructor = false; inStaticConstructor = node.isStaticConstructor(); checkAbstractDeclaration(node); checkRepetitiveMethod(node); checkOverloadingPrivateAndPublic(node); checkMethodModifiers(node); checkGenericsUsage(node, node.getParameters()); checkGenericsUsage(node, node.getReturnType()); super.visitMethod(node); } private void checkMethodModifiers(MethodNode node) { // don't check volatile here as it overlaps with ACC_BRIDGE // additional modifiers not allowed for interfaces if ((this.currentClass.getModifiers() & ACC_INTERFACE) != 0) { checkMethodForModifier(node, isStrict(node.getModifiers()), "strictfp"); checkMethodForModifier(node, isSynchronized(node.getModifiers()), "synchronized"); checkMethodForModifier(node, isNative(node.getModifiers()), "native"); } } private void checkMethodForWeakerAccessPrivileges(MethodNode mn, ClassNode cn) { if (mn.isPublic()) return; Parameter[] params = mn.getParameters(); for (MethodNode superMethod : cn.getSuperClass().getMethods(mn.getName())) { Parameter[] superParams = superMethod.getParameters(); if (!hasEqualParameterTypes(params, superParams)) continue; if ((mn.isPrivate() && !superMethod.isPrivate()) || (mn.isProtected() && superMethod.isPublic())) { addWeakerAccessError(cn, mn, params, superMethod); return; } } } private void checkOverloadingPrivateAndPublic(MethodNode node) { if (isConstructor(node)) return; boolean hasPrivate = node.isPrivate(); boolean hasPublic = node.isPublic(); for (MethodNode method : currentClass.getMethods(node.getName())) { if (method == node) continue; if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue; if (method.isPublic() || method.isProtected()) { hasPublic = true; } else { hasPrivate = true; } if (hasPrivate && hasPublic) break; } if (hasPrivate && hasPublic) { addError("Mixing private and public/protected methods of the same name causes multimethods to be disabled and is forbidden to avoid surprising behaviour. Renaming the private methods will solve the problem.", node); } } private void checkRepetitiveMethod(MethodNode node) { if (isConstructor(node)) return; for (MethodNode method : currentClass.getMethods(node.getName())) { if (method == node) continue; if (!method.getDeclaringClass().equals(node.getDeclaringClass())) continue; Parameter[] p1 = node.getParameters(); Parameter[] p2 = method.getParameters(); if (p1.length != p2.length) continue; addErrorIfParamsAndReturnTypeEqual(p2, p1, node, method); } } private void addErrorIfParamsAndReturnTypeEqual(Parameter[] p2, Parameter[] p1, MethodNode node, MethodNode element) { boolean isEqual = true; for (int i = 0; i < p2.length; i++) { isEqual &= p1[i].getType().equals(p2[i].getType()); if (!isEqual) break; } isEqual &= node.getReturnType().equals(element.getReturnType()); if (isEqual) { addError("Repetitive method name/signature for " + getDescription(node) + " in " + getDescription(currentClass) + ".", node); } } public void visitField(FieldNode node) { if (currentClass.getDeclaredField(node.getName()) != node) { addError("The " + getDescription(node) + " is declared multiple times.", node); } checkInterfaceFieldModifiers(node); checkGenericsUsage(node, node.getType()); super.visitField(node); } public void visitProperty(PropertyNode node) { checkDuplicateProperties(node); checkGenericsUsage(node, node.getType()); super.visitProperty(node); } private void checkDuplicateProperties(PropertyNode node) { ClassNode cn = node.getDeclaringClass(); String name = node.getName(); String getterName = "get" + MetaClassHelper.capitalize(name); if (Character.isUpperCase(name.charAt(0))) { for (PropertyNode propNode : cn.getProperties()) { String otherName = propNode.getField().getName(); String otherGetterName = "get" + MetaClassHelper.capitalize(otherName); if (node != propNode && getterName.equals(otherGetterName)) { String msg = "The field " + name + " and " + otherName + " on the class " + cn.getName() + " will result in duplicate JavaBean properties, which is not allowed"; addError(msg, node); } } } } private void checkInterfaceFieldModifiers(FieldNode node) { if (!currentClass.isInterface()) return; if ((node.getModifiers() & (ACC_PUBLIC | ACC_STATIC | ACC_FINAL)) == 0 || (node.getModifiers() & (ACC_PRIVATE | ACC_PROTECTED)) != 0) { addError("The " + getDescription(node) + " is not 'public static final' but is defined in " + getDescription(currentClass) + ".", node); } } public void visitBinaryExpression(BinaryExpression expression) { if (expression.getOperation().getType() == Types.LEFT_SQUARE_BRACKET && expression.getRightExpression() instanceof MapEntryExpression) { addError("You tried to use a map entry for an index operation, this is not allowed. " + "Maybe something should be set in parentheses or a comma is missing?", expression.getRightExpression()); } super.visitBinaryExpression(expression); switch (expression.getOperation().getType()) { case Types.EQUAL: // = assignment case Types.BITWISE_AND_EQUAL: case Types.BITWISE_OR_EQUAL: case Types.BITWISE_XOR_EQUAL: case Types.PLUS_EQUAL: case Types.MINUS_EQUAL: case Types.MULTIPLY_EQUAL: case Types.DIVIDE_EQUAL: case Types.INTDIV_EQUAL: case Types.MOD_EQUAL: case Types.POWER_EQUAL: case Types.LEFT_SHIFT_EQUAL: case Types.RIGHT_SHIFT_EQUAL: case Types.RIGHT_SHIFT_UNSIGNED_EQUAL: checkFinalFieldAccess(expression.getLeftExpression()); break; default: break; } } private void checkFinalFieldAccess(Expression expression) { if (!(expression instanceof VariableExpression) && !(expression instanceof PropertyExpression)) return; Variable v = null; if (expression instanceof VariableExpression) { VariableExpression ve = (VariableExpression) expression; v = ve.getAccessedVariable(); } else { PropertyExpression propExp = ((PropertyExpression) expression); Expression objectExpression = propExp.getObjectExpression(); if (objectExpression instanceof VariableExpression) { VariableExpression varExp = (VariableExpression) objectExpression; if (varExp.isThisExpression()) { v = currentClass.getDeclaredField(propExp.getPropertyAsString()); } } } if (v instanceof FieldNode) { FieldNode fn = (FieldNode) v; /* * if it is static final but not accessed inside a static constructor, or, * if it is an instance final but not accessed inside a instance constructor, it is an error */ boolean isFinal = fn.isFinal(); boolean isStatic = fn.isStatic(); boolean error = isFinal && ((isStatic && !inStaticConstructor) || (!isStatic && !inConstructor)); if (error) addError("cannot modify" + (isStatic ? " static" : "") + " final field '" + fn.getName() + "' outside of " + (isStatic ? "static initialization block." : "constructor."), expression); } } public void visitConstructor(ConstructorNode node) { inConstructor = true; inStaticConstructor = node.isStaticConstructor(); checkGenericsUsage(node, node.getParameters()); super.visitConstructor(node); } public void visitCatchStatement(CatchStatement cs) { if (!(cs.getExceptionType().isDerivedFrom(ClassHelper.make(Throwable.class)))) { addError("Catch statement parameter type is not a subclass of Throwable.", cs); } super.visitCatchStatement(cs); } public void visitMethodCallExpression(MethodCallExpression mce) { super.visitMethodCallExpression(mce); Expression aexp = mce.getArguments(); if (aexp instanceof TupleExpression) { TupleExpression arguments = (TupleExpression) aexp; for (Expression e : arguments.getExpressions()) { checkForInvalidDeclaration(e); } } else { checkForInvalidDeclaration(aexp); } } @Override public void visitDeclarationExpression(DeclarationExpression expression) { super.visitDeclarationExpression(expression); if (expression.isMultipleAssignmentDeclaration()) return; checkInvalidDeclarationModifier(expression, ACC_ABSTRACT, "abstract"); checkInvalidDeclarationModifier(expression, ACC_NATIVE, "native"); checkInvalidDeclarationModifier(expression, ACC_PRIVATE, "private"); checkInvalidDeclarationModifier(expression, ACC_PROTECTED, "protected"); checkInvalidDeclarationModifier(expression, ACC_PUBLIC, "public"); checkInvalidDeclarationModifier(expression, ACC_STATIC, "static"); checkInvalidDeclarationModifier(expression, ACC_STRICT, "strictfp"); checkInvalidDeclarationModifier(expression, ACC_SYNCHRONIZED, "synchronized"); checkInvalidDeclarationModifier(expression, ACC_TRANSIENT, "transient"); checkInvalidDeclarationModifier(expression, ACC_VOLATILE, "volatile"); } private void checkInvalidDeclarationModifier(DeclarationExpression expression, int modifier, String modName) { if ((expression.getVariableExpression().getModifiers() & modifier) != 0) { addError("Modifier '" + modName + "' not allowed here.", expression); } } private void checkForInvalidDeclaration(Expression exp) { if (!(exp instanceof DeclarationExpression)) return; addError("Invalid use of declaration inside method call.", exp); } public void visitConstantExpression(ConstantExpression expression) { super.visitConstantExpression(expression); checkStringExceedingMaximumLength(expression); } public void visitGStringExpression(GStringExpression expression) { super.visitGStringExpression(expression); for (ConstantExpression ce : expression.getStrings()) { checkStringExceedingMaximumLength(ce); } } private void checkStringExceedingMaximumLength(ConstantExpression expression) { Object value = expression.getValue(); if (value instanceof String) { String s = (String) value; if (s.length() > 65535) { addError("String too long. The given string is " + s.length() + " Unicode code units long, but only a maximum of 65535 is allowed.", expression); } } } private void checkGenericsUsage(ASTNode ref, ClassNode[] nodes) { for (ClassNode node : nodes) { checkGenericsUsage(ref, node); } } private void checkGenericsUsage(ASTNode ref, Parameter[] params) { for (Parameter p : params) { checkGenericsUsage(ref, p.getType()); } } private void checkGenericsUsage(ASTNode ref, ClassNode node) { if (node.isArray()) { checkGenericsUsage(ref, node.getComponentType()); } else if (!node.isRedirectNode() && node.isUsingGenerics()) { addError( "A transform used a generics containing ClassNode "+ node + " " + "for "+getRefDescriptor(ref) + "directly. You are not supposed to do this. " + "Please create a new ClassNode referring to the old ClassNode " + "and use the new ClassNode instead of the old one. Otherwise " + "the compiler will create wrong descriptors and a potential " + "NullPointerException in TypeResolver in the OpenJDK. If this is " + "not your own doing, please report this bug to the writer of the " + "transform.", ref); } } private String getRefDescriptor(ASTNode ref) { if (ref instanceof FieldNode) { FieldNode f = (FieldNode) ref; return "the field "+f.getName()+" "; } else if (ref instanceof PropertyNode) { PropertyNode p = (PropertyNode) ref; return "the property "+p.getName()+" "; } else if (ref instanceof ConstructorNode) { return "the constructor "+ref.getText()+" "; } else if (ref instanceof MethodNode) { return "the method "+ref.getText()+" "; } else if (ref instanceof ClassNode) { return "the super class "+ref+" "; } return "<unknown with class "+ref.getClass()+"> "; } }
/* * Copyright 2014-2021 JKOOL, LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jkoolcloud.tnt4j.repository; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import org.apache.commons.configuration2.PropertiesConfiguration; import org.apache.commons.configuration2.builder.BasicConfigurationBuilder; import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder; import org.apache.commons.configuration2.builder.ReloadingFileBasedConfigurationBuilder; import org.apache.commons.configuration2.builder.fluent.Parameters; import org.apache.commons.configuration2.builder.fluent.PropertiesBuilderParameters; import org.apache.commons.configuration2.event.ConfigurationErrorEvent; import org.apache.commons.configuration2.event.ConfigurationEvent; import org.apache.commons.configuration2.event.EventListener; import org.apache.commons.configuration2.ex.ConfigurationException; import org.apache.commons.configuration2.ex.ConfigurationRuntimeException; import org.apache.commons.configuration2.reloading.PeriodicReloadingTrigger; import org.apache.commons.configuration2.reloading.ReloadingEvent; import com.jkoolcloud.tnt4j.config.ConfigException; import com.jkoolcloud.tnt4j.config.Configurable; import com.jkoolcloud.tnt4j.core.OpLevel; import com.jkoolcloud.tnt4j.sink.DefaultEventSinkFactory; import com.jkoolcloud.tnt4j.sink.EventSink; import com.jkoolcloud.tnt4j.utils.Utils; /** * <p> * This class implements a file based token repository based on a property file following the key=value pairs defined * per line. File is auto-reloaded by default based on 20sec refresh time. The reload time can be changed by setting * {@code tnt4j.file.repository.refresh} property * </p> * * @see TokenRepository * * @version $Revision: 6 $ * */ public class FileTokenRepository implements TokenRepository, Configurable { private static EventSink logger = DefaultEventSinkFactory.defaultEventSink(FileTokenRepository.class); private static ConcurrentHashMap<TokenRepositoryListener, EventListener<?>[]> LISTEN_MAP = new ConcurrentHashMap<>(49); private static long DEFAULT_REFRESH_DELAY = TimeUnit.SECONDS.toMillis(20); private String configName = null; private BasicConfigurationBuilder<PropertiesConfiguration> config = null; private PeriodicReloadingTrigger cfgReloadTrigger = null; protected Map<String, ?> settings = null; private long refDelay; /** * Create file/property based token repository instance based on default file name or url specified by * {@code tnt4j.token.repository} java property which should be found in specified properties. * */ public FileTokenRepository() { this(System.getProperty("tnt4j.token.repository"), Long.getLong("tnt4j.file.repository.refresh", DEFAULT_REFRESH_DELAY)); } /** * Create file/property based token repository instance given a specific filename or url. File name is auto-loaded * based on {@code tnt4j.file.repository.refresh} property which is set to 20000 (ms) by default. * * @param url * file name or URL of the property file containing tokens * @param refreshDelay * delay in milliseconds between refresh */ public FileTokenRepository(String url, long refreshDelay) { configName = url; refDelay = refreshDelay; } @Override public void addRepositoryListener(TokenRepositoryListener listener) { if (configName == null || !isOpen()) { return; } TokenConfigurationListener cfListener = new TokenConfigurationListener(listener, logger); TokenConfigurationErrorListener cfErrListener = new TokenConfigurationErrorListener(listener, logger); EventListener<?>[] pListeners = new EventListener[2]; pListeners[0] = cfListener; pListeners[1] = cfErrListener; LISTEN_MAP.put(listener, pListeners); config.addEventListener(ConfigurationEvent.ANY, cfListener); config.addEventListener(ConfigurationErrorEvent.ANY, cfErrListener); } @Override public void removeRepositoryListener(TokenRepositoryListener listener) { if (configName == null || !isOpen()) { return; } EventListener<?>[] pListeners = LISTEN_MAP.get(listener); if (pListeners != null) { LISTEN_MAP.remove(listener); config.removeEventListener(ConfigurationEvent.ANY, (TokenConfigurationListener) pListeners[0]); config.removeEventListener(ConfigurationErrorEvent.ANY, (TokenConfigurationErrorListener) pListeners[1]); } } @Override public Object get(String key) { try { return isOpen() ? config.getConfiguration().getProperty(key) : null; } catch (ConfigurationException exc) { throw new ConfigurationRuntimeException("Failed to get configuration property", exc); } } @Override public Iterator<? extends Object> getKeys() { try { return isOpen() ? config.getConfiguration().getKeys() : null; } catch (ConfigurationException exc) { throw new ConfigurationRuntimeException("Failed to get configuration properties key set", exc); } } @Override public void remove(String key) { if (isOpen()) { try { config.getConfiguration().clearProperty(key); } catch (ConfigurationException exc) { throw new ConfigurationRuntimeException("Failed to remove configuration property", exc); } } } @Override public void set(String key, Object value) { if (isOpen()) { try { config.getConfiguration().setProperty(key, value); } catch (ConfigurationException exc) { throw new ConfigurationRuntimeException("Failed to set configuration property", exc); } } } @Override public String getName() { return configName; } @Override public String toString() { PropertiesConfiguration cfg = null; try { if (isOpen()) { cfg = config.getConfiguration(); } } catch (ConfigurationException exc) { } return super.toString() + "{url: " + getName() + ", delay: " + refDelay + ", config: " + cfg + "}"; } @Override public boolean isOpen() { return config != null; } @Override public void open() throws IOException { if (isOpen() || (configName == null)) { return; } try { initConfig(); if (cfgReloadTrigger != null) { cfgReloadTrigger.start(); } } catch (Throwable e) { throw new IOException(e); } } /** * Initialize property configuration based on a configured configuration file name. The method attempts to load it * from URL if given config is URL, then load it from class path and then from file system. * * @throws MalformedURLException * if malformed configuration file name */ protected void initConfig() throws MalformedURLException { int urlIndex = configName.indexOf("://"); PropertiesBuilderParameters params = new Parameters().properties(); if (urlIndex > 0) { params.setURL(new URL(configName)); } else { URL configResource = getClass().getResource("/" + configName); if (configResource != null) { params.setURL(configResource); } else { params.setFileName(configName); } } if (refDelay > 0) { params.setReloadingRefreshDelay(refDelay); ReloadingFileBasedConfigurationBuilder<PropertiesConfiguration> builder = new ReloadingFileBasedConfigurationBuilder<>(PropertiesConfiguration.class); builder.configure(params); cfgReloadTrigger = new PeriodicReloadingTrigger(builder.getReloadingController(), null, refDelay, TimeUnit.MILLISECONDS); config = builder; } else { config = new FileBasedConfigurationBuilder<>(PropertiesConfiguration.class); config.configure(params); } } @Override public void close() throws IOException { if (cfgReloadTrigger != null) { cfgReloadTrigger.shutdown(); } } @Override public void reopen() throws IOException { close(); removeListenersFromClosed(); open(); addListenersToOpened(); } private void removeListenersFromClosed() { if (isOpen()) { for (Map.Entry<TokenRepositoryListener, EventListener<?>[]> le : LISTEN_MAP.entrySet()) { EventListener<?>[] pListeners = le.getValue(); if (pListeners != null) { config.removeEventListener(ConfigurationEvent.ANY, (TokenConfigurationListener) pListeners[0]); config.removeEventListener(ConfigurationErrorEvent.ANY, (TokenConfigurationErrorListener) pListeners[1]); } } } } private void addListenersToOpened() { if (isOpen()) { for (Map.Entry<TokenRepositoryListener, EventListener<?>[]> le : LISTEN_MAP.entrySet()) { EventListener<?>[] pListeners = le.getValue(); if (pListeners != null) { config.addEventListener(ConfigurationEvent.ANY, (TokenConfigurationListener) pListeners[0]); config.addEventListener(ConfigurationErrorEvent.ANY, (TokenConfigurationErrorListener) pListeners[1]); } } } } @Override public Map<String, ?> getConfiguration() { return settings; } @Override public void setConfiguration(Map<String, ?> props) throws ConfigException { settings = props; configName = Utils.getString("Url", props, configName); refDelay = Utils.getLong("RefreshTime", props, refDelay); } @Override public boolean isDefined() { return configName != null; } } class TokenConfigurationListener implements EventListener<ConfigurationEvent> { TokenRepositoryListener repListener = null; EventSink logger = null; public TokenConfigurationListener(TokenRepositoryListener listener, EventSink log) { repListener = listener; logger = log; } @Override public void onEvent(ConfigurationEvent event) { if (event.isBeforeUpdate()) { return; } logger.log(OpLevel.DEBUG, "configurationChanged: type={0}, {1}:{2}", event.getEventType(), event.getPropertyName(), event.getPropertyValue()); if (event.getEventType() == ConfigurationEvent.ADD_PROPERTY) { repListener.repositoryChanged(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_ADD_KEY, event.getPropertyName(), event.getPropertyValue(), null)); } else if (event.getEventType() == ConfigurationEvent.SET_PROPERTY) { repListener.repositoryChanged(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_SET_KEY, event.getPropertyName(), event.getPropertyValue(), null)); } else if (event.getEventType() == ConfigurationEvent.CLEAR_PROPERTY) { repListener.repositoryChanged(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_CLEAR_KEY, event.getPropertyName(), event.getPropertyValue(), null)); } else if (event.getEventType() == ConfigurationEvent.CLEAR) { repListener.repositoryChanged(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_CLEAR, event.getPropertyName(), event.getPropertyValue(), null)); } else if (event.getEventType() == ReloadingEvent.ANY) { repListener.repositoryChanged(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_RELOAD, event.getPropertyName(), event.getPropertyValue(), null)); } } } class TokenConfigurationErrorListener implements EventListener<ConfigurationErrorEvent> { TokenRepositoryListener repListener = null; EventSink logger = null; public TokenConfigurationErrorListener(TokenRepositoryListener listener, EventSink log) { repListener = listener; logger = log; } @Override public void onEvent(ConfigurationErrorEvent event) { logger.log(OpLevel.ERROR, "Configuration error detected, event={0}", event, event.getCause()); repListener.repositoryError(new TokenRepositoryEvent(event.getSource(), TokenRepository.EVENT_EXCEPTION, event.getPropertyName(), event.getPropertyValue(), event.getCause())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.mina.proxy.handlers.http.ntlm; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.StringTokenizer; import org.apache.mina.proxy.utils.ByteUtilities; /** * NTLMUtilities.java - NTLM functions used for authentication and unit testing. * * @author <a href="http://mina.apache.org">Apache MINA Project</a> * @since MINA 2.0.0-M3 */ public class NTLMUtilities implements NTLMConstants { /** * @see #writeSecurityBuffer(short, short, int, byte[], int) */ public final static byte[] writeSecurityBuffer(short length, int bufferOffset) { byte[] b = new byte[8]; writeSecurityBuffer(length, length, bufferOffset, b, 0); return b; } /** * Writes a security buffer to the given array <code>b</code> at offset * <code>offset</code>. A security buffer defines a pointer to an area * in the data that defines some data with a variable length. This allows * to have a semi-fixed length header thus making a little bit easier * the decoding process in the NTLM protocol. * * @param length the length of the security buffer * @param allocated the allocated space for the security buffer (should be * greater or equal to <code>length</code> * @param bufferOffset the offset from the main array where the currently * defined security buffer will be written * @param b the buffer in which we write the security buffer * @param offset the offset at which to write to the b buffer */ public final static void writeSecurityBuffer(short length, short allocated, int bufferOffset, byte[] b, int offset) { ByteUtilities.writeShort(length, b, offset); ByteUtilities.writeShort(allocated, b, offset + 2); ByteUtilities.writeInt(bufferOffset, b, offset + 4); } /** * Writes the Windows OS version passed in as three byte values * (majorVersion.minorVersion.buildNumber) to the given byte array * at <code>offset</code>. * * @param majorVersion the major version number * @param minorVersion the minor version number * @param buildNumber the build number * @param b the target byte array * @param offset the offset at which to write in the array */ public final static void writeOSVersion(byte majorVersion, byte minorVersion, short buildNumber, byte[] b, int offset) { b[offset] = majorVersion; b[offset + 1] = minorVersion; b[offset + 2] = (byte) buildNumber; b[offset + 3] = (byte) (buildNumber >> 8); b[offset + 4] = 0; b[offset + 5] = 0; b[offset + 6] = 0; b[offset + 7] = 0x0F; } /** * Tries to return a valid OS version on Windows systems. If it fails to * do so or if we're running on another OS then a fake Windows XP OS * version is returned because the protocol uses it. * * @return a NTLM OS version byte buffer */ public final static byte[] getOsVersion() { String os = System.getProperty("os.name"); if ((os == null) || !os.toUpperCase().contains("WINDOWS")) { return DEFAULT_OS_VERSION; } byte[] osVer = new byte[8]; // Let's enclose the code by a try...catch in order to // manage incorrect strings. In this case, we will generate // an exception and deal with the special cases. try { Process pr = Runtime.getRuntime().exec("cmd /C ver"); BufferedReader reader = new BufferedReader(new InputStreamReader(pr.getInputStream())); pr.waitFor(); String line; // We loop as we may have blank lines. do { line = reader.readLine(); } while ((line != null) && (line.length() != 0)); reader.close(); // If line is null, we must not go any farther if (line == null) { // Throw an exception to jump into the catch() part throw new Exception(); } // The command line should return a response like : // Microsoft Windows XP [version 5.1.2600] int pos = line.toLowerCase().indexOf("version"); if (pos == -1) { // Throw an exception to jump into the catch() part throw new Exception(); } pos += 8; line = line.substring(pos, line.indexOf(']')); StringTokenizer tk = new StringTokenizer(line, "."); if (tk.countTokens() != 3) { // Throw an exception to jump into the catch() part throw new Exception(); } writeOSVersion(Byte.parseByte(tk.nextToken()), Byte.parseByte(tk.nextToken()), Short.parseShort(tk.nextToken()), osVer, 0); } catch (Exception ex) { try { String version = System.getProperty("os.version"); writeOSVersion(Byte.parseByte(version.substring(0, 1)), Byte.parseByte(version.substring(2, 3)), (short) 0, osVer, 0); } catch (Exception ex2) { return DEFAULT_OS_VERSION; } } return osVer; } /** * see http://davenport.sourceforge.net/ntlm.html#theType1Message * * @param workStation the workstation name * @param domain the domain name * @param customFlags custom flags, if null then * <code>NTLMConstants.DEFAULT_CONSTANTS</code> is used * @param osVersion the os version of the client, if null then * <code>NTLMConstants.DEFAULT_OS_VERSION</code> is used * @return the type 1 message */ public final static byte[] createType1Message(String workStation, String domain, Integer customFlags, byte[] osVersion) { byte[] msg = null; if (osVersion != null && osVersion.length != 8) { throw new IllegalArgumentException("osVersion parameter should be a 8 byte wide array"); } if (workStation == null || domain == null) { throw new IllegalArgumentException("workStation and domain must be non null"); } int flags = customFlags != null ? customFlags | FLAG_NEGOTIATE_WORKSTATION_SUPPLIED | FLAG_NEGOTIATE_DOMAIN_SUPPLIED : DEFAULT_FLAGS; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { baos.write(NTLM_SIGNATURE); baos.write(ByteUtilities.writeInt(MESSAGE_TYPE_1)); baos.write(ByteUtilities.writeInt(flags)); byte[] domainData = ByteUtilities.getOEMStringAsByteArray(domain); byte[] workStationData = ByteUtilities.getOEMStringAsByteArray(workStation); int pos = (osVersion != null) ? 40 : 32; baos.write(writeSecurityBuffer((short) domainData.length, pos + workStationData.length)); baos.write(writeSecurityBuffer((short) workStationData.length, pos)); if (osVersion != null) { baos.write(osVersion); } // Order is not mandatory since a pointer is given in the security buffers baos.write(workStationData); baos.write(domainData); msg = baos.toByteArray(); baos.close(); } catch (IOException e) { return null; } return msg; } /** * Writes a security buffer and returns the pointer of the position * where to write the next security buffer. * * @param baos the stream where the security buffer is written * @param len the length of the security buffer * @param pointer the position where the security buffer can be written * @return the position where the next security buffer will be written * @throws IOException if writing to the ByteArrayOutputStream fails */ public final static int writeSecurityBufferAndUpdatePointer(ByteArrayOutputStream baos, short len, int pointer) throws IOException { baos.write(writeSecurityBuffer(len, pointer)); return pointer + len; } /** * Extracts the NTLM challenge from the type 2 message as an 8 byte array. * * @param msg the type 2 message byte array * @return the challenge */ public final static byte[] extractChallengeFromType2Message(byte[] msg) { byte[] challenge = new byte[8]; System.arraycopy(msg, 24, challenge, 0, 8); return challenge; } /** * Extracts the NTLM flags from the type 2 message. * * @param msg the type 2 message byte array * @return the proxy flags as an int */ public final static int extractFlagsFromType2Message(byte[] msg) { byte[] flagsBytes = new byte[4]; System.arraycopy(msg, 20, flagsBytes, 0, 4); ByteUtilities.changeWordEndianess(flagsBytes, 0, 4); return ByteUtilities.makeIntFromByte4(flagsBytes); } /** * Reads the byte array described by the security buffer stored at the * <code>securityBufferOffset</code> offset. * * @param msg the message where to read the security buffer and it's value * @param securityBufferOffset the offset at which to read the security buffer * @return a new byte array holding the data pointed by the security buffer */ public final static byte[] readSecurityBufferTarget(byte[] msg, int securityBufferOffset) { byte[] securityBuffer = new byte[8]; System.arraycopy(msg, securityBufferOffset, securityBuffer, 0, 8); ByteUtilities.changeWordEndianess(securityBuffer, 0, 8); int length = ByteUtilities.makeIntFromByte2(securityBuffer); int offset = ByteUtilities.makeIntFromByte4(securityBuffer, 4); byte[] secBufValue = new byte[length]; System.arraycopy(msg, offset, secBufValue, 0, length); return secBufValue; } /** * Extracts the target name from the type 2 message. * * @param msg the type 2 message byte array * @param msgFlags the flags if null then flags are extracted from the * type 2 message * @return the target name * @throws UnsupportedEncodingException if unable to use the * needed UTF-16LE or ASCII charsets */ public final static String extractTargetNameFromType2Message(byte[] msg, Integer msgFlags) throws UnsupportedEncodingException { // Read the security buffer to determine where the target name // is stored and what it's length is byte[] targetName = readSecurityBufferTarget(msg, 12); // now we convert it to a string int flags = msgFlags == null ? extractFlagsFromType2Message(msg) : msgFlags; if (ByteUtilities.isFlagSet(flags, FLAG_NEGOTIATE_UNICODE)) { return new String(targetName, "UTF-16LE"); } return new String(targetName, "ASCII"); } /** * Extracts the target information block from the type 2 message. * * @param msg the type 2 message byte array * @param msgFlags the flags if null then flags are extracted from the * type 2 message * @return the target info */ public final static byte[] extractTargetInfoFromType2Message(byte[] msg, Integer msgFlags) { int flags = msgFlags == null ? extractFlagsFromType2Message(msg) : msgFlags; if (!ByteUtilities.isFlagSet(flags, FLAG_NEGOTIATE_TARGET_INFO)) { return null; } int pos = 40; return readSecurityBufferTarget(msg, pos); } /** * Prints to the {@link PrintWriter} the target information block extracted * from the type 2 message. * * @param msg the type 2 message * @param msgFlags the flags if null then flags are extracted from the * type 2 message * @param out the output target for the information * @throws UnsupportedEncodingException if unable to use the * needed UTF-16LE or ASCII charsets */ public final static void printTargetInformationBlockFromType2Message(byte[] msg, Integer msgFlags, PrintWriter out) throws UnsupportedEncodingException { int flags = msgFlags == null ? extractFlagsFromType2Message(msg) : msgFlags; byte[] infoBlock = extractTargetInfoFromType2Message(msg, flags); if (infoBlock == null) { out.println("No target information block found !"); } else { int pos = 0; while (infoBlock[pos] != 0) { out.print("---\nType " + infoBlock[pos] + ": "); switch (infoBlock[pos]) { case 1: out.println("Server name"); break; case 2: out.println("Domain name"); break; case 3: out.println("Fully qualified DNS hostname"); break; case 4: out.println("DNS domain name"); break; case 5: out.println("Parent DNS domain name"); break; } byte[] len = new byte[2]; System.arraycopy(infoBlock, pos + 2, len, 0, 2); ByteUtilities.changeByteEndianess(len, 0, 2); int length = ByteUtilities.makeIntFromByte2(len, 0); out.println("Length: " + length + " bytes"); out.print("Data: "); if (ByteUtilities.isFlagSet(flags, FLAG_NEGOTIATE_UNICODE)) { out.println(new String(infoBlock, pos + 4, length, "UTF-16LE")); } else { out.println(new String(infoBlock, pos + 4, length, "ASCII")); } pos += 4 + length; out.flush(); } } } /** * @see <a * href="http://davenport.sourceforge.net/ntlm.html#theType3Message">NTLM * message type</a> * * @param user * the user name * @param password * the user password * @param challenge * the challenge response * @param target * the target name * @param workstation * the client workstation's name * @param serverFlags * the flags set by the client * @param osVersion * the os version of the client * @return the type 3 message */ public final static byte[] createType3Message(String user, String password, byte[] challenge, String target, String workstation, Integer serverFlags, byte[] osVersion) { byte[] msg = null; if (challenge == null || challenge.length != 8) { throw new IllegalArgumentException("challenge[] should be a 8 byte wide array"); } if (osVersion != null && osVersion.length != 8) { throw new IllegalArgumentException("osVersion should be a 8 byte wide array"); } int flags = serverFlags != null ? serverFlags : DEFAULT_FLAGS; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { baos.write(NTLM_SIGNATURE); baos.write(ByteUtilities.writeInt(MESSAGE_TYPE_3)); byte[] dataLMResponse = NTLMResponses.getLMResponse(password, challenge); byte[] dataNTLMResponse = NTLMResponses.getNTLMResponse(password, challenge); boolean useUnicode = ByteUtilities.isFlagSet(flags, FLAG_NEGOTIATE_UNICODE); byte[] targetName = ByteUtilities.encodeString(target, useUnicode); byte[] userName = ByteUtilities.encodeString(user, useUnicode); byte[] workstationName = ByteUtilities.encodeString(workstation, useUnicode); int pos = osVersion != null ? 72 : 64; int responsePos = pos + targetName.length + userName.length + workstationName.length; responsePos = writeSecurityBufferAndUpdatePointer(baos, (short) dataLMResponse.length, responsePos); writeSecurityBufferAndUpdatePointer(baos, (short) dataNTLMResponse.length, responsePos); pos = writeSecurityBufferAndUpdatePointer(baos, (short) targetName.length, pos); pos = writeSecurityBufferAndUpdatePointer(baos, (short) userName.length, pos); writeSecurityBufferAndUpdatePointer(baos, (short) workstationName.length, pos); /** LM/LMv2 Response security buffer 20 NTLM/NTLMv2 Response security buffer 28 Target Name security buffer 36 User Name security buffer 44 Workstation Name security buffer (52) Session Key (optional) security buffer (60) Flags (optional) long (64) OS Version Structure (Optional) 8 bytes **/ // Session Key Security Buffer ??! baos.write(new byte[] { 0, 0, 0, 0, (byte) 0x9a, 0, 0, 0 }); baos.write(ByteUtilities.writeInt(flags)); if (osVersion != null) { baos.write(osVersion); } // Order is not mandatory since a pointer is given in the security buffers baos.write(targetName); baos.write(userName); baos.write(workstationName); baos.write(dataLMResponse); baos.write(dataNTLMResponse); msg = baos.toByteArray(); baos.close(); } catch (Exception e) { e.printStackTrace(); return null; } return msg; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.lookup.namespace.cache; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; import com.google.inject.Inject; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.guice.LazySingleton; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.CacheGenerator; import io.druid.query.lookup.namespace.ExtractionNamespace; import sun.misc.Cleaner; import javax.annotation.Nullable; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; /** * Usage: * <pre>{@code * CacheScheduler.Entry entry = cacheScheduler.schedule(namespace); // or scheduleAndWait(namespace, timeout) * CacheState cacheState = entry.getCacheState(); * // cacheState could be either NoCache or VersionedCache. * if (cacheState instanceof NoCache) { * // the cache is not yet created, or already closed * } else if (cacheState instanceof VersionedCache) { * Map<String, String> cache = ((VersionedCache) cacheState).getCache(); // use the cache * // Although VersionedCache implements AutoCloseable, versionedCache shouldn't be manually closed * // when obtained from entry.getCacheState(). If the namespace updates should be ceased completely, * // entry.close() (see below) should be called, it will close the last VersionedCache itself. * // On scheduled updates, outdated VersionedCaches are also closed automatically. * } * ... * entry.close(); // close the last VersionedCache and unschedule future updates * }</pre> */ @LazySingleton public final class CacheScheduler { private static final Logger log = new Logger(CacheScheduler.class); public final class Entry<T extends ExtractionNamespace> implements AutoCloseable { private final EntryImpl<T> impl; private Entry(final T namespace, final CacheGenerator<T> cacheGenerator) { impl = new EntryImpl<>(namespace, this, cacheGenerator); } /** * Returns the last cache state, either {@link NoCache} or {@link VersionedCache}. */ public CacheState getCacheState() { return impl.cacheStateHolder.get(); } /** * @return the entry's cache if it is already initialized and not yet closed * @throws IllegalStateException if the entry's cache is not yet initialized, or {@link #close()} has * already been called */ public Map<String, String> getCache() { CacheState cacheState = getCacheState(); if (cacheState instanceof VersionedCache) { return ((VersionedCache) cacheState).getCache(); } else { throw new ISE("Cannot get cache: %s", cacheState); } } @VisibleForTesting Future<?> getUpdaterFuture() { return impl.updaterFuture; } public void awaitTotalUpdates(int totalUpdates) throws InterruptedException { impl.updateCounter.awaitTotalUpdates(totalUpdates); } void awaitNextUpdates(int nextUpdates) throws InterruptedException { impl.updateCounter.awaitNextUpdates(nextUpdates); } /** * Close the last {@link #getCacheState()}, if it is {@link VersionedCache}, and unschedule future updates. */ @Override public void close() { impl.close(); } @Override public String toString() { return impl.toString(); } } /** * This class effectively contains the whole state and most of the logic of {@link Entry}, need to be a separate class * because the Entry must not be referenced from the runnable executed in {@link #cacheManager}'s ExecutorService, * that would be a leak preventing the Entry to be collected by GC, and therefore {@link #entryCleaner} to be run by * the JVM. Also, {@link #entryCleaner} must not reference the Entry through it's Runnable hunk. */ public class EntryImpl<T extends ExtractionNamespace> implements AutoCloseable { private final T namespace; private final String asString; private final AtomicReference<CacheState> cacheStateHolder = new AtomicReference<CacheState>(NoCache.CACHE_NOT_INITIALIZED); private final Future<?> updaterFuture; private final Cleaner entryCleaner; private final CacheGenerator<T> cacheGenerator; private final UpdateCounter updateCounter = new UpdateCounter(); private final CountDownLatch startLatch = new CountDownLatch(1); private EntryImpl(final T namespace, final Entry<T> entry, final CacheGenerator<T> cacheGenerator) { try { this.namespace = namespace; this.asString = StringUtils.format("namespace [%s] : %s", namespace, super.toString()); this.updaterFuture = schedule(namespace); this.entryCleaner = createCleaner(entry); this.cacheGenerator = cacheGenerator; activeEntries.incrementAndGet(); } finally { startLatch.countDown(); } } private Cleaner createCleaner(Entry<T> entry) { return Cleaner.create(entry, new Runnable() { @Override public void run() { closeFromCleaner(); } }); } private Future<?> schedule(final T namespace) { final long updateMs = namespace.getPollMs(); Runnable command = new Runnable() { @Override public void run() { updateCache(); } }; if (updateMs > 0) { return cacheManager.scheduledExecutorService().scheduleAtFixedRate(command, 0, updateMs, TimeUnit.MILLISECONDS); } else { return cacheManager.scheduledExecutorService().schedule(command, 0, TimeUnit.MILLISECONDS); } } private void updateCache() { try { // Ensures visibility of the whole EntryImpl's state (fields and their state). startLatch.await(); CacheState currentCacheState = cacheStateHolder.get(); if (!Thread.currentThread().isInterrupted() && currentCacheState != NoCache.ENTRY_CLOSED) { final String currentVersion = currentVersionOrNull(currentCacheState); tryUpdateCache(currentVersion); } } catch (Throwable t) { try { close(); } catch (Exception e) { t.addSuppressed(e); } if (Thread.currentThread().isInterrupted() || t instanceof InterruptedException || t instanceof Error) { throw Throwables.propagate(t); } } } private void tryUpdateCache(String currentVersion) throws Exception { boolean updatedCacheSuccessfully = false; VersionedCache newVersionedCache = null; try { newVersionedCache = cacheGenerator.generateCache(namespace, this, currentVersion, CacheScheduler.this ); if (newVersionedCache != null) { CacheState previousCacheState = swapCacheState(newVersionedCache); if (previousCacheState != NoCache.ENTRY_CLOSED) { updatedCacheSuccessfully = true; if (previousCacheState instanceof VersionedCache) { ((VersionedCache) previousCacheState).close(); } log.debug("%s: the cache was successfully updated", this); } else { newVersionedCache.close(); log.debug("%s was closed while the cache was being updated, discarding the update", this); } } else { log.debug("%s: Version `%s` not updated, the cache is not updated", this, currentVersion); } } catch (Throwable t) { try { if (newVersionedCache != null && !updatedCacheSuccessfully) { newVersionedCache.close(); } log.error(t, "Failed to update %s", this); } catch (Exception e) { t.addSuppressed(e); } if (Thread.currentThread().isInterrupted() || t instanceof InterruptedException || t instanceof Error) { // propagate to the catch block in updateCache() throw t; } } } private String currentVersionOrNull(CacheState currentCacheState) { if (currentCacheState instanceof VersionedCache) { return ((VersionedCache) currentCacheState).version; } else { return null; } } private CacheState swapCacheState(VersionedCache newVersionedCache) { CacheState lastCacheState; // CAS loop do { lastCacheState = cacheStateHolder.get(); if (lastCacheState == NoCache.ENTRY_CLOSED) { return lastCacheState; } } while (!cacheStateHolder.compareAndSet(lastCacheState, newVersionedCache)); updateCounter.update(); return lastCacheState; } @Override public void close() { if (!doClose(true)) { log.error("Cache for %s has already been closed", this); } // This Cleaner.clean() call effectively just removes the Cleaner from the internal linked list of all cleaners. // It will delegate to closeFromCleaner() which will be a no-op because cacheStateHolder is already set to // ENTRY_CLOSED. entryCleaner.clean(); } private void closeFromCleaner() { try { if (doClose(false)) { log.error("Entry.close() was not called, closed resources by the JVM"); } } catch (Throwable t) { try { log.error(t, "Error while closing %s", this); } catch (Exception e) { t.addSuppressed(e); } Throwables.propagateIfInstanceOf(t, Error.class); // Must not throw exceptions in the cleaner thread, run by the JVM. } } /** * @param calledManually true if called manually from {@link #close()}, false if called by the JVM via Cleaner * @return true if successfully closed, false if has already closed before */ private boolean doClose(boolean calledManually) { CacheState lastCacheState = cacheStateHolder.getAndSet(NoCache.ENTRY_CLOSED); if (lastCacheState != NoCache.ENTRY_CLOSED) { try { log.info("Closing %s", this); logExecutionError(); } // Logging (above) is not the main goal of the closing process, so try to cancel the updaterFuture even if // logging failed for whatever reason. finally { activeEntries.decrementAndGet(); updaterFuture.cancel(true); // If calledManually = false, i. e. called by the JVM via Cleaner.clean(), let the JVM close cache itself // via it's own Cleaner as well, when the cache becomes unreachable. Because when somebody forgets to call // entry.close(), it may be harmful to forcibly close the cache, which could still be used, at some // non-deterministic point of time. Cleaners are introduced to mitigate possible errors, not to escalate them. if (calledManually && lastCacheState instanceof VersionedCache) { ((VersionedCache) lastCacheState).cacheHandler.close(); } } return true; } else { return false; } } private void logExecutionError() { if (updaterFuture.isDone()) { try { updaterFuture.get(); } catch (ExecutionException ee) { log.error(ee.getCause(), "Error in %s", this); } catch (CancellationException ce) { log.error(ce, "Future for %s has already been cancelled", this); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new RuntimeException(ie); } } } @Override public String toString() { return asString; } } public interface CacheState {} public enum NoCache implements CacheState { CACHE_NOT_INITIALIZED, ENTRY_CLOSED } public final class VersionedCache implements CacheState, AutoCloseable { final String entryId; final CacheHandler cacheHandler; final String version; private VersionedCache(String entryId, String version) { this.entryId = entryId; this.cacheHandler = cacheManager.createCache(); this.version = version; } public Map<String, String> getCache() { return cacheHandler.getCache(); } public String getVersion() { return version; } @Override public void close() { cacheHandler.close(); // Log statement after cacheHandler.close(), because logging may fail (e. g. in shutdown hooks) log.debug("Closed version [%s] of %s", version, entryId); } } private final Map<Class<? extends ExtractionNamespace>, CacheGenerator<?>> namespaceGeneratorMap; private final NamespaceExtractionCacheManager cacheManager; private final AtomicLong updatesStarted = new AtomicLong(0); private final AtomicInteger activeEntries = new AtomicInteger(); @Inject public CacheScheduler( final ServiceEmitter serviceEmitter, final Map<Class<? extends ExtractionNamespace>, CacheGenerator<?>> namespaceGeneratorMap, NamespaceExtractionCacheManager cacheManager ) { this.namespaceGeneratorMap = namespaceGeneratorMap; this.cacheManager = cacheManager; cacheManager.scheduledExecutorService().scheduleAtFixedRate( new Runnable() { long priorUpdatesStarted = 0L; @Override public void run() { try { final long tasks = updatesStarted.get(); serviceEmitter.emit( ServiceMetricEvent.builder() .build("namespace/deltaTasksStarted", tasks - priorUpdatesStarted) ); priorUpdatesStarted = tasks; } catch (Exception e) { log.error(e, "Error emitting namespace stats"); if (Thread.currentThread().isInterrupted()) { throw Throwables.propagate(e); } } } }, 1, 10, TimeUnit.MINUTES ); } /** * This method should be used from {@link CacheGenerator#generateCache} implementations, to obtain a {@link * VersionedCache} to be returned. * * @param entryId an object uniquely corresponding to the {@link CacheScheduler.Entry}, for which VersionedCache is * created * @param version version, associated with the cache */ public VersionedCache createVersionedCache(@Nullable EntryImpl<? extends ExtractionNamespace> entryId, String version) { updatesStarted.incrementAndGet(); return new VersionedCache(String.valueOf(entryId), version); } @VisibleForTesting long updatesStarted() { return updatesStarted.get(); } @VisibleForTesting public long getActiveEntries() { return activeEntries.get(); } @Nullable public Entry scheduleAndWait(ExtractionNamespace namespace, long waitForFirstRunMs) throws InterruptedException { final Entry entry = schedule(namespace); log.debug("Scheduled new %s", entry); boolean success = false; try { success = entry.impl.updateCounter.awaitFirstUpdate(waitForFirstRunMs, TimeUnit.MILLISECONDS); if (success) { return entry; } else { return null; } } finally { if (!success) { // ExecutionException's cause is logged in entry.close() entry.close(); log.error("CacheScheduler[%s] - problem during start or waiting for the first run", entry); } } } public <T extends ExtractionNamespace> Entry schedule(final T namespace) { final CacheGenerator<T> generator = (CacheGenerator<T>) namespaceGeneratorMap.get(namespace.getClass()); if (generator == null) { throw new ISE("Cannot find generator for namespace [%s]", namespace); } return new Entry<>(namespace, generator); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.app.launcher; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.tez.dag.app.dag.DAG; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.common.TezTaskUmbilicalProtocol; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.dag.app.AppContext; import org.apache.tez.dag.app.TaskAttemptListener; import org.apache.tez.dag.app.rm.NMCommunicatorEvent; import org.apache.tez.dag.app.rm.NMCommunicatorLaunchRequestEvent; import org.apache.tez.dag.app.rm.NMCommunicatorStopRequestEvent; import org.apache.tez.dag.app.rm.container.AMContainerEvent; import org.apache.tez.dag.app.rm.container.AMContainerEventCompleted; import org.apache.tez.dag.app.rm.container.AMContainerEventLaunchFailed; import org.apache.tez.dag.app.rm.container.AMContainerEventLaunched; import org.apache.tez.dag.app.rm.container.AMContainerEventType; import org.apache.tez.dag.history.DAGHistoryEvent; import org.apache.tez.dag.history.events.ContainerLaunchedEvent; import org.apache.tez.dag.records.TaskAttemptTerminationCause; import org.apache.tez.runtime.api.ExecutionContext; import org.apache.tez.runtime.api.impl.ExecutionContextImpl; import org.apache.tez.runtime.library.common.shuffle.ShuffleUtils; import org.apache.tez.runtime.task.TezChild; /** * Runs the container task locally in a thread. * Since all (sub)tasks share the same local directory, they must be executed * sequentially in order to avoid creating/deleting the same files/dirs. */ public class LocalContainerLauncher extends AbstractService implements ContainerLauncher { private static final Logger LOG = LoggerFactory.getLogger(LocalContainerLauncher.class); private final AppContext context; private final TaskAttemptListener taskAttemptListener; private final AtomicBoolean serviceStopped = new AtomicBoolean(false); private final String workingDirectory; private final Map<String, String> localEnv = new HashMap<String, String>(); private final ExecutionContext executionContext; private int numExecutors; private final ConcurrentHashMap<ContainerId, RunningTaskCallback> runningContainers = new ConcurrentHashMap<ContainerId, RunningTaskCallback>(); private final ExecutorService callbackExecutor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("CallbackExecutor").build()); private BlockingQueue<NMCommunicatorEvent> eventQueue = new LinkedBlockingQueue<NMCommunicatorEvent>(); private Thread eventHandlingThread; private ListeningExecutorService taskExecutorService; public LocalContainerLauncher(AppContext context, TaskAttemptListener taskAttemptListener, String workingDirectory) throws UnknownHostException { super(LocalContainerLauncher.class.getName()); this.context = context; this.taskAttemptListener = taskAttemptListener; this.workingDirectory = workingDirectory; AuxiliaryServiceHelper.setServiceDataIntoEnv( ShuffleUtils.SHUFFLE_HANDLER_SERVICE_ID, ByteBuffer.allocate(4).putInt(0), localEnv); executionContext = new ExecutionContextImpl(InetAddress.getLocalHost().getHostName()); // User cannot be set here since it isn't available till a DAG is running. } @Override public synchronized void serviceInit(Configuration conf) { numExecutors = conf.getInt(TezConfiguration.TEZ_AM_INLINE_TASK_EXECUTION_MAX_TASKS, TezConfiguration.TEZ_AM_INLINE_TASK_EXECUTION_MAX_TASKS_DEFAULT); Preconditions.checkState(numExecutors >=1, "Must have at least 1 executor"); ExecutorService rawExecutor = Executors.newFixedThreadPool(numExecutors, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("LocalTaskExecutionThread #%d") .build()); this.taskExecutorService = MoreExecutors.listeningDecorator(rawExecutor); } @Override public void serviceStart() throws Exception { eventHandlingThread = new Thread(new TezSubTaskRunner(), "LocalContainerLauncher-SubTaskRunner"); eventHandlingThread.start(); } @Override public void serviceStop() throws Exception { if (!serviceStopped.compareAndSet(false, true)) { LOG.info("Service Already stopped. Ignoring additional stop"); return; } if (eventHandlingThread != null) { eventHandlingThread.interrupt(); eventHandlingThread.join(2000l); } if (taskExecutorService != null) { taskExecutorService.shutdownNow(); } callbackExecutor.shutdownNow(); } @Override public void dagComplete(DAG dag) { } @Override public void dagSubmitted() { } // Thread to monitor the queue of incoming NMCommunicator events private class TezSubTaskRunner implements Runnable { @Override public void run() { while (!Thread.currentThread().isInterrupted() && !serviceStopped.get()) { NMCommunicatorEvent event; try { event = eventQueue.take(); switch (event.getType()) { case CONTAINER_LAUNCH_REQUEST: launch((NMCommunicatorLaunchRequestEvent) event); break; case CONTAINER_STOP_REQUEST: stop((NMCommunicatorStopRequestEvent)event); break; } } catch (InterruptedException e) { if (!serviceStopped.get()) { LOG.error("TezSubTaskRunner interrupted ", e); } return; } catch (Throwable e) { LOG.error("TezSubTaskRunner failed due to exception", e); throw e; } } } } @SuppressWarnings("unchecked") void sendContainerLaunchFailedMsg(ContainerId containerId, String message) { context.getEventHandler().handle(new AMContainerEventLaunchFailed(containerId, message)); } private void handleLaunchFailed(Throwable t, ContainerId containerId) { String message; if (t instanceof RejectedExecutionException) { message = "Failed to queue container launch for container Id: " + containerId; } else { message = "Failed to launch container for container Id: " + containerId; } LOG.error(message, t); sendContainerLaunchFailedMsg(containerId, message); } //launch tasks private void launch(NMCommunicatorLaunchRequestEvent event) { String tokenIdentifier = context.getApplicationID().toString(); try { TezChild tezChild; try { tezChild = createTezChild(context.getAMConf(), event.getContainerId(), tokenIdentifier, context.getApplicationAttemptId().getAttemptId(), context.getLocalDirs(), (TezTaskUmbilicalProtocol) taskAttemptListener, TezCommonUtils.parseCredentialsBytes(event.getContainerLaunchContext().getTokens().array())); } catch (InterruptedException e) { handleLaunchFailed(e, event.getContainerId()); return; } catch (TezException e) { handleLaunchFailed(e, event.getContainerId()); return; } catch (IOException e) { handleLaunchFailed(e, event.getContainerId()); return; } ListenableFuture<TezChild.ContainerExecutionResult> runningTaskFuture = taskExecutorService.submit(createSubTask(tezChild, event.getContainerId())); RunningTaskCallback callback = new RunningTaskCallback(context, event.getContainerId()); runningContainers.put(event.getContainerId(), callback); Futures.addCallback(runningTaskFuture, callback, callbackExecutor); } catch (RejectedExecutionException e) { handleLaunchFailed(e, event.getContainerId()); } } private void stop(NMCommunicatorStopRequestEvent event) { // A stop_request will come in when a task completes and reports back or a preemption decision // is made. Currently the LocalTaskScheduler does not support preemption. Also preemption // will not work in local mode till Tez supports task preemption instead of container preemption. RunningTaskCallback callback = runningContainers.get(event.getContainerId()); if (callback == null) { LOG.info("Ignoring stop request for containerId: " + event.getContainerId()); } else { LOG.info( "Ignoring stop request for containerId {}. Relying on regular task shutdown for it to end", event.getContainerId()); // Allow the tezChild thread to run it's course. It'll receive a shutdown request from the // AM eventually since the task and container will be unregistered. // This will need to be fixed once interrupting tasks is supported. } // Send this event to maintain regular control flow. This isn't of much use though. context.getEventHandler().handle( new AMContainerEvent(event.getContainerId(), AMContainerEventType.C_NM_STOP_SENT)); } private class RunningTaskCallback implements FutureCallback<TezChild.ContainerExecutionResult> { private final AppContext appContext; private final ContainerId containerId; RunningTaskCallback(AppContext appContext, ContainerId containerId) { this.appContext = appContext; this.containerId = containerId; } @Override public void onSuccess(TezChild.ContainerExecutionResult result) { runningContainers.remove(containerId); LOG.info("ContainerExecutionResult for: " + containerId + " = " + result); if (result.getExitStatus() == TezChild.ContainerExecutionResult.ExitStatus.SUCCESS || result.getExitStatus() == TezChild.ContainerExecutionResult.ExitStatus.ASKED_TO_DIE) { LOG.info("Container: " + containerId + " completed successfully"); appContext.getEventHandler().handle( new AMContainerEventCompleted(containerId, result.getExitStatus().getExitCode(), null, TaskAttemptTerminationCause.CONTAINER_EXITED)); } else { LOG.info("Container: " + containerId + " completed but with errors"); appContext.getEventHandler().handle( new AMContainerEventCompleted(containerId, result.getExitStatus().getExitCode(), result.getErrorMessage() == null ? (result.getThrowable() == null ? null : result.getThrowable().getMessage()) : result.getErrorMessage(), TaskAttemptTerminationCause.APPLICATION_ERROR)); } } @Override public void onFailure(Throwable t) { runningContainers.remove(containerId); // Ignore CancellationException since that is triggered by the LocalContainerLauncher itself // TezChild would have exited by this time. There's no need to invoke shutdown again. if (!(t instanceof CancellationException)) { LOG.info("Container: " + containerId + ": Execution Failed: ", t); // Inform of failure with exit code 1. appContext.getEventHandler() .handle(new AMContainerEventCompleted(containerId, TezChild.ContainerExecutionResult.ExitStatus.EXECUTION_FAILURE.getExitCode(), t.getMessage(), TaskAttemptTerminationCause.APPLICATION_ERROR)); } else { LOG.info("Ignoring CancellationException - triggered by LocalContainerLauncher"); appContext.getEventHandler() .handle(new AMContainerEventCompleted(containerId, TezChild.ContainerExecutionResult.ExitStatus.SUCCESS.getExitCode(), "CancellationException", TaskAttemptTerminationCause.CONTAINER_EXITED)); } } } //create a SubTask private synchronized Callable<TezChild.ContainerExecutionResult> createSubTask( final TezChild tezChild, final ContainerId containerId) { return new Callable<TezChild.ContainerExecutionResult>() { @Override public TezChild.ContainerExecutionResult call() throws InterruptedException, TezException, IOException { // Reset the interrupt status. Ideally the thread should not be in an interrupted state. // TezTaskRunner needs to be fixed to ensure this. Thread.interrupted(); // Inform about the launch request now that the container has been allocated a thread to execute in. context.getEventHandler().handle(new AMContainerEventLaunched(containerId)); ContainerLaunchedEvent lEvt = new ContainerLaunchedEvent(containerId, context.getClock().getTime(), context.getApplicationAttemptId()); context.getHistoryHandler().handle(new DAGHistoryEvent(context.getCurrentDAGID(), lEvt)); return tezChild.run(); } }; } private TezChild createTezChild(Configuration defaultConf, ContainerId containerId, String tokenIdentifier, int attemptNumber, String[] localDirs, TezTaskUmbilicalProtocol tezTaskUmbilicalProtocol, Credentials credentials) throws InterruptedException, TezException, IOException { Map<String, String> containerEnv = new HashMap<String, String>(); containerEnv.putAll(localEnv); containerEnv.put(Environment.USER.name(), context.getUser()); long memAvailable; synchronized (this) { // needed to fix findbugs Inconsistent synchronization warning memAvailable = Runtime.getRuntime().maxMemory() / numExecutors; } TezChild tezChild = TezChild.newTezChild(defaultConf, null, 0, containerId.toString(), tokenIdentifier, attemptNumber, localDirs, workingDirectory, containerEnv, "", executionContext, credentials, memAvailable, context.getUser()); tezChild.setUmbilical(tezTaskUmbilicalProtocol); return tezChild; } @Override public void handle(NMCommunicatorEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { throw new TezUncheckedException(e); } } }
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.aop.aspectj.autoproxy; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Method; import java.util.function.Supplier; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Before; import org.aspectj.lang.annotation.Pointcut; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.springframework.aop.MethodBeforeAdvice; import org.springframework.aop.aspectj.annotation.AnnotationAwareAspectJAutoProxyCreator; import org.springframework.aop.aspectj.annotation.AspectMetadata; import org.springframework.aop.config.AopConfigUtils; import org.springframework.aop.framework.ProxyConfig; import org.springframework.aop.support.AopUtils; import org.springframework.aop.support.StaticMethodMatcherPointcutAdvisor; import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.config.MethodInvokingFactoryBean; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.testfixture.beans.ITestBean; import org.springframework.beans.testfixture.beans.TestBean; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.NestedRuntimeException; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.lang.Nullable; import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for AspectJ auto-proxying. Includes mixing with Spring AOP Advisors * to demonstrate that existing autoproxying contract is honoured. * * @author Rod Johnson * @author Juergen Hoeller * @author Chris Beams * @author Sam Brannen */ public class AspectJAutoProxyCreatorTests { @Test public void testAspectsAreApplied() { ClassPathXmlApplicationContext bf = newContext("aspects.xml"); ITestBean tb = (ITestBean) bf.getBean("adrian"); assertThat(tb.getAge()).isEqualTo(68); MethodInvokingFactoryBean factoryBean = (MethodInvokingFactoryBean) bf.getBean("&factoryBean"); assertThat(AopUtils.isAopProxy(factoryBean.getTargetObject())).isTrue(); assertThat(((ITestBean) factoryBean.getTargetObject()).getAge()).isEqualTo(68); } @Test public void testMultipleAspectsWithParameterApplied() { ClassPathXmlApplicationContext bf = newContext("aspects.xml"); ITestBean tb = (ITestBean) bf.getBean("adrian"); tb.setAge(10); assertThat(tb.getAge()).isEqualTo(20); } @Test public void testAspectsAreAppliedInDefinedOrder() { ClassPathXmlApplicationContext bf = newContext("aspectsWithOrdering.xml"); ITestBean tb = (ITestBean) bf.getBean("adrian"); assertThat(tb.getAge()).isEqualTo(71); } @Test public void testAspectsAndAdvisorAreApplied() { ClassPathXmlApplicationContext ac = newContext("aspectsPlusAdvisor.xml"); ITestBean shouldBeWeaved = (ITestBean) ac.getBean("adrian"); doTestAspectsAndAdvisorAreApplied(ac, shouldBeWeaved); } @Test public void testAspectsAndAdvisorAreAppliedEvenIfComingFromParentFactory() { ClassPathXmlApplicationContext ac = newContext("aspectsPlusAdvisor.xml"); GenericApplicationContext childAc = new GenericApplicationContext(ac); // Create a child factory with a bean that should be woven RootBeanDefinition bd = new RootBeanDefinition(TestBean.class); bd.getPropertyValues().addPropertyValue(new PropertyValue("name", "Adrian")) .addPropertyValue(new PropertyValue("age", 34)); childAc.registerBeanDefinition("adrian2", bd); // Register the advisor auto proxy creator with subclass childAc.registerBeanDefinition(AnnotationAwareAspectJAutoProxyCreator.class.getName(), new RootBeanDefinition( AnnotationAwareAspectJAutoProxyCreator.class)); childAc.refresh(); ITestBean beanFromChildContextThatShouldBeWeaved = (ITestBean) childAc.getBean("adrian2"); //testAspectsAndAdvisorAreApplied(childAc, (ITestBean) ac.getBean("adrian")); doTestAspectsAndAdvisorAreApplied(childAc, beanFromChildContextThatShouldBeWeaved); } protected void doTestAspectsAndAdvisorAreApplied(ApplicationContext ac, ITestBean shouldBeWeaved) { TestBeanAdvisor tba = (TestBeanAdvisor) ac.getBean("advisor"); MultiplyReturnValue mrv = (MultiplyReturnValue) ac.getBean("aspect"); assertThat(mrv.getMultiple()).isEqualTo(3); tba.count = 0; mrv.invocations = 0; assertThat(AopUtils.isAopProxy(shouldBeWeaved)).as("Autoproxying must apply from @AspectJ aspect").isTrue(); assertThat(shouldBeWeaved.getName()).isEqualTo("Adrian"); assertThat(mrv.invocations).isEqualTo(0); assertThat(shouldBeWeaved.getAge()).isEqualTo((34 * mrv.getMultiple())); assertThat(tba.count).as("Spring advisor must be invoked").isEqualTo(2); assertThat(mrv.invocations).as("Must be able to hold state in aspect").isEqualTo(1); } @Test public void testPerThisAspect() { ClassPathXmlApplicationContext bf = newContext("perthis.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); assertThat(AopUtils.isAopProxy(adrian1)).isTrue(); assertThat(adrian1.getAge()).isEqualTo(0); assertThat(adrian1.getAge()).isEqualTo(1); ITestBean adrian2 = (ITestBean) bf.getBean("adrian"); assertThat(adrian2).isNotSameAs(adrian1); assertThat(AopUtils.isAopProxy(adrian1)).isTrue(); assertThat(adrian2.getAge()).isEqualTo(0); assertThat(adrian2.getAge()).isEqualTo(1); assertThat(adrian2.getAge()).isEqualTo(2); assertThat(adrian2.getAge()).isEqualTo(3); assertThat(adrian1.getAge()).isEqualTo(2); } @Test public void testPerTargetAspect() throws SecurityException, NoSuchMethodException { ClassPathXmlApplicationContext bf = newContext("pertarget.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); assertThat(AopUtils.isAopProxy(adrian1)).isTrue(); // Does not trigger advice or count int explicitlySetAge = 25; adrian1.setAge(explicitlySetAge); assertThat(adrian1.getAge()).as("Setter does not initiate advice").isEqualTo(explicitlySetAge); // Fire aspect AspectMetadata am = new AspectMetadata(PerTargetAspect.class, "someBean"); assertThat(am.getPerClausePointcut().getMethodMatcher().matches(TestBean.class.getMethod("getSpouse"), null)).isTrue(); adrian1.getSpouse(); assertThat(adrian1.getAge()).as("Advice has now been instantiated").isEqualTo(0); adrian1.setAge(11); assertThat(adrian1.getAge()).as("Any int setter increments").isEqualTo(2); adrian1.setName("Adrian"); //assertEquals("Any other setter does not increment", 2, adrian1.getAge()); ITestBean adrian2 = (ITestBean) bf.getBean("adrian"); assertThat(adrian2).isNotSameAs(adrian1); assertThat(AopUtils.isAopProxy(adrian1)).isTrue(); assertThat(adrian2.getAge()).isEqualTo(34); adrian2.getSpouse(); assertThat(adrian2.getAge()).as("Aspect now fired").isEqualTo(0); assertThat(adrian2.getAge()).isEqualTo(1); assertThat(adrian2.getAge()).isEqualTo(2); assertThat(adrian1.getAge()).isEqualTo(3); } @Test public void testTwoAdviceAspect() { ClassPathXmlApplicationContext bf = newContext("twoAdviceAspect.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); testAgeAspect(adrian1, 0, 2); } @Test public void testTwoAdviceAspectSingleton() { ClassPathXmlApplicationContext bf = newContext("twoAdviceAspectSingleton.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); testAgeAspect(adrian1, 0, 1); ITestBean adrian2 = (ITestBean) bf.getBean("adrian"); assertThat(adrian2).isNotSameAs(adrian1); testAgeAspect(adrian2, 2, 1); } @Test public void testTwoAdviceAspectPrototype() { ClassPathXmlApplicationContext bf = newContext("twoAdviceAspectPrototype.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); testAgeAspect(adrian1, 0, 1); ITestBean adrian2 = (ITestBean) bf.getBean("adrian"); assertThat(adrian2).isNotSameAs(adrian1); testAgeAspect(adrian2, 0, 1); } private void testAgeAspect(ITestBean adrian, int start, int increment) { assertThat(AopUtils.isAopProxy(adrian)).isTrue(); adrian.setName(""); assertThat(adrian.age()).isEqualTo(start); int newAge = 32; adrian.setAge(newAge); assertThat(adrian.age()).isEqualTo((start + increment)); adrian.setAge(0); assertThat(adrian.age()).isEqualTo((start + increment * 2)); } @Test public void testAdviceUsingJoinPoint() { ClassPathXmlApplicationContext bf = newContext("usesJoinPointAspect.xml"); ITestBean adrian1 = (ITestBean) bf.getBean("adrian"); adrian1.getAge(); AdviceUsingThisJoinPoint aspectInstance = (AdviceUsingThisJoinPoint) bf.getBean("aspect"); //(AdviceUsingThisJoinPoint) Aspects.aspectOf(AdviceUsingThisJoinPoint.class); //assertEquals("method-execution(int TestBean.getAge())",aspectInstance.getLastMethodEntered()); assertThat(aspectInstance.getLastMethodEntered().indexOf("TestBean.getAge())") != 0).isTrue(); } @Test public void testIncludeMechanism() { ClassPathXmlApplicationContext bf = newContext("usesInclude.xml"); ITestBean adrian = (ITestBean) bf.getBean("adrian"); assertThat(AopUtils.isAopProxy(adrian)).isTrue(); assertThat(adrian.getAge()).isEqualTo(68); } @Test public void testForceProxyTargetClass() { ClassPathXmlApplicationContext bf = newContext("aspectsWithCGLIB.xml"); ProxyConfig pc = (ProxyConfig) bf.getBean(AopConfigUtils.AUTO_PROXY_CREATOR_BEAN_NAME); assertThat(pc.isProxyTargetClass()).as("should be proxying classes").isTrue(); assertThat(pc.isExposeProxy()).as("should expose proxy").isTrue(); } @Test public void testWithAbstractFactoryBeanAreApplied() { ClassPathXmlApplicationContext bf = newContext("aspectsWithAbstractBean.xml"); ITestBean adrian = (ITestBean) bf.getBean("adrian"); assertThat(AopUtils.isAopProxy(adrian)).isTrue(); assertThat(adrian.getAge()).isEqualTo(68); } @Test public void testRetryAspect() { ClassPathXmlApplicationContext bf = newContext("retryAspect.xml"); UnreliableBean bean = (UnreliableBean) bf.getBean("unreliableBean"); RetryAspect aspect = (RetryAspect) bf.getBean("retryAspect"); int attempts = bean.unreliable(); assertThat(attempts).isEqualTo(2); assertThat(aspect.getBeginCalls()).isEqualTo(2); assertThat(aspect.getRollbackCalls()).isEqualTo(1); assertThat(aspect.getCommitCalls()).isEqualTo(1); } @Test public void testWithBeanNameAutoProxyCreator() { ClassPathXmlApplicationContext bf = newContext("withBeanNameAutoProxyCreator.xml"); ITestBean tb = (ITestBean) bf.getBean("adrian"); assertThat(tb.getAge()).isEqualTo(68); } @ParameterizedTest(name = "[{index}] {0}") @ValueSource(classes = {ProxyTargetClassFalseConfig.class, ProxyTargetClassTrueConfig.class}) void lambdaIsAlwaysProxiedWithJdkProxy(Class<?> configClass) { try (ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(configClass)) { Supplier<?> supplier = context.getBean(Supplier.class); assertThat(AopUtils.isAopProxy(supplier)).as("AOP proxy").isTrue(); assertThat(AopUtils.isJdkDynamicProxy(supplier)).as("JDK Dynamic proxy").isTrue(); assertThat(supplier.get()).asString().isEqualTo("advised: lambda"); } } /** * Returns a new {@link ClassPathXmlApplicationContext} for the file ending in <var>fileSuffix</var>. */ private ClassPathXmlApplicationContext newContext(String fileSuffix) { return new ClassPathXmlApplicationContext(qName(fileSuffix), getClass()); } /** * Returns the relatively qualified name for <var>fileSuffix</var>. * e.g. for a fileSuffix='foo.xml', this method will return * 'AspectJAutoProxyCreatorTests-foo.xml' */ private String qName(String fileSuffix) { return String.format("%s-%s", getClass().getSimpleName(), fileSuffix); } } @Aspect("pertarget(execution(* *.getSpouse()))") class PerTargetAspect implements Ordered { public int count; private int order = Ordered.LOWEST_PRECEDENCE; @Around("execution(int *.getAge())") public int returnCountAsAge() { return count++; } @Before("execution(void *.set*(int))") public void countSetter() { ++count; } @Override public int getOrder() { return this.order; } public void setOrder(int order) { this.order = order; } } @Aspect class AdviceUsingThisJoinPoint { private String lastEntry = ""; public String getLastMethodEntered() { return this.lastEntry; } @Pointcut("execution(* *(..))") public void methodExecution() { } @Before("methodExecution()") public void entryTrace(JoinPoint jp) { this.lastEntry = jp.toString(); } } @Aspect class DummyAspect { @Around("execution(* setAge(int))") public Object test(ProceedingJoinPoint pjp) throws Throwable { return pjp.proceed(); } } @Aspect class DummyAspectWithParameter { @Around("execution(* setAge(int)) && args(age)") public Object test(ProceedingJoinPoint pjp, int age) throws Throwable { return pjp.proceed(); } } class DummyFactoryBean implements FactoryBean<Object> { @Override public Object getObject() { throw new UnsupportedOperationException(); } @Override public Class<?> getObjectType() { throw new UnsupportedOperationException(); } @Override public boolean isSingleton() { throw new UnsupportedOperationException(); } } @Aspect @Order(10) class IncreaseReturnValue { @Around("execution(int *.getAge())") public Object doubleReturnValue(ProceedingJoinPoint pjp) throws Throwable { int result = (Integer) pjp.proceed(); return result + 3; } } @Aspect class MultiplyReturnValue { private int multiple = 2; public int invocations; public void setMultiple(int multiple) { this.multiple = multiple; } public int getMultiple() { return this.multiple; } @Around("execution(int *.getAge())") public Object doubleReturnValue(ProceedingJoinPoint pjp) throws Throwable { ++this.invocations; int result = (Integer) pjp.proceed(); return result * this.multiple; } } @Retention(RetentionPolicy.RUNTIME) @interface Marker { } @Aspect class MultiplyReturnValueForMarker { private int multiple = 2; public int invocations; public void setMultiple(int multiple) { this.multiple = multiple; } public int getMultiple() { return this.multiple; } @Around("@annotation(org.springframework.aop.aspectj.autoproxy.Marker)") public Object doubleReturnValue(ProceedingJoinPoint pjp) throws Throwable { ++this.invocations; int result = (Integer) pjp.proceed(); return result * this.multiple; } } interface IMarkerTestBean extends ITestBean { @Marker @Override int getAge(); } class MarkerTestBean extends TestBean implements IMarkerTestBean { @Marker @Override public int getAge() { return super.getAge(); } } @Aspect class RetryAspect { private int beginCalls; private int commitCalls; private int rollbackCalls; @Pointcut("execution(public * UnreliableBean.*(..))") public void execOfPublicMethod() { } /** * Retry Advice */ @Around("execOfPublicMethod()") public Object retry(ProceedingJoinPoint jp) throws Throwable { boolean retry = true; Object o = null; while (retry) { try { retry = false; this.beginCalls++; try { o = jp.proceed(); this.commitCalls++; } catch (RetryableException re) { this.rollbackCalls++; throw re; } } catch (RetryableException re) { retry = true; } } return o; } public int getBeginCalls() { return this.beginCalls; } public int getCommitCalls() { return this.commitCalls; } public int getRollbackCalls() { return this.rollbackCalls; } } @SuppressWarnings("serial") class RetryableException extends NestedRuntimeException { public RetryableException(String msg) { super(msg); } public RetryableException(String msg, Throwable cause) { super(msg, cause); } } class UnreliableBean { private int calls; public int unreliable() { this.calls++; if (this.calls % 2 != 0) { throw new RetryableException("foo"); } return this.calls; } } @SuppressWarnings("serial") class TestBeanAdvisor extends StaticMethodMatcherPointcutAdvisor { public int count; public TestBeanAdvisor() { setAdvice((MethodBeforeAdvice) (method, args, target) -> ++count); } @Override public boolean matches(Method method, @Nullable Class<?> targetClass) { return ITestBean.class.isAssignableFrom(targetClass); } } abstract class AbstractProxyTargetClassConfig { @Bean Supplier<String> stringSupplier() { return () -> "lambda"; } @Bean SupplierAdvice supplierAdvice() { return new SupplierAdvice(); } @Aspect static class SupplierAdvice { @Around("execution(public * org.springframework.aop.aspectj.autoproxy..*.*(..))") Object aroundSupplier(ProceedingJoinPoint joinPoint) throws Throwable { return "advised: " + joinPoint.proceed(); } } } @Configuration(proxyBeanMethods = false) @EnableAspectJAutoProxy(proxyTargetClass = false) class ProxyTargetClassFalseConfig extends AbstractProxyTargetClassConfig { } @Configuration(proxyBeanMethods = false) @EnableAspectJAutoProxy(proxyTargetClass = true) class ProxyTargetClassTrueConfig extends AbstractProxyTargetClassConfig { }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* -------------------------------- Package --------------------------------- */ package com.github.jessemull.microflex.stat.statinteger; /* ------------------------------ Dependencies ------------------------------ */ import static org.junit.Assert.*; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.TreeMap; import java.util.Map; import java.util.Random; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.commons.math3.util.Precision; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import com.github.jessemull.microflex.integerflex.plate.PlateInteger; import com.github.jessemull.microflex.integerflex.plate.WellInteger; import com.github.jessemull.microflex.integerflex.plate.WellSetInteger; import com.github.jessemull.microflex.integerflex.stat.KurtosisInteger; import com.github.jessemull.microflex.util.RandomUtil; /** * This class tests the methods in the kurtosis integer class. * @author Jesse L. Mull * @update Updated Oct 18, 2016 * @address http://www.jessemull.com * @email hello@jessemull.com */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class KurtosisIntegerTest { /* ---------------------------- Local Fields -----------------------------*/ /* Minimum and maximum values for random well and lists */ private static int minValue = 0; // Minimum integer value for wells private static int maxValue = 100; // Maximum integer value for wells private static Random random = new Random(); // Generates random integers private static int precision = 10; // Precision for double results /* The addition operation */ private static KurtosisInteger kurtosis = new KurtosisInteger(); /* Random objects and numbers for testing */ private static int rows = 5; private static int columns = 4; private static int length = 5; private static int lengthIndices = 10; private static int plateNumber = 10; private static int plateNumberIndices = 5; private static PlateInteger[] array = new PlateInteger[plateNumber]; private static PlateInteger[] arrayIndices = new PlateInteger[plateNumberIndices]; /* Value of false redirects System.err */ private static boolean error = true; private static PrintStream originalOut = System.out; /** * Generates random objects and numbers for testing. */ @BeforeClass public static void setUp() { if(error) { System.setErr(new PrintStream(new OutputStream() { public void write(int x) {} })); } for(int j = 0; j < array.length; j++) { PlateInteger plate = RandomUtil.randomPlateInteger( rows, columns, minValue, maxValue, length, "Plate1-" + j); array[j] = plate; } for(int j = 0; j < arrayIndices.length; j++) { PlateInteger plateIndices = RandomUtil.randomPlateInteger( rows, columns, minValue, maxValue, lengthIndices, "Plate1-" + j); arrayIndices[j] = plateIndices; } } /** * Toggles system error. */ @AfterClass public static void restoreErrorOut() { System.setErr(originalOut); } /* ---------------------------- Constructors -----------------------------*/ /** * Tests the default constructor. */ @Test public void testConstructor() { KurtosisInteger test = new KurtosisInteger(); assertNotNull(test); } /* ---------------- Well statistics for all plate wells ----------------- */ /** * Tests the plate statistics method. */ @Test public void testPlate() { for(PlateInteger plate : array) { Map<WellInteger, Double> resultMap = new TreeMap<WellInteger, Double>(); Map<WellInteger, Double> returnedMap = kurtosis.plate(plate); for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } DescriptiveStatistics stat = new DescriptiveStatistics(input); double result = stat.getKurtosis(); resultMap.put(well, result); } for(WellInteger well : plate) { double result = Precision.round(resultMap.get(well), precision); double returned = Precision.round(returnedMap.get(well), precision); assertTrue(result == returned); } } } /** * Tests the plate statistics method using the values between the indices. */ @Test public void testPlateIndices() { for(PlateInteger plate : arrayIndices) { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); Map<WellInteger, Double> resultMap = new TreeMap<WellInteger, Double>(); Map<WellInteger, Double> returnedMap = kurtosis.plate(plate, begin, end - begin); for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end)); double result = stat.getKurtosis(); resultMap.put(well, result); } for(WellInteger well : plate) { double result = Precision.round(resultMap.get(well), precision); double returned = Precision.round(returnedMap.get(well), precision); assertTrue(result == returned); } } } /* --------------------- Aggregated plate statistics ------------------- */ /** * Tests the aggregated plate statistics method. */ @Test public void testAggregatedPlate() { for(PlateInteger plate : array) { List<Double> resultList = new ArrayList<Double>(); double aggregatedReturned = Precision.round(kurtosis.platesAggregated(plate), precision); for(WellInteger well : plate) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double resultAggregated = Precision.round(statAggregated.getKurtosis(), precision); assertTrue(resultAggregated == aggregatedReturned); } } /** * Tests the aggregated plate statistics method using a collection. */ @Test public void testAggregatedPlateCollection() { List<PlateInteger> collection = Arrays.asList(array); Map<PlateInteger, Double> aggregatedReturnedMap = kurtosis.platesAggregated(collection); Map<PlateInteger, Double> aggregatedResultMap = new TreeMap<PlateInteger, Double>(); for(PlateInteger plate : collection) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : plate) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(plate, aggregatedResult); } for(PlateInteger plate : collection) { double result = Precision.round(aggregatedResultMap.get(plate), precision); double returned = Precision.round(aggregatedReturnedMap.get(plate), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using an array. */ @Test public void testAggregatedPlateArray() { Map<PlateInteger, Double> aggregatedReturnedMap = kurtosis.platesAggregated(array); Map<PlateInteger, Double> aggregatedResultMap = new TreeMap<PlateInteger, Double>(); for(PlateInteger plate : array) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : plate) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(plate, aggregatedResult); } for(PlateInteger plate : array) { double result = Precision.round(aggregatedResultMap.get(plate), precision); double returned = Precision.round(aggregatedReturnedMap.get(plate), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using the values between the indices. */ @Test public void testAggregatedPlateIndices() { for(PlateInteger plate : arrayIndices) { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); List<Double> resultList = new ArrayList<Double>(); double aggregatedReturned = Precision.round(kurtosis.platesAggregated(plate, begin, end - begin), precision); for(WellInteger well : plate) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = Precision.round(statAggregated.getKurtosis(), precision); assertTrue(aggregatedResult == aggregatedReturned); } } /** * Tests the aggregated plate statistics method using the values between the indices of * the collection. */ @Test public void testAggregatedPlateCollectionIndices() { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); List<PlateInteger> collection = Arrays.asList(arrayIndices); Map<PlateInteger, Double> aggregatedReturnedMap = kurtosis.platesAggregated(collection, begin, end - begin); Map<PlateInteger, Double> aggregatedResultMap = new TreeMap<PlateInteger, Double>(); for(PlateInteger plate : collection) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : plate) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double resultAggregated = statAggregated.getKurtosis(); aggregatedResultMap.put(plate, resultAggregated); } for(PlateInteger plate : collection) { double result = Precision.round(aggregatedResultMap.get(plate), precision); double returned = Precision.round(aggregatedReturnedMap.get(plate), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using the values between the indices of * the array. */ @Test public void testAggregatedPlateArrayIndices() { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); Map<PlateInteger, Double> aggregatedReturnedMap = kurtosis.platesAggregated(arrayIndices, begin, end - begin); Map<PlateInteger, Double> aggregatedResultMap = new TreeMap<PlateInteger, Double>(); for(PlateInteger plate : arrayIndices) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : plate) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(plate, aggregatedResult); } for(PlateInteger plate : arrayIndices) { double result = Precision.round(aggregatedResultMap.get(plate), precision); double returned = Precision.round(aggregatedReturnedMap.get(plate), precision); assertTrue(result == returned); } } /* --------------- Well statistics for all wells in a set -------------- */ /** * Tests set calculation. */ @Test public void testSet() { for(PlateInteger plate : array) { Map<WellInteger, Double> resultMap = new TreeMap<WellInteger, Double>(); Map<WellInteger, Double> returnedMap = kurtosis.set(plate.dataSet()); for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } DescriptiveStatistics stat = new DescriptiveStatistics(input); double result = stat.getKurtosis(); resultMap.put(well, result); } for(WellInteger well : plate) { double result = Precision.round(resultMap.get(well), precision); double returned = Precision.round(returnedMap.get(well), precision); assertTrue(result == returned); } } } /** * Tests set calculation using indices. */ @Test public void testSetIndices() { for(PlateInteger plate : arrayIndices) { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); Map<WellInteger, Double> resultMap = new TreeMap<WellInteger, Double>(); Map<WellInteger, Double> returnedMap = kurtosis.set(plate.dataSet(), begin, end - begin); for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end)); double result = stat.getKurtosis(); resultMap.put(well, result); } for(WellInteger well : plate) { double result = Precision.round(resultMap.get(well), precision); double returned = Precision.round(returnedMap.get(well), precision); assertTrue(result == returned); } } } /* ---------------------- Aggregated set statistics -------------------- */ /** * Tests the aggregated plate statistics method. */ @Test public void testAggregatedSet() { for(PlateInteger plate : array) { List<Double> resultList = new ArrayList<Double>(); double aggregatedReturned = Precision.round(kurtosis.setsAggregated(plate.dataSet()), precision); for(WellInteger well : plate) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double resultAggregated = Precision.round(statAggregated.getKurtosis(), precision); assertTrue(resultAggregated == aggregatedReturned); } } /** * Tests the aggregated plate statistics method using a collection. */ @Test public void testAggregatedSetCollection() { List<WellSetInteger> collection = new ArrayList<WellSetInteger>(); for(PlateInteger plate : array) { collection.add(plate.dataSet()); } Map<WellSetInteger, Double> aggregatedReturnedMap = kurtosis.setsAggregated(collection); Map<WellSetInteger, Double> aggregatedResultMap = new TreeMap<WellSetInteger, Double>(); for(WellSetInteger set : collection) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : set) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(set, aggregatedResult); } for(WellSetInteger set : collection) { double result = Precision.round(aggregatedResultMap.get(set), precision); double returned = Precision.round(aggregatedReturnedMap.get(set), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using an array. */ @Test public void testAggregatedSetArray() { WellSetInteger[] setArray = new WellSetInteger[array.length]; for(int i = 0; i < setArray.length; i++) { setArray[i] = array[i].dataSet(); } Map<WellSetInteger, Double> aggregatedReturnedMap = kurtosis.setsAggregated(setArray); Map<WellSetInteger, Double> aggregatedResultMap = new TreeMap<WellSetInteger, Double>(); for(WellSetInteger set : setArray) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : set) { resultList.addAll(well.toDouble()); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(set, aggregatedResult); } for(WellSetInteger set : setArray) { double result = Precision.round(aggregatedResultMap.get(set), precision); double returned = Precision.round(aggregatedReturnedMap.get(set), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using the values between the indices. */ @Test public void testAggregatedSetIndices() { for(PlateInteger plate : arrayIndices) { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); List<Double> resultList = new ArrayList<Double>(); double aggregatedReturned = Precision.round(kurtosis.setsAggregated(plate.dataSet(), begin, end - begin), precision); for(WellInteger well : plate) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double resultAggregated = Precision.round(statAggregated.getKurtosis(), precision); assertTrue(resultAggregated == aggregatedReturned); } } /** * Tests the aggregated plate statistics method using the values between the indices of * the collection. */ @Test public void testAggregatedSetCollectionIndices() { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); List<WellSetInteger> collection = new ArrayList<WellSetInteger>(); for(PlateInteger plate : arrayIndices) { collection.add(plate.dataSet()); } Map<WellSetInteger, Double> aggregatedReturnedMap = kurtosis.setsAggregated(collection, begin, end - begin); Map<WellSetInteger, Double> aggregatedResultMap = new TreeMap<WellSetInteger, Double>(); for(WellSetInteger set : collection) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : set) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(set, aggregatedResult); } for(WellSetInteger set : collection) { double result = Precision.round(aggregatedResultMap.get(set), precision); double returned = Precision.round(aggregatedReturnedMap.get(set), precision); assertTrue(result == returned); } } /** * Tests the aggregated plate statistics method using the values between the indices of * the array. */ @Test public void testAggregatedSetArrayIndices() { int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); WellSetInteger[] setArrayIndices = new WellSetInteger[arrayIndices.length]; for(int i = 0; i < setArrayIndices.length; i++) { setArrayIndices[i] = arrayIndices[i].dataSet(); } Map<WellSetInteger, Double> aggregatedReturnedMap = kurtosis.setsAggregated(setArrayIndices, begin, end - begin); Map<WellSetInteger, Double> aggregatedResultMap = new TreeMap<WellSetInteger, Double>(); for(WellSetInteger set : setArrayIndices) { List<Double> resultList = new ArrayList<Double>(); for(WellInteger well : set) { resultList.addAll(well.toDouble().subList(begin, end)); } double[] inputAggregated = new double[resultList.size()]; for(int i = 0; i < resultList.size(); i++) { inputAggregated[i] = resultList.get(i); } DescriptiveStatistics statAggregated = new DescriptiveStatistics(inputAggregated); double aggregatedResult = statAggregated.getKurtosis(); aggregatedResultMap.put(set, aggregatedResult); } for(WellSetInteger plate : setArrayIndices) { double result = Precision.round(aggregatedResultMap.get(plate), precision); double returned = Precision.round(aggregatedReturnedMap.get(plate), precision); assertTrue(result == returned); } } /* -------------------------- Well statistics -------------------------- */ /** * Tests well calculation. */ @Test public void testWell() { for(PlateInteger plate : array) { for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } DescriptiveStatistics stat = new DescriptiveStatistics(input); double result = Precision.round(stat.getKurtosis(), precision); double returned = Precision.round(kurtosis.well(well), precision); assertTrue(result == returned); } } } /** * Tests well calculation using indices. */ @Test public void testWellIndices() { for(PlateInteger plate : arrayIndices) { for(WellInteger well : plate) { double[] input = new double[well.size()]; int index = 0; for(double bd : well) { input[index++] = bd;; } int size = arrayIndices[0].first().size(); int begin = random.nextInt(size - 5); int end = (begin + 4) + random.nextInt(size - (begin + 4) + 1); DescriptiveStatistics stat = new DescriptiveStatistics(ArrayUtils.subarray(input, begin, end)); double result = Precision.round(stat.getKurtosis(), precision); double returned = Precision.round(kurtosis.well(well, begin, end - begin), precision); assertTrue(result == returned); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.util.convert.converters; import java.math.BigDecimal; import java.util.Calendar; import java.util.Date; import java.util.Locale; import junit.framework.TestCase; import org.apache.wicket.IConverterLocator; import org.apache.wicket.util.convert.ConversionException; import org.apache.wicket.util.convert.ConverterLocator; /** * Tests for the base converters. * * @author Eelco Hillenius */ public final class ConvertersTest extends TestCase { /** Dutch locale for localized testing. */ private static final Locale DUTCH_LOCALE = new Locale("nl", "NL"); /** * Construct. */ public ConvertersTest() { super(); } /** * Construct. * * @param name */ public ConvertersTest(String name) { super(name); } /** * Test generalized conversion */ public void testConversion() { final IConverterLocator converter = new ConverterLocator(); assertEquals("7", converter.getConverter(Integer.class).convertToString(new Integer(7), Locale.US)); assertEquals("7.1", converter.getConverter(Double.class).convertToString(new Double(7.1), Locale.US)); assertEquals("7,1", converter.getConverter(Double.class).convertToString(new Double(7.1), DUTCH_LOCALE)); Calendar cal = Calendar.getInstance(DUTCH_LOCALE); cal.clear(); cal.set(2002, Calendar.OCTOBER, 24); Date date = cal.getTime(); assertEquals(date, converter.getConverter(Date.class).convertToObject("24-10-02", DUTCH_LOCALE)); assertEquals("24-10-02", converter.getConverter(Date.class).convertToString(date, DUTCH_LOCALE)); // empty strings should return null, NOT throw NPEs assertNull(converter.getConverter(Integer.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Byte.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Character.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Float.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Long.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Short.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Date.class).convertToObject("", Locale.US)); assertNull(converter.getConverter(Double.class).convertToObject("", Locale.US)); assertEquals(Boolean.FALSE, converter.getConverter(Boolean.class).convertToObject("", Locale.US)); assertNotNull(converter.getConverter(String.class).convertToObject("", Locale.US)); } /** * @throws Exception */ public void testThousandSeperator() throws Exception { BigDecimalConverter bdc = new BigDecimalConverter(); assertEquals(new BigDecimal(3000), bdc.convertToObject("3 000", Locale.FRENCH)); DoubleConverter dc = new DoubleConverter(); assertEquals(new Double(3000), dc.convertToObject("3 000", Locale.FRENCH)); } /** * @throws Exception * WICKET-1344 public void testBigDecimalRounding() throws Exception { * BigDecimalConverter bdc = new BigDecimalConverter(); assertEquals("123.45", * bdc.convertToObject("123.45", Locale.ENGLISH).toString()); * } */ /** * Test boolean conversions. */ public void testBooleanConversions() { BooleanConverter booleanConverter = new BooleanConverter(); assertEquals("true", booleanConverter.convertToString(Boolean.TRUE, Locale.getDefault())); assertEquals("false", booleanConverter.convertToString(Boolean.FALSE, Locale.getDefault())); assertEquals(Boolean.TRUE, booleanConverter.convertToObject("true", Locale.getDefault())); assertEquals(Boolean.FALSE, booleanConverter.convertToObject("false", Locale.getDefault())); try { booleanConverter.convertToObject("whatever", Locale.getDefault()); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } } /** * Test byte conversions. */ public void testByteConversions() { ByteConverter converter = new ByteConverter(); assertEquals(new Byte((byte)10), converter.convertToObject("10", Locale.US)); assertEquals("10", converter.convertToString(new Byte((byte)10), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("10whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("256", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } } /** * Test double conversions. */ public void testDoubleConversions() { DoubleConverter converter = new DoubleConverter(); assertEquals(new Double(1.1), converter.convertToObject("1.1", Locale.US)); assertEquals("1.1", converter.convertToString(new Double(1.1), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } try { converter.convertToObject("1.1whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } } /** * Test float conversions. */ public void testFloatConversions() { FloatConverter converter = new FloatConverter(); assertEquals(new Float(1.1), converter.convertToObject("1.1", Locale.US)); assertEquals("1.1", converter.convertToString(new Float(1.1), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } try { converter.convertToObject("1.1whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } } /** * Test integer conversions. */ public void testIntegerConversions() { IntegerConverter converter = new IntegerConverter(); assertEquals(new Integer(10), converter.convertToObject("10", Locale.US)); assertEquals("10", converter.convertToString(new Integer(10), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("10whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("" + ((long)Integer.MAX_VALUE + 1), Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } } /** * Test long conversions. */ public void testLongConversions() { LongConverter converter = new LongConverter(); assertEquals(new Long(10), converter.convertToObject("10", Locale.US)); assertEquals("10", converter.convertToString(new Long(10), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("10whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("" + Long.MAX_VALUE + "0", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } } /** * Test short conversions */ public void testShortConversions() { ShortConverter converter = new ShortConverter(); assertEquals(new Short((short)10), converter.convertToObject("10", Locale.US)); assertEquals("10", converter.convertToString(new Short((short)10), Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("10whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } try { converter.convertToObject("" + (Short.MAX_VALUE + 1), Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // This is correct } } /** * Test date locale conversions. */ public void testDateConverter() { DateConverter converter = new DateConverter(); Calendar cal = Calendar.getInstance(DUTCH_LOCALE); cal.clear(); cal.set(2002, Calendar.OCTOBER, 24); Date date = cal.getTime(); assertEquals("24-10-02", converter.convertToString(date, DUTCH_LOCALE)); assertEquals(date, converter.convertToObject("24-10-02", DUTCH_LOCALE)); assertEquals("10/24/02", converter.convertToString(date, Locale.US)); assertEquals(date, converter.convertToObject("10/24/02", Locale.US)); try { converter.convertToObject("whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } try { converter.convertToObject("10/24/02whatever", Locale.US); fail("Conversion should have thrown an exception"); } catch (ConversionException e) { // this is correct } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto package com.google.privacy.dlp.v2; /** * * * <pre> * Request message for DeleteInspectTemplate. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DeleteInspectTemplateRequest} */ public final class DeleteInspectTemplateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DeleteInspectTemplateRequest) DeleteInspectTemplateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteInspectTemplateRequest.newBuilder() to construct. private DeleteInspectTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteInspectTemplateRequest() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteInspectTemplateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DeleteInspectTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DeleteInspectTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.class, com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.DeleteInspectTemplateRequest)) { return super.equals(obj); } com.google.privacy.dlp.v2.DeleteInspectTemplateRequest other = (com.google.privacy.dlp.v2.DeleteInspectTemplateRequest) obj; boolean result = true; result = result && getName().equals(other.getName()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.privacy.dlp.v2.DeleteInspectTemplateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for DeleteInspectTemplate. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DeleteInspectTemplateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DeleteInspectTemplateRequest) com.google.privacy.dlp.v2.DeleteInspectTemplateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DeleteInspectTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DeleteInspectTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.class, com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.Builder.class); } // Construct using com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DeleteInspectTemplateRequest_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.DeleteInspectTemplateRequest getDefaultInstanceForType() { return com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.DeleteInspectTemplateRequest build() { com.google.privacy.dlp.v2.DeleteInspectTemplateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.DeleteInspectTemplateRequest buildPartial() { com.google.privacy.dlp.v2.DeleteInspectTemplateRequest result = new com.google.privacy.dlp.v2.DeleteInspectTemplateRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.DeleteInspectTemplateRequest) { return mergeFrom((com.google.privacy.dlp.v2.DeleteInspectTemplateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.DeleteInspectTemplateRequest other) { if (other == com.google.privacy.dlp.v2.DeleteInspectTemplateRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.privacy.dlp.v2.DeleteInspectTemplateRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.privacy.dlp.v2.DeleteInspectTemplateRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Resource name of the organization and inspectTemplate to be deleted, for * example `organizations/433245324/inspectTemplates/432452342` or * projects/project-id/inspectTemplates/432452342. * </pre> * * <code>string name = 1;</code> */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DeleteInspectTemplateRequest) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteInspectTemplateRequest) private static final com.google.privacy.dlp.v2.DeleteInspectTemplateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DeleteInspectTemplateRequest(); } public static com.google.privacy.dlp.v2.DeleteInspectTemplateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteInspectTemplateRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteInspectTemplateRequest>() { @java.lang.Override public DeleteInspectTemplateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteInspectTemplateRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeleteInspectTemplateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteInspectTemplateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.DeleteInspectTemplateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2015 Synced Synapse. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xbmc.kore.ui.sections.hosts; import android.app.Activity; import android.app.ProgressDialog; import android.content.DialogInterface; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.Toast; import org.xbmc.kore.R; import org.xbmc.kore.eventclient.EventServerConnection; import org.xbmc.kore.host.HostInfo; import org.xbmc.kore.jsonrpc.ApiCallback; import org.xbmc.kore.jsonrpc.ApiException; import org.xbmc.kore.jsonrpc.HostConnection; import org.xbmc.kore.jsonrpc.method.Application; import org.xbmc.kore.jsonrpc.method.JSONRPC; import org.xbmc.kore.jsonrpc.type.ApplicationType; import org.xbmc.kore.utils.LogUtils; import org.xbmc.kore.utils.NetUtils; import java.util.regex.Matcher; import java.util.regex.Pattern; import butterknife.ButterKnife; import butterknife.InjectView; /** * Fragment that presents the welcome message */ public class HostFragmentManualConfiguration extends Fragment { private static final String TAG = LogUtils.makeLogTag(HostFragmentManualConfiguration.class); /** * Fragment arguments */ private static final String PREFIX = "org.xbmc.kore"; public static final String HOST_ID = PREFIX + ".host_id", HOST_NAME = PREFIX + ".host_name", HOST_ADDRESS = PREFIX + ".host_address", HOST_HTTP_PORT = PREFIX + ".host_http_port", HOST_TCP_PORT = PREFIX + ".host_tcp_post", HOST_USERNAME = PREFIX + ".host_username", HOST_PASSWORD = PREFIX + ".host_password", HOST_MAC_ADDRESS = PREFIX + ".host_mac_address", HOST_WOL_PORT = PREFIX + ".host_wol_port", HOST_PROTOCOL = PREFIX + ".host_protocol", HOST_USE_EVENT_SERVER = PREFIX + ".host_use_event_server", HOST_EVENT_SERVER_PORT = PREFIX + ".host_event_server_port"; public static final String GO_STRAIGHT_TO_TEST = PREFIX + ".go_straight_to_test"; /** * Callback interface to communicate with the encolsing activity */ public interface HostManualConfigurationListener { public void onHostManualConfigurationNext(HostInfo hostInfo); public void onHostManualConfigurationCancel(); } public static String CANCEL_BUTTON_LABEL_ARG = PREFIX + ".cancel_button_label"; private HostManualConfigurationListener listener; private ProgressDialog progressDialog; @InjectView(R.id.xbmc_name) EditText xbmcNameEditText; @InjectView(R.id.xbmc_ip_address) EditText xbmcIpAddressEditText; @InjectView(R.id.xbmc_http_port) EditText xbmcHttpPortEditText; @InjectView(R.id.xbmc_tcp_port) EditText xbmcTcpPortEditText; @InjectView(R.id.xbmc_username) EditText xbmcUsernameEditText; @InjectView(R.id.xbmc_password) EditText xbmcPasswordEditText; @InjectView(R.id.xbmc_mac_address) EditText xbmcMacAddressEditText; @InjectView(R.id.xbmc_wol_port) EditText xbmcWolPortEditText; @InjectView(R.id.xbmc_use_tcp) CheckBox xbmcUseTcpCheckbox; @InjectView(R.id.xbmc_use_event_server) CheckBox xbmcUseEventServerCheckbox; @InjectView(R.id.xbmc_event_server_port) EditText xbmcEventServerPortEditText; // Handler for callbacks final Handler handler = new Handler(); @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View root = inflater.inflate(R.layout.fragment_add_host_manual_configuration, container, false); ButterKnife.inject(this, root); // By default, use TCP xbmcUseTcpCheckbox.setChecked(true); xbmcUseTcpCheckbox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { xbmcTcpPortEditText.setEnabled(isChecked); } }); xbmcUseEventServerCheckbox.setChecked(true); xbmcUseEventServerCheckbox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { xbmcEventServerPortEditText.setEnabled(isChecked); } }); // Check if we were given a host info String hostName = getArguments().getString(HOST_NAME); String hostAddress = getArguments().getString(HOST_ADDRESS); int hostHttpPort = getArguments().getInt(HOST_HTTP_PORT, HostInfo.DEFAULT_HTTP_PORT); int hostTcpPort = getArguments().getInt(HOST_TCP_PORT, HostInfo.DEFAULT_TCP_PORT); String hostUsername = getArguments().getString(HOST_USERNAME); String hostPassword = getArguments().getString(HOST_PASSWORD); int hostProtocol = getArguments().getInt(HOST_PROTOCOL, HostConnection.PROTOCOL_TCP); String hostMacAddress = getArguments().getString(HOST_MAC_ADDRESS); int hostWolPort = getArguments().getInt(HOST_WOL_PORT, HostInfo.DEFAULT_WOL_PORT); boolean hostUseEventServer = getArguments().getBoolean(HOST_USE_EVENT_SERVER, true); int hostEventServerPort = getArguments().getInt(HOST_EVENT_SERVER_PORT, HostInfo.DEFAULT_EVENT_SERVER_PORT); if (hostAddress != null) { xbmcNameEditText.setText(hostName); xbmcIpAddressEditText.setText(hostAddress); xbmcHttpPortEditText.setText(String.valueOf(hostHttpPort)); if (!TextUtils.isEmpty(hostUsername)) xbmcUsernameEditText.setText(hostUsername); if (!TextUtils.isEmpty(hostPassword)) xbmcPasswordEditText.setText(hostPassword); xbmcUseTcpCheckbox.setChecked(!(hostProtocol == HostConnection.PROTOCOL_HTTP)); xbmcTcpPortEditText.setEnabled(xbmcUseTcpCheckbox.isChecked()); if (hostTcpPort != HostInfo.DEFAULT_TCP_PORT) xbmcTcpPortEditText.setText(String.valueOf(hostTcpPort)); if (!TextUtils.isEmpty(hostMacAddress)) xbmcMacAddressEditText.setText(hostMacAddress); if (hostWolPort != HostInfo.DEFAULT_WOL_PORT) xbmcWolPortEditText.setText(String.valueOf(hostWolPort)); xbmcUseEventServerCheckbox.setChecked(hostUseEventServer); xbmcEventServerPortEditText.setEnabled(xbmcUseEventServerCheckbox.isChecked()); if (hostEventServerPort != HostInfo.DEFAULT_EVENT_SERVER_PORT) xbmcEventServerPortEditText.setText(String.valueOf(hostEventServerPort)); } return root; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (getView() == null) return; progressDialog = new ProgressDialog(getActivity()); Button next, previous; // Next button next = (Button)getView().findViewById(R.id.next); next.setText(R.string.test_connection); next.setCompoundDrawables(null, null, null, null); next.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { testConnection(); } }); // Previous button previous = (Button)getView().findViewById(R.id.previous); if (getArguments().getString(CANCEL_BUTTON_LABEL_ARG, null) != null) { previous.setText(getArguments().getString(CANCEL_BUTTON_LABEL_ARG)); } else { previous.setText(android.R.string.cancel); } previous.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { listener.onHostManualConfigurationCancel(); } }); // Check if the activity wants us to go straight to test boolean goStraighToTest = getArguments().getBoolean(GO_STRAIGHT_TO_TEST, false); if (goStraighToTest) { testConnection(); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { listener = (HostManualConfigurationListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement AddHostManualConfigurationListener interface."); } } private static boolean isValidPort(int port) { return port > 0 && port <= 65535; } /** * Tests a connection with the values set in the UI. * Checks whether the values are correctly set, and then tries to make * a ping call. First through HTTP, and if it succeeds, through TCP to * check availability. Finally adds the host and advances the wizard */ private void testConnection() { String xbmcName = xbmcNameEditText.getText().toString(); boolean isHttps = false; String xbmcAddress = xbmcIpAddressEditText.getText().toString(); if (xbmcAddress.startsWith("https://")) { xbmcAddress = xbmcAddress.substring("https://".length()); LogUtils.LOGD(TAG, "Stripped https:// on address to get: " + xbmcAddress); isHttps = true; } else if (xbmcAddress.startsWith("http://")) { xbmcAddress = xbmcAddress.substring("http://".length()); LogUtils.LOGD(TAG, "Stripped http:// on address to get: " + xbmcAddress); } int xbmcHttpPort = isHttps ? HostInfo.DEFAULT_HTTPS_PORT : HostInfo.DEFAULT_HTTP_PORT; Integer implicitPort = null; Matcher m = Pattern.compile("^.*:(\\d{1,5})\\z").matcher(xbmcAddress); if (m.matches()) { // Minus one character for the colon xbmcAddress = xbmcAddress.substring(0, m.start(1) - 1); LogUtils.LOGD(TAG, "Stripped port on address to get: " + xbmcAddress); try { implicitPort = Integer.valueOf(m.group(1)); } catch (NumberFormatException e) { LogUtils.LOGW( TAG, "Value matching port regex couldn't be parsed as integer: " + m.group(1) ); implicitPort = -1; } } Integer explicitPort = null; String aux = xbmcHttpPortEditText.getText().toString(); if (!TextUtils.isEmpty(aux)) { try { explicitPort = Integer.valueOf(aux); } catch (NumberFormatException e) { explicitPort = -1; } } if (implicitPort != null) { if (!isValidPort(implicitPort)) { Toast.makeText(getActivity(), R.string.wizard_invalid_http_port_specified, Toast.LENGTH_SHORT); xbmcIpAddressEditText.requestFocus(); return; } xbmcHttpPort = implicitPort; } else if (explicitPort != null) { if (!isValidPort(explicitPort)) { Toast.makeText(getActivity(), R.string.wizard_invalid_http_port_specified, Toast.LENGTH_SHORT); xbmcHttpPortEditText.requestFocus(); return; } xbmcHttpPort = explicitPort; } String xbmcUsername = xbmcUsernameEditText.getText().toString(); String xbmcPassword = xbmcPasswordEditText.getText().toString(); aux = xbmcTcpPortEditText.getText().toString(); int xbmcTcpPort; try { xbmcTcpPort = TextUtils.isEmpty(aux) ? HostInfo.DEFAULT_TCP_PORT : Integer.valueOf(aux); } catch (NumberFormatException exc) { xbmcTcpPort = -1; } int xbmcProtocol = xbmcUseTcpCheckbox.isChecked()? HostConnection.PROTOCOL_TCP : HostConnection.PROTOCOL_HTTP; String macAddress = xbmcMacAddressEditText.getText().toString(); aux = xbmcWolPortEditText.getText().toString(); int xbmcWolPort = HostInfo.DEFAULT_WOL_PORT; try { xbmcWolPort = TextUtils.isEmpty(aux) ? HostInfo.DEFAULT_WOL_PORT : Integer.valueOf(aux); } catch (NumberFormatException exc) { // Ignoring this exception and keeping WoL port at the default value } boolean xbmcUseEventServer = xbmcUseEventServerCheckbox.isChecked(); aux = xbmcEventServerPortEditText.getText().toString(); int xbmcEventServerPort; try { xbmcEventServerPort = TextUtils.isEmpty(aux) ? HostInfo.DEFAULT_EVENT_SERVER_PORT : Integer.valueOf(aux); } catch (NumberFormatException exc) { xbmcEventServerPort = -1; } // Check Xbmc name and address if (TextUtils.isEmpty(xbmcName)) { Toast.makeText(getActivity(), R.string.wizard_no_name_specified, Toast.LENGTH_SHORT).show(); xbmcNameEditText.requestFocus(); return; } else if (TextUtils.isEmpty(xbmcAddress)) { Toast.makeText(getActivity(), R.string.wizard_no_address_specified, Toast.LENGTH_SHORT).show(); xbmcIpAddressEditText.requestFocus(); return; } else if (xbmcTcpPort <= 0) { Toast.makeText(getActivity(), R.string.wizard_invalid_tcp_port_specified, Toast.LENGTH_SHORT).show(); xbmcTcpPortEditText.requestFocus(); return; } else if (xbmcEventServerPort <= 0) { Toast.makeText(getActivity(), R.string.wizard_invalid_tcp_port_specified, Toast.LENGTH_SHORT).show(); xbmcEventServerPortEditText.requestFocus(); return; } // If username or password empty, set it to null if (TextUtils.isEmpty(xbmcUsername)) xbmcUsername = null; if (TextUtils.isEmpty(xbmcPassword)) xbmcPassword = null; // Ok, let's try to ping the host final HostInfo checkedHostInfo = new HostInfo(xbmcName, xbmcAddress, xbmcProtocol, xbmcHttpPort, xbmcTcpPort, xbmcUsername, xbmcPassword, xbmcUseEventServer, xbmcEventServerPort, isHttps); checkedHostInfo.setMacAddress(macAddress); checkedHostInfo.setWolPort(xbmcWolPort); progressDialog.setTitle(String.format(getResources().getString(R.string.wizard_connecting_to_xbmc_title), xbmcName)); progressDialog.setMessage(getResources().getString(R.string.wizard_connecting_to_xbmc_message)); progressDialog.setCancelable(false); progressDialog.setIndeterminate(true); progressDialog.setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialog) { // Let's ping the host through HTTP chainCallCheckHttpConnection(checkedHostInfo); } }); progressDialog.show(); } private void chainCallCheckHttpConnection(final HostInfo hostInfo) { // Let's ping the host through HTTP final HostConnection hostConnection = new HostConnection(hostInfo); hostConnection.setProtocol(HostConnection.PROTOCOL_HTTP); final JSONRPC.Ping httpPing = new JSONRPC.Ping(); httpPing.execute(hostConnection, new ApiCallback<String>() { @Override public void onSuccess(String result) { LogUtils.LOGD(TAG, "Successfully connected to new host through HTTP."); // Great, we managed to connect through HTTP, let's check through tcp if (hostInfo.getProtocol() == HostConnection.PROTOCOL_TCP) { chainCallCheckTcpConnection(hostConnection, hostInfo); } else { // No TCP, check EventServer hostConnection.disconnect(); chainCallCheckEventServerConnection(hostInfo); } } @Override public void onError(int errorCode, String description) { // Couldn't connect through HTTP, abort, and initialize checkedHostInfo hostConnection.disconnect(); hostConnectionError(errorCode, description); } }, handler); } private void chainCallCheckTcpConnection(final HostConnection hostConnection, final HostInfo hostInfo) { final JSONRPC.Ping tcpPing = new JSONRPC.Ping(); hostConnection.setProtocol(HostConnection.PROTOCOL_TCP); tcpPing.execute(hostConnection, new ApiCallback<String>() { @Override public void onSuccess(String result) { // Great, we managed to connect through HTTP and TCP LogUtils.LOGD(TAG, "Successfully connected to new host through TCP."); hostConnection.disconnect(); // Check EventServer chainCallCheckEventServerConnection(hostInfo); } @Override public void onError(int errorCode, String description) { // We only managed to connect through HTTP, revert checkedHostInfo to use HTTP LogUtils.LOGD(TAG, "Couldn't connect to host through TCP. Message: " + description); hostConnection.disconnect(); hostInfo.setProtocol(HostConnection.PROTOCOL_HTTP); // Check EventServer chainCallCheckEventServerConnection(hostInfo); } }, handler); } private void chainCallCheckEventServerConnection(final HostInfo hostInfo) { if (hostInfo.getUseEventServer()) { EventServerConnection.testEventServerConnection( hostInfo, new EventServerConnection.EventServerConnectionCallback() { @Override public void OnConnectResult(boolean success) { LogUtils.LOGD(TAG, "Check ES connection: " + success); if (success) { chainCallCheckKodiVersion(hostInfo); } else { hostInfo.setUseEventServer(false); chainCallCheckKodiVersion(hostInfo); } } }, handler); } else { chainCallCheckKodiVersion(hostInfo); } } private void chainCallCheckKodiVersion(final HostInfo hostInfo) { final HostConnection hostConnection = new HostConnection(hostInfo); hostConnection.setProtocol(HostConnection.PROTOCOL_HTTP); final Application.GetProperties getProperties = new Application.GetProperties(Application.GetProperties.VERSION); getProperties.execute(hostConnection, new ApiCallback<ApplicationType.PropertyValue>() { @Override public void onSuccess(ApplicationType.PropertyValue result) { LogUtils.LOGD(TAG, "Successfully checked Kodi version."); hostInfo.setKodiVersionMajor(result.version.major); hostInfo.setKodiVersionMinor(result.version.minor); hostInfo.setKodiVersionRevision(result.version.revision); hostInfo.setKodiVersionTag(result.version.tag); hostConnection.disconnect(); hostConnectionChecked(hostInfo); } @Override public void onError(int errorCode, String description) { // Couldn't get Kodi version... Odd, but let's proceed anyway with the defaults hostConnection.disconnect(); hostConnectionChecked(hostInfo); } }, handler); } /** * The connection was checked, and hostInfo has all the correct parameters to communicate * with it * @param hostInfo {@link HostInfo} to add */ private void hostConnectionChecked(final HostInfo hostInfo) { // Let's get the MAC Address, if we don't have one if (TextUtils.isEmpty(hostInfo.getMacAddress())) { new Thread(new Runnable() { @Override public void run() { String localMacAddress = NetUtils.getMacAddress(hostInfo.getAddress()); hostInfo.setMacAddress(localMacAddress); handler.post(new Runnable() { @Override public void run() { if (isAdded()) { progressDialog.dismiss(); listener.onHostManualConfigurationNext(hostInfo); } } }); } }).start(); } else { // Mac address was supplied if (isAdded()) { progressDialog.dismiss(); listener.onHostManualConfigurationNext(hostInfo); } } } /** * Treats errors occurred during the connection check * @param errorCode Error code * @param description Description */ private void hostConnectionError(int errorCode, String description) { if (!isAdded()) return; progressDialog.dismiss(); LogUtils.LOGD(TAG, "An error occurred during connection testint. Message: " + description); switch (errorCode) { case ApiException.HTTP_RESPONSE_CODE_UNAUTHORIZED: String username = xbmcUsernameEditText.getText().toString(), password = xbmcPasswordEditText.getText().toString(); int messageResourceId; if (TextUtils.isEmpty(username) || TextUtils.isEmpty(password)) { messageResourceId = R.string.wizard_empty_authentication; } else { messageResourceId = R.string.wizard_incorrect_authentication; } Toast.makeText(getActivity(), messageResourceId, Toast.LENGTH_SHORT).show(); xbmcUsernameEditText.requestFocus(); break; default: Toast.makeText(getActivity(), R.string.wizard_error_connecting, Toast.LENGTH_SHORT).show(); break; } } }
/* * Copyright (c) 2005-2011 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.loan.business; import org.mifos.accounts.util.helpers.AccountState; import org.mifos.framework.business.AbstractEntity; import org.mifos.framework.util.helpers.Money; public class LoanSummaryEntity extends AbstractEntity { @SuppressWarnings("unused") // see .hbm.xml file private Integer accountId; @SuppressWarnings("unused") // see .hbm.xml file private LoanBO loan; private Money originalPrincipal; private Money originalInterest; private Money originalFees; private Money originalPenalty; private Money principalPaid; private Money interestPaid; private Money feesPaid; private Money penaltyPaid; private Money rawAmountTotal; public Money getRawAmountTotal() { return rawAmountTotal; } public void setRawAmountTotal(Money rawAmountTotal) { this.rawAmountTotal = rawAmountTotal; } protected LoanSummaryEntity() { super(); this.accountId = null; this.loan = null; } public LoanSummaryEntity(LoanBO loan, Money originalPrincipal, Money originalInterest, Money originalFees, Money rawAmountTotal) { super(); this.accountId = null; this.loan = loan; this.originalPrincipal = originalPrincipal; this.originalInterest = originalInterest; this.originalFees = originalFees; this.rawAmountTotal = rawAmountTotal; this.originalPenalty = new Money(loan.getCurrency()); this.principalPaid = new Money(loan.getCurrency()); this.interestPaid = new Money(loan.getCurrency()); this.feesPaid = new Money(loan.getCurrency()); this.penaltyPaid = new Money(loan.getCurrency()); } public Money getFeesPaid() { return feesPaid; } void setFeesPaid(Money feesPaid) { this.feesPaid = feesPaid; } public Money getInterestPaid() { return interestPaid; } void setInterestPaid(Money interestPaid) { this.interestPaid = interestPaid; } public Money getOriginalFees() { return originalFees; } void setOriginalFees(Money originalFees) { this.originalFees = originalFees; } public Money getOriginalInterest() { return originalInterest; } void setOriginalInterest(Money originalInterest) { this.originalInterest = originalInterest; } public Money getOriginalPenalty() { return originalPenalty; } void setOriginalPenalty(Money originalPenalty) { this.originalPenalty = originalPenalty; } public Money getOriginalPrincipal() { return originalPrincipal; } void setOriginalPrincipal(Money originalPrincipal) { this.originalPrincipal = originalPrincipal; } public Money getPenaltyPaid() { return penaltyPaid; } void setPenaltyPaid(Money penaltyPaid) { this.penaltyPaid = penaltyPaid; } public Money getPrincipalPaid() { return principalPaid; } void setPrincipalPaid(Money principalPaid) { this.principalPaid = principalPaid; } void updateFeePaid(Money totalPayment) { feesPaid = feesPaid.add(totalPayment); } public Money getPrincipalDue() { if (loanIsWrittenOffOrRescheduled()) { return new Money(this.getOriginalPrincipal().getCurrency()); } return getOriginalPrincipal().subtract(getPrincipalPaid()); } public Money getInterestDue() { if (loanIsWrittenOffOrRescheduled()) { return new Money(this.getOriginalPrincipal().getCurrency()); } return getOriginalInterest().subtract(getInterestPaid()); } public Money getPenaltyDue() { if (loanIsWrittenOffOrRescheduled()) { return new Money(this.getOriginalPrincipal().getCurrency()); } return getOriginalPenalty().subtract(getPenaltyPaid()); } public Money getFeesDue() { if (loanIsWrittenOffOrRescheduled()) { return new Money(this.getOriginalPrincipal().getCurrency()); } return getOriginalFees().subtract(getFeesPaid()); } public Money getTotalAmntDue() { if (loanIsWrittenOffOrRescheduled()) { return new Money(this.getOriginalPrincipal().getCurrency()); } return getPrincipalDue().add(getInterestDue()).add(getPenaltyDue()).add(getFeesDue()); } public Money getTotalLoanAmnt() { return getOriginalPrincipal().add(getOriginalFees()).add(getOriginalInterest()).add(getOriginalPenalty()); } public Money getTotalAmntPaid() { return getPrincipalPaid().add(getFeesPaid()).add(getInterestPaid()).add(getPenaltyPaid()); } public Money getOutstandingBalance() { Money totalAmount = new Money(loan.getCurrency()); totalAmount = totalAmount.add(getOriginalPrincipal()).subtract(getPrincipalPaid()); totalAmount = totalAmount.add(getOriginalInterest()).subtract(getInterestPaid()); totalAmount = totalAmount.add(getOriginalPenalty()).subtract(getPenaltyPaid()); totalAmount = totalAmount.add(getOriginalFees()).subtract(getFeesPaid()); return totalAmount; } public void updatePaymentDetails(PaymentAllocation paymentAllocation) { updatePaymentDetails(paymentAllocation.getPrincipalPaid(), paymentAllocation.getTotalInterestPaid(), paymentAllocation.getTotalPenaltyPaid(), paymentAllocation.getTotalAndMiscFeesPaid()); } public void updatePaymentDetails(Money principalPaid, Money interestPaid, Money totalPenaltyPaid, Money totalAndMiscFeesPaid) { this.principalPaid = this.principalPaid.add(principalPaid); this.interestPaid = this.interestPaid.add(interestPaid); penaltyPaid = penaltyPaid.add(totalPenaltyPaid); feesPaid = feesPaid.add(totalAndMiscFeesPaid); } public void updatePaymentDetails(LoanTrxnDetailEntity loanReverseTrxn) { Money penaltyPaid = loanReverseTrxn.totalPenaltyPaid(); Money totalAndMiscFeesPaid = loanReverseTrxn.totalAndMiscFeesPaid(); updatePaymentDetails(loanReverseTrxn.getPrincipalAmount(), loanReverseTrxn.getInterestAmount(), penaltyPaid, totalAndMiscFeesPaid); } // John W - to contra the decreaseBy (used when making early repayment) when adjusting the fully paid loan void increaseBy(Money principal, Money interest, Money penalty, Money fees) { originalPrincipal = originalPrincipal.add(principal); originalFees = originalFees.add(fees); originalPenalty = originalPenalty.add(penalty); originalInterest = originalInterest.add(interest); rawAmountTotal = rawAmountTotal.add(interest.add(fees)); } void decreaseBy(Money principal, Money interest, Money penalty, Money fees) { originalPrincipal = originalPrincipal.subtract(principal); originalFees = originalFees.subtract(fees); originalPenalty = originalPenalty.subtract(penalty); originalInterest = originalInterest.subtract(interest); rawAmountTotal = rawAmountTotal.subtract(interest.add(fees)); } void updateOriginalFees(Money charge) { setOriginalFees(getOriginalFees().add(charge)); rawAmountTotal = rawAmountTotal.add(charge); } void updateOriginalPenalty(Money charge) { setOriginalPenalty(getOriginalPenalty().add(charge)); } private boolean loanIsWrittenOffOrRescheduled() { if ((this.loan.getAccountState().getId().equals(AccountState.LOAN_CLOSED_WRITTEN_OFF.getValue())) || (this.loan.getAccountState().getId().equals(AccountState.LOAN_CLOSED_RESCHEDULED.getValue()))) { return true; } return false; } }
/** * Copyright (C) 2009 - 2013 SC 4ViewSoft SRL * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.achartengine.renderer; import android.graphics.Color; import android.graphics.Typeface; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * An abstract renderer to be extended by the multiple series classes. */ public class DefaultRenderer implements Serializable { /** * The chart title. */ private String mChartTitle = ""; /** * The chart title text size. */ private float mChartTitleTextSize = 15; /** * A no color constant. */ public static final int NO_COLOR = 0; /** * The default background color. */ public static final int BACKGROUND_COLOR = Color.BLACK; /** * The default color for text. */ public static final int TEXT_COLOR = Color.LTGRAY; /** * A text font for regular text, like the chart labels. */ private static final Typeface REGULAR_TEXT_FONT = Typeface .create(Typeface.SERIF, Typeface.NORMAL); /** * The typeface name for the texts. */ private String mTextTypefaceName = REGULAR_TEXT_FONT.toString(); /** * The typeface style for the texts. */ private int mTextTypefaceStyle = Typeface.NORMAL; /** * The typeface for the texts */ private Typeface mTextTypeface; /** * The chart background color. */ private int mBackgroundColor; /** * If the background color is applied. */ private boolean mApplyBackgroundColor; /** * If the axes are visible. */ private boolean mShowAxes = true; /** * The Y axis color. */ private int mYAxisColor = TEXT_COLOR; /** * The X axis color. */ private int mXAxisColor = TEXT_COLOR; /** * The axis width. */ private float mAxisWidth = 1; /** * If the labels are visible. */ private boolean mShowLabels = true; /** * If the tick marks are visible. */ private boolean mShowTickMarks = true; /** * The labels color. */ private int mLabelsColor = TEXT_COLOR; /** * The labels text size. */ private float mLabelsTextSize = 10; /** * If the legend is visible. */ private boolean mShowLegend = true; /** * The legend text size. */ private float mLegendTextSize = 12; /** * If the legend should size to fit. */ private boolean mFitLegend = false; /** * If the X axis grid should be displayed. */ private boolean mShowGridX = false; /** * If the Y axis grid should be displayed. */ private boolean mShowGridY = false; /** * If the custom text grid should be displayed on the X axis. */ private boolean mShowCustomTextGridX = false; /** * If the custom text grid should be displayed on the Y axis. */ private boolean mShowCustomTextGridY = false; /** * The simple renderers that are included in this multiple series renderer. */ private List<SimpleSeriesRenderer> mRenderers = new ArrayList<SimpleSeriesRenderer>(); /** * The antialiasing flag. */ private boolean mAntialiasing = true; /** * The legend height. */ private int mLegendHeight = 0; /** * The margins size. */ private int[] mIconMargins = new int[]{5, 10, 5, 10}; /** * The margins size. */ private int[] mMargins = new int[]{20, 30, 10, 20}; /** * A value to be used for scaling the chart. */ private float mScale = 1; /** * A flag for enabling the pan. */ private boolean mPanEnabled = true; /** * A flag for enabling the zoom. */ private boolean mZoomEnabled = true; /** * A flag for enabling the visibility of the zoom buttons. */ private boolean mZoomButtonsVisible = false; /** * The zoom rate. */ private float mZoomRate = 1.5f; /** * A flag for enabling the external zoom. */ private boolean mExternalZoomEnabled = false; /** * The original chart scale. */ private float mOriginalScale = mScale; /** * A flag for enabling the click on elements. */ private boolean mClickEnabled = false; /** * The selectable radius around a clickable point. */ private int selectableBuffer = 15; /** * If the chart should display the values (available for pie chart). */ private boolean mDisplayValues; /** * A flag to be set if the chart is inside a scroll and doesn't need to shrink * when not enough space. */ private boolean mInScroll; /** * The start angle for circular charts such as pie, doughnut, etc. */ private float mStartAngle = 0; /** * Returns the chart title. * * @return the chart title */ public String getChartTitle() { return mChartTitle; } /** * Sets the chart title. * * @param title the chart title */ public void setChartTitle(String title) { mChartTitle = title; } /** * Returns the chart title text size. * * @return the chart title text size */ public float getChartTitleTextSize() { return mChartTitleTextSize; } /** * Sets the chart title text size. * * @param textSize the chart title text size */ public void setChartTitleTextSize(float textSize) { mChartTitleTextSize = textSize; } /** * Adds a simple renderer to the multiple renderer. * * @param renderer the renderer to be added */ public void addSeriesRenderer(SimpleSeriesRenderer renderer) { mRenderers.add(renderer); } /** * Adds a simple renderer to the multiple renderer. * * @param index the index in the renderers list * @param renderer the renderer to be added */ public void addSeriesRenderer(int index, SimpleSeriesRenderer renderer) { mRenderers.add(index, renderer); } /** * Removes a simple renderer from the multiple renderer. * * @param renderer the renderer to be removed */ public void removeSeriesRenderer(SimpleSeriesRenderer renderer) { mRenderers.remove(renderer); } /** * Removes all renderers from the multiple renderer. */ public void removeAllRenderers() { mRenderers.clear(); } /** * Returns the simple renderer from the multiple renderer list. * * @param index the index in the simple renderers list * @return the simple renderer at the specified index */ public SimpleSeriesRenderer getSeriesRendererAt(int index) { return mRenderers.get(index); } /** * Returns the simple renderers count in the multiple renderer list. * * @return the simple renderers count */ public int getSeriesRendererCount() { return mRenderers.size(); } /** * Returns an array of the simple renderers in the multiple renderer list. * * @return the simple renderers array */ public SimpleSeriesRenderer[] getSeriesRenderers() { return mRenderers.toArray(new SimpleSeriesRenderer[0]); } /** * Returns the background color. * * @return the background color */ public int getBackgroundColor() { return mBackgroundColor; } /** * Sets the background color. * * @param color the background color */ public void setBackgroundColor(int color) { mBackgroundColor = color; } /** * Returns if the background color should be applied. * * @return the apply flag for the background color. */ public boolean isApplyBackgroundColor() { return mApplyBackgroundColor; } /** * Sets if the background color should be applied. * * @param apply the apply flag for the background color */ public void setApplyBackgroundColor(boolean apply) { mApplyBackgroundColor = apply; } /** * Returns the axes color. * * @return the axes color */ public int getAxesColor() { if (mXAxisColor != TEXT_COLOR) { return mXAxisColor; } else { return mYAxisColor; } } /** * Sets the axes color. * * @param color the axes color */ public void setAxesColor(int color) { this.setXAxisColor(color); this.setYAxisColor(color); } /** * Returns the color of the Y axis * * @return the Y axis color */ public int getYAxisColor() { return mYAxisColor; } /** * Sets the Y axis color. * * @param color the Y axis color */ public void setYAxisColor(int color) { mYAxisColor = color; } /** * Returns the color of the X axis * * @return the X axis color */ public int getXAxisColor() { return mXAxisColor; } /** * Sets the X axis color. * * @param color the X axis color */ public void setXAxisColor(int color) { mXAxisColor = color; } /** * Returns the labels color. * * @return the labels color */ public int getLabelsColor() { return mLabelsColor; } /** * Sets the labels color. * * @param color the labels color */ public void setLabelsColor(int color) { mLabelsColor = color; } /** * Returns the labels text size. * * @return the labels text size */ public float getLabelsTextSize() { return mLabelsTextSize; } /** * Sets the labels text size. * * @param textSize the labels text size */ public void setLabelsTextSize(float textSize) { mLabelsTextSize = textSize; } /** * Returns if the axes should be visible. * * @return the visibility flag for the axes */ public boolean isShowAxes() { return mShowAxes; } /** * Sets if the axes should be visible. * * @param showAxes the visibility flag for the axes */ public void setShowAxes(boolean showAxes) { mShowAxes = showAxes; } /** * Returns if the labels should be visible. * * @return the visibility flag for the labels */ public boolean isShowLabels() { return mShowLabels; } /** * Sets if the labels should be visible. * * @param showLabels the visibility flag for the labels */ public void setShowLabels(boolean showLabels) { mShowLabels = showLabels; } /** * Returns if the tick marks should be visible. * * @return */ public boolean isShowTickMarks() { return mShowTickMarks; } /** * Sets if the tick marks should be visible. * * @param showTickMarks the visibility flag for the tick marks */ public void setShowTickMarks(boolean mShowTickMarks) { this.mShowTickMarks = mShowTickMarks; } /** * Returns if the X axis grid should be visible. * * @return the visibility flag for the X axis grid */ public boolean isShowGridX() { return mShowGridX; } /** * Returns if the Y axis grid should be visible. * * @return the visibility flag for the Y axis grid */ public boolean isShowGridY() { return mShowGridY; } /** * Sets if the X axis grid should be visible. * * @param showGrid the visibility flag for the X axis grid */ public void setShowGridX(boolean showGrid) { mShowGridX = showGrid; } /** * Sets if the Y axis grid should be visible. * * @param showGrid the visibility flag for the Y axis grid */ public void setShowGridY(boolean showGrid) { mShowGridY = showGrid; } /** * Sets if the grid should be visible. * * @param showGrid the visibility flag for the grid */ public void setShowGrid(boolean showGrid) { setShowGridX(showGrid); setShowGridY(showGrid); } /** * Returns if the X axis custom text grid should be visible. * * @return the visibility flag for the X axis custom text grid */ public boolean isShowCustomTextGridX() { return mShowCustomTextGridX; } /** * Returns if the Y axis custom text grid should be visible. * * @return the visibility flag for the custom text Y axis grid */ public boolean isShowCustomTextGridY() { return mShowCustomTextGridY; } /** * Sets if the X axis custom text grid should be visible. * * @param showGrid the visibility flag for the X axis custom text grid */ public void setShowCustomTextGridX(boolean showGrid) { mShowCustomTextGridX = showGrid; } /** * Sets if the Y axis custom text grid should be visible. * * @param showGrid the visibility flag for the Y axis custom text grid */ public void setShowCustomTextGridY(boolean showGrid) { mShowCustomTextGridY = showGrid; } /** * Sets if the grid for custom X or Y labels should be visible. * * @param showGrid the visibility flag for the custom text grid */ public void setShowCustomTextGrid(boolean showGrid) { setShowCustomTextGridX(showGrid); setShowCustomTextGridY(showGrid); } /** * Returns if the legend should be visible. * * @return the visibility flag for the legend */ public boolean isShowLegend() { return mShowLegend; } /** * Sets if the legend should be visible. * * @param showLegend the visibility flag for the legend */ public void setShowLegend(boolean showLegend) { mShowLegend = showLegend; } /** * Returns if the legend should size to fit. * * @return the fit behavior */ public boolean isFitLegend() { return mFitLegend; } /** * Sets if the legend should size to fit. * * @param fit the fit behavior */ public void setFitLegend(boolean fit) { mFitLegend = fit; } /** * Returns the text typeface name. * * @return the text typeface name */ public String getTextTypefaceName() { return mTextTypefaceName; } /** * Returns the text typeface style. * * @return the text typeface style */ public int getTextTypefaceStyle() { return mTextTypefaceStyle; } /** * Returns the text typeface. * * @return the text typeface */ public Typeface getTextTypeface() { return mTextTypeface; } /** * Returns the legend text size. * * @return the legend text size */ public float getLegendTextSize() { return mLegendTextSize; } /** * Sets the legend text size. * * @param textSize the legend text size */ public void setLegendTextSize(float textSize) { mLegendTextSize = textSize; } /** * Sets the text typeface name and style. * * @param typefaceName the text typeface name * @param style the text typeface style */ public void setTextTypeface(String typefaceName, int style) { mTextTypefaceName = typefaceName; mTextTypefaceStyle = style; } /** * Sets the text typeface. * * @param typeface the typeface */ public void setTextTypeface(Typeface typeface) { mTextTypeface = typeface; } /** * Returns the antialiasing flag value. * * @return the antialiasing value */ public boolean isAntialiasing() { return mAntialiasing; } /** * Sets the antialiasing value. * * @param antialiasing the antialiasing */ public void setAntialiasing(boolean antialiasing) { mAntialiasing = antialiasing; } /** * Returns the value to be used for scaling the chart. * * @return the scale value */ public float getScale() { return mScale; } /** * Returns the original value to be used for scaling the chart. * * @return the original scale value */ public float getOriginalScale() { return mOriginalScale; } /** * Sets the value to be used for scaling the chart. It works on some charts * like pie, doughnut, dial. * * @param scale the scale value */ public void setScale(float scale) { mScale = scale; } /** * Returns the enabled state of the zoom. * * @return if zoom is enabled */ public boolean isZoomEnabled() { return mZoomEnabled; } /** * Sets the enabled state of the zoom. * * @param enabled zoom enabled */ public void setZoomEnabled(boolean enabled) { mZoomEnabled = enabled; } /** * Returns the visible state of the zoom buttons. * * @return if zoom buttons are visible */ public boolean isZoomButtonsVisible() { return mZoomButtonsVisible; } /** * Sets the visible state of the zoom buttons. * * @param visible if the zoom buttons are visible */ public void setZoomButtonsVisible(boolean visible) { mZoomButtonsVisible = visible; } /** * Returns the enabled state of the external (application implemented) zoom. * * @return if external zoom is enabled */ public boolean isExternalZoomEnabled() { return mExternalZoomEnabled; } /** * Sets the enabled state of the external (application implemented) zoom. * * @param enabled external zoom enabled */ public void setExternalZoomEnabled(boolean enabled) { mExternalZoomEnabled = enabled; } /** * Returns the zoom rate. * * @return the zoom rate */ public float getZoomRate() { return mZoomRate; } /** * Returns the enabled state of the pan. * * @return if pan is enabled */ public boolean isPanEnabled() { return mPanEnabled; } /** * Sets the enabled state of the pan. * * @param enabled pan enabled */ public void setPanEnabled(boolean enabled) { mPanEnabled = enabled; } /** * Sets the zoom rate. * * @param rate the zoom rate */ public void setZoomRate(float rate) { mZoomRate = rate; } /** * Returns the enabled state of the click. * * @return if click is enabled */ public boolean isClickEnabled() { return mClickEnabled; } /** * Sets the enabled state of the click. * * @param enabled click enabled */ public void setClickEnabled(boolean enabled) { mClickEnabled = enabled; } /** * Returns the selectable radius value around clickable points. * * @return the selectable radius */ public int getSelectableBuffer() { return selectableBuffer; } /** * Sets the selectable radius value around clickable points. * * @param buffer the selectable radius */ public void setSelectableBuffer(int buffer) { selectableBuffer = buffer; } /** * Returns the legend height. * * @return the legend height */ public int getLegendHeight() { return mLegendHeight; } /** * Sets the legend height, in pixels. * * @param height the legend height */ public void setLegendHeight(int height) { mLegendHeight = height; } /** * Returns the margin sizes. An array containing the margins in this order: * top, left, bottom, right * * @return the margin sizes */ public int[] getMargins() { return mMargins; } /** * Sets the margins, in pixels. * * @param margins an array containing the margin size values, in this order: * top, left, bottom, right */ public void setMargins(int[] margins) { mMargins = margins; } /** * Returns if the chart is inside a scroll view and doesn't need to shrink. * * @return if it is inside a scroll view */ public boolean isInScroll() { return mInScroll; } /** * To be set if the chart is inside a scroll view and doesn't need to shrink * when not enough space. * * @param inScroll if it is inside a scroll view */ public void setInScroll(boolean inScroll) { mInScroll = inScroll; } /** * Returns the start angle for circular charts such as pie, doughnut. An angle * of 0 degrees correspond to the geometric angle of 0 degrees (3 o'clock on a * watch.) * * @return the start angle in degrees */ public float getStartAngle() { return mStartAngle; } /** * Sets the start angle for circular charts such as pie, doughnut, etc. An * angle of 0 degrees correspond to the geometric angle of 0 degrees (3 * o'clock on a watch.) * * @param startAngle the start angle in degrees */ public void setStartAngle(float startAngle) { while (startAngle < 0) { startAngle += 360; } mStartAngle = startAngle; } /** * Returns if the values should be displayed as text. * * @return if the values should be displayed as text */ public boolean isDisplayValues() { return mDisplayValues; } /** * Sets if the values should be displayed as text (supported by pie chart). * * @param display if the values should be displayed as text */ public void setDisplayValues(boolean display) { mDisplayValues = display; } public float getAxisWidth() { return mAxisWidth; } public void setAxisWidth(float axisWidth) { this.mAxisWidth = axisWidth; } public int[] getIconMargins() { return mIconMargins; } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.util.lz4; import javax.annotation.*; import java.nio.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.MemoryUtil.*; /** * Native bindings to <a target="_blank" href="http://lz4.github.io/lz4/">LZ4</a>, a lossless compression algorithm, providing compression speed &gt; 500 MB/s per core, * scalable with multi-cores CPU. It features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on * multi-core systems. * * <p>Speed can be tuned dynamically, selecting an "acceleration" factor which trades compression ratio for faster speed. On the other end, a high * compression derivative, {@code LZ4_HC}, is also provided, trading CPU time for improved compression ratio. All versions feature the same decompression * speed.</p> * * <p>LZ4 is also compatible with <a target="_blank" href="https://github.com/facebook/zstd#the-case-for-small-data-compression">dictionary compression</a>, and can ingest * any input file as dictionary, including those created by <a target="_blank" href="https://github.com/facebook/zstd/blob/v1.3.5/programs/zstd.1.md#dictionary-builder">Zstandard Dictionary Builder</a>. (note: only the final 64KB are used).</p> * * <p>The raw LZ4 block compression format is detailed within <a href="https://github.com/lz4/lz4/blob/dev/doc/lz4_Block_format.md">lz4_Block_format</a>.</p> * * <p>Arbitrarily long files or data streams are compressed using multiple blocks, for streaming requirements. These blocks are organized into a frame, * defined into <a target="_blank" href="https://github.com/lz4/lz4/blob/dev/doc/lz4_Frame_format.md">lz4_Frame_format</a>. Interoperable versions of LZ4 must also respect * the frame format.</p> * * <h3>In-place compression and decompression</h3> * * <p>It's possible to have input and output sharing the same buffer, for highly contrained memory environments. In both cases, it requires input to lay at * the end of the buffer, and decompression to start at beginning of the buffer. Buffer size must feature some margin, hence be larger than final size.</p> * * <pre><code> * |&lt;------------------------buffer---------------------------------&gt;| * |&lt;-----------compressed data---------&gt;| * |&lt;-----------decompressed size------------------&gt;| * |&lt;----margin----&gt;|</code></pre> * * <p>This technique is more useful for decompression, since decompressed size is typically larger, and margin is short.</p> * * <p>In-place decompression will work inside any buffer which size is &ge; {@code LZ4_DECOMPRESS_INPLACE_BUFFER_SIZE(decompressedSize)}. This presumes that * {@code decompressedSize} &gt; {@code compressedSize}. Otherwise, it means compression actually expanded data, and it would be more efficient to store * such data with a flag indicating it's not compressed. This can happen when data is not compressible (already compressed, or encrypted).</p> * * <p>For in-place compression, margin is larger, as it must be able to cope with both history preservation, requiring input data to remain unmodified up to * {@link #LZ4_DISTANCE_MAX DISTANCE_MAX}, and data expansion, which can happen when input is not compressible. As a consequence, buffer size requirements are much higher, and * memory savings offered by in-place compression are more limited.</p> * * <p>There are ways to limit this cost for compression:</p> * * <ul> * <li>Reduce history size, by modifying {@code LZ4_DISTANCE_MAX}. Note that it is a compile-time constant, so all compressions will apply this limit. * Lower values will reduce compression ratio, except when input_size &lt; {@code LZ4_DISTANCE_MAX}, so it's a reasonable trick when inputs are known * to be small.</li> * <li>Require the compressor to deliver a "maximum compressed size". This is the {@code dstCapacity} parameter in {@code LZ4_compress*()}. When this size * is &lt; {@code LZ4_COMPRESSBOUND(inputSize)}, then compression can fail, in which case, the return code will be 0 (zero). The caller must be ready * for these cases to happen, and typically design a backup scheme to send data uncompressed.</li> * </ul> * * <p>The combination of both techniques can significantly reduce the amount of margin required for in-place compression.</p> * * <p>In-place compression can work in any buffer which size is &ge; {@code (maxCompressedSize)} with {@code maxCompressedSize == LZ4_COMPRESSBOUND(srcSize)} * for guaranteed compression success. {@link #LZ4_COMPRESS_INPLACE_BUFFER_SIZE COMPRESS_INPLACE_BUFFER_SIZE} depends on both {@code maxCompressedSize} and {@code LZ4_DISTANCE_MAX}, so it's * possible to reduce memory requirements by playing with them.</p> */ public class LZ4 { static { LibLZ4.initialize(); } /** Version number part. */ public static final int LZ4_VERSION_MAJOR = 1, LZ4_VERSION_MINOR = 9, LZ4_VERSION_RELEASE = 3; /** Version number. */ public static final int LZ4_VERSION_NUMBER = (LZ4_VERSION_MAJOR *100*100 + LZ4_VERSION_MINOR *100 + LZ4_VERSION_RELEASE); /** Version string. */ public static final String LZ4_VERSION_STRING = LZ4_VERSION_MAJOR + "." + LZ4_VERSION_MINOR + "." + LZ4_VERSION_RELEASE; /** Maximum input size. */ public static final int LZ4_MAX_INPUT_SIZE = 0x7E000000; /** * Memory usage formula : {@code N->2^N} Bytes (examples: {@code 10 -> 1KB; 12 -> 4KB ; 16 -> 64KB; 20 -> 1MB;} etc.) * * <p>Increasing memory usage improves compression ratio. Reduced memory usage may improve speed, thanks to better cache locality. Default value is 14, for * 16KB, which nicely fits into Intel x86 L1 cache.</p> */ public static final int LZ4_MEMORY_USAGE = 14; public static final int LZ4_HASHLOG = (LZ4_MEMORY_USAGE - 2); public static final int LZ4_HASHTABLESIZE = (1 << LZ4_MEMORY_USAGE); public static final int LZ4_HASH_SIZE_U32 = (1 << LZ4_HASHLOG); public static final int LZ4_STREAMSIZE = 16416; public static final int LZ4_STREAMSIZE_VOIDP = LZ4_STREAMSIZE / Pointer.POINTER_SIZE; public static final int LZ4_STREAMDECODESIZE_U64 = 4 + (Pointer.POINTER_SIZE == 16 ? 2 : 0); public static final int LZ4_STREAMDECODESIZE = (LZ4_STREAMDECODESIZE_U64 * Long.BYTES); /** History window size; can be user-defined at compile time. */ public static final int LZ4_DISTANCE_MAX = 64; protected LZ4() { throw new UnsupportedOperationException(); } // --- [ LZ4_versionNumber ] --- /** Returns the version number. */ public static native int LZ4_versionNumber(); // --- [ LZ4_versionString ] --- /** Unsafe version of: {@link #LZ4_versionString versionString} */ public static native long nLZ4_versionString(); /** Returns the version string. */ @NativeType("char const *") public static String LZ4_versionString() { long __result = nLZ4_versionString(); return memASCII(__result); } // --- [ LZ4_compress_default ] --- /** * Unsafe version of: {@link #LZ4_compress_default compress_default} * * @param srcSize max supported value is {@link #LZ4_MAX_INPUT_SIZE MAX_INPUT_SIZE} * @param dstCapacity size of buffer {@code dst} (which must be already allocated) */ public static native int nLZ4_compress_default(long src, long dst, int srcSize, int dstCapacity); /** * Compresses {@code srcSize} bytes from buffer {@code src} into already allocated {@code dst} buffer of size {@code dstCapacity}. * * <p>Compression is guaranteed to succeed if {@code dstCapacity} &ge; {@link #LZ4_compressBound compressBound}{@code (srcSize)}. It also runs faster, so it's a recommended setting.</p> * * <p>If the function cannot compress {@code src} into a more limited {@code dst} budget, compression stops <i>immediately</i>, and the function result is * zero. In which case, {@code dst} content is undefined (invalid).</p> * * <p>This function is protected against buffer overflow scenarios (never writes outside {@code dst} buffer, nor read outside {@code src} buffer).</p> * * @return the number of bytes written into buffer {@code dest} (necessarily &le; {@code maxOutputSize}) or 0 if compression fails */ public static int LZ4_compress_default(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst) { return nLZ4_compress_default(memAddress(src), memAddress(dst), src.remaining(), dst.remaining()); } // --- [ LZ4_decompress_safe ] --- /** * Unsafe version of: {@link #LZ4_decompress_safe decompress_safe} * * @param compressedSize is the exact complete size of the compressed block * @param dstCapacity is the size of destination buffer (which must be already allocated), presumed an upper bound of decompressed size */ public static native int nLZ4_decompress_safe(long src, long dst, int compressedSize, int dstCapacity); /** * If destination buffer is not large enough, decoding will stop and output an error code (negative value). * * <p>If the source stream is detected malformed, the function will stop decoding and return a negative result.</p> * * <p>Note 1: This function is protected against malicious data packets: it will never write outside {@code dst} buffer, nor read outside {@code source} * buffer, even if the compressed block is maliciously modified to order the decoder to do these actions. In such case, the decoder stops immediately, and * considers the compressed block malformed.</p> * * <p>Note 2: {@code compressedSize} and {@code dstCapacity} must be provided to the function, the compressed block does not contain them. The implementation * is free to send / store / derive this information in whichever way is most beneficial. If there is a need for a different format which bundles together * both compressed data and its metadata, consider looking at {@code lz4frame.h} instead.</p> * * @return the number of bytes decompressed into destination buffer (necessarily &le; {@code dstCapacity}) */ public static int LZ4_decompress_safe(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst) { return nLZ4_decompress_safe(memAddress(src), memAddress(dst), src.remaining(), dst.remaining()); } // --- [ LZ4_COMPRESSBOUND ] --- /** See {@link #LZ4_compressBound compressBound}. */ public static int LZ4_COMPRESSBOUND(int isize) { return LZ4_MAX_INPUT_SIZE < isize ? 0 : isize + isize / 255 + 16; } // --- [ LZ4_compressBound ] --- /** * Provides the maximum size that LZ4 compression may output in a "worst case" scenario (input data not compressible). * * <p>This function is primarily useful for memory allocation purposes (destination buffer size). Macro {@link #LZ4_COMPRESSBOUND COMPRESSBOUND} is also provided for * compilation-time evaluation (stack memory allocation for example).</p> * * <p>Note that {@link #LZ4_compress_default compress_default} compresses faster when {@code dstCapacity} is &ge; {@link #LZ4_compressBound compressBound}{@code (srcSize)}</p> * * @param inputSize max supported value is {@link #LZ4_MAX_INPUT_SIZE MAX_INPUT_SIZE} * * @return maximum output size in a "worst case" scenario or 0, if input size is incorrect (too large or negative) */ public static native int LZ4_compressBound(int inputSize); // --- [ LZ4_compress_fast ] --- /** Unsafe version of: {@link #LZ4_compress_fast compress_fast} */ public static native int nLZ4_compress_fast(long src, long dst, int srcSize, int dstCapacity, int acceleration); /** * Same as {@link #LZ4_compress_default compress_default}, but allows selection of "acceleration" factor. * * <p>The larger the acceleration value, the faster the algorithm, but also the lesser the compression. It's a trade-off. It can be fine tuned, with each * successive value providing roughly +~3% to speed. An acceleration value of "1" is the same as regular {@link #LZ4_compress_default compress_default}. Values &le; 0 will be * replaced by {@code LZ4_ACCELERATION_DEFAULT} (currently == 1, see lz4.c). Values &gt; {@code LZ4_ACCELERATION_MAX} will be replaced by * {@code LZ4_ACCELERATION_MAX} (currently {@code == 65537}, see lz4.c).</p> */ public static int LZ4_compress_fast(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, int acceleration) { return nLZ4_compress_fast(memAddress(src), memAddress(dst), src.remaining(), dst.remaining(), acceleration); } // --- [ LZ4_sizeofState ] --- public static native int LZ4_sizeofState(); // --- [ LZ4_compress_fast_extState ] --- /** Unsafe version of: {@link #LZ4_compress_fast_extState compress_fast_extState} */ public static native int nLZ4_compress_fast_extState(long state, long src, long dst, int srcSize, int dstCapacity, int acceleration); /** * Same as {@link #LZ4_compress_fast compress_fast}, using an externally allocated memory space for its state. * * <p>Use {@link #LZ4_sizeofState sizeofState} to know how much memory must be allocated, and allocate it on 8-bytes boundaries (using {@code malloc()} typically). Then, provide * it as {@code void* state} to compression function.</p> */ public static int LZ4_compress_fast_extState(@NativeType("void *") ByteBuffer state, @NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, int acceleration) { return nLZ4_compress_fast_extState(memAddress(state), memAddress(src), memAddress(dst), src.remaining(), dst.remaining(), acceleration); } // --- [ LZ4_compress_destSize ] --- /** * Unsafe version of: {@link #LZ4_compress_destSize compress_destSize} * * @param srcSizePtr will be modified to indicate how many bytes where read from {@code source} to fill {@code dest}. New value is necessarily &le; input value. */ public static native int nLZ4_compress_destSize(long src, long dst, long srcSizePtr, int targetDstSize); /** * Reverse the logic: compresses as much data as possible from {@code src} buffer into already allocated buffer {@code dst} of size * {@code targetDstSize}. * * <p>This function either compresses the entire {@code src} content into {@code dst} if it's large enough, or fill {@code dst} buffer completely with as * much data as possible from {@code src}. Note: acceleration parameter is fixed to {@code "default"}.</p> * * @param srcSizePtr will be modified to indicate how many bytes where read from {@code source} to fill {@code dest}. New value is necessarily &le; input value. * * @return nb bytes written into {@code dest} (necessarily &le; {@code targetDestSize}) or 0 if compression fails */ public static int LZ4_compress_destSize(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, @NativeType("int *") IntBuffer srcSizePtr) { if (CHECKS) { check(srcSizePtr, 1); check(src, srcSizePtr.get(srcSizePtr.position())); } return nLZ4_compress_destSize(memAddress(src), memAddress(dst), memAddress(srcSizePtr), dst.remaining()); } // --- [ LZ4_decompress_safe_partial ] --- /** Unsafe version of: {@link #LZ4_decompress_safe_partial decompress_safe_partial} */ public static native int nLZ4_decompress_safe_partial(long src, long dst, int compressedSize, int targetOutputSize, int dstCapacity); /** * Decompresses an LZ4 compressed block, of size {@code srcSize} at position {@code src}, into destination buffer {@code dst} of size {@code dstCapacity}. * * <p>Up to {@code targetOutputSize} bytes will be decoded. The function stops decoding on reaching this objective. This can be useful to boost performance * whenever only the beginning of a block is required.</p> * * <p>Notes:</p> * * <ol> * <li>result can be &lt; {@code targetOutputSize}, if compressed block contains less data.</li> * <li>{@code targetOutputSize} must be &le; {@code dstCapacity}</li> * <li>this function effectively stops decoding on reaching {@code targetOutputSize}, so {@code dstCapacity} is kind of redundant. This is because in * older versions of this function, decoding operation would still write complete sequences. Therefore, there was no guarantee that it would stop * writing at exactly {@code targetOutputSize}, it could write more bytes, though only up to {@code dstCapacity}. Some "margin" used to be required * for this operation to work properly. Thankfully, this is no longer necessary. The function nonetheless keeps the same signature, in an effort to * preserve API compatibility.</li> * <li>if {@code srcSize} is the exact size of the block, then {@code targetOutputSize} can be any value, including larger than the block's decompressed * size. The function will, at most, generate block's decompressed size.</li> * <li>if {@code srcSize} is <em>larger</em> than block's compressed size, then {@code targetOutputSize} <b>MUST</b> be &le; block's decompressed size. * Otherwise, <em>silent corruption will occur</em>.</li> * </ol> * * @return the number of bytes decoded in {@code dst} (necessarily &le; {@code targetOutputSize}). If source stream is detected malformed, function returns a * negative result. */ public static int LZ4_decompress_safe_partial(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, int targetOutputSize) { return nLZ4_decompress_safe_partial(memAddress(src), memAddress(dst), src.remaining(), targetOutputSize, dst.remaining()); } // --- [ LZ4_createStream ] --- /** Allocates and initializes an {@code LZ4_stream_t} structure. */ @NativeType("LZ4_stream_t *") public static native long LZ4_createStream(); // --- [ LZ4_freeStream ] --- /** Unsafe version of: {@link #LZ4_freeStream freeStream} */ public static native int nLZ4_freeStream(long streamPtr); /** Releases memory of an {@code LZ4_stream_t} structure. */ public static int LZ4_freeStream(@NativeType("LZ4_stream_t *") long streamPtr) { if (CHECKS) { check(streamPtr); } return nLZ4_freeStream(streamPtr); } // --- [ LZ4_resetStream_fast ] --- /** Unsafe version of: {@link #LZ4_resetStream_fast resetStream_fast} */ public static native void nLZ4_resetStream_fast(long streamPtr); /** * Use this to prepare an {@code LZ4_stream_t} for a new chain of dependent blocks (e.g., {@link #LZ4_compress_fast_continue compress_fast_continue}). * * <p>An {@code LZ4_stream_t} must be initialized once before usage. This is automatically done when created by {@link #LZ4_createStream createStream}. However, should the * {@code LZ4_stream_t} be simply declared on stack (for example), it's necessary to initialize it first, using {@link #LZ4_initStream initStream}.</p> * * <p>After init, start any new stream with {@code LZ4_resetStream_fast()}. A same {@code LZ4_stream_t} can be re-used multiple times consecutively and * compress multiple streams, provided that it starts each new stream with {@code LZ4_resetStream_fast()}.</p> * * <p>{@code LZ4_resetStream_fast()} is much faster than {@code LZ4_initStream()}, but is not compatible with memory regions containing garbage data.</p> * * <p>Note: it's only useful to call {@code LZ4_resetStream_fast()} in the context of streaming compression. The {@code extState} functions perform their own * resets. Invoking {@code LZ4_resetStream_fast()} before is redundant, and even counterproductive.</p> * * @since version 1.9.0 */ public static void LZ4_resetStream_fast(@NativeType("LZ4_stream_t *") long streamPtr) { if (CHECKS) { check(streamPtr); } nLZ4_resetStream_fast(streamPtr); } // --- [ LZ4_loadDict ] --- /** Unsafe version of: {@link #LZ4_loadDict loadDict} */ public static native int nLZ4_loadDict(long streamPtr, long dictionary, int dictSize); /** * Use this function to reference a static dictionary into {@code LZ4_stream_t}. * * <p>The dictionary must remain available during compression. {@code LZ4_loadDict()} triggers a reset, so any previous data will be forgotten. The same * dictionary will have to be loaded on decompression side for successful decoding. Dictionarys are useful for better compression of small data (KB * range). While LZ4 accepts any input as dictionary, results are generally better when using Zstandard's Dictionary Builder. Loading a size of 0 is * allowed, and is the same as reset.</p> * * @return loaded dictionary size, in bytes (necessarily &le; 64 KB) */ public static int LZ4_loadDict(@NativeType("LZ4_stream_t *") long streamPtr, @Nullable @NativeType("char const *") ByteBuffer dictionary) { if (CHECKS) { check(streamPtr); } return nLZ4_loadDict(streamPtr, memAddressSafe(dictionary), remainingSafe(dictionary)); } // --- [ LZ4_compress_fast_continue ] --- /** Unsafe version of: {@link #LZ4_compress_fast_continue compress_fast_continue} */ public static native int nLZ4_compress_fast_continue(long streamPtr, long src, long dst, int srcSize, int dstCapacity, int acceleration); /** * Compress {@code src} content using data from previously compressed blocks, for better compression ratio. * * <p>{@code dst} buffer must be already allocated. If {@code dstCapacity} &ge; {@link #LZ4_compressBound compressBound}{@code (srcSize)}, compression is guaranteed to succeed, and * runs faster.</p> * * <p>Note 1: Each invocation to {@code LZ4_compress_fast_continue()} generates a new block. Each block has precise boundaries. Each block must be * decompressed separately, calling {@code LZ4_decompress_*()} with relevant metadata. It's not possible to append blocks together and expect a single * invocation of {@code LZ4_decompress_*()} to decompress them together.</p> * * <p>Note 2: The previous 64KB of source data is <em>assumed</em> to remain present, unmodified, at same address in memory!</p> * * <p>Note 3: When input is structured as a double-buffer, each buffer can have any size, including &lt; 64 KB. Make sure that buffers are separated, by at * least one byte. This construction ensures that each block only depends on previous block.</p> * * <p>Note 4: If input buffer is a ring-buffer, it can have any size, including &lt; 64 KB.</p> * * @return size of compressed block or 0 if there is an error (typically, cannot fit into {@code dst}). After an error, the stream status is undefined (invalid), * it can only be reset or freed. */ public static int LZ4_compress_fast_continue(@NativeType("LZ4_stream_t *") long streamPtr, @NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, int acceleration) { if (CHECKS) { check(streamPtr); } return nLZ4_compress_fast_continue(streamPtr, memAddress(src), memAddress(dst), src.remaining(), dst.remaining(), acceleration); } // --- [ LZ4_saveDict ] --- /** Unsafe version of: {@link #LZ4_saveDict saveDict} */ public static native int nLZ4_saveDict(long streamPtr, long safeBuffer, int maxDictSize); /** * If last 64KB data cannot be guaranteed to remain available at its current memory location, save it into a safer place ({@code char* safeBuffer}). * * <p>This is schematically equivalent to a {@code memcpy()} followed by {@link #LZ4_loadDict loadDict}, but is much faster, because {@code LZ4_saveDict()} doesn't need to * rebuild tables.</p> * * @return saved dictionary size in bytes (necessarily &le; {@code maxDictSize}), or 0 if error */ public static int LZ4_saveDict(@NativeType("LZ4_stream_t *") long streamPtr, @NativeType("char *") ByteBuffer safeBuffer) { if (CHECKS) { check(streamPtr); } return nLZ4_saveDict(streamPtr, memAddress(safeBuffer), safeBuffer.remaining()); } // --- [ LZ4_createStreamDecode ] --- /** * Creates a streaming decompression tracking context. * * <p>A tracking context can be re-used multiple times.</p> */ @NativeType("LZ4_streamDecode_t *") public static native long LZ4_createStreamDecode(); // --- [ LZ4_freeStreamDecode ] --- /** Unsafe version of: {@link #LZ4_freeStreamDecode freeStreamDecode} */ public static native int nLZ4_freeStreamDecode(long LZ4_stream); /** Frees a streaming decompression tracking context. */ public static int LZ4_freeStreamDecode(@NativeType("LZ4_streamDecode_t *") long LZ4_stream) { if (CHECKS) { check(LZ4_stream); } return nLZ4_freeStreamDecode(LZ4_stream); } // --- [ LZ4_setStreamDecode ] --- /** Unsafe version of: {@link #LZ4_setStreamDecode setStreamDecode} */ public static native int nLZ4_setStreamDecode(long LZ4_streamDecode, long dictionary, int dictSize); /** * An {@code LZ4_streamDecode_t} context can be allocated once and re-used multiple times. Use this function to start decompression of a new stream of * blocks. * * <p>A dictionary can optionally be set. Use {@code NULL} or size 0 for a reset order. Dictionary is presumed stable: it must remain accessible and unmodified * during next decompression.</p> * * @return 1 if OK, 0 if error */ @NativeType("int") public static boolean LZ4_setStreamDecode(@NativeType("LZ4_streamDecode_t *") long LZ4_streamDecode, @NativeType("char const *") ByteBuffer dictionary) { if (CHECKS) { check(LZ4_streamDecode); } return nLZ4_setStreamDecode(LZ4_streamDecode, memAddress(dictionary), dictionary.remaining()) != 0; } // --- [ LZ4_decoderRingBufferSize ] --- /** * In a ring buffer scenario (optional), blocks are presumed decompressed next to each other up to the moment there is not enough remaining space for next * block ({@code remainingSize &lt; maxBlockSize}), at which stage it resumes from beginning of ring buffer. When setting such a ring buffer for streaming * decompression, provides the minimum size of this ring buffer to be compatible with any source respecting {@code maxBlockSize} condition. * * @return minimum ring buffer size, or 0 if there is an error (invalid {@code maxBlockSize}) * * @since version 1.8.2 */ public static native int LZ4_decoderRingBufferSize(int maxBlockSize); // --- [ LZ4_decompress_safe_continue ] --- /** Unsafe version of: {@link #LZ4_decompress_safe_continue decompress_safe_continue} */ public static native int nLZ4_decompress_safe_continue(long LZ4_streamDecode, long src, long dst, int srcSize, int dstCapacity); /** * These decoding functions allow decompression of consecutive blocks in "streaming" mode. * * <p>A block is an unsplittable entity, it must be presented entirely to a decompression function. Decompression functions only accept one block at a time. * The last 64KB of previously decoded data <i>must</i> remain available and unmodified at the memory position where they were decoded. If less than 64KB * of data has been decoded, all the data must be present.</p> * * <p>Special: if decompression side sets a ring buffer, it must respect one of the following conditions:</p> * * <ul> * <li>Decompression buffer size is <i>at least</i> {@link #LZ4_decoderRingBufferSize decoderRingBufferSize}({@code maxBlockSize}). {@code maxBlockSize} is the maximum size of any single * block. It can have any value &gt; 16 bytes. In which case, encoding and decoding buffers do not need to be synchronized. Actually, data can be * produced by any source compliant with LZ4 format specification, and respecting {@code maxBlockSize}.</li> * <li>Synchronized mode: Decompression buffer size is <i>exactly</i> the same as compression buffer size, and follows exactly same update rule (block * boundaries at same positions), and decoding function is provided with exact decompressed size of each block (exception for last block of the * stream), <i>then</i> decoding &amp; encoding ring buffer can have any size, including small ones ( &lt; 64 KB).</li> * <li>Decompression buffer is larger than encoding buffer, by a minimum of {@code maxBlockSize} more bytes. In which case, encoding and decoding buffers * do not need to be synchronized, and encoding ring buffer can have any size, including small ones ( &lt; 64 KB).</li> * </ul> * * <p>Whenever these conditions are not possible, save the last 64KB of decoded data into a safe buffer where it can't be modified during decompression, then * indicate where this data is saved using {@link #LZ4_setStreamDecode setStreamDecode}, before decompressing next block.</p> */ public static int LZ4_decompress_safe_continue(@NativeType("LZ4_streamDecode_t *") long LZ4_streamDecode, @NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst) { if (CHECKS) { check(LZ4_streamDecode); } return nLZ4_decompress_safe_continue(LZ4_streamDecode, memAddress(src), memAddress(dst), src.remaining(), dst.remaining()); } // --- [ LZ4_decompress_safe_usingDict ] --- /** Unsafe version of: {@link #LZ4_decompress_safe_usingDict decompress_safe_usingDict} */ public static native int nLZ4_decompress_safe_usingDict(long src, long dst, int srcSize, int dstCapacity, long dictStart, int dictSize); /** * These decoding functions work the same as a combination of {@link #LZ4_setStreamDecode setStreamDecode} followed by {@code LZ4_decompress_*_continue()}. They are stand-alone, * and don't need an {@code LZ4_streamDecode_t} structure. * * <p>Dictionary is presumed stable: it must remain accessible and unmodified during decompression.</p> * * <p>Performance tip: Decompression speed can be substantially increased when {@code dst == dictStart + dictSize}.</p> */ public static int LZ4_decompress_safe_usingDict(@NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, @NativeType("char const *") ByteBuffer dictStart) { return nLZ4_decompress_safe_usingDict(memAddress(src), memAddress(dst), src.remaining(), dst.remaining(), memAddress(dictStart), dictStart.remaining()); } // --- [ LZ4_compress_fast_extState_fastReset ] --- /** Unsafe version of: {@link #LZ4_compress_fast_extState_fastReset compress_fast_extState_fastReset} */ public static native int nLZ4_compress_fast_extState_fastReset(long state, long src, long dst, int srcSize, int dstCapacity, int acceleration); /** * A variant of {@link #LZ4_compress_fast_extState compress_fast_extState}. * * <p>Using this variant avoids an expensive initialization step. It is only safe to call if the state buffer is known to be correctly initialized already * (see above comment on {@link #LZ4_resetStream_fast resetStream_fast} for a definition of "correctly initialized"). From a high level, the difference is that this function * initializes the provided state with a call to something like {@link #LZ4_resetStream_fast resetStream_fast} while {@link #LZ4_compress_fast_extState compress_fast_extState} starts with a call to {@link #LZ4_initStream initStream}.</p> */ public static int LZ4_compress_fast_extState_fastReset(@NativeType("void *") ByteBuffer state, @NativeType("char const *") ByteBuffer src, @NativeType("char *") ByteBuffer dst, int acceleration) { return nLZ4_compress_fast_extState_fastReset(memAddress(state), memAddress(src), memAddress(dst), src.remaining(), dst.remaining(), acceleration); } // --- [ LZ4_attach_dictionary ] --- /** Unsafe version of: {@link #LZ4_attach_dictionary attach_dictionary} */ public static native void nLZ4_attach_dictionary(long workingStream, long dictionaryStream); /** * This is an experimental API that allows efficient use of a static dictionary many times. * * <p>Rather than re-loading the dictionary buffer into a working context before each compression, or copying a pre-loaded dictionary's {@code LZ4_stream_t} * into a working {@code LZ4_stream_t}, this function introduces a no-copy setup mechanism, in which the working stream references the dictionary stream * in-place.</p> * * <p>Several assumptions are made about the state of the dictionary stream. Currently, only streams which have been prepared by {@link #LZ4_loadDict loadDict} should be * expected to work.</p> * * <p>Alternatively, the provided {@code dictionaryStream} may be {@code NULL}, in which case any existing dictionary stream is unset.</p> * * <p>If a dictionary is provided, it replaces any pre-existing stream history. The dictionary contents are the only history that can be referenced and * logically immediately precede the data compressed in the first subsequent compression call.</p> * * <p>The dictionary will only remain attached to the working stream through the first compression call, at the end of which it is cleared. The dictionary * stream (and source buffer) must remain in-place / accessible / unchanged through the completion of the first compression call on the stream.</p> */ public static void LZ4_attach_dictionary(@NativeType("LZ4_stream_t *") long workingStream, @NativeType("LZ4_stream_t const *") long dictionaryStream) { if (CHECKS) { check(workingStream); } nLZ4_attach_dictionary(workingStream, dictionaryStream); } // --- [ LZ4_initStream ] --- /** Unsafe version of: {@link #LZ4_initStream initStream} */ public static native long nLZ4_initStream(long buffer, long size); /** * An {@code LZ4_stream_t} structure must be initialized at least once. This is automatically done when invoking createStream(), but it's not when the * structure is simply declared on stack (for example). * * <p>Use {@code LZ4_initStream()} to properly initialize a newly declared {@code LZ4_stream_t}. It can also initialize any arbitrary buffer of sufficient * size, and will return a pointer of proper type upon initialization.</p> * * <p>Note: initialization fails if size and alignment conditions are not respected. In which case, the function will {@code NULL}.</p> * * <p>Note 2: An {@code LZ4_stream_t} structure guarantees correct alignment and size.</p> * * @since 1.9.0 */ @NativeType("LZ4_stream_t *") public static long LZ4_initStream(@NativeType("void *") ByteBuffer buffer) { return nLZ4_initStream(memAddress(buffer), buffer.remaining()); } // --- [ LZ4_DECOMPRESS_INPLACE_MARGIN ] --- public static int LZ4_DECOMPRESS_INPLACE_MARGIN(int compressedSize) { return (compressedSize >>> 8) + 32; } // --- [ LZ4_DECOMPRESS_INPLACE_BUFFER_SIZE ] --- /** * Note: presumes that {@code compressedSize} &lt; {@code decompressedSize}. * * <p>Note 2: margin is overestimated a bit, since it could use {@code compressedSize instead}.</p> */ public static int LZ4_DECOMPRESS_INPLACE_BUFFER_SIZE(int decompressedSize) { return decompressedSize + LZ4_DECOMPRESS_INPLACE_MARGIN(decompressedSize); } // --- [ LZ4_COMPRESS_INPLACE_MARGIN ] --- public static int LZ4_COMPRESS_INPLACE_MARGIN() { return LZ4_DISTANCE_MAX + 32; } // --- [ LZ4_COMPRESS_INPLACE_BUFFER_SIZE ] --- /** @param maxCompressedSize is generally {@link #LZ4_COMPRESSBOUND COMPRESSBOUND}{@code (inputSize)}, but can be set to any lower value, with the risk that compression can fail (return code 0) */ public static int LZ4_COMPRESS_INPLACE_BUFFER_SIZE(int maxCompressedSize) { return maxCompressedSize + LZ4_COMPRESS_INPLACE_MARGIN(); } /** For static allocation; {@code maxBlockSize} presumed valid. */ public static int LZ4_DECODER_RING_BUFFER_SIZE(int maxBlockSize) { return 65536 + 14 + maxBlockSize; } }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.core.datamgt.service; import com.liferay.portal.kernel.exception.PortalException; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.kernel.io.unsync.UnsyncByteArrayInputStream; import com.liferay.portal.kernel.io.unsync.UnsyncByteArrayOutputStream; import com.liferay.portal.kernel.log.Log; import com.liferay.portal.kernel.log.LogFactoryUtil; import com.liferay.portal.kernel.util.ClassLoaderObjectInputStream; import com.liferay.portal.kernel.util.PropsUtil; import com.liferay.portal.kernel.util.Validator; import com.liferay.portal.model.BaseModel; import org.oep.core.datamgt.model.DictAttributeClp; import org.oep.core.datamgt.model.DictCollectionClp; import org.oep.core.datamgt.model.DictDataClp; import org.oep.core.datamgt.model.DictMetaDataClp; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; /** * @author NQMINH */ public class ClpSerializer { public static String getServletContextName() { if (Validator.isNotNull(_servletContextName)) { return _servletContextName; } synchronized (ClpSerializer.class) { if (Validator.isNotNull(_servletContextName)) { return _servletContextName; } try { ClassLoader classLoader = ClpSerializer.class.getClassLoader(); Class<?> portletPropsClass = classLoader.loadClass( "com.liferay.util.portlet.PortletProps"); Method getMethod = portletPropsClass.getMethod("get", new Class<?>[] { String.class }); String portletPropsServletContextName = (String)getMethod.invoke(null, "oep-core-datamgt-portlet-deployment-context"); if (Validator.isNotNull(portletPropsServletContextName)) { _servletContextName = portletPropsServletContextName; } } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info( "Unable to locate deployment context from portlet properties"); } } if (Validator.isNull(_servletContextName)) { try { String propsUtilServletContextName = PropsUtil.get( "oep-core-datamgt-portlet-deployment-context"); if (Validator.isNotNull(propsUtilServletContextName)) { _servletContextName = propsUtilServletContextName; } } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info( "Unable to locate deployment context from portal properties"); } } } if (Validator.isNull(_servletContextName)) { _servletContextName = "oep-core-datamgt-portlet"; } return _servletContextName; } } public static Object translateInput(BaseModel<?> oldModel) { Class<?> oldModelClass = oldModel.getClass(); String oldModelClassName = oldModelClass.getName(); if (oldModelClassName.equals(DictAttributeClp.class.getName())) { return translateInputDictAttribute(oldModel); } if (oldModelClassName.equals(DictCollectionClp.class.getName())) { return translateInputDictCollection(oldModel); } if (oldModelClassName.equals(DictDataClp.class.getName())) { return translateInputDictData(oldModel); } if (oldModelClassName.equals(DictMetaDataClp.class.getName())) { return translateInputDictMetaData(oldModel); } return oldModel; } public static Object translateInput(List<Object> oldList) { List<Object> newList = new ArrayList<Object>(oldList.size()); for (int i = 0; i < oldList.size(); i++) { Object curObj = oldList.get(i); newList.add(translateInput(curObj)); } return newList; } public static Object translateInputDictAttribute(BaseModel<?> oldModel) { DictAttributeClp oldClpModel = (DictAttributeClp)oldModel; BaseModel<?> newModel = oldClpModel.getDictAttributeRemoteModel(); newModel.setModelAttributes(oldClpModel.getModelAttributes()); return newModel; } public static Object translateInputDictCollection(BaseModel<?> oldModel) { DictCollectionClp oldClpModel = (DictCollectionClp)oldModel; BaseModel<?> newModel = oldClpModel.getDictCollectionRemoteModel(); newModel.setModelAttributes(oldClpModel.getModelAttributes()); return newModel; } public static Object translateInputDictData(BaseModel<?> oldModel) { DictDataClp oldClpModel = (DictDataClp)oldModel; BaseModel<?> newModel = oldClpModel.getDictDataRemoteModel(); newModel.setModelAttributes(oldClpModel.getModelAttributes()); return newModel; } public static Object translateInputDictMetaData(BaseModel<?> oldModel) { DictMetaDataClp oldClpModel = (DictMetaDataClp)oldModel; BaseModel<?> newModel = oldClpModel.getDictMetaDataRemoteModel(); newModel.setModelAttributes(oldClpModel.getModelAttributes()); return newModel; } public static Object translateInput(Object obj) { if (obj instanceof BaseModel<?>) { return translateInput((BaseModel<?>)obj); } else if (obj instanceof List<?>) { return translateInput((List<Object>)obj); } else { return obj; } } public static Object translateOutput(BaseModel<?> oldModel) { Class<?> oldModelClass = oldModel.getClass(); String oldModelClassName = oldModelClass.getName(); if (oldModelClassName.equals( "org.oep.core.datamgt.model.impl.DictAttributeImpl")) { return translateOutputDictAttribute(oldModel); } else if (oldModelClassName.endsWith("Clp")) { try { ClassLoader classLoader = ClpSerializer.class.getClassLoader(); Method getClpSerializerClassMethod = oldModelClass.getMethod( "getClpSerializerClass"); Class<?> oldClpSerializerClass = (Class<?>)getClpSerializerClassMethod.invoke(oldModel); Class<?> newClpSerializerClass = classLoader.loadClass(oldClpSerializerClass.getName()); Method translateOutputMethod = newClpSerializerClass.getMethod("translateOutput", BaseModel.class); Class<?> oldModelModelClass = oldModel.getModelClass(); Method getRemoteModelMethod = oldModelClass.getMethod("get" + oldModelModelClass.getSimpleName() + "RemoteModel"); Object oldRemoteModel = getRemoteModelMethod.invoke(oldModel); BaseModel<?> newModel = (BaseModel<?>)translateOutputMethod.invoke(null, oldRemoteModel); return newModel; } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info("Unable to translate " + oldModelClassName, t); } } } if (oldModelClassName.equals( "org.oep.core.datamgt.model.impl.DictCollectionImpl")) { return translateOutputDictCollection(oldModel); } else if (oldModelClassName.endsWith("Clp")) { try { ClassLoader classLoader = ClpSerializer.class.getClassLoader(); Method getClpSerializerClassMethod = oldModelClass.getMethod( "getClpSerializerClass"); Class<?> oldClpSerializerClass = (Class<?>)getClpSerializerClassMethod.invoke(oldModel); Class<?> newClpSerializerClass = classLoader.loadClass(oldClpSerializerClass.getName()); Method translateOutputMethod = newClpSerializerClass.getMethod("translateOutput", BaseModel.class); Class<?> oldModelModelClass = oldModel.getModelClass(); Method getRemoteModelMethod = oldModelClass.getMethod("get" + oldModelModelClass.getSimpleName() + "RemoteModel"); Object oldRemoteModel = getRemoteModelMethod.invoke(oldModel); BaseModel<?> newModel = (BaseModel<?>)translateOutputMethod.invoke(null, oldRemoteModel); return newModel; } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info("Unable to translate " + oldModelClassName, t); } } } if (oldModelClassName.equals( "org.oep.core.datamgt.model.impl.DictDataImpl")) { return translateOutputDictData(oldModel); } else if (oldModelClassName.endsWith("Clp")) { try { ClassLoader classLoader = ClpSerializer.class.getClassLoader(); Method getClpSerializerClassMethod = oldModelClass.getMethod( "getClpSerializerClass"); Class<?> oldClpSerializerClass = (Class<?>)getClpSerializerClassMethod.invoke(oldModel); Class<?> newClpSerializerClass = classLoader.loadClass(oldClpSerializerClass.getName()); Method translateOutputMethod = newClpSerializerClass.getMethod("translateOutput", BaseModel.class); Class<?> oldModelModelClass = oldModel.getModelClass(); Method getRemoteModelMethod = oldModelClass.getMethod("get" + oldModelModelClass.getSimpleName() + "RemoteModel"); Object oldRemoteModel = getRemoteModelMethod.invoke(oldModel); BaseModel<?> newModel = (BaseModel<?>)translateOutputMethod.invoke(null, oldRemoteModel); return newModel; } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info("Unable to translate " + oldModelClassName, t); } } } if (oldModelClassName.equals( "org.oep.core.datamgt.model.impl.DictMetaDataImpl")) { return translateOutputDictMetaData(oldModel); } else if (oldModelClassName.endsWith("Clp")) { try { ClassLoader classLoader = ClpSerializer.class.getClassLoader(); Method getClpSerializerClassMethod = oldModelClass.getMethod( "getClpSerializerClass"); Class<?> oldClpSerializerClass = (Class<?>)getClpSerializerClassMethod.invoke(oldModel); Class<?> newClpSerializerClass = classLoader.loadClass(oldClpSerializerClass.getName()); Method translateOutputMethod = newClpSerializerClass.getMethod("translateOutput", BaseModel.class); Class<?> oldModelModelClass = oldModel.getModelClass(); Method getRemoteModelMethod = oldModelClass.getMethod("get" + oldModelModelClass.getSimpleName() + "RemoteModel"); Object oldRemoteModel = getRemoteModelMethod.invoke(oldModel); BaseModel<?> newModel = (BaseModel<?>)translateOutputMethod.invoke(null, oldRemoteModel); return newModel; } catch (Throwable t) { if (_log.isInfoEnabled()) { _log.info("Unable to translate " + oldModelClassName, t); } } } return oldModel; } public static Object translateOutput(List<Object> oldList) { List<Object> newList = new ArrayList<Object>(oldList.size()); for (int i = 0; i < oldList.size(); i++) { Object curObj = oldList.get(i); newList.add(translateOutput(curObj)); } return newList; } public static Object translateOutput(Object obj) { if (obj instanceof BaseModel<?>) { return translateOutput((BaseModel<?>)obj); } else if (obj instanceof List<?>) { return translateOutput((List<Object>)obj); } else { return obj; } } public static Throwable translateThrowable(Throwable throwable) { if (_useReflectionToTranslateThrowable) { try { UnsyncByteArrayOutputStream unsyncByteArrayOutputStream = new UnsyncByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream(unsyncByteArrayOutputStream); objectOutputStream.writeObject(throwable); objectOutputStream.flush(); objectOutputStream.close(); UnsyncByteArrayInputStream unsyncByteArrayInputStream = new UnsyncByteArrayInputStream(unsyncByteArrayOutputStream.unsafeGetByteArray(), 0, unsyncByteArrayOutputStream.size()); Thread currentThread = Thread.currentThread(); ClassLoader contextClassLoader = currentThread.getContextClassLoader(); ObjectInputStream objectInputStream = new ClassLoaderObjectInputStream(unsyncByteArrayInputStream, contextClassLoader); throwable = (Throwable)objectInputStream.readObject(); objectInputStream.close(); return throwable; } catch (SecurityException se) { if (_log.isInfoEnabled()) { _log.info("Do not use reflection to translate throwable"); } _useReflectionToTranslateThrowable = false; } catch (Throwable throwable2) { _log.error(throwable2, throwable2); return throwable2; } } Class<?> clazz = throwable.getClass(); String className = clazz.getName(); if (className.equals(PortalException.class.getName())) { return new PortalException(); } if (className.equals(SystemException.class.getName())) { return new SystemException(); } if (className.equals("org.oep.core.datamgt.DictAttributeNameException")) { return new org.oep.core.datamgt.DictAttributeNameException(); } if (className.equals("org.oep.core.datamgt.DictCollectionNameException")) { return new org.oep.core.datamgt.DictCollectionNameException(); } if (className.equals( "org.oep.core.datamgt.DictCollectionStatusException")) { return new org.oep.core.datamgt.DictCollectionStatusException(); } if (className.equals( "org.oep.core.datamgt.DictCollectionTitleException")) { return new org.oep.core.datamgt.DictCollectionTitleException(); } if (className.equals( "org.oep.core.datamgt.DictCollectionVersionException")) { return new org.oep.core.datamgt.DictCollectionVersionException(); } if (className.equals("org.oep.core.datamgt.DictDataDataCodeException")) { return new org.oep.core.datamgt.DictDataDataCodeException(); } if (className.equals("org.oep.core.datamgt.DuplicateException")) { return new org.oep.core.datamgt.DuplicateException(); } if (className.equals("org.oep.core.datamgt.NotFoundException")) { return new org.oep.core.datamgt.NotFoundException(); } if (className.equals("org.oep.core.datamgt.NotIdentifiedException")) { return new org.oep.core.datamgt.NotIdentifiedException(); } if (className.equals("org.oep.core.datamgt.NotPermissionException")) { return new org.oep.core.datamgt.NotPermissionException(); } if (className.equals( "org.oep.core.datamgt.NoSuchDictAttributeException")) { return new org.oep.core.datamgt.NoSuchDictAttributeException(); } if (className.equals( "org.oep.core.datamgt.NoSuchDictCollectionException")) { return new org.oep.core.datamgt.NoSuchDictCollectionException(); } if (className.equals("org.oep.core.datamgt.NoSuchDictDataException")) { return new org.oep.core.datamgt.NoSuchDictDataException(); } if (className.equals("org.oep.core.datamgt.NoSuchDictMetaDataException")) { return new org.oep.core.datamgt.NoSuchDictMetaDataException(); } return throwable; } public static Object translateOutputDictAttribute(BaseModel<?> oldModel) { DictAttributeClp newModel = new DictAttributeClp(); newModel.setModelAttributes(oldModel.getModelAttributes()); newModel.setDictAttributeRemoteModel(oldModel); return newModel; } public static Object translateOutputDictCollection(BaseModel<?> oldModel) { DictCollectionClp newModel = new DictCollectionClp(); newModel.setModelAttributes(oldModel.getModelAttributes()); newModel.setDictCollectionRemoteModel(oldModel); return newModel; } public static Object translateOutputDictData(BaseModel<?> oldModel) { DictDataClp newModel = new DictDataClp(); newModel.setModelAttributes(oldModel.getModelAttributes()); newModel.setDictDataRemoteModel(oldModel); return newModel; } public static Object translateOutputDictMetaData(BaseModel<?> oldModel) { DictMetaDataClp newModel = new DictMetaDataClp(); newModel.setModelAttributes(oldModel.getModelAttributes()); newModel.setDictMetaDataRemoteModel(oldModel); return newModel; } private static Log _log = LogFactoryUtil.getLog(ClpSerializer.class); private static String _servletContextName; private static boolean _useReflectionToTranslateThrowable = true; }
package de.nikem.jebu.impl.websocket.server; import static de.nikem.jebu.util.Closer.close; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.sql.Timestamp; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; import javax.json.Json; import javax.json.JsonException; import javax.json.stream.JsonParser; import javax.json.stream.JsonParser.Event; import javax.websocket.CloseReason; import javax.websocket.CloseReason.CloseCodes; import javax.websocket.EndpointConfig; import javax.websocket.OnClose; import javax.websocket.OnError; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import javax.websocket.server.PathParam; import javax.websocket.server.ServerEndpoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.nikem.jebu.api.EventBus; import de.nikem.jebu.api.Subscriber; import de.nikem.jebu.impl.EventBusImpl; import de.nikem.jebu.impl.websocket.JebuWebsocketEvent; import de.nikem.jebu.impl.websocket.JebuWebsocketEvent.Action; /** * WebSocket implementation for jebu.<br> * exchanged messages * * @author uawet0ju * */ @ServerEndpoint(value = "/{path}", configurator = JebuServerConfigurator.class) public class JebuServerEndpoint { public final static String PATH_EVENTBUS = "eventbus"; public final static String PATH_MANAGER = "manager"; public static final int MAX_MESSAGE_BUFFER_SIZE = 10 * 1024 * 1024; private final Logger log = LoggerFactory.getLogger(getClass()); private final EventBusImpl jebu = new EventBusImpl(); private final Collection<Session> managerSessions = Collections.synchronizedCollection(new HashSet<Session>()); public JebuServerEndpoint() { log.debug("JebuServerEndpoint instantiation"); } @OnOpen public void onOpen(Session session, EndpointConfig config, @PathParam("path") String path) { log.debug("connect on {} {{}} from {}", this, path, session.getId()); session.setMaxBinaryMessageBufferSize(MAX_MESSAGE_BUFFER_SIZE); session.setMaxTextMessageBufferSize(MAX_MESSAGE_BUFFER_SIZE); JebuServerContext serverContext = (JebuServerContext) config.getUserProperties().get(JebuServerContext.JEBU_SERVER_CONTEXT); if (serverContext != null) { serverContext.setJebu(getJebu()); serverContext.setManagerSessions(getManagerSessions()); } if (PATH_MANAGER.equals(path)) { getManagerSessions().add(session); } publishManagers(); } @OnMessage public void onMessage(byte[] message, Session session, @PathParam("path") String path) { log.trace("message from {}", session.getId()); if (PATH_EVENTBUS.equals(path)) { onEventbusMessage(message, session); } else if (PATH_MANAGER.equals(path)) { log.debug("cannot handle manager message"); } } @OnMessage public void onMessage(String message, Session session, @PathParam("path") String path) { log.debug("String message: {}", message); if (PATH_EVENTBUS.equals(path)) { onEventbusMessage(message, session); } else if (PATH_MANAGER.equals(path)) { log.debug("cannot handle manager message"); } } private void onEventbusMessage(byte[] message, Session session) { JebuWebsocketEvent event = null; InputStream is = null; ObjectInputStream ois = null; try { is = new ByteArrayInputStream(message); ois = new ObjectInputStream(is); event = (JebuWebsocketEvent) ois.readObject(); onEventBusMessage(event, session, new JebuWebSocketSubscriber(session)); } catch (IOException e) { log.error("message processing error", e); } catch (ClassNotFoundException e) { log.error("message processing error", e); } finally { close(is); close(ois); } publishManagers(event, session); } private void onEventbusMessage(String message, Session session) { JebuWebsocketEvent event = null; StringReader is = null; Action action = null; String eventName = null; String data = null; try { if ("ping".equals(message)) { log.debug("ping received {}", session.getId()); return; } is = new StringReader(message); JsonParser parser = Json.createParser(is); while (parser.hasNext()) { Event e = parser.next(); if (e == Event.KEY_NAME) { String key = parser.getString(); if ("action".equals(key)) { parser.next(); action = Action.valueOf(parser.getString()); } else if ("eventName".equals(key)) { parser.next(); eventName = parser.getString(); } else if ("data".equals(key)) { parser.next(); data = parser.getString(); } } } event = new JebuWebsocketEvent(eventName, action, data); onEventBusMessage(event, session, new JebuWebSocketJsonSubscriber(session)); } catch (JsonException e) { log.error("message processing error", e); } finally { close(is); } publishManagers(event, session); } /** * @param event * @param session * @param subscriber */ protected void onEventBusMessage(JebuWebsocketEvent event, Session session, final JebuWebSocketSubscriber subscriber) { log.trace("event from {}: {}", session.getId(), event); EventBus jebu = getJebu(); switch (event.getAction()) { case subscribe: jebu.subscribe(event.getEventName(), subscriber); break; case unsubscribe: if (event.getEventName() != null) { jebu.unsubscribe(event.getEventName(), subscriber); } else { jebu.unsubscribe(subscriber); } break; case publish: jebu.publish(event.getEventName(), event); break; default: log.error("unknown action {}", event.getAction()); break; } } @OnClose public void onClose(Session session, CloseReason reason, @PathParam("path") String path) { log.debug("Socket Closed: {}", reason); if (PATH_MANAGER.equals(path) && getManagerSessions() != null) { getManagerSessions().remove(session); publishManagers(); } else if (PATH_EVENTBUS.equals(path)) { getJebu().unsubscribe(new JebuWebSocketSubscriber(session)); } } @OnError public void onError(Throwable cause, @PathParam("path") String path) { log.error("websocket error {{}}", path); log.error("websocket error", cause); } private void publishManagers() { publishManagers(null, null); } private void publishManagers(JebuWebsocketEvent event, Session session) { Writer w = new StringWriter(); try { w.append("{"); quote("subscriberMap", w).append(": {"); boolean firstEvt = true; for (Map.Entry<String, Collection<Subscriber>> entry : getJebu().getSubscriberMap().entrySet()) { if (!firstEvt) { w.append(','); } firstEvt = false; quote(entry.getKey(), w).append(": ["); boolean firstSub = true; for (Subscriber s : entry.getValue()) { if (!firstSub) { w.append(','); } firstSub = false; quote(s.getId(), w); } w.append(']'); } w.append("},"); quote("managerSessions", w).append(": ["); boolean firstSub = true; for (Session managerSession : getManagerSessions()) { if (!firstSub) { w.append(','); } firstSub = false; quote(managerSession.getId(), w); } w.append(']'); if (event != null) { String data = null; if (event.getData() != null) { data = event.getData().toString(); if (data.length() > 100) { data = data.substring(0, 97) + "..."; } } w.append(','); quote("event", w).append(": {"); quote("sender", w).append(':'); quote(session.getId(), w).append(','); quote("action", w).append(':'); quote(event.getAction().toString(), w).append(','); quote("eventName", w).append(':'); quote(event.getEventName(), w).append(','); quote("data", w).append(':'); quote(data, w).append(','); quote("timestamp", w).append(':'); quote(new Timestamp(System.currentTimeMillis()).toString(), w).append('}'); } w.append('}'); for (Session managerSession : getManagerSessions()) { log.trace("publish to manager session {}", managerSession); managerSession.getAsyncRemote().sendText(w.toString()); } log.trace("current status"); log.trace(w.toString()); } catch (IOException e) { log.debug("JSON wirte error", e); } } protected Collection<Session> getManagerSessions() { return managerSessions; } protected EventBusImpl getJebu() { return jebu; } @Override protected void finalize() throws Throwable { for (Session session : getManagerSessions()) { session.close(new CloseReason(CloseCodes.GOING_AWAY, "good night!")); } } public static Writer quote(String string, Writer w) throws IOException { if (string == null || string.length() == 0) { w.write("\"\""); return w; } char b; char c = 0; String hhhh; int i; int len = string.length(); w.write('"'); for (i = 0; i < len; i += 1) { b = c; c = string.charAt(i); switch (c) { case '\\': w.write('\\'); w.write(c); break; case '"': w.write('\\'); w.write(c); break; case '/': if (b == '<') { w.write('\\'); } w.write(c); break; case '\b': w.write("\\b"); break; case '\t': w.write("\\t"); break; case '\n': w.write("\\n"); break; case '\f': w.write("\\f"); break; case '\r': w.write("\\r"); break; default: if (c < ' ' || (c >= '\u0080' && c < '\u00a0') || (c >= '\u2000' && c < '\u2100')) { w.write("\\u"); hhhh = Integer.toHexString(c); w.write("0000", 0, 4 - hhhh.length()); w.write(hhhh); } else { w.write(c); } break; } } w.write('"'); return w; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.thrift; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import javassist.util.proxy.MethodHandler; import javassist.util.proxy.Proxy; import javassist.util.proxy.ProxyFactory; import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.component.thrift.server.ThriftHsHaServer; import org.apache.camel.component.thrift.server.ThriftMethodHandler; import org.apache.camel.component.thrift.server.ThriftThreadPoolServer; import org.apache.camel.support.DefaultConsumer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.thrift.TProcessor; import org.apache.thrift.server.TServer; import org.apache.thrift.transport.TNonblockingServerSocket; import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TZlibTransport; /** * Represents Thrift server consumer implementation */ public class ThriftConsumer extends DefaultConsumer { private TNonblockingServerSocket asyncServerTransport; private TServerSocket syncServerTransport; private TServer server; private final ThriftConfiguration configuration; private final ThriftEndpoint endpoint; public ThriftConsumer(ThriftEndpoint endpoint, Processor processor, ThriftConfiguration configuration) { super(endpoint, processor); this.endpoint = endpoint; this.configuration = configuration; } public ThriftConfiguration getConfiguration() { return configuration; } @Override protected void doStart() throws Exception { super.doStart(); if (server == null) { log.debug("Starting the Thrift server"); initializeServer(); server.serve(); log.info("Thrift server started and listening on port: {}", asyncServerTransport == null ? syncServerTransport.getServerSocket().getLocalPort() : asyncServerTransport.getPort()); } } @Override protected void doStop() throws Exception { if (server != null) { log.debug("Terminating Thrift server"); server.stop(); if (ObjectHelper.isNotEmpty(asyncServerTransport)) { asyncServerTransport.close(); asyncServerTransport = null; } if (ObjectHelper.isNotEmpty(syncServerTransport)) { syncServerTransport.close(); syncServerTransport = null; } server.stop(); server = null; } super.doStop(); } @SuppressWarnings({"rawtypes", "unchecked"}) protected void initializeServer() throws TTransportException { Class serverImplementationClass; Object serverImplementationInstance; Object serverProcessor; ProxyFactory serviceProxy = new ProxyFactory(); MethodHandler methodHandler = new ThriftMethodHandler(endpoint, this); try { Class serverInterface = ThriftUtils.getServerInterface(endpoint.getServicePackage(), endpoint.getServiceName(), endpoint.isSynchronous(), endpoint.getCamelContext()); serviceProxy.setInterfaces(new Class[] {serverInterface}); serverImplementationClass = serviceProxy.createClass(); serverImplementationInstance = serverImplementationClass.getConstructor().newInstance(); ((Proxy)serverImplementationInstance).setHandler(methodHandler); serverProcessor = ThriftUtils.constructServerProcessor(endpoint.getServicePackage(), endpoint.getServiceName(), serverImplementationInstance, endpoint.isSynchronous(), endpoint.getCamelContext()); } catch (IllegalArgumentException | InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException | SecurityException e) { throw new IllegalArgumentException("Unable to create server implementation proxy service for " + configuration.getService()); } if (configuration.getNegotiationType() == ThriftNegotiationType.SSL && endpoint.isSynchronous()) { SSLContextParameters sslParameters = configuration.getSslParameters(); if (sslParameters == null) { throw new IllegalArgumentException("SSL parameters must be initialized if negotiation type is set to " + configuration.getNegotiationType()); } ObjectHelper.notNull(sslParameters.getSecureSocketProtocol(), "Security protocol"); ObjectHelper.notNull(sslParameters.getKeyManagers().getKeyStore().getResource(), "Keystore path"); ObjectHelper.notNull(sslParameters.getKeyManagers().getKeyStore().getPassword(), "Keystore password"); TSSLTransportFactory.TSSLTransportParameters sslParams; sslParams = new TSSLTransportFactory.TSSLTransportParameters(sslParameters.getSecureSocketProtocol(), sslParameters.getCipherSuites() == null ? null : sslParameters.getCipherSuites().getCipherSuite().stream().toArray(String[]::new)); if (ObjectHelper.isNotEmpty(sslParameters.getKeyManagers().getKeyStore().getProvider()) && ObjectHelper.isNotEmpty(sslParameters.getKeyManagers().getKeyStore().getType())) { sslParams.setKeyStore(sslParameters.getKeyManagers().getKeyStore().getResource(), sslParameters.getKeyManagers().getKeyStore().getPassword(), sslParameters.getKeyManagers().getKeyStore().getProvider(), sslParameters.getKeyManagers().getKeyStore().getType()); } else { sslParams.setKeyStore(sslParameters.getKeyManagers().getKeyStore().getResource(), sslParameters.getKeyManagers().getKeyStore().getPassword()); } try { syncServerTransport = TSSLTransportFactory.getServerSocket(configuration.getPort(), configuration.getClientTimeout(), InetAddress.getByName(configuration.getHost()), sslParams); } catch (UnknownHostException e) { throw new IllegalArgumentException("Unknown host defined: " + configuration.getHost()); } ThriftThreadPoolServer.Args args = new ThriftThreadPoolServer.Args(syncServerTransport); args.processor((TProcessor)serverProcessor); args.executorService(getEndpoint().getCamelContext().getExecutorServiceManager().newThreadPool(this, getEndpoint().getEndpointUri(), configuration.getPoolSize(), configuration.getMaxPoolSize())); args.startThreadPool(getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "start-" + getEndpoint().getEndpointUri())); args.context(endpoint.getCamelContext()); server = new ThriftThreadPoolServer(args); } else if (configuration.getCompressionType() == ThriftCompressionType.ZLIB && endpoint.isSynchronous()) { if (ObjectHelper.isNotEmpty(configuration.getHost()) && ObjectHelper.isNotEmpty(configuration.getPort())) { log.debug("Building sync Thrift server on {}:{}", configuration.getHost(), configuration.getPort()); syncServerTransport = new TServerSocket(new InetSocketAddress(configuration.getHost(), configuration.getPort()), configuration.getClientTimeout()); } else if (ObjectHelper.isEmpty(configuration.getHost()) && ObjectHelper.isNotEmpty(configuration.getPort())) { log.debug("Building sync Thrift server on <any address>:{}", configuration.getPort()); syncServerTransport = new TServerSocket(configuration.getPort(), configuration.getClientTimeout()); } else { throw new IllegalArgumentException("No server start properties (host, port) specified"); } ThriftThreadPoolServer.Args args = new ThriftThreadPoolServer.Args(syncServerTransport); args.processor((TProcessor)serverProcessor); args.transportFactory(new TZlibTransport.Factory()); args.executorService(getEndpoint().getCamelContext().getExecutorServiceManager().newThreadPool(this, getEndpoint().getEndpointUri(), configuration.getPoolSize(), configuration.getMaxPoolSize())); args.startThreadPool(getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "start-" + getEndpoint().getEndpointUri())); args.context(endpoint.getCamelContext()); server = new ThriftThreadPoolServer(args); } else { if (ObjectHelper.isNotEmpty(configuration.getHost()) && ObjectHelper.isNotEmpty(configuration.getPort())) { log.debug("Building Thrift server on {}:{}", configuration.getHost(), configuration.getPort()); asyncServerTransport = new TNonblockingServerSocket(new InetSocketAddress(configuration.getHost(), configuration.getPort()), configuration.getClientTimeout()); } else if (ObjectHelper.isEmpty(configuration.getHost()) && ObjectHelper.isNotEmpty(configuration.getPort())) { log.debug("Building Thrift server on <any address>:{}", configuration.getPort()); asyncServerTransport = new TNonblockingServerSocket(configuration.getPort(), configuration.getClientTimeout()); } else { throw new IllegalArgumentException("No server start properties (host, port) specified"); } ThriftHsHaServer.Args args = new ThriftHsHaServer.Args(asyncServerTransport); args.processor((TProcessor)serverProcessor); args.executorService(getEndpoint().getCamelContext().getExecutorServiceManager().newThreadPool(this, getEndpoint().getEndpointUri(), configuration.getPoolSize(), configuration.getMaxPoolSize())); args.startThreadPool(getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "start-" + getEndpoint().getEndpointUri())); args.context(endpoint.getCamelContext()); server = new ThriftHsHaServer(args); } } public boolean process(Exchange exchange, AsyncCallback callback) { return doSend(exchange, callback); } private boolean doSend(Exchange exchange, AsyncCallback callback) { if (isRunAllowed()) { getAsyncProcessor().process(exchange, doneSync -> { if (exchange.getException() != null) { getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException()); } callback.done(doneSync); }); return false; } else { log.warn("Consumer not ready to process exchanges. The exchange {} will be discarded", exchange); callback.done(true); return true; } } }
/* * Copyright (c) 2012 David Green * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package castledesigner; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.Graphics; import java.awt.Toolkit; import java.awt.datatransfer.StringSelection; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.imageio.ImageIO; import javax.swing.BoxLayout; import javax.swing.GroupLayout; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextArea; import javax.swing.LayoutStyle.ComponentPlacement; import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; import javax.swing.border.EmptyBorder; import javax.swing.filechooser.FileFilter; import javax.swing.filechooser.FileNameExtensionFilter; /** * An application for the game "Stronghold Kingdoms" that helps players design * their castle layout in advance, and allows them to share their designs * through plain alphanumeric strings. * * @author David Green */ public class Editor { public static final String programVersion = "1.13"; public static Boolean nowFileSaving = false; private static LandPanel landPanel; private static JFrame frame; private static JFileChooser saveFileChooser; private static JFileChooser openFileChooser; private static File currentFile; private static JPanel errorPanel; private static final String FILE_EXTENSION = "png"; public static void main( String[] args ) { setLookAndFeel(); JPanel mainPanel = new JPanel(); landPanel = new LandPanel(); landPanel.getLandGrid().addDesignListener(new DesignListener() { public void designChanged() { errorPanel.removeAll(); List<String> designErrors = landPanel.getLandGrid().getCastle().getDesignErrors(); for (String designError : designErrors) { JLabel designErrorLabel = new JLabel(designError); designErrorLabel.setForeground(Color.red); designErrorLabel.setFont(new Font(designErrorLabel.getFont().getName(), Font.BOLD, designErrorLabel.getFont().getSize())); errorPanel.add(designErrorLabel); } errorPanel.revalidate(); } }); BuildingsPanel buildingsPanel = new BuildingsPanel(); buildingsPanel.setCastle(landPanel.getLandGrid().getCastle()); buildingsPanel.addPropertyChangeListener(BuildingsPanel.SELECTED_BUILDING, new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { landPanel.getLandGrid().setSelectedBuilding((BuildingType)evt.getNewValue()); } }); landPanel.getLandGrid().addDesignListener(buildingsPanel); MenuPanel menuPanel = new MenuPanel(); errorPanel = new JPanel(); errorPanel.setBorder(new EmptyBorder(5, 10, 0, 0)); errorPanel.setLayout(new BoxLayout(errorPanel, BoxLayout.Y_AXIS)); JPanel rightPanel = new JPanel(); GroupLayout layout = new GroupLayout(rightPanel); rightPanel.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(GroupLayout.Alignment.LEADING) .addComponent(menuPanel) .addComponent(buildingsPanel) .addComponent(errorPanel) ); layout.setVerticalGroup( layout.createSequentialGroup() .addComponent(menuPanel) .addComponent(buildingsPanel) .addPreferredGap(ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(errorPanel) ); JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, landPanel, rightPanel); // JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, new JScrollPane(landPanel), new JScrollPane(rightPanel)); mainPanel.add(splitPane); saveFileChooser = new JFileChooser(); saveFileChooser.setFileFilter(new FileNameExtensionFilter("Stronghold Kingdoms Castle Design", FILE_EXTENSION)); saveFileChooser.setAcceptAllFileFilterUsed(false); saveFileChooser.addPropertyChangeListener(JFileChooser.SELECTED_FILE_CHANGED_PROPERTY, new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { File file = saveFileChooser.getSelectedFile(); if (file != null) { String path = file.getAbsolutePath(); if (!path.endsWith('.' + FILE_EXTENSION)) { File newFile = new File(path + '.' + FILE_EXTENSION); saveFileChooser.setSelectedFile(newFile); } } } }); openFileChooser = new JFileChooser(); //It would be nice to use the following, but for backwards compatibility reasons we can't. //openFileChooser.setFileFilter(new FileNameExtensionFilter("Stronghold Kingdoms Castle Design", FILE_EXTENSION)); openFileChooser.setFileFilter(new FileFilter() { @Override public boolean accept(File file) { String[] s = file.getName().split("\\."); return file.isDirectory() || s.length <= 1 || s[s.length-1].equals(FILE_EXTENSION); } @Override public String getDescription() { return null; } }); JScrollPane mainScrollPane = new JScrollPane(mainPanel); frame = new JFrame("Stronghold Kingdoms Castle Designer"); //frame.setJMenuBar(createMenuBar()); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // frame.getContentPane().add(mainPanel); frame.getContentPane().add(mainScrollPane); frame.pack(); frame.setExtendedState(JFrame.MAXIMIZED_BOTH); frame.setVisible(true); } /** * Attempts to set the Look and Feel of the application to the native * platform. */ private static void setLookAndFeel() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } catch (InstantiationException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } catch (UnsupportedLookAndFeelException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } } /** * Returns the menu bar for our application's main screen. * * @return the JMenuBar for our application's main screen */ // private static JMenuBar createMenuBar() // { // JMenuBar menuBar = new JMenuBar(); // // menuBar.add(createFileMenu()); // menuBar.add(createHelpMenu()); // // return menuBar; // } /** * Returns the File menu * * @return the File JMenu */ // private static JMenu createFileMenu() // { // JMenu fileMenu = new JMenu("File"); // // fileMenu.add(createOpenMenuItem()); // fileMenu.add(createSaveMenuItem()); // fileMenu.add(createSaveAsMenuItem()); // // fileMenu.add(new JPopupMenu.Separator()); // fileMenu.add(createExportMenuItem()); // fileMenu.add(createImportMenuItem()); // // fileMenu.add(new JPopupMenu.Separator()); // fileMenu.add(createClearMenuItem()); // // fileMenu.add(new JPopupMenu.Separator()); // fileMenu.add(createExitMenuItem()); // // return fileMenu; // } /** * Returns the menu item to open/load castles. * * @return the Open menu item */ public static JMenuItem createOpenMenuItem() { JMenuItem openMenuItem = new JMenuItem("Open"); openMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int result = openFileChooser.showOpenDialog(frame); if (result == JFileChooser.APPROVE_OPTION) { File file = openFileChooser.getSelectedFile(); BufferedReader in = null; String errorMessage = null; try { String importString = null; if (file.getName().endsWith("." + FILE_EXTENSION)) { BufferedImage bufferedImage = ImageIO.read(file); importString = Barcode.extractBarcode(bufferedImage); } else { in = new BufferedReader(new FileReader(file)); importString = in.readLine(); } importData(importString); currentFile = file; } catch (IOException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); errorMessage = ex.getLocalizedMessage(); } catch (InvalidBarcodeException ex) { errorMessage = "Choosing a random image is naughty!\n" + file.getAbsolutePath() + " is not a valid castle design image."; } catch (UnsupportedVersionException ex) { errorMessage = ex.getMessage(); } finally { if (in != null) { try { in.close(); } catch (IOException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } } if (errorMessage != null) { JOptionPane.showMessageDialog(frame, errorMessage, "Error Reading File", JOptionPane.ERROR_MESSAGE); } } } } }); return openMenuItem; } /** * Returns the menu item to save the castle. * * @return the Save menu item. */ public static JMenuItem createSaveMenuItem() { JMenuItem saveMenuItem = new JMenuItem("Save"); saveMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (currentFile == null || !currentFile.getName().endsWith('.' + FILE_EXTENSION)) { // disable mouse overlay for image saving nowFileSaving = true; landPanel.getLandGrid().REpaint(); showSaveDialog(); nowFileSaving = false; } else { saveFile(currentFile); } } }); return saveMenuItem; } private static void showSaveDialog() { int result = saveFileChooser.showSaveDialog(frame); if (result == JFileChooser.APPROVE_OPTION) { saveFile(saveFileChooser.getSelectedFile()); } } private static void saveFile(File file) { PrintWriter out = null; try { BufferedImage bufferedImage = landPanel.getDesignImage(); BufferedImage newBufferedImage = new BufferedImage(bufferedImage.getWidth()+20,bufferedImage.getHeight()+20,BufferedImage.TYPE_INT_RGB ); Graphics graphic = newBufferedImage.getGraphics(); graphic.setColor(Color.white); graphic.fillRect(0,0,newBufferedImage.getWidth(),19); graphic.fillRect(0,0,19,newBufferedImage.getHeight()); graphic.drawImage(bufferedImage,19,19,null); Barcode.embedBarcode(newBufferedImage, generateExportString()); ImageIO.write(newBufferedImage, "png", file); currentFile = file; } catch (IOException ex) { Logger.getLogger(Editor.class.getName()).log(Level.SEVERE, null, ex); } finally { if (out != null) { out.flush(); out.close(); } } } /** * Returns the menu item to save the castle under a specific filename. * * @return the SaveAs menu item. */ public static JMenuItem createSaveAsMenuItem() { JMenuItem saveAsMenuItem = new JMenuItem("Save As"); saveAsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { //nowFileSaving == true; showSaveDialog(); //nowFileSaving == false; } }); return saveAsMenuItem; } /** * Returns the menu item responsible for showing a text string containing * all the inputted data. * * @return the JMenuItem for exporting */ public static JMenuItem createExportMenuItem() { JMenuItem exportMenuItem = new JMenuItem("Create Export Text"); exportMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { final String exportString = generateExportString(); JTextArea textArea = new JTextArea(exportString); textArea.setLineWrap(true); textArea.setEditable(false); JScrollPane scrollPane = new JScrollPane(textArea); scrollPane.setPreferredSize(new Dimension(450, 300)); JPanel panel = new JPanel(); panel.add(scrollPane); JButton clipboardButton = new JButton("Copy to Clipboard"); clipboardButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Toolkit.getDefaultToolkit().getSystemClipboard().setContents(new StringSelection(exportString), null); } }); panel.add(clipboardButton); JOptionPane.showMessageDialog(frame, panel); } }); return exportMenuItem; } /** * Returns the menu item responsible for allowing input of a string * that will populate our data. * * @return the JMenuItem for importing */ public static JMenuItem createImportMenuItem() { JMenuItem importMenuItem = new JMenuItem("Import Text"); importMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { final JTextArea textArea = new JTextArea(); textArea.setLineWrap(true); JScrollPane scrollPane = new JScrollPane(textArea); scrollPane.setPreferredSize(new Dimension(450, 300)); JPanel panel = new JPanel(); panel.add(scrollPane); JButton clipboardButton = new JButton("Import"); clipboardButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { try { importData(textArea.getText()); } catch (UnsupportedVersionException ex) { JOptionPane.showMessageDialog(frame, ex.getMessage(), "Error Importing Design", JOptionPane.ERROR_MESSAGE); } } }); panel.add(clipboardButton); JOptionPane.showMessageDialog(frame, panel); } }); return importMenuItem; } /** * Returns a menu item for exiting the application. * * @return the JMenuItem for exiting the application */ public static JMenuItem createExitMenuItem() { JMenuItem exitMenuItem = new JMenuItem("Exit"); exitMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { frame.dispose(); } }); return exitMenuItem; } /** * Returns a menu item for clearing the data. * * @return the JMenuItem for clearing the data */ public static JMenuItem createClearMenuItem() { JMenuItem exitMenuItem = new JMenuItem("Clear"); exitMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { //We don't want to accidently save over a previous design currentFile = null; landPanel.getLandGrid().clearData(); } }); return exitMenuItem; } /** * Returns the help menu. * * @return the JMenu for the Help menu */ public static JMenu createHelpMenu() { JMenu helpMenu = new JMenu("Help"); helpMenu.add(createAboutMenuItem()); helpMenu.add(createTipsMenuItem()); // JMenuItem aboutMenuItem = new JMenuItem("About"); // aboutMenuItem.addActionListener(new ActionListener() // { // public void actionPerformed(ActionEvent e) // { // JDialog about = new About(frame); // about.setLocationRelativeTo(frame); // about.setVisible(true); // } // }); // helpMenu.add(aboutMenuItem); return helpMenu; } /** * Returns About frame. * * @return the About menu item. */ public static JMenuItem createAboutMenuItem() { JMenuItem aboutMenuItem = new JMenuItem("About"); aboutMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JDialog about = new About(frame); about.setLocationRelativeTo(frame); about.setVisible(true); } }); return aboutMenuItem; } /** * Returns Tips frame. * * @return the Tips menu item. */ public static JMenuItem createTipsMenuItem() { JMenuItem tipsMenuItem = new JMenuItem("Tips"); tipsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JDialog tips = new Tips(frame); tips.setLocationRelativeTo(frame); tips.setVisible(true); } }); return tipsMenuItem; } public static JMenuItem createAllMoatMenuItem() { JMenuItem allMoatMenuItem = new JMenuItem("All moat"); allMoatMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JDialog allMoat = new AllMoat(frame); allMoat.setLocationRelativeTo(frame); allMoat.setVisible(true); } }); return allMoatMenuItem; } public static void all_Moat() { landPanel.getLandGrid().all_Moat(); } public static void delete_all_Moat() { landPanel.getLandGrid().delete_all_Moat(); } public static JMenuItem createStonePerimeterMenuItem() { JMenuItem stonePerimeterMenuItem = new JMenuItem("Stone perimeter"); stonePerimeterMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { landPanel.getLandGrid().stone_perimeter(); } }); return stonePerimeterMenuItem; } private static String generateExportString() { return landPanel.getLandGrid().getCastle().getGridDataExport(); } private static void importData(String text) throws UnsupportedVersionException { if (text == null || text.length() == 0) return; landPanel.getLandGrid().importData(text); } }
/* * Copyright 2013 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.chris.video; import java.io.File; import java.io.IOException; import android.app.Activity; import android.graphics.Matrix; import android.graphics.SurfaceTexture; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.Spinner; /** * Play a movie from a file on disk. Output goes to a TextureView. * <p> * Currently video-only. * <p> * Contrast with PlayMovieSurfaceActivity, which uses a SurfaceView. Much of the code is * the same, but here we can handle the aspect ratio adjustment with a simple matrix, * rather than a custom layout. * <p> * TODO: investigate crash when screen is rotated while movie is playing (need * to have onPause() wait for playback to stop) */ public class PlayMovieActivity extends Activity implements OnItemSelectedListener, TextureView.SurfaceTextureListener, MoviePlayer.PlayerFeedback { private static final String TAG = PlayMovieActivity.class.getSimpleName(); private TextureView mTextureView; private String[] mMovieFiles; private int mSelectedMovie; private boolean mShowStopLabel; private MoviePlayer.PlayTask mPlayTask; private boolean mSurfaceTextureReady = false; private final Object mStopper = new Object(); // used to signal stop @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_play_movie); mTextureView = (TextureView) findViewById(R.id.movie_texture_view); mTextureView.setSurfaceTextureListener(this); // Populate file-selection spinner. Spinner spinner = (Spinner) findViewById(R.id.playMovieFile_spinner); // Need to create one of these fancy ArrayAdapter thingies, and specify the generic layout // for the widget itself. mMovieFiles = MiscUtils.getFiles(Environment.getExternalStorageDirectory(), "*.mp4"); ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, mMovieFiles); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); // Apply the adapter to the spinner. spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(this); updateControls(); } @Override protected void onResume() { Log.d(TAG, "PlayMovieActivity onResume"); super.onResume(); } @Override protected void onPause() { Log.d(TAG, "PlayMovieActivity onPause"); super.onPause(); // We're not keeping track of the state in static fields, so we need to shut the // playback down. Ideally we'd preserve the state so that the player would continue // after a device rotation. // // We want to be sure that the player won't continue to send frames after we pause, // because we're tearing the view down. So we wait for it to stop here. if (mPlayTask != null) { stopPlayback(); mPlayTask.waitForStop(); } } @Override public void onSurfaceTextureAvailable(SurfaceTexture st, int width, int height) { // There's a short delay between the start of the activity and the initialization // of the SurfaceTexture that backs the TextureView. We don't want to try to // send a video stream to the TextureView before it has initialized, so we disable // the "play" button until this callback fires. Log.d(TAG, "SurfaceTexture ready (" + width + "x" + height + ")"); mSurfaceTextureReady = true; updateControls(); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture st, int width, int height) { // ignore } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture st) { mSurfaceTextureReady = false; // assume activity is pausing, so don't need to update controls return true; // caller should release ST } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { // ignore } /* * Called when the movie Spinner gets touched. */ @Override public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { Spinner spinner = (Spinner) parent; mSelectedMovie = spinner.getSelectedItemPosition(); Log.d(TAG, "onItemSelected: " + mSelectedMovie + " '" + mMovieFiles[mSelectedMovie] + "'"); } @Override public void onNothingSelected(AdapterView<?> parent) {} /** * onClick handler for "play"/"stop" button. */ public void clickPlayStop(@SuppressWarnings("unused") View unused) { if (mShowStopLabel) { Log.d(TAG, "stopping movie"); stopPlayback(); // Don't update the controls here -- let the task thread do it after the movie has // actually stopped. //mShowStopLabel = false; //updateControls(); } else { if (mPlayTask != null) { Log.w(TAG, "movie already playing"); return; } Log.d(TAG, "starting movie"); SpeedControlCallback callback = new SpeedControlCallback(); if (((CheckBox) findViewById(R.id.locked60fps_checkbox)).isChecked()) { // TODO: consider changing this to be "free running" mode callback.setFixedPlaybackRate(60); } SurfaceTexture st = mTextureView.getSurfaceTexture(); Surface surface = new Surface(st); MoviePlayer player = null; try { player = new MoviePlayer( new File(Environment.getExternalStorageDirectory(), mMovieFiles[mSelectedMovie]), surface, callback); } catch (IOException ioe) { Log.e(TAG, "Unable to play movie", ioe); surface.release(); return; } adjustAspectRatio(player.getVideoWidth(), player.getVideoHeight()); mPlayTask = new MoviePlayer.PlayTask(player, this); if (((CheckBox) findViewById(R.id.loopPlayback_checkbox)).isChecked()) { mPlayTask.setLoopMode(true); } mShowStopLabel = true; updateControls(); mPlayTask.execute(); } } /** * Requests stoppage if a movie is currently playing. Does not wait for it to stop. */ private void stopPlayback() { if (mPlayTask != null) { mPlayTask.requestStop(); } } @Override // MoviePlayer.PlayerFeedback public void playbackStopped() { Log.d(TAG, "playback stopped"); mShowStopLabel = false; mPlayTask = null; updateControls(); } /** * Sets the TextureView transform to preserve the aspect ratio of the video. */ private void adjustAspectRatio(int videoWidth, int videoHeight) { int viewWidth = mTextureView.getWidth(); int viewHeight = mTextureView.getHeight(); double aspectRatio = (double) videoHeight / videoWidth; int newWidth, newHeight; if (viewHeight > (int) (viewWidth * aspectRatio)) { // limited by narrow width; restrict height newWidth = viewWidth; newHeight = (int) (viewWidth * aspectRatio); } else { // limited by short height; restrict width newWidth = (int) (viewHeight / aspectRatio); newHeight = viewHeight; } int xoff = (viewWidth - newWidth) / 2; int yoff = (viewHeight - newHeight) / 2; Log.v(TAG, "video=" + videoWidth + "x" + videoHeight + " view=" + viewWidth + "x" + viewHeight + " newView=" + newWidth + "x" + newHeight + " off=" + xoff + "," + yoff); Matrix txform = new Matrix(); mTextureView.getTransform(txform); txform.setScale((float) newWidth / viewWidth, (float) newHeight / viewHeight); //txform.postRotate(10); // just for fun txform.postTranslate(xoff, yoff); mTextureView.setTransform(txform); } /** * Updates the on-screen controls to reflect the current state of the app. */ private void updateControls() { Button play = (Button) findViewById(R.id.play_stop_button); if (mShowStopLabel) { play.setText(R.string.stop_button_text); } else { play.setText(R.string.play_button_text); } play.setEnabled(mSurfaceTextureReady); // We don't support changes mid-play, so dim these. CheckBox check = (CheckBox) findViewById(R.id.locked60fps_checkbox); check.setEnabled(!mShowStopLabel); check = (CheckBox) findViewById(R.id.loopPlayback_checkbox); check.setEnabled(!mShowStopLabel); } }
package com.njdaeger.java.wrapper; import java.io.File; import java.util.UUID; import com.njdaeger.java.Core; import com.njdaeger.java.configuration.data.Database; import com.njdaeger.java.configuration.data.Entry; import com.njdaeger.java.configuration.data.LastLocation; import com.njdaeger.java.configuration.data.LogoutLocation; import com.njdaeger.java.configuration.data.OfflineHome; import com.njdaeger.java.configuration.data.UserFile; import com.njdaeger.java.configuration.enums.InternalDatabase; import com.njdaeger.java.configuration.enums.PlayerPaths; import com.njdaeger.java.configuration.interfaces.IOfflineHome; public final class OfflineUser implements IOfflineUser { // The user's file private UserFile userFile; // Boolean to check if the user file actually exists. private boolean exists; // The uuid of the offline user. private UUID uuid; // The name of the user. private String name; // This is the directory to the user's homes. private File dir; public OfflineUser(String name) { Database database = (Database) Core.getDatabase(InternalDatabase.PLAYERDATA); Entry e = (Entry) database.getEntry(name); if (e == null) { return; } UUID id = UUID.fromString((String) e.getValue()); this.name = name; this.uuid = id; this.userFile = new UserFile(id); this.exists = getUserFile().exists(); } @Override public UUID getId() { return uuid; } @Override public boolean isMuted() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.MUTED); } @Override public void setMuted(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.MUTED, value); } @Override public boolean isSpying() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.SOCIALSPY); } @Override public void setSpying(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.SOCIALSPY, value); } @Override public boolean isGod() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.GOD); } @Override public void setGod(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.GOD, value); } @Override public boolean isMessageable() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.MESSAGEABLE); } @Override public void setMessageable(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.MESSAGEABLE, value); } @Override public boolean isAfk() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.AFK); } @Override public void setAfk(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.AFK, value); } @Override public boolean isTeleportable() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.TPTOGGLED); } @Override public void setTeleportable(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.TPTOGGLED, value); } @Override public boolean isGroup(String group) { if (!exists) { userFile.createConfig(); } return getGroup().equalsIgnoreCase(group); } @Override public String getGroup() { if (!exists) { userFile.createConfig(); } return (String) userFile.getValue(PlayerPaths.RANK); } @Override public void setGroup(String group) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.RANK, group); } @Override public boolean hasNickname() { if (!exists) { userFile.createConfig(); } return getName().matches(getNickname()); } @Override public boolean hasNickname(String nickname) { if (!exists) { userFile.createConfig(); } return getNickname().matches(nickname); } @Override public String getNickname() { if (!exists) { userFile.createConfig(); } return (String) userFile.getValue(PlayerPaths.DISPLAYNAME); } @Override public void setNickname(String nickname) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.DISPLAYNAME, nickname); } @Override public boolean isFlying() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.FLYING); } @Override public void setFlying(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.FLYING, value); } @Override public double getFlyingSpeed() { if (!exists) { userFile.createConfig(); } return (double) userFile.getValue(PlayerPaths.FLYSPEED); } @Override public void setFlyingSpeed(double value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.FLYSPEED, value); } @Override public int getWalkingSpeed() { if (!exists) { userFile.createConfig(); } return (int) userFile.getValue(PlayerPaths.WALKSPEED); } @Override public void setWalkingSpeed(double value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.WALKSPEED, value); } @Override public boolean isOp() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.OPPED); } @Override public void setOp(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.OPPED, value); } @Override public Gamemode getGamemode() { if (!exists) { userFile.createConfig(); } return Gamemode.getAliasUsed((String) userFile.getValue(PlayerPaths.GAMEMODE)); } @Override public boolean isGamemode(Gamemode mode) { if (!exists) { userFile.createConfig(); } return getGamemode().equals(mode); } @Override public void setGamemode(Gamemode mode) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.GAMEMODE, mode.name()); } @Override public void setGamemode(String mode) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.GAMEMODE, Gamemode.getAliasUsed(mode).name()); } @Override public boolean isBubbled() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.BUBBLED); } @Override public void setBubbled(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.BUBBLED, value); } @Override public boolean isHidden() { if (!exists) { userFile.createConfig(); } return (boolean) userFile.getValue(PlayerPaths.HIDDEN); } @Override public void setHidden(boolean value) { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.HIDDEN.getPath(), value); } @Override public void setLogoutTime() { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.LOGOUT.getPath(), System.currentTimeMillis()); } @Override public long getLogoutTime() { if (!exists) { userFile.createConfig(); } Object a = userFile.getValue(PlayerPaths.LOGOUT.getPath()); if (a == null) { return -1; } return (long) a; } @Override public void setLoginTime() { if (!exists) { userFile.createConfig(); } userFile.setValue(PlayerPaths.LOGOUT.getPath(), System.currentTimeMillis()); } @Override public long getLoginTime() { if (!exists) { userFile.createConfig(); } Object a = userFile.getValue(PlayerPaths.LOGIN.getPath()); if (a == null) { return -1; } return (long) a; } @Override public IOfflineHome getHome(String home) { return new OfflineHome(this, home); } @Override public UserFile getUserFile() { return userFile; } @Override public LastLocation getLast() { return new LastLocation(this); } @Override public LogoutLocation getLogout() { return new LogoutLocation(this); } @Override public String getName() { return name; } @Override public String[] getHomes() { this.dir = new File("plugins" + File.separator + "EssentialCommands" + File.separator + "users" + File.separator + getId() + File.separator + "homes"); return dir.list(); } @Override public String listHomes() { this.dir = new File("plugins" + File.separator + "EssentialCommands" + File.separator + "users" + File.separator + getId() + File.separator + "homes"); if (dir.list() == null) { return null; } String[] homes = dir.list(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < homes.length; i++) { sb.append(homes[i]).append(" "); } String message = sb.toString().trim(); String finalmsg = message.replace(".yml", ""); String wcommas = finalmsg.replaceAll(" ", ", "); return wcommas; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.psi.search; import com.intellij.JavaTestUtil; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.ide.todo.TodoConfiguration; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ex.ProjectManagerEx; import com.intellij.openapi.projectRoots.impl.ProjectRootUtil; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.ex.ProjectRootManagerEx; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.JavaPsiFacadeEx; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.cache.impl.todo.TodoIndex; import com.intellij.psi.impl.cache.impl.todo.TodoIndexEntry; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl; import com.intellij.psi.search.*; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.testFramework.HeavyPlatformTestCase; import com.intellij.testFramework.JavaPsiTestCase; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.PsiTestUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.ArrayUtilRt; import com.intellij.util.indexing.FileBasedIndex; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.*; @HeavyPlatformTestCase.WrapInCommand public class UpdateCacheTest extends JavaPsiTestCase { @Override protected void setUpProject() throws Exception { loadAndSetupProject(getProjectDirOrFile().toString()); } private void loadAndSetupProject(String path) throws Exception { LocalFileSystem.getInstance().refreshIoFiles(myFilesToDelete); myProject = ProjectManager.getInstance().loadAndOpenProject(path); setUpModule(); String root = JavaTestUtil.getJavaTestDataPath() + "/psi/search/updateCache"; createTestProjectStructure( root); setUpJdk(); ProjectManagerEx.getInstanceEx().openTestProject(myProject); runStartupActivities(); } public void testFileCreation() { PsiDirectory root = ProjectRootUtil.getAllContentRoots(myProject) [0]; PsiFile file = PsiFileFactory.getInstance(myProject).createFileFromText("New.java", JavaFileType.INSTANCE, "class A{ Object o;}"); final PsiFile finalFile = file; file = WriteAction.compute(()->(PsiFile)root.add(finalFile)); assertNotNull(file); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalFileCreation() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; String newFilePath = root.getPresentableUrl() + File.separatorChar + "New.java"; FileUtil.writeToFile(new File(newFilePath), "class A{ Object o;}".getBytes(StandardCharsets.UTF_8)); VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(newFilePath.replace(File.separatorChar, '/')); assertNotNull(file); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalFileDeletion() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("1.java"); assertNotNull(file); delete(file); PsiClass stringClass = myJavaFacade.findClass("java.lang.String", GlobalSearchScope.allScope(getProject())); assertNotNull(stringClass); checkUsages(stringClass, ArrayUtilRt.EMPTY_STRING_ARRAY); } public void testExternalFileModification() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("1.java"); assertNotNull(file); setFileText(file, "class A{ Object o;}"); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"1.java"}); } public void testExternalFileModificationWhileProjectClosed() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, ArrayUtil.EMPTY_STRING_ARRAY); FileBasedIndex.getInstance().getContainingFiles(TodoIndex.NAME, new TodoIndexEntry("todo", true), GlobalSearchScope.allScope(getProject())); final String projectLocation = myProject.getPresentableUrl(); assert projectLocation != null : myProject; PlatformTestUtil.saveProject(myProject); final VirtualFile content = ModuleRootManager.getInstance(getModule()).getContentRoots()[0]; Project project = myProject; ProjectManagerEx.getInstanceEx().forceCloseProject(project); myProject = null; InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project); assertTrue("Project was not disposed", project.isDisposed()); myModule = null; final File file = new File(root.getPath(), "1.java"); assertTrue(file.exists()); FileUtil.writeToFile(file, "class A{ Object o;}".getBytes(StandardCharsets.UTF_8)); root.refresh(false, true); LocalFileSystem.getInstance().refresh(false); myProject = ProjectManager.getInstance().loadAndOpenProject(projectLocation); InjectedLanguageManagerImpl.pushInjectors(getProject()); setUpModule(); setUpJdk(); ProjectManagerEx.getInstanceEx().openTestProject(myProject); UIUtil.dispatchAllInvocationEvents(); // startup activities runStartupActivities(); PsiTestUtil.addSourceContentToRoots(getModule(), content); assertNotNull(myProject); myPsiManager = (PsiManagerImpl) PsiManager.getInstance(myProject); myJavaFacade = JavaPsiFacadeEx.getInstanceEx(myProject); objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"1.java"}); } public void testExternalDirCreation() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; String newFilePath = root.getPresentableUrl() + File.separatorChar + "dir" + File.separatorChar + "New.java"; LOG.assertTrue(new File(newFilePath).getParentFile().mkdir()); FileUtil.writeToFile(new File(newFilePath), "class A{ Object o;}".getBytes(StandardCharsets.UTF_8)); VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(newFilePath.replace(File.separatorChar, '/')); assertNotNull(file); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalDirDeletion() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("aDir"); assertNotNull(file); delete(file); PsiClass threadClass = myJavaFacade.findClass("java.lang.Thread", GlobalSearchScope.allScope(getProject())); assertNotNull(threadClass); checkUsages(threadClass, ArrayUtilRt.EMPTY_STRING_ARRAY); } public void testTodoConfigurationChange() { TodoPattern pattern = new TodoPattern("newtodo", TodoAttributesUtil.createDefault(), true); TodoPattern[] oldPatterns = TodoConfiguration.getInstance().getTodoPatterns(); checkTodos(new String[]{"2.java"}); TodoConfiguration.getInstance().setTodoPatterns(new TodoPattern[]{pattern}); try{ checkTodos(new String[]{"1.java"}); } finally{ TodoConfiguration.getInstance().setTodoPatterns(oldPatterns); checkTodos(new String[]{"2.java"}); } } public void testAddExcludeRoot() { PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; final VirtualFile dir = root.findChild("aDir"); WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newFile = createChildData(dir, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTestUtil.addExcludedRoot(myModule, dir); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java"}); checkTodos(ArrayUtil.EMPTY_STRING_ARRAY); } public void testRemoveExcludeRoot() { ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; final VirtualFile dir = root.findChild("aDir"); PsiTestUtil.addExcludedRoot(myModule, dir); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newFile = createChildData(dir, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches PsiTestUtil.removeExcludedRoot(myModule, dir); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testAddSourceRoot() throws Exception { File dir = createTempDirectory(); final VirtualFile root = LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/')); WriteCommandAction.writeCommandAction(getProject()).run(() -> { PsiTestUtil.addContentRoot(myModule, root); VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches PsiTestUtil.addSourceRoot(myModule, root); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testRemoveSourceRoot() { final VirtualFile root = ModuleRootManager.getInstance(myModule).getContentRoots()[0]; PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches VirtualFile[] sourceRoots = ModuleRootManager.getInstance(myModule).getSourceRoots(); LOG.assertTrue(sourceRoots.length == 1); PsiTestUtil.removeSourceRoot(myModule, sourceRoots[0]); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); // currently it actually finds usages by FQN due to Java PSI enabled for out-of-source java files // so the following check is disabled //checkUsages(exceptionClass, new String[]{}); checkTodos(new String[]{"2.java", "New.java"}); } public void testAddProjectRoot() throws Exception { File dir = createTempDirectory(); final VirtualFile root = LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/')); WriteCommandAction.writeCommandAction(getProject()).run(() -> { PsiTestUtil.addSourceRoot(myModule, root); VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiSearchHelper.getInstance(myProject).processAllFilesWithWord("aaa", GlobalSearchScope.allScope(myProject), psiFile -> true, true); // to initialize caches /* rootManager.startChange(); rootManager.addRoot(root, ProjectRootType.PROJECT); rootManager.finishChange(); */ PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testSCR6066() { ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches WriteCommandAction.writeCommandAction(getProject()).run(() -> { VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); }); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches PsiTestUtil.addExcludedRoot(myModule, root); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, ArrayUtil.EMPTY_STRING_ARRAY); checkTodos(ArrayUtil.EMPTY_STRING_ARRAY); } private void checkUsages(PsiElement element, @NonNls String[] expectedFiles){ PsiReference[] refs = ReferencesSearch.search(element, GlobalSearchScope.projectScope(myProject), false).toArray( PsiReference.EMPTY_ARRAY); List<PsiFile> files = new ArrayList<>(); for (PsiReference ref : refs) { PsiFile file = ref.getElement().getContainingFile(); if (!files.contains(file)) { files.add(file); } } assertEquals(expectedFiles.length, files.size()); Collections.sort(files, Comparator.comparing(PsiFileSystemItem::getName)); Arrays.sort(expectedFiles); for(int i = 0; i < expectedFiles.length; i++){ String name = expectedFiles[i]; PsiFile file = files.get(i); assertEquals(name, file.getName()); } } private void checkTodos(@NonNls String[] expectedFiles){ PsiTodoSearchHelper helper = PsiTodoSearchHelper.SERVICE.getInstance(myProject); PsiFile[] files = helper.findFilesWithTodoItems(); assertEquals(expectedFiles.length, files.length); Arrays.sort(files, Comparator.comparing(PsiFileSystemItem::getName)); Arrays.sort(expectedFiles); for(int i = 0; i < expectedFiles.length; i++){ String name = expectedFiles[i]; PsiFile file = files[i]; assertEquals(name, file.getName()); } } }
package com.fincatto.documentofiscal.nfe400.classes.nota; import com.fincatto.documentofiscal.DFPais; import com.fincatto.documentofiscal.DFUnidadeFederativa; import com.fincatto.documentofiscal.nfe400.FabricaDeObjetosFake; import com.fincatto.documentofiscal.nfe400.classes.NFEndereco; import org.junit.Assert; import org.junit.Test; public class NFEnderecoTest { @Test(expected = IllegalStateException.class) public void naoDevePermitirBairroComTamanhoInvalido() { try { new NFEndereco().setBairro(""); } catch (final IllegalStateException e) { new NFEndereco().setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU1"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirTelefoneComTamanhoInvalido() { try { new NFEndereco().setTelefone("12345"); } catch (final IllegalStateException e) { new NFEndereco().setTelefone("123456789012345"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroComTamanhoInvalido() { try { new NFEndereco().setNumero(""); } catch (final IllegalStateException e) { new NFEndereco().setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirLogradouroComTamanhoInvalido() { try { new NFEndereco().setLogradouro(""); } catch (final IllegalStateException e) { new NFEndereco().setLogradouro("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirDescricaoMunicipioComTamanhoInvalido() { try { new NFEndereco().setDescricaoMunicipio(""); } catch (final IllegalStateException e) { new NFEndereco().setDescricaoMunicipio("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirComplementoComTamanhoInvalido() { try { new NFEndereco().setComplemento(""); } catch (final IllegalStateException e) { new NFEndereco().setComplemento("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y1"); } } @Test(expected = IllegalStateException.class) public void naoDevePermitirCodigoMunicipioComTamanhoInvalido() { try { new NFEndereco().setCodigoMunicipio("999999"); } catch (final IllegalStateException e) { new NFEndereco().setCodigoMunicipio("10000000"); } } @Test public void deveObterCepComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setCep("88095550"); Assert.assertEquals("88095550", endereco.getCep()); } @Test public void deveObterBairroComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("Principal"); Assert.assertEquals("Principal", endereco.getBairro()); } @Test public void deveObterCodigoMunicipioComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setCodigoMunicipio("9999999"); Assert.assertEquals("9999999", endereco.getCodigoMunicipio()); } @Test public void deveObterCodigoPaisComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setCodigoPais("1058"); Assert.assertEquals(Integer.valueOf(1058), endereco.getCodigoPais().getCodigo()); } @Test public void deveObterComplementoComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getComplemento()); } @Test public void deveObterDescricaoMunicipioComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setDescricaoMunicipio("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getDescricaoMunicipio()); } @Test public void deveObterLogradouroComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setLogradouro("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); Assert.assertEquals("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw", endereco.getLogradouro()); } @Test public void deveObterNumeroComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); Assert.assertEquals("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y", endereco.getNumero()); } @Test public void deveObterTelefoneComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setTelefone("12345678901324"); Assert.assertEquals("12345678901324", endereco.getTelefone()); } @Test public void deveObterUfComoFoiSetado() { final NFEndereco endereco = new NFEndereco(); endereco.setUf(DFUnidadeFederativa.SC); Assert.assertEquals("SC", endereco.getUf()); } @Test(expected = IllegalStateException.class) public void naoDevePermitirBairroNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test public void devePermitirCepNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); Assert.assertNotNull(endereco.toString()); } @Test(expected = IllegalStateException.class) public void naoDevePermitirCodigoMunicipioNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test public void devePermitirCodigoPaisNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); Assert.assertNotNull(endereco.toString()); } @Test public void devePermitirComplementoNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirDescricaoMunicipioNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirLogradouroNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirNumeroNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test public void devePermitirTelefoneNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setUf(DFUnidadeFederativa.SC); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirUFNulo() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais("999"); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirPaisBrasilUfEX() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais(DFPais.BRASIL); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.EX); endereco.toString(); } @Test public void devePermitirPaisUfEX() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais(DFPais.AFRICA_DO_SUL); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.EX); endereco.toString(); } @Test(expected = IllegalStateException.class) public void naoDevePermitirPaisUfEX() { final NFEndereco endereco = new NFEndereco(); endereco.setBairro("67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU"); endereco.setCep("88095550"); endereco.setCodigoMunicipio("9999999"); endereco.setCodigoPais(DFPais.AFRICA_DO_SUL); endereco.setComplemento("Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw"); endereco.setDescricaoMunicipio("s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG"); endereco.setLogradouro("NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV"); endereco.setNumero("11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y"); endereco.setTelefone("12345678901324"); endereco.setUf(DFUnidadeFederativa.PR); endereco.toString(); } @Test public void deveGerarXMLDeAcordoComOPadraoEstabelecido() { final String xmlEsperado = "<NFEndereco><xLgr>NKwaAJ5ZJ49aQYmqBvxMhBzkGUqvtXnqusGEtjDzKCXPGwrEZCS8LGKHyBbV</xLgr><nro>11mzXHR8rZTgfE35EqfGhiShiIwQfLCAziFDXVgs3EjLSPkZkCvfGNLMEf5y</nro><xCpl>Fr3gSvoAeKbGpQD3r98KFeB50P3Gq14XBVsv5fpiaBvJ3HTOpREiwYGs20Xw</xCpl><xBairro>67LQFlXOBK0JqAE1rFi2CEyUGW5Z8QmmHhzmZ9GABVLKa9AbV0uFR0onl7nU</xBairro><cMun>9999999</cMun><xMun>s1Cr2hWP6bptQ80A9vWBuTaODR1U82LtKQi1DEm3LsAXu9AbkSeCtfXJVTKG</xMun><UF>RS</UF><CEP>88095550</CEP><cPais>1058</cPais><fone>12345678901324</fone></NFEndereco>"; Assert.assertEquals(xmlEsperado, FabricaDeObjetosFake.getNFEndereco().toString()); } }
/* Driver.java - Copyright (c) 2014, David Paul Hentchel * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package bench; import java.io.IOException; import java.util.HashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import bench.Client.ClientState; import bench.TestContext.TestStats; /** * Run and Manage multi-client SQL benchmark tests. This class handles the complete distributed * benchmark test. Actual running of benchmark clients is done by the Client * class, and all DBMS and SQL management functions are in the Database class. * One running instance of this class is launched on each host running benchmark clients. * TODO Orchestrate multiple Drivers in a run * * @author dhentchel */ public class Driver implements Runnable, Client.ClientManager { static final long MAX_ERROR_COUNT = 10000; static final long MAX_STAGING_WAIT_MSECS = 30000; static final long WARMUP_TIME_SECS = 10; // Time allowed for sessions to ramp up static final long MAX_TIME_SECS = 3600; static final long MAX_IGNORED_CLIENTS = 1; // If this many clients are done or aborted, the test is terminated early static final String VERSION = "1.2"; private TestContext _context; private int _numclients; private HashMap<Long, Client> _client; /** * Thread synchronization fields and methods. */ private AtomicBoolean _runFlag; private HashMap<Long, ClientState> _clientState; private AtomicInteger _clientReadyCount = new AtomicInteger(0); private AtomicInteger _clientRunningCount = new AtomicInteger(0); private AtomicInteger _clientFinishedCount = new AtomicInteger(0); private ThreadGroup _allThreads; private long _startTime; private long _endTime; private TestStats _priorStats; public Driver ( TestContext ctx ) { _context = ctx; _numclients = _context.getInt("numclients"); _client = new HashMap<Long, Client>(); _clientState = new HashMap<Long, ClientState>(); _runFlag = new AtomicBoolean(false); } public void exec_load ( ) { System.err.println("Driver: Running load."); Client loader = new Client(_context, 0, null); loader.load(); } public void exec_launch_clients( ) { System.err.println("Driver: Launching client threads."); if (_context.getEnumIndex("starter") == 1) { System.err.println("Driver: Ready to start Client execution. Press <enter> key to continue:"); try { System.in.read(); } catch (IOException e) { ; } } _allThreads = new ThreadGroup("AllThreads"); _runFlag.set(false); System.err.println("Starting client test threads."); for (int threadNum=0; threadNum < _numclients; threadNum++) { long clientID = computeClientID(_context, threadNum); _clientState.put(clientID, ClientState.NONE); Client client = new Client(_context, threadNum, this); _client.put(clientID, client); Thread t = new Thread(_allThreads, client); t.start(); } synchronized (_clientReadyCount) { // now wait for the client ready count to reach criterion (100%) if (_clientReadyCount.get() < _numclients) { try { _clientReadyCount.wait(MAX_STAGING_WAIT_MSECS); } catch (InterruptedException e) { e.printStackTrace(); } } } _runFlag.set(true); synchronized (this) { this.notifyAll(); // Activate client threads } try {Thread.sleep(WARMUP_TIME_SECS);} catch (InterruptedException e) { ; } if (_clientReadyCount.get() > 0) throw new RuntimeException("Driver FATAL: Client threads still in Ready state after warmup period"); } public void exec_monitor_clients ( ) { System.err.println("Driver: Monitoring threads."); int period = 0; _startTime = System.currentTimeMillis(); _priorStats = new TestStats(); long intervalSecs = _context.getInt("rptintervalsecs"); int numIntervals = _context.getInt("numrptintervals"); System.out.println("\nPeriod\tOps/sec\tReads\tWrites\t#Errs"); try { while (running()) { synchronized (_clientState) { if (_clientFinishedCount.get() > MAX_IGNORED_CLIENTS) { _runFlag.set(false); System.err.println("Driver: Terminating early because too many clients ended or aborted before all cycles completed."); } else { _clientState.wait(intervalSecs * 1000); } } if (period > 0 && !running()) // Premature termination; only print stats break; TestStats newStats = new TestStats(); for (Client client : _client.values()) { newStats.tally(client.getStats()); } System.out.println(String.format("%s\t%s\t%s\t%s\t%s", period, (newStats.statementCount - _priorStats.statementCount) / intervalSecs, newStats.queryCount, newStats.updateCount, newStats.errorCount)); _priorStats = newStats; period++; if (newStats.errorCount >= MAX_ERROR_COUNT || (System.currentTimeMillis()) - _startTime > MAX_TIME_SECS * 1000) { System.err.println("Driver: FATAL - error count or max time exceeded, terminating test."); _runFlag.set(false); break; } if (period == numIntervals) _runFlag.set(false); } } catch (Exception e) { System.err.println("FATAL in Driver: " + e.getClass().getName() + "\n\t" + e.getMessage()); } } public void exec_terminate ( ) { System.err.println("Driver: Waiting for threads to complete."); _runFlag.set(false); // tell client threads to stop _endTime = System.currentTimeMillis(); try {Thread.sleep(10000);} catch (InterruptedException e) { ; } System.err.println("Driver: Cleaning up (press Ctl-C to abort)."); _allThreads.interrupt(); while (_allThreads.activeCount() > 0) { try { Thread.sleep(10000); } catch (InterruptedException e) { ; } if (TestContext.isVerbose) System.err.print("."); if ((System.currentTimeMillis() - _startTime)/1000 > 100) { System.err.println("Timed out waiting for thread shutdown"); System.exit(-100); } } } public void exec_print_stats ( ) { long totalTime = (_endTime - _startTime) / 1000; long totalTxns = _priorStats.updateCount + _priorStats.queryCount; System.err.println("Driver: Printing final stats."); System.out.println(String.format("\n\nBenchmark complete.\nTotal Time:\t%s\tseconds\nTotal Ops:\t%s\nTxnRate:\t%s\ttxn/sec\nErrorCount:\t%s", totalTime, totalTxns, totalTxns/totalTime, _priorStats.errorCount)); } public static long MAX_CLIENT_THREADS = 100; /** * Determine a canonical, unique Client ID for a given Client thread. * This combines the driverID number and threadID number to give a unique long value. */ public static long computeClientID ( TestContext ctx, long threadID ) { if (threadID >= MAX_CLIENT_THREADS) throw new RuntimeException(String.format("Fatal: Cannot run Driver instance with more than %s clients.", MAX_CLIENT_THREADS)); long driverID = ctx.getInt("driverid"); return (driverID * MAX_CLIENT_THREADS + threadID); } /** * A singleton Driver thread may be run to monitor status and orchestrate Clients. * TODO move client run monitoring from main() to here. */ @Override public void run() { ; } @Override public void setState(long clientID, ClientState state) { synchronized (_clientState) { ClientState oldState = _clientState.get(clientID); if (oldState == ClientState.NONE) { if (state != ClientState.NEW) throw new RuntimeException(String.format("Driver: Invalid state transition: %s to %s", oldState, state)); } else if (oldState == ClientState.NEW) { if (state == ClientState.READY) { long newReadyCount = _clientReadyCount.incrementAndGet(); if (newReadyCount >= _numclients) // trigger when all clients are ready synchronized(_clientReadyCount) { _clientReadyCount.notify(); } } else throw new RuntimeException(String.format("Driver: Invalid state transition: %s to %s", oldState, state)); } else if (oldState == ClientState.READY) { if (state == ClientState.RUNNING) { _clientReadyCount.decrementAndGet(); _clientRunningCount.incrementAndGet(); } else if (state == ClientState.ABORTED) { _clientReadyCount.decrementAndGet(); long newFinishedCount = _clientFinishedCount.incrementAndGet(); if (newFinishedCount >= MAX_IGNORED_CLIENTS) // trigger premature termination if threshold for finished clients is hit synchronized(_clientFinishedCount) { _clientFinishedCount.notify(); } } else throw new RuntimeException(String.format("Driver: Invalid state transition: %s to %s", oldState, state)); } else if (oldState == ClientState.RUNNING) { if (state == ClientState.DONE || state == ClientState.ABORTED) { _clientRunningCount.decrementAndGet(); long newFinishedCount = _clientFinishedCount.incrementAndGet(); if (newFinishedCount > MAX_IGNORED_CLIENTS) // trigger premature termination if threshold for finished clients is hit synchronized(_clientFinishedCount) { _clientFinishedCount.notify(); } } } else { throw new RuntimeException(String.format("Driver: Invalid state transition: %s to %s", oldState, state)); } _clientState.put(clientID, state); } } @Override public boolean running() { return _runFlag.get(); } /** * Load the database and run the benchmark. * This is a multi-threaded SQL benchmark. The __runFlag__ object is used to gate the starting and stopping of client threads. * If the 'doload' flag is set, the database is pre-loaded, using the loadscriptfile; this script should recreate and load tables to ensure that * the database is returned to a known state. Then the client threads are launched and notified to start executing * SQL transactions. In the run phase, the clients execute the configured runscriptfile script. * During execution, the program periodically prints the current TPS rate and the accumulated number of errors. * Upon completion, a brief report is printed summarizing transaction and error counts, along with overall transactions per second. * @param args [-help] [-debug] [host=VAL] [port=VAL] [dbname=VAL] [user=VAL] [password=VAL] */ public static void main(String[] args) { System.err.println("Driver: Setting up test context."); TestContext context = new TestContext(); context.parse(args); if (TestContext.isVerbose) context.writeConfig(System.err); Driver driver = new Driver(context); if (context.getBool("doload")) { driver.exec_load(); } driver.exec_launch_clients(); driver.exec_monitor_clients(); driver.exec_terminate(); driver.exec_print_stats(); System.err.println("\nBenchmark complete."); } }
/** */ package hu.bme.aut.oogen; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>OO Language Specific Snippet</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link hu.bme.aut.oogen.OOLanguageSpecificSnippetImpl#getCode <em>Code</em>}</li> * <li>{@link hu.bme.aut.oogen.OOLanguageSpecificSnippetImpl#getLang <em>Lang</em>}</li> * </ul> * * @generated */ public class OOLanguageSpecificSnippetImpl extends MinimalEObjectImpl.Container implements OOLanguageSpecificSnippet { /** * The default value of the '{@link #getCode() <em>Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCode() * @generated * @ordered */ protected static final String CODE_EDEFAULT = null; /** * The cached value of the '{@link #getCode() <em>Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCode() * @generated * @ordered */ protected String code = CODE_EDEFAULT; /** * The default value of the '{@link #getLang() <em>Lang</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLang() * @generated * @ordered */ protected static final OOLanguage LANG_EDEFAULT = OOLanguage.JAVA; /** * The cached value of the '{@link #getLang() <em>Lang</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLang() * @generated * @ordered */ protected OOLanguage lang = LANG_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected OOLanguageSpecificSnippetImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return OogenPackage.Literals.OO_LANGUAGE_SPECIFIC_SNIPPET; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getCode() { return code; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCode(String newCode) { String oldCode = code; code = newCode; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__CODE, oldCode, code)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public OOLanguage getLang() { return lang; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLang(OOLanguage newLang) { OOLanguage oldLang = lang; lang = newLang == null ? LANG_EDEFAULT : newLang; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__LANG, oldLang, lang)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__CODE: return getCode(); case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__LANG: return getLang(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__CODE: setCode((String)newValue); return; case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__LANG: setLang((OOLanguage)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__CODE: setCode(CODE_EDEFAULT); return; case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__LANG: setLang(LANG_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__CODE: return CODE_EDEFAULT == null ? code != null : !CODE_EDEFAULT.equals(code); case OogenPackage.OO_LANGUAGE_SPECIFIC_SNIPPET__LANG: return lang != LANG_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (code: "); result.append(code); result.append(", lang: "); result.append(lang); result.append(')'); return result.toString(); } } //OOLanguageSpecificSnippetImpl
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.cloudkit; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSError; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * CKFetchDatabaseChangesOperation * <p> * This operation will fetch changes to record zones within a database * <p> * If a change anchor from a previous @c CKFetchDatabaseChangesOperation is passed in, only the zones that have changed since that anchor will be returned. * This per-database @c serverChangeToken is not to be confused with the per-recordZone @c serverChangeToken from @c CKFetchRecordZoneChangesOperation. * If this is your first fetch or if you wish to re-fetch all zones, pass nil for the change token. * Change token are opaque tokens and clients should not infer any behavior based on their content. * [@c] CKFetchDatabaseChangesOperation is supported in a @c privateCloudDatabase and @c sharedCloudDatabase */ @Generated @Library("CloudKit") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class CKFetchDatabaseChangesOperation extends CKDatabaseOperation { static { NatJ.register(); } @Generated protected CKFetchDatabaseChangesOperation(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native CKFetchDatabaseChangesOperation alloc(); @Owned @Generated @Selector("allocWithZone:") public static native CKFetchDatabaseChangesOperation allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native CKFetchDatabaseChangesOperation new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * ! @discussion Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations. */ @Generated @Selector("changeTokenUpdatedBlock") @ObjCBlock(name = "call_changeTokenUpdatedBlock_ret") public native Block_changeTokenUpdatedBlock_ret changeTokenUpdatedBlock(); /** * When set to YES, this operation will send repeated requests to the server until all record zone changes have been fetched. * <p> * [@c] changeTokenUpdatedBlock will be invoked periodically, to give clients an updated change token so that already-fetched record zone changes don't need to be re-fetched on a subsequent operation. * When set to NO, it is the responsibility of the caller to issue subsequent fetch-changes operations when moreComing is YES in a @c fetchDatabaseChangesCompletionBlock invocation. * [@c] fetchAllChanges is @c YES by default * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("fetchAllChanges") public native boolean fetchAllChanges(); /** * This block is called when the operation completes. * <p> * Clients are responsible for saving the change token at the end of the operation and passing it in to the next call to @c CKFetchDatabaseChangesOperation. * If the server returns a @c CKErrorChangeTokenExpired error, the @c previousServerChangeToken value was too old and the client should toss its local cache and re-fetch the changes in this record zone starting with a nil @c previousServerChangeToken. * If @c moreComing is true then the server wasn't able to return all the changes in this response. Another @c CKFetchDatabaseChangesOperation operation should be run with the @c previousServerChangeToken token from this operation. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("fetchDatabaseChangesCompletionBlock") @ObjCBlock(name = "call_fetchDatabaseChangesCompletionBlock_ret") public native Block_fetchDatabaseChangesCompletionBlock_ret fetchDatabaseChangesCompletionBlock(); @Generated @Selector("init") public native CKFetchDatabaseChangesOperation init(); @Generated @Selector("initWithPreviousServerChangeToken:") public native CKFetchDatabaseChangesOperation initWithPreviousServerChangeToken( CKServerChangeToken previousServerChangeToken); @Generated @Selector("previousServerChangeToken") public native CKServerChangeToken previousServerChangeToken(); @Generated @Selector("recordZoneWithIDChangedBlock") @ObjCBlock(name = "call_recordZoneWithIDChangedBlock_ret") public native Block_recordZoneWithIDChangedBlock_ret recordZoneWithIDChangedBlock(); @Generated @Selector("recordZoneWithIDWasDeletedBlock") @ObjCBlock(name = "call_recordZoneWithIDWasDeletedBlock_ret") public native Block_recordZoneWithIDWasDeletedBlock_ret recordZoneWithIDWasDeletedBlock(); @Generated @Selector("resultsLimit") @NUInt public native long resultsLimit(); /** * ! @discussion Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations. */ @Generated @Selector("setChangeTokenUpdatedBlock:") public native void setChangeTokenUpdatedBlock( @ObjCBlock(name = "call_setChangeTokenUpdatedBlock") Block_setChangeTokenUpdatedBlock value); /** * When set to YES, this operation will send repeated requests to the server until all record zone changes have been fetched. * <p> * [@c] changeTokenUpdatedBlock will be invoked periodically, to give clients an updated change token so that already-fetched record zone changes don't need to be re-fetched on a subsequent operation. * When set to NO, it is the responsibility of the caller to issue subsequent fetch-changes operations when moreComing is YES in a @c fetchDatabaseChangesCompletionBlock invocation. * [@c] fetchAllChanges is @c YES by default * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("setFetchAllChanges:") public native void setFetchAllChanges(boolean value); /** * This block is called when the operation completes. * <p> * Clients are responsible for saving the change token at the end of the operation and passing it in to the next call to @c CKFetchDatabaseChangesOperation. * If the server returns a @c CKErrorChangeTokenExpired error, the @c previousServerChangeToken value was too old and the client should toss its local cache and re-fetch the changes in this record zone starting with a nil @c previousServerChangeToken. * If @c moreComing is true then the server wasn't able to return all the changes in this response. Another @c CKFetchDatabaseChangesOperation operation should be run with the @c previousServerChangeToken token from this operation. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("setFetchDatabaseChangesCompletionBlock:") public native void setFetchDatabaseChangesCompletionBlock( @ObjCBlock(name = "call_setFetchDatabaseChangesCompletionBlock") Block_setFetchDatabaseChangesCompletionBlock value); @Generated @Selector("setPreviousServerChangeToken:") public native void setPreviousServerChangeToken(CKServerChangeToken value); @Generated @Selector("setRecordZoneWithIDChangedBlock:") public native void setRecordZoneWithIDChangedBlock( @ObjCBlock(name = "call_setRecordZoneWithIDChangedBlock") Block_setRecordZoneWithIDChangedBlock value); @Generated @Selector("setRecordZoneWithIDWasDeletedBlock:") public native void setRecordZoneWithIDWasDeletedBlock( @ObjCBlock(name = "call_setRecordZoneWithIDWasDeletedBlock") Block_setRecordZoneWithIDWasDeletedBlock value); @Generated @Selector("setResultsLimit:") public native void setResultsLimit(@NUInt long value); @Runtime(ObjCRuntime.class) @Generated public interface Block_changeTokenUpdatedBlock_ret { @Generated void call_changeTokenUpdatedBlock_ret(CKServerChangeToken arg0); } @Runtime(ObjCRuntime.class) @Generated public interface Block_fetchDatabaseChangesCompletionBlock_ret { @Generated void call_fetchDatabaseChangesCompletionBlock_ret(CKServerChangeToken arg0, boolean arg1, NSError arg2); } @Runtime(ObjCRuntime.class) @Generated public interface Block_recordZoneWithIDChangedBlock_ret { @Generated void call_recordZoneWithIDChangedBlock_ret(CKRecordZoneID arg0); } @Runtime(ObjCRuntime.class) @Generated public interface Block_recordZoneWithIDWasDeletedBlock_ret { @Generated void call_recordZoneWithIDWasDeletedBlock_ret(CKRecordZoneID arg0); } @Runtime(ObjCRuntime.class) @Generated public interface Block_setChangeTokenUpdatedBlock { @Generated void call_setChangeTokenUpdatedBlock(CKServerChangeToken arg0); } @Runtime(ObjCRuntime.class) @Generated public interface Block_setFetchDatabaseChangesCompletionBlock { @Generated void call_setFetchDatabaseChangesCompletionBlock(CKServerChangeToken arg0, boolean arg1, NSError arg2); } @Runtime(ObjCRuntime.class) @Generated public interface Block_setRecordZoneWithIDChangedBlock { @Generated void call_setRecordZoneWithIDChangedBlock(CKRecordZoneID arg0); } @Runtime(ObjCRuntime.class) @Generated public interface Block_setRecordZoneWithIDWasDeletedBlock { @Generated void call_setRecordZoneWithIDWasDeletedBlock(CKRecordZoneID arg0); } /** * If this block is set it will be called instead of @c recordZoneWithIDWasDeletedBlock if the user deleted this zone via the iCloud storage UI. * <p> * This is an indication that the user wanted all data deleted, so local cached data should be wiped and not re-uploaded to the server. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("recordZoneWithIDWasPurgedBlock") @ObjCBlock(name = "call_recordZoneWithIDWasPurgedBlock_ret") public native Block_recordZoneWithIDWasPurgedBlock_ret recordZoneWithIDWasPurgedBlock(); @Runtime(ObjCRuntime.class) @Generated public interface Block_recordZoneWithIDWasPurgedBlock_ret { @Generated void call_recordZoneWithIDWasPurgedBlock_ret(CKRecordZoneID arg0); } /** * If this block is set it will be called instead of @c recordZoneWithIDWasDeletedBlock if the user deleted this zone via the iCloud storage UI. * <p> * This is an indication that the user wanted all data deleted, so local cached data should be wiped and not re-uploaded to the server. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("setRecordZoneWithIDWasPurgedBlock:") public native void setRecordZoneWithIDWasPurgedBlock( @ObjCBlock(name = "call_setRecordZoneWithIDWasPurgedBlock") Block_setRecordZoneWithIDWasPurgedBlock value); @Runtime(ObjCRuntime.class) @Generated public interface Block_setRecordZoneWithIDWasPurgedBlock { @Generated void call_setRecordZoneWithIDWasPurgedBlock(CKRecordZoneID arg0); } /** * If this block is set it will be called instead of @c recordZoneWithIDWasDeletedBlock if the user chose to reset all encrypted data for their account. * <p> * This is an indication that the user had to reset encrypted data during account recovery, so local cached data should be re-uploaded to the server to minimize data loss. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock") @ObjCBlock(name = "call_recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock_ret") public native Block_recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock_ret recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock(); @Runtime(ObjCRuntime.class) @Generated public interface Block_recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock_ret { @Generated void call_recordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock_ret(CKRecordZoneID arg0); } /** * If this block is set it will be called instead of @c recordZoneWithIDWasDeletedBlock if the user chose to reset all encrypted data for their account. * <p> * This is an indication that the user had to reset encrypted data during account recovery, so local cached data should be re-uploaded to the server to minimize data loss. * Each @c CKOperation instance has a private serial queue. This queue is used for all callback block invocations */ @Generated @Selector("setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock:") public native void setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock( @ObjCBlock(name = "call_setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock") Block_setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock value); @Runtime(ObjCRuntime.class) @Generated public interface Block_setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock { @Generated void call_setRecordZoneWithIDWasDeletedDueToUserEncryptedDataResetBlock(CKRecordZoneID arg0); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.keymap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.*; public abstract class KeymapsTestCase extends KeymapsTestCaseBase { /** * This table lists known actions which use the same shortcut. Note that it's ok to add a new entry here only if you're sure that the * mentioned actions will never be enabled in the same context. Otherwise, users may get unexpected behavior when they try to use that * shortcut. */ @NonNls protected static final Map<String, String[][]> DEFAULT_DUPLICATES = new HashMap<>(){{ put("$default", new String[][] { { "ADD", "ExpandTreeNode", "Graph.ZoomIn"}, { "DELETE", "$Delete", "DatabaseView.DropAction"}, { "ENTER", "Console.Execute", "Console.TableResult.EditValue", "DirDiffMenu.SynchronizeDiff", "EditorChooseLookupItem", "EditorEnter", "NextTemplateVariable", "Git.Log.Branches.Change.Branch.Filter", "SplitChooser.Split"}, { "F1", "ContextHelp", "org.jetbrains.r.actions.REditorHelpAction"}, { "F2", "GotoNextError", "GuiDesigner.EditComponent", "GuiDesigner.EditGroup", "Console.TableResult.EditValue", "XDebugger.SetValue", "Git.Reword.Commit", "Git.Rename.Local.Branch", "ShelvedChanges.Rename", "ChangesView.Rename"}, { "alt ENTER", "ShowIntentionActions", "Console.TableResult.EditValue"}, { "F3", "FindNext", "PoToggleSelection"}, { "F5", "Graph.RouteEdges", "CopyElement", "PoBrowserRefresh"}, { "F7", "NextDiff", "StepInto"}, { "INSERT", "EditorToggleInsertState", "UsageView.Include", "DomElementsTreeView.AddElement", "DomCollectionControl.Add", "XDebugger.NewWatch"}, { "SUBTRACT", "CollapseTreeNode", "Graph.ZoomOut"}, { "TAB", "EditorChooseLookupItemReplace", "NextTemplateVariable", "NextParameter", "EditorIndentSelection", "EditorTab", "ExpandLiveTemplateByTab", "BraceOrQuoteOut", "SplitChooser.NextWindow"}, { "alt DOWN", "ShowContent", "MethodDown", "ShowSearchHistory"}, { "alt F1", "SelectIn", "ProjectViewChangeView"}, { "alt F6", "CommanderSyncViews", "EditPropertyValue"}, { "alt INSERT", "FileChooser.NewFolder", "Generate", "NewElement"}, { "control 1", "FileChooser.GotoHome", "GotoBookmark1", "DuplicatesForm.SendToLeft"}, { "control 2", "FileChooser.GotoProject", "GotoBookmark2", "DuplicatesForm.SendToRight"}, { "control 3", "GotoBookmark3", "FileChooser.GotoModule"}, { "control ADD", "ExpandAll", "ExpandExpandableComponent", "ExpandRegion"}, { "control DIVIDE", "CommentByLineComment", "Graph.ActualSize"}, { "control DOWN", "EditorScrollDown", "EditorLookupDown", "MethodOverloadSwitchDown", "JupyterSelectCellBelowAction"}, { "control ENTER", "Console.Execute.Multiline", "DirDiffMenu.SynchronizeDiff.All", "EditorSplitLine", "NotebookRunCellAction", "PyExecuteCellAction", "SplitChooser.Duplicate", "Terminal.SmartCommandExecution.Run", "ViewSource", "org.jetbrains.r.actions.RunSelection"}, { "control EQUALS", "ExpandAll", "ExpandExpandableComponent", "ExpandRegion"}, { "control F5", "Refresh", "Rerun"}, { "control B", "GotoDeclaration", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "control C", "$Copy", "Terminal.CopySelectedText", "org.jetbrains.r.console.RConsoleView.RInterruptAction"}, { "control D", "EditorDuplicate", "Diff.ShowDiff", "CompareTwoFiles", "Console.TableResult.CloneRow", "SendEOF", "FileChooser.GotoDesktop", "org.jetbrains.r.console.RConsoleView.REofAction"}, { "control E", "RecentFiles", "SwitcherIterateItems", "SwitcherRecentEditedChangedToggleCheckBox", "Vcs.ShowMessageHistory"}, { "control I", "ImplementMethods", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "control INSERT", "$Copy", "Terminal.CopySelectedText"}, { "control M", "EditorScrollToCenter", "Vcs.ShowMessageHistory"}, { "control N", "Console.TableResult.AddRow", "FileChooser.NewFolder", "GotoClass"}, { "control P", "FileChooser.TogglePathShowing", "ParameterInfo"}, { "control R", "Replace", "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "control SLASH", "CommentByLineComment", "Graph.ActualSize"}, { "control SPACE", "CodeCompletion", "ChangesView.SetDefault"}, { "control U", "GotoSuperMethod", "CommanderSwapPanels", "org.intellij.plugins.markdown.ui.actions.styling.InsertImageAction"}, { "control UP", "EditorScrollUp", "EditorLookupUp", "MethodOverloadSwitchUp", "JupyterSelectCellAboveAction"}, { "control SUBTRACT", "CollapseAll", "CollapseExpandableComponent", "CollapseRegion"}, { "control V", "$Paste", "Terminal.Paste"}, { "control alt A", "ChangesView.AddUnversioned", "Diagram.DeselectAll", "Diff.ApplyRightSide"}, { "control alt B", "GotoImplementation", "org.jetbrains.r.rendering.chunk.RunChunksAboveAction"}, { "control alt D", "Console.TableResult.SetDefault", "org.jetbrains.r.actions.RMarkdownInterruptAction"}, { "control alt K", "Git.Commit.And.Push.Executor", "Hg.Commit.And.Push.Executor"}, { "control alt E", "Console.History.Browse", "PerforceDirect.Edit", "ToggleFindInSelection"}, { "control alt DOWN", "NextOccurence", "Console.TableResult.NextPage"}, { "control alt G", "DatabaseView.SqlGenerator", "org.jetbrains.plugins.ruby.rails.actions.generators.GeneratorsPopupAction", "Mvc.RunTarget"}, { "control alt I", "AutoIndentLines", "RMarkdownNewChunk"}, { "control alt R", "Diff.ApplyLeftSide", "org.jetbrains.plugins.ruby.tasks.rake.actions.RakeTasksPopupAction", "Django.RunManageTaskAction"}, { "control alt UP", "PreviousOccurence", "Console.TableResult.PreviousPage"}, { "control alt N", "Inline", "Console.TableResult.SetNull"}, { "control alt X", "SECOND_STROKE_SHORTCUT", "org.jetbrains.r.rendering.chunk.DebugChunkAction"}, { "control alt shift F", "DatabaseView.FullTextSearch", "Console.TableResult.EditFilterCriteria", "GoFmtFileAction"}, { "ctrl alt C", "IntroduceConstant", "org.jetbrains.r.rendering.chunk.RunChunkAction"}, { "ctrl alt U", "ShowUmlDiagramPopup", "ChangesView.UnshelveSilently"}, { "ctrl alt F", "IntroduceField", "ShowFilterPopup"}, { "control MINUS", "CollapseAll", "CollapseExpandableComponent", "CollapseRegion"}, { "control PERIOD", "EditorChooseLookupItemDot", "CollapseSelection"}, { "shift DELETE", "$Cut", "Maven.Uml.Exclude"}, { "shift ENTER", "CollapseExpandableComponent", "Console.TableResult.EditValueMaximized", "DatabaseView.PropertiesAction", "EditorStartNewLine", "ExpandExpandableComponent", "NotebookRunCellSelectBelowAction", "OpenInRightSplit", "RConsoleNextLineAction"}, { "shift F4", "Debugger.EditTypeSource", "EditSourceInNewWindow"}, { "shift F6", "RenameElement", "Git.Reword.Commit", "Git.Rename.Local.Branch", "ShelvedChanges.Rename", "ChangesView.Rename"}, { "shift F7", "PreviousDiff", "SmartStepInto"}, { "shift INSERT", "$Paste", "Terminal.Paste"}, { "shift TAB", "PreviousTemplateVariable", "PrevParameter", "EditorUnindentSelection", "SplitChooser.PreviousWindow"}, { "shift alt ENTER", "Console.TableResult.EditValueMaximized", "JupyterDebugAction", "SplitChooser"}, { "shift alt A", "NotebookInsertCellAboveAction", "VcsHistory.ShowAllAffected"}, { "shift alt B", "NotebookInsertCellBelowAction", "tasks.open.in.browser", "sql.SelectInDatabaseView"}, { "shift alt G", "EditorAddCaretPerSelectedLine", "hg4idea.QGotoFromPatches"}, { "shift alt M", "ChangesView.Move", "Vcs.MoveChangedLinesToChangelist"}, { "shift ctrl C", "CopyPaths", "DatabaseView.CopyDdlAction", "org.intellij.plugins.markdown.ui.actions.styling.ToggleCodeSpanAction"}, { "shift control D", "TagDocumentationNavigation", "Diff.ShowSettingsPopup", "Uml.ShowDiff", "Console.TableResult.CompareCells"}, { "shift control ENTER", "EditorCompleteStatement", "Console.Jpa.GenerateSql", "org.jetbrains.r.actions.RRunAction", "Terminal.SmartCommandExecution.Debug"}, { "shift control F10", "Console.Open", "RunClass", "RunTargetAction"}, { "shift control F8", "ViewBreakpoints", "EditBreakpoint"}, { "shift control G", "ClassTemplateNavigation", "GoToClass"}, { "shift control H", "ChangesView.ShelveSilently", "MethodHierarchy"}, { "shift control M", "EditorMatchBrace", "RInsertPipeAction"}, { "shift control T", "GotoTest", "ServiceView.ShowServices"}, { "shift control U", "ShelveChanges.UnshelveWithDialog", "EditorToggleCase", "org.intellij.plugins.markdown.ui.actions.styling.MarkdownCreateLinkAction"}, { "shift control alt D", "UML.ShowChanges", "Console.TableResult.CloneColumn"}, { "shift control alt P", "IntroduceFunctionalParameter", "ReformatWithPrettierAction" }, { "shift control alt ENTER", "Console.Jpa.GenerateDDL", "Console.TableResult.SubmitAndCommit", "Console.Transaction.Commit", "NotebookRunAllAction", "org.jetbrains.r.actions.RDebugAction"}, { "control alt Z", "Vcs.RollbackChangedLines", "ChangesView.Revert"}, { "shift control TAB", "Switcher", "Diff.FocusOppositePane"}, { "ctrl alt ENTER", "EditorStartNewLineBefore", "QuickActionPopup", "org.jetbrains.r.actions.DebugSelection"}, { "alt button1", "EditorCreateRectangularSelectionOnMouseDrag", "QuickEvaluateExpression"}, { "button2", "EditorPasteFromX11", "GotoDeclaration", "EditorCreateRectangularSelectionOnMouseDrag"}, { "ESCAPE", "CloseGotItTooltip", "EditorEscape", "FocusEditor", "Terminal.SwitchFocusToEditor"}, { "SPACE", "Console.TableResult.ColumnVisibility", "QuickPreview", "SplitChooser.SplitCenter"}, { "ctrl L", "FindNext", "Terminal.ClearBuffer"}, { "control alt button1", "GotoImplementation", "UiInspector"}, {"alt shift RIGHT", "Diff.NextChange", "NextEditorTab"}, {"alt shift LEFT", "Diff.PrevChange", "PreviousEditorTab"}, { "shift ctrl alt DOWN", "ResizeToolWindowDown", "VcsShowNextChangeMarker"}, { "shift ctrl alt UP", "ResizeToolWindowUp", "VcsShowPrevChangeMarker"}, { "shift ctrl alt LEFT", "MoveElementLeft", "ResizeToolWindowLeft"}, { "shift ctrl alt RIGHT", "MoveElementRight", "ResizeToolWindowRight"}, }); put("Mac OS X 10.5+", new String[][] { { "F1", "QuickJavaDoc", "org.jetbrains.r.actions.REditorHelpAction"}, { "F3", "PoToggleSelection", "ToggleBookmark"}, { "Force touch", "GotoDeclaration", "RunToCursor"}, { "BACK_SPACE", "$Delete", "EditorBackSpace"}, { "shift BACK_SPACE", "EditorBackSpace", "UsageView.Include"}, { "meta BACK_SPACE", "EditorDeleteLine", "$Delete"}, { "control D", "Debug", "org.jetbrains.r.console.RConsoleView.REofAction"}, { "control M", "EditorMatchBrace", "Vcs.ShowMessageHistory"}, { "control O", "ExportToTextFile", "OverrideMethods"}, { "ctrl alt F", "EditorNextWord", "ShowFilterPopup"}, { "control ENTER", "Generate", "NotebookRunCellAction", "NewElement", "PyExecuteCellAction"}, { "control SPACE", "CodeCompletion", "ChangesView.SetDefault"}, { "shift control TAB", "Switcher", "Diff.FocusOppositePane"}, { "meta R", "Refresh", "Rerun", "Replace", "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "meta 1", "ActivateProjectToolWindow", "FileChooser.GotoHome", "DuplicatesForm.SendToLeft"}, { "meta 2", "ActivateFavoritesToolWindow", "FileChooser.GotoProject", "DuplicatesForm.SendToRight"}, { "meta 3", "ActivateFindToolWindow", "FileChooser.GotoModule"}, { "meta I", "DatabaseView.PropertiesAction", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "meta N", "Console.TableResult.AddRow", "FileChooser.NewFolder", "Generate", "NewElement"}, { "shift meta G", "ClassTemplateNavigation", "GoToClass", "FindPrevious"}, { "shift meta M", "ChangesView.Move", "RInsertPipeAction", "Vcs.MoveChangedLinesToChangelist"}, { "shift meta L", "Jdbc.OpenConsole.New", "rspec.LetIntroduce"}, { "alt R", "Django.RunManageTaskAction", "org.jetbrains.plugins.ruby.tasks.rake.actions.RakeTasksPopupAction"}, { "alt DOWN", "EditorUnSelectWord", "MethodOverloadSwitchDown", "ShowSearchHistory"}, { "alt UP", "EditorSelectWord", "MethodOverloadSwitchUp"}, { "ctrl alt DOWN", "GotoNextElementUnderCaretUsage", "ResizeToolWindowDown"}, { "ctrl alt LEFT", "EmmetPreviousEditPoint", "ResizeToolWindowLeft"}, { "ctrl alt UP", "GotoPrevElementUnderCaretUsage", "ResizeToolWindowUp"}, { "ctrl alt RIGHT", "EmmetNextEditPoint", "ResizeToolWindowRight"}, { "meta L", "GotoLine", "Terminal.ClearBuffer"}, { "meta K", "CheckinProject", "Terminal.ClearBuffer"}, {"control shift RIGHT", "Diff.NextChange", "NextEditorTab"}, {"control shift LEFT", "Diff.PrevChange", "PreviousEditorTab"}, { "shift ctrl DOWN", "MethodDown", "ShowContent"}, }); put("Mac OS X", new String[][] { { "Force touch", "GotoDeclaration", "RunToCursor"}, { "BACK_SPACE", "$Delete", "EditorBackSpace"}, { "control DOWN", "EditorLookupDown", "JupyterSelectCellBelowAction", "MethodDown"}, { "control UP", "EditorLookupUp", "JupyterSelectCellAboveAction", "MethodUp"}, { "control ENTER", "Generate", "NotebookRunCellAction", "NewElement", "PyExecuteCellAction"}, { "control F5", "Refresh", "Rerun"}, { "control N", "Generate", "NewElement"}, { "shift control TAB", "Switcher", "Diff.FocusOppositePane"}, { "meta 1", "ActivateProjectToolWindow", "FileChooser.GotoHome", "DuplicatesForm.SendToLeft"}, { "meta 2", "ActivateFavoritesToolWindow", "FileChooser.GotoProject", "DuplicatesForm.SendToRight"}, { "meta 3", "ActivateFindToolWindow", "FileChooser.GotoModule"}, { "alt R", "Django.RunManageTaskAction", "org.jetbrains.plugins.ruby.tasks.rake.actions.RakeTasksPopupAction"}, { "ctrl SPACE", "ChangesView.SetDefault", "CodeCompletion"}, { "meta K", "CheckinProject", "Terminal.ClearBuffer"}, {"control shift RIGHT", "Diff.NextChange", "NextEditorTab"}, {"control shift LEFT", "Diff.PrevChange", "PreviousEditorTab"}, }); put("Emacs", new String[][] { { "TAB", "EditorChooseLookupItemReplace", "NextTemplateVariable", "NextParameter", "EditorIndentSelection", "EmacsStyleIndent", "ExpandLiveTemplateByTab", "BraceOrQuoteOut", "SplitChooser.NextWindow"}, { "alt MINUS", "RInsertAssignmentAction", "SECOND_STROKE_SHORTCUT"}, { "alt SLASH", "CodeCompletion", "HippieCompletion"}, { "control B", "EditorLeft", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "control D", "$Delete", "Diff.ShowDiff", "CompareTwoFiles", "Console.TableResult.CloneRow", "SendEOF", "FileChooser.GotoDesktop", "org.jetbrains.r.console.RConsoleView.REofAction"}, { "control K", "EditorCutLineEnd", "CheckinProject"}, { "control M", "EditorEnter", "EditorChooseLookupItem", "NextTemplateVariable", "Console.Execute"}, { "control N", "Console.TableResult.AddRow", "EditorDown", "FileChooser.NewFolder"}, { "control P", "EditorUp", "FileChooser.TogglePathShowing"}, { "control R", "org.jetbrains.plugins.ruby.rails.console.ReloadSources", "FindPrevious"}, { "control SLASH", "$Undo", "Graph.ActualSize"}, { "control SPACE", "EditorToggleStickySelection", "ChangesView.SetDefault"}, { "control UP", "EditorBackwardParagraph", "EditorLookupUp", "JupyterSelectCellAboveAction", "MethodOverloadSwitchUp"}, { "control DOWN", "EditorForwardParagraph", "EditorLookupDown", "JupyterSelectCellBelowAction", "MethodOverloadSwitchDown"}, { "ctrl V", "EditorPageDown", "Terminal.Paste"}, { "control alt A", "MethodUp", "ChangesView.AddUnversioned", "Diagram.DeselectAll"}, { "control alt E", "Console.History.Browse", "MethodDown", "PerforceDirect.Edit", "ToggleFindInSelection"}, { "control alt G", "GotoDeclaration", "org.jetbrains.plugins.ruby.rails.actions.generators.GeneratorsPopupAction", "Mvc.RunTarget"}, { "control alt S", "ShowSettings", "Find"}, { "shift alt S", "FindUsages", "context.save"}, { "shift alt G", "GotoClass", "hg4idea.QGotoFromPatches"}, { "shift alt P", "ParameterInfo", "hg4idea.QPushAction"}, { "shift control X", SECOND_STROKE, "com.jetbrains.php.framework.FrameworkRunConsoleAction"}, { "ESCAPE", SECOND_STROKE, "CloseGotItTooltip", "FocusEditor", "Terminal.SwitchFocusToEditor"}, { "ctrl L", "EditorScrollToCenter", "Terminal.ClearBuffer"}, }); put("Visual Studio", new String[][] { { "F7", "CompileDirty", "NextDiff"}, { "ctrl P", "FileChooser.TogglePathShowing", "Print"}, { "ctrl F10", "RunToCursor", "UpdateRunningApplication"}, { "F5", "Graph.RouteEdges", "Resume", "PoBrowserRefresh"}, { "shift alt OPEN_BRACKET", "EditorCodeBlockStart", "EmmetPreviousEditPoint"}, { "shift alt F11", "FindUsagesInFile", "HighlightUsagesInFile"}, { "ctrl alt B", "ViewBreakpoints", "org.jetbrains.r.rendering.chunk.RunChunksAboveAction"}, { "ctrl alt F", "ActivateStructureToolWindow", "ShowFilterPopup"}, { "shift ctrl alt DOWN", "MoveStatementDown", "ResizeToolWindowDown"}, { "ctrl R", SECOND_STROKE, "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "shift ctrl alt UP", "MoveStatementUp", "ResizeToolWindowUp"}, { "ctrl SUBTRACT", "Back", "CollapseAll", "CollapseExpandableComponent"}, { "ctrl R,R", "ChangesView.Rename", "Git.Rename.Local.Branch", "Git.Reword.Commit", "RenameElement", "ShelvedChanges.Rename"}, { "shift alt LEFT", "Diff.PrevChange", "EditorLeftWithSelection"}, { "shift ctrl alt K", "Git.Commit.And.Push.Executor", "Hg.Commit.And.Push.Executor"}, { "ctrl E", SECOND_STROKE, "SwitcherIterateItems", "SwitcherRecentEditedChangedToggleCheckBox"}, { "shift alt RIGHT", "Diff.NextChange", "EditorRightWithSelection"}, { "ctrl N", "Console.TableResult.AddRow", "FileChooser.NewFolder", "NewElement"}, { "shift F5", "Graph.ApplyCurrentLayout", "Stop"}, { "ctrl alt W", "ActivateWebToolWindow", "Vcs.UpdateProject"}, { "shift alt ENTER", "JupyterDebugAction", "ToggleFullScreen"}, { "ctrl ENTER", "Console.Execute.Multiline", "DirDiffMenu.SynchronizeDiff.All", "EditorStartNewLineBefore", "NotebookRunCellAction", "PyExecuteCellAction", "ViewSource", "org.jetbrains.r.actions.RunSelection", "Terminal.SmartCommandExecution.Run", "SplitChooser.Duplicate"}, { "ctrl MINUS", "Back", "CollapseAll", "CollapseExpandableComponent"}, { "ctrl alt ENTER", "ReformatCode", "org.jetbrains.r.actions.DebugSelection"}, { "shift ctrl H", "ChangesView.ShelveSilently", "ReplaceInPath"}, { "shift ctrl alt P", "IntroduceFunctionalParameter", "ReformatWithPrettierAction", "VcsShowPrevChangeMarker"}, { "ctrl L", "EditorDeleteLine", "Terminal.ClearBuffer"}, { "alt O", "SwitchHeaderSource", "ExportToTextFile"}, { "ctrl H", "Replace", "Vcs.ShowMessageHistory"}, { "control alt button1", "EditorAddOrRemoveCaret", "UiInspector"}, { "shift alt button1", "EditorCreateRectangularSelection", "EditorCreateRectangularSelectionOnMouseDrag"}, { "ctrl PERIOD", "EditorChooseLookupItemDot", "ShowIntentionActions"}, { "ctrl alt R", "Diff.ApplyLeftSide", "Refresh"}, }); put("Default for XWin", new String[][] { { "shift ctrl alt button1", "EditorAddRectangularSelectionOnMouseDrag", "QuickEvaluateExpression"}, { "shift ctrl C", "CopyPaths", "DatabaseView.CopyDdlAction", "Terminal.CopySelectedText", "org.intellij.plugins.markdown.ui.actions.styling.ToggleCodeSpanAction"}, { "shift ctrl V", "PasteMultiple", "Terminal.Paste"}, }); put("Default for GNOME", new String[][] { { "shift alt 1", "SelectIn", "ProjectViewChangeView"}, { "shift alt 7", "IDEtalk.SearchUserHistory", "FindUsages"}, { "shift alt LEFT", "PreviousEditorTab", "Diff.PrevChange", "Back"}, { "shift alt RIGHT", "NextEditorTab", "Diff.NextChange","Forward"}, }); put("Default for KDE", new String[][] { { "control 1", "FileChooser.GotoHome", "ShowErrorDescription", "DuplicatesForm.SendToLeft"}, { "control 2", "FileChooser.GotoProject", "Stop", "DuplicatesForm.SendToRight"}, { "control 3", "FindWordAtCaret", "FileChooser.GotoModule"}, { "control 5", "Refresh", "Rerun"}, { "shift alt 1", "SelectIn", "ProjectViewChangeView"}, { "shift alt 7", "IDEtalk.SearchUserHistory", "FindUsages"}, { "shift alt L", "ReformatCode", "context.load"}, }); put("Eclipse", new String[][] { { "F2", "Console.TableResult.EditValue", "QuickJavaDoc", "XDebugger.SetValue"}, { "F3", "GotoDeclaration", "PoToggleSelection"}, { "F5", "Graph.RouteEdges", "StepInto", "PoBrowserRefresh"}, { "alt DOWN", "ShowContent", "MoveStatementDown", "ShowSearchHistory"}, { "alt HOME", "ViewNavigationBar", "ShowNavBar"}, { "ctrl 1", "DuplicatesForm.SendToLeft", "FileChooser.GotoHome", "ShowIntentionActions"}, { "control F10", "ShowPopupMenu", "UpdateRunningApplication"}, { "control B", "CompileDirty", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "control D", "EditorDeleteLine", "Diff.ShowDiff", "CompareTwoFiles", "Console.TableResult.CloneRow", "SendEOF", "FileChooser.GotoDesktop", "org.jetbrains.r.console.RConsoleView.REofAction"}, { "control I", "AutoIndentLines", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "control N", "Console.TableResult.AddRow", "ShowPopupMenu", "FileChooser.NewFolder"}, { "control P", "FileChooser.TogglePathShowing", "Print"}, { "control R", "RunToCursor", "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "control U", "EvaluateExpression", "CommanderSwapPanels", "org.intellij.plugins.markdown.ui.actions.styling.InsertImageAction"}, { "control alt DOWN", "Console.TableResult.NextPage", "EditorDuplicateLines"}, { "control alt E", "Console.History.Browse", "ExecuteInPyConsoleAction", "PerforceDirect.Edit", "ToggleFindInSelection"}, { "shift alt D", "hg4idea.QFold", "Debug"}, { "shift alt G", "RerunTests", "hg4idea.QGotoFromPatches"}, { "shift alt L", "IntroduceVariable", "context.load"}, { "shift alt P", "hg4idea.QPushAction", "ImplementMethods"}, { "shift alt R", "RenameElement", "Git.Reword.Commit", "Git.Rename.Local.Branch", "ShelvedChanges.Rename", "ChangesView.Rename"}, { "shift alt S", "ShowPopupMenu", "context.save"}, { "shift alt T", "ShowPopupMenu", "tasks.switch"}, { "shift control DOWN", "ResizeToolWindowDown", "MethodDown"}, { "shift control E", "RecentLocations", "EditSource"}, { "shift control F6", "ChangeTypeSignature", "PreviousTab"}, { "shift control G", "FindUsagesInFile", "ClassTemplateNavigation", "GoToClass"}, { "shift control I", "QuickImplementations", "XDebugger.Inspect"}, { "shift control K", "Vcs.Push", "FindPrevious"}, { "shift control X", "EditorToggleCase", "com.jetbrains.php.framework.FrameworkRunConsoleAction"}, { "shift ctrl alt RIGHT", "PreviousEditorTab", "Diff.PrevChange"}, { "shift ctrl alt LEFT", "NextEditorTab", "Diff.NextChange"}, { "ctrl L", "GotoLine", "Terminal.ClearBuffer"}, }); put("NetBeans 6.5", new String[][] { { "F4", "RunToCursor", "EditSource"}, { "shift F5", "Graph.ApplyCurrentLayout", "Stop"}, { "F5", "Debugger.ResumeThread", "Resume", "Graph.RouteEdges", "PoBrowserRefresh"}, { "alt DOWN", "GotoNextElementUnderCaretUsage", "ShowContent", "ShowSearchHistory"}, { "control 1", "ActivateProjectToolWindow", "DuplicatesForm.SendToLeft"}, { "control 2", "ActivateProjectToolWindow", "FileChooser.GotoProject", "DuplicatesForm.SendToRight"}, { "control 3", "ActivateProjectToolWindow", "FileChooser.GotoModule"}, { "control BACK_SPACE", "EditorDeleteToWordStart", "ToggleDockMode"}, { "control DIVIDE", "CollapseRegionRecursively", "Graph.ActualSize"}, { "control M", "Vcs.ShowMessageHistory", "Move"}, { "control N", "Console.TableResult.AddRow", "NewElement", "FileChooser.NewFolder"}, { "control R", "RenameElement", "org.jetbrains.plugins.ruby.rails.console.ReloadSources", "Git.Reword.Commit", "Git.Rename.Local.Branch", "ShelvedChanges.Rename", "ChangesView.Rename"}, { "control U", SECOND_STROKE, "CommanderSwapPanels", "org.intellij.plugins.markdown.ui.actions.styling.InsertImageAction"}, { "control PERIOD", "GotoNextError", "EditorChooseLookupItemDot"}, { "control alt DOWN", "MethodDown", "NextOccurence", "Console.TableResult.NextPage"}, { "control alt UP", "MethodUp", "PreviousOccurence", "Console.TableResult.PreviousPage"}, { "control alt shift P", "Print", "ReformatWithPrettierAction"}, { "shift F4", "RecentFiles", "SwitcherIterateItems", "SwitcherRecentEditedChangedToggleCheckBox", "Debugger.EditTypeSource", "Vcs.ShowMessageHistory", "EditSourceInNewWindow"}, { "shift alt F9", "ChooseDebugConfiguration", "ValidateXml", "ValidateJsp"}, { "shift alt D", "ToggleFloatingMode", "hg4idea.QFold"}, { "shift control F7", "HighlightUsagesInFile", "XDebugger.NewWatch"}, { "shift ctrl C", "CommentByLineComment", "DatabaseView.CopyDdlAction"}, { "shift control H", "ChangesView.ShelveSilently", "ReplaceInPath"}, { "shift control K", "HippieCompletion", "Vcs.Push"}, { "shift control M", "RInsertPipeAction", "ShowBookmarks"}, { "control alt E", "Console.History.Browse", "ExecuteInPyConsoleAction", "PerforceDirect.Edit", "ToggleFindInSelection"}, { "shift ctrl BACK_SLASH", "GotoUrlAction", "SmartTypeCompletion"}, { "ctrl alt PAGE_DOWN", "Diff.NextChange", "NextEditorTab"}, { "ctrl alt PAGE_UP", "Diff.PrevChange", "PreviousEditorTab"}, }); put("Eclipse (Mac OS X)", new String[][] { { "meta BACK_SPACE", "EditorDeleteToWordStart", "$Delete"}, { "F2", "Console.TableResult.EditValue", "QuickJavaDoc", "XDebugger.SetValue", "Git.Reword.Commit", "Git.Rename.Local.Branch", "ShelvedChanges.Rename", "ChangesView.Rename"}, { "F3", "GotoDeclaration", "EditSource", "PoToggleSelection"}, { "F5", "StepInto", "Graph.RouteEdges", "PoBrowserRefresh"}, { "alt DOWN", "MoveStatementDown", "MethodOverloadSwitchDown", "ShowSearchHistory"}, { "alt UP", "MoveStatementUp", "MethodOverloadSwitchUp"}, { "control PERIOD", "EditorChooseLookupItemDot", "HippieCompletion"}, { "meta 1", "FileChooser.GotoHome", "ShowIntentionActions", "DuplicatesForm.SendToLeft"}, { "meta 3", "FileChooser.GotoModule", "GotoAction"}, { "meta B", "CompileDirty", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "meta D", "EditorDeleteLine", "Diff.ShowDiff", "CompareTwoFiles", "Console.TableResult.CloneRow", "SendEOF", "FileChooser.GotoDesktop"}, { "meta I", "DatabaseView.PropertiesAction", "AutoIndentLines", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "meta P", "FileChooser.TogglePathShowing", "Print"}, { "meta R", "org.jetbrains.plugins.ruby.rails.console.ReloadSources", "RunToCursor"}, { "meta U", "CommanderSwapPanels", "EvaluateExpression", "org.intellij.plugins.markdown.ui.actions.styling.InsertImageAction"}, { "meta W", "CloseContent", "CloseActiveTab"}, { "meta alt B", "ShowNavBar", "org.jetbrains.r.rendering.chunk.RunChunksAboveAction"}, { "meta alt C", "ChangeSignature", "org.jetbrains.r.rendering.chunk.RunChunkAction"}, { "meta alt I", "Inline", "RMarkdownNewChunk"}, { "meta alt DOWN", "Console.TableResult.NextPage", "EditorDuplicateLines"}, { "shift meta G", "ClassTemplateNavigation", "GoToClass", "FindUsages"}, { "shift meta K", "Vcs.Push", "FindPrevious"}, { "shift meta X", "EditorToggleCase", "com.jetbrains.php.framework.FrameworkRunConsoleAction"}, { "shift meta U", "FindUsagesInFile", "ShelveChanges.UnshelveWithDialog", "org.intellij.plugins.markdown.ui.actions.styling.MarkdownCreateLinkAction"}, { "control shift alt Z", "Vcs.RollbackChangedLines", "ChangesView.Revert"}, { "meta K", "FindNext", "Terminal.ClearBuffer"}, { "shift ctrl DOWN", "EditorUnSelectWord", "ShowContent"}, }); put("Sublime Text", new String[][] { { "F2", "ChangesView.Rename", "Console.TableResult.EditValue", "Git.Reword.Commit", "Git.Rename.Local.Branch", "GotoNextBookmark", "GuiDesigner.EditComponent", "GuiDesigner.EditGroup", "ShelvedChanges.Rename", "XDebugger.SetValue"}, { "alt MINUS", "Back", "RInsertAssignmentAction"}, { "ctrl ADD", "EditorIncreaseFontSize", "ExpandAll", "ExpandExpandableComponent"}, { "ctrl B", "Compile", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "ctrl D", "CompareTwoFiles", "Console.TableResult.CloneRow", "Diff.ShowDiff", "FileChooser.GotoDesktop", "SelectNextOccurrence", "SendEOF", "org.jetbrains.r.console.RConsoleView.REofAction"}, { "ctrl I", "IncrementalSearch", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "ctrl ENTER", "Console.Execute.Multiline", "DirDiffMenu.SynchronizeDiff.All", "EditorStartNewLine", "NotebookRunCellAction", "PyExecuteCellAction", "ViewSource", "org.jetbrains.r.actions.RunSelection", "Terminal.SmartCommandExecution.Run", "SplitChooser.Duplicate"}, { "ctrl EQUALS", "EditorIncreaseFontSize", "ExpandAll", "ExpandExpandableComponent"}, { "ctrl M", "EditorMatchBrace", "Vcs.ShowMessageHistory"}, { "ctrl MINUS", "CollapseAll", "CollapseExpandableComponent", "EditorDecreaseFontSize"}, { "ctrl N", "Console.TableResult.AddRow", "FileChooser.NewFolder", "NewElement"}, { "ctrl P", "FileChooser.TogglePathShowing", "GotoFile"}, { "ctrl R", "FileStructurePopup", "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "ctrl SUBTRACT", "CollapseAll", "CollapseExpandableComponent", "EditorDecreaseFontSize"}, { "ctrl V", "EditorPasteSimple", "Terminal.Paste"}, { "ctrl alt DOWN", "Console.TableResult.NextPage", "EditorCloneCaretBelow"}, { "ctrl alt UP", "Console.TableResult.PreviousPage", "EditorCloneCaretAbove"}, { "shift ENTER", "CollapseExpandableComponent", "Console.TableResult.EditValueMaximized", "DatabaseView.PropertiesAction", "EditorSplitLine", "ExpandExpandableComponent", "NotebookRunCellSelectBelowAction", "OpenInRightSplit", "RConsoleNextLineAction"}, { "shift INSERT", "EditorPasteSimple", "Terminal.Paste"}, { "shift ctrl D", "Console.TableResult.CompareCells", "EditorDuplicate", "Uml.ShowDiff"}, { "shift ctrl M", "EditorSelectWord", "RInsertPipeAction"}, { "shift ctrl ENTER", "Console.Jpa.GenerateSql", "EditorStartNewLineBefore", "org.jetbrains.r.actions.RRunAction", "Terminal.SmartCommandExecution.Debug"}, { "shift ctrl T", "ReopenClosedTab", "ServiceView.ShowServices"}, { "ctrl L", "EditorSelectLine", "Terminal.ClearBuffer"}, }); put("Sublime Text (Mac OS X)", new String[][] { { "F2", "ChangesView.Rename", "Console.TableResult.EditValue", "Git.Reword.Commit", "Git.Rename.Local.Branch", "GotoNextBookmark", "GuiDesigner.EditComponent", "GuiDesigner.EditGroup", "ShelvedChanges.Rename", "XDebugger.SetValue"}, { "meta ADD", "EditorIncreaseFontSize", "ExpandAll", "ExpandExpandableComponent"}, { "meta B", "Compile", "org.intellij.plugins.markdown.ui.actions.styling.ToggleBoldAction"}, { "meta BACK_SPACE", "EditorDeleteToLineStart", "$Delete"}, { "meta D", "CompareTwoFiles", "Console.TableResult.CloneRow", "Diff.ShowDiff", "FileChooser.GotoDesktop", "SelectNextOccurrence", "SendEOF"}, { "meta ENTER", "Console.Execute.Multiline", "DirDiffMenu.SynchronizeDiff.All", "EditorStartNewLine", "ViewSource", "org.jetbrains.r.actions.RunSelection", "Terminal.SmartCommandExecution.Run", "SplitChooser.Duplicate"}, { "meta EQUALS", "EditorIncreaseFontSize", "ExpandAll", "ExpandExpandableComponent"}, { "meta I", "DatabaseView.PropertiesAction", "IncrementalSearch", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "meta MINUS", "CollapseAll", "CollapseExpandableComponent", "EditorDecreaseFontSize"}, { "meta P", "FileChooser.TogglePathShowing", "GotoFile"}, { "meta R", "FileStructurePopup", "Refresh", "Rerun", "org.jetbrains.plugins.ruby.rails.console.ReloadSources"}, { "meta SUBTRACT", "CollapseAll", "CollapseExpandableComponent", "EditorDecreaseFontSize"}, { "meta V", "EditorPasteSimple", "Terminal.Paste"}, { "meta alt DOWN", "Console.TableResult.NextPage", "GotoDeclaration"}, { "meta alt G", "DatabaseView.SqlGenerator", "FindWordAtCaret", "org.jetbrains.plugins.ruby.rails.actions.generators.GeneratorsPopupAction"}, { "shift ENTER", "CollapseExpandableComponent", "Console.TableResult.EditValueMaximized", "EditorSplitLine", "ExpandExpandableComponent", "NotebookRunCellSelectBelowAction", "OpenInRightSplit", "RConsoleNextLineAction"}, { "shift meta D", "Console.TableResult.CompareCells", "EditorDuplicate", "Uml.ShowDiff"}, { "shift meta ENTER", "Console.Jpa.GenerateSql", "EditorStartNewLineBefore", "org.jetbrains.r.actions.RRunAction", "Terminal.SmartCommandExecution.Debug"}, { "shift meta T", "ReopenClosedTab", "ServiceView.ShowServices"}, { "meta L", "EditorSelectLine", "Terminal.ClearBuffer"}, { "meta K", "SECOND_STROKE_SHORTCUT", "Terminal.ClearBuffer"}, { "shift ctrl DOWN", "EditorCloneCaretBelow", "ShowContent"}, }); put("macOS System Shortcuts", new String[][] { { "ctrl C", "Vcs.QuickListPopupAction", "org.jetbrains.r.console.RConsoleView.RInterruptAction"}, { "meta I", "DatabaseView.PropertiesAction", "QuickJavaDoc", "org.intellij.plugins.markdown.ui.actions.styling.ToggleItalicAction"}, { "shift meta T", "ReopenClosedTab", "ServiceView.ShowServices"}, { "ctrl O", "EditorSplitLine", "ExportToTextFile"}, { "meta DOWN", "EditSourceNotInEditor", "EditorTextEnd"}, { "meta alt I", "Move", "RMarkdownNewChunk"}, { "ctrl ENTER", "EditorSplitLine", "NotebookRunCellAction", "PyExecuteCellAction"}, { "meta CLOSE_BRACKET", "Forward", "NextDiff"}, { "meta OPEN_BRACKET", "Back", "PreviousDiff"}, { "meta alt E", "Console.History.Browse", "GotoNextElementUnderCaretUsage", "PerforceDirect.Edit"}, { "ctrl meta M", "ChangesView.Move", "Vcs.MoveChangedLinesToChangelist"}, { "meta alt DOWN", "Console.TableResult.NextPage", "JupyterSelectCellBelowAction", "NextOccurence"}, { "meta alt UP", "Console.TableResult.PreviousPage", "JupyterSelectCellAboveAction", "PreviousOccurence"}, { "ctrl alt DOWN", "MethodDown", "ResizeToolWindowDown"}, { "ctrl alt UP", "MethodUp", "ResizeToolWindowUp"}, }); }}; // @formatter:on @NonNls protected static final Set<String> DEFAULT_UNKNOWN_ACTION_IDS = Set.of( "ActivateCommitToolWindow", "ActivateVersionControlToolWindow", "ActivateFavoritesToolWindow", "ActivateCommanderToolWindow", "ActivateServicesToolWindow", "ActivateMessagesToolWindow", "ActivateProjectToolWindow", "ActivateStructureToolWindow", "ActivateProblemsViewToolWindow", "ActivateTODOToolWindow", "ActivateWebToolWindow", "ActivatePaletteToolWindow", "ActivateTerminalToolWindow", "IDEtalk.SearchUserHistory", "IDEtalk.Rename", "CIDR.Lang.Cpp.GenerateDefinitions", "ActivateErrorsInSolutionToolWindow", "ActivateUnitTestsToolWindow", "ActivateDatabaseToolWindow", "ActivateBuildToolWindow", "ActivateNuGetToolWindow", "RiderBackendAction-EncapsulateField", "SwitchHeaderSource", "BuildProject", "RebuildProject", "BuildSolutionAction", "RebuildSolutionAction", "ActivateInspectionResultsToolWindow", "GoFmtProjectAction", "GoGenerateFileAction", "GoShareInPlaygroundAction", "" ); @NonNls protected static final Set<String> DEFAULT_BOUND_ACTIONS = Set.of( "EditorDelete", "List-selectLastRowExtendSelection", "List-selectLastRow", "List-selectFirstRowExtendSelection", "List-selectFirstRow", "Tree-selectLastExtendSelection", "Tree-selectLast", "Tree-selectFirstExtendSelection", "Tree-selectFirst" ); private static final List<String> DEFAULT_CONFLICT_SAFE_GROUPS = List.of( "Log.KeymapGroup", "UsageGroupingActionGroup", "UsageFilteringActionGroup", "SearchEverywhereActions", "Images.ThumbnailViewActions", "Images.ImageViewActions", "ArrangementRulesGroup", "ServiceViewTreeToolbar", "TodoViewGroupByGroup", "ChangesView.GroupBy" ); @Override protected Collection<String> getConflictSafeGroups() { return DEFAULT_CONFLICT_SAFE_GROUPS; } @Override protected String getGroupForUnknownAction(@NotNull String actionId) { if (actionId.equals("ChangesView.GroupBy.Module")) { return "ChangesView.GroupBy"; } return null; } }
import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.GraphicsEnvironment; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Image; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import javax.imageio.ImageIO; import javax.swing.BorderFactory; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.ListSelectionModel; public class Driver implements ActionListener { JFrame boardFrame = new JFrame("Monopoly"); JFrame p1Frame = new JFrame("Player 1"); JFrame p2Frame = new JFrame("Player2"); JPanel p1Panel = new JPanel(); JPanel p2Panel = new JPanel(); BoardPanel boardPanel; DefaultListModel[] dList = new DefaultListModel[40]; JList[] myList = new JList[40]; JButton[] squares = new JButton[40]; GridBagConstraints c = new GridBagConstraints(); FlowLayout Layout = new FlowLayout(); JLabel p1Money = new JLabel("$1500"); JLabel p2Money = new JLabel("$1500"); JLabel p1Name = new JLabel(" Player 1 "); JLabel p2Name = new JLabel(" Player 2 "); JButton p1BuyHouse = new JButton(" Buy House/Hotel "); JButton p1BuyStreet = new JButton(" Buy Street "); JButton roll = new JButton("Roll"); JButton p2BuyHouse = new JButton(" Buy House/Hotel "); JButton p2BuyStreet = new JButton(" Buy Street "); JButton p1EndTurn = new JButton(" End Turn "); JButton p2EndTurn = new JButton(" End Turn "); JFrame p1Properties = new JFrame("P1 Properties"); JPanel p1PropertiesPanel = new JPanel(); JLabel p1PropLabel = new JLabel(" Player 1 Properties: "); ArrayList<JButton> p1Prop = new ArrayList(); MonopolyGame myGame = new MonopolyGame(); JFrame p2Properties = new JFrame("P2 Properties"); JPanel p2PropertiesPanel = new JPanel(); JLabel p2PropLabel = new JLabel(" Player 2 Properties: "); ArrayList<JButton> p2Prop = new ArrayList(); boolean canRoll = true; int turn = 0; int numP1 = 0; int numP2 = 0; public static void main(String[] args) { new Driver(); } public Driver(){ BufferedImage img = null; try { img = ImageIO.read(new File("monopoly.jpg")); } catch (IOException e) { e.printStackTrace(); } boardPanel = new BoardPanel(img); boardPanel.setLayout(new GridBagLayout()); c.gridx = 10; c.gridy = 10; c.ipady = 15; c.ipadx = 70; dList[0] = new DefaultListModel(); dList[0].addElement("p1"); dList[0].addElement("p2"); myList[0] = new JList(dList[0]); //myList[i].setPreferredSize(new Dimension(30, 45)); myList[0].setLayoutOrientation(JList.VERTICAL); myList[0].setSelectionMode(ListSelectionModel.SINGLE_INTERVAL_SELECTION); myList[0].setOpaque(false); myList[0].setBorder(BorderFactory.createLineBorder(Color.black)); for(int i = 1; i < 40;i++) { squares[i] = new JButton(""); squares[i].setPreferredSize(new Dimension(70, 60)); squares[i].setOpaque(false); squares[i].setContentAreaFilled(false); dList[i] = new DefaultListModel(); dList[i].addElement(" "); dList[i].addElement(" "); myList[i] = new JList(dList[i]); //myList[i].setPreferredSize(new Dimension(30, 45)); myList[i].setLayoutOrientation(JList.VERTICAL); myList[i].setOpaque(false); myList[i].setBorder(BorderFactory.createLineBorder(Color.black)); } //boardPanel.add(squares[0],c); boardPanel.add(myList[0],c); c.gridx = 9; c.gridy = 10; c.ipadx = 54; for(int i = 1; i <11; i++) { //boardPanel.add(squares[i],c); boardPanel.add(myList[i],c); c.gridx--; if(c.gridx == 0) { c.ipadx = 70; } } c.ipadx = 70; c.gridx = 0; c.gridy = 9; //boardPanel.add(squares[11],c); boardPanel.add(myList[11],c); c.gridx = 0; c.gridy = 8; //boardPanel.add(squares[12],c); boardPanel.add(myList[12],c); c.gridx = 0; c.gridy = 7; //boardPanel.add(squares[13],c); boardPanel.add(myList[13],c); c.gridx = 0; c.gridy = 6; //boardPanel.add(squares[14],c); boardPanel.add(myList[14],c); c.gridx = 0; c.gridy = 5; //boardPanel.add(squares[15],c); boardPanel.add(myList[15],c); c.gridx = 0; c.gridy = 4; //boardPanel.add(squares[16],c); boardPanel.add(myList[16],c); c.gridx = 0; c.gridy = 3; //boardPanel.add(squares[17],c); boardPanel.add(myList[17],c); c.gridx = 0; c.gridy = 2; //boardPanel.add(squares[18],c); boardPanel.add(myList[18],c); c.gridx = 0; c.gridy = 1; //boardPanel.add(squares[19],c); boardPanel.add(myList[19],c); c.gridx = 0; c.gridy = 0; for(int i = 20; i <30; i++) { boardPanel.add(myList[i],c); c.gridx++; if(c.gridx == 10) { c.ipadx =70; } else { c.ipadx = 54; } } //boardPanel.add(squares[30],c); boardPanel.add(myList[30],c); c.gridx = 10; c.gridy = 1; //boardPanel.add(squares[31],c); boardPanel.add(myList[31],c); c.gridx = 10; c.gridy = 2; //boardPanel.add(squares[32],c); boardPanel.add(myList[32],c); c.gridx = 10; c.gridy = 3; //boardPanel.add(squares[33],c); boardPanel.add(myList[33],c); c.gridx = 10; c.gridy = 4; //boardPanel.add(squares[34],c); boardPanel.add(myList[34],c); c.gridx = 10; c.gridy = 5; //boardPanel.add(squares[35],c); boardPanel.add(myList[35],c); c.gridx = 10; c.gridy = 6; //boardPanel.add(squares[36],c); boardPanel.add(myList[36],c); c.gridx = 10; c.gridy = 7; //boardPanel.add(squares[37],c); boardPanel.add(myList[37],c); c.gridx = 10; c.gridy = 8; //boardPanel.add(squares[38],c); boardPanel.add(myList[38],c); c.gridx = 10; c.gridy = 9; //boardPanel.add(squares[39],c); boardPanel.add(myList[39],c); c.gridx = 4; c.gridy = 5; c.gridwidth = 3; c.ipady = 25; int x = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getX()-650); int y = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getY()-310); roll.addActionListener(this); p1BuyHouse.addActionListener(this); p1BuyStreet.addActionListener(this); p2BuyHouse.addActionListener(this); p2BuyStreet.addActionListener(this); p1EndTurn.addActionListener(this); p2EndTurn.addActionListener(this); boardPanel.add(roll, c); boardFrame.add(boardPanel); p1Panel.add(p1Name); p1Panel.add(p1Money); p1Panel.add(p1BuyHouse); p1Panel.add(p1BuyStreet); p1Panel.add(p1EndTurn); p1Frame.add(p1Panel); p1Frame.setSize(250, 200); p1Frame.setLocation(x,y); p1Frame.setVisible(true); p1Frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); p1Frame.setResizable(false); p1PropertiesPanel.add(p1PropLabel); p1Properties.add(p1PropertiesPanel); x = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getX()-650); y = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getY()-100); p1Properties.setSize(250, 400); p1Properties.setLocation(x,y); p1Properties.setVisible(true); p1Properties.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); p1Properties.setResizable(false); p2PropertiesPanel.add(p2PropLabel); p2Properties.add(p2PropertiesPanel); p2Properties.setSize(250, 400); x = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getX()+400); y = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getY()-100); p2Properties.setLocation(x,y); p2Properties.setVisible(true); p2Properties.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); p2Properties.setResizable(false); p2Panel.add(p2Name); p2Panel.add(p2Money); p2Panel.add(p2BuyHouse); p2Panel.add(p2BuyStreet); p2Panel.add(p2EndTurn); x = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getX()+400); y = (int) (GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint().getY()-310); p2Frame.add(p2Panel); p2Frame.setSize(250, 200); p2Frame.setLocation(x,y); p2Frame.setVisible(true); p2Frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); p2Frame.setResizable(false); boardFrame.setSize(770,660); boardFrame.setLocationRelativeTo(null); boardFrame.setVisible(true); boardFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); boardFrame.setResizable(true); } public void actionPerformed(ActionEvent e) { if(e.getSource() ==roll ) { if(canRoll) { myGame.playRound(); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } if(myGame.getTurn() % 2 ==0 ) { if((myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation())).getOwner() == myGame.getPlayer(1) && ((myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation())).getOwner() != null)) { myGame.getPlayer(0).pay(((Street)myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation())).getRent()); myGame.getPlayer(1).addMoney(((Street)myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation())).getRent()); } } else if(myGame.getTurn() % 2 ==1 ) { if((myGame.getBoard().getSquare(myGame.getPlayer(1).getLocation())).getOwner() == myGame.getPlayer(0) && ((myGame.getBoard().getSquare(myGame.getPlayer(1).getLocation())).getOwner() != null)) { myGame.getPlayer(1).pay(((Street)myGame.getBoard().getSquare(myGame.getPlayer(1).getLocation())).getRent()); myGame.getPlayer(0).addMoney(((Street)myGame.getBoard().getSquare(myGame.getPlayer(1).getLocation())).getRent()); } } p1Money.setText("$" + myGame.getPlayer(0).getMoney()); p2Money.setText("$" + myGame.getPlayer(1).getMoney()); } } if(e.getSource() == p1EndTurn) { if(myGame.getTurn() %2 == 0) { myGame.endTurn(); canRoll = true; } if(((Jail)(myGame.getBoard().getJail())).getPrisoner() == myGame.getPlayer(1)) { if(myGame.getPlayer(0).getLocation() != 10) { dList[10].setElementAt("p2", 1); myList[10].setModel(dList[10]); } else { dList[10].setElementAt("p1", 0); dList[10].setElementAt("p2", 1); myList[10].setModel(dList[10]); } Object[] options = {"Roll", "Use get out card", "pay $50"}; int reply = JOptionPane.showOptionDialog(null,"You are in Jail. Pick a way to get out.","Jail",JOptionPane.YES_NO_CANCEL_OPTION,JOptionPane.QUESTION_MESSAGE, null, options, options[2]); if(reply == JOptionPane.YES_OPTION) { int roll = ((Jail)myGame.getBoard().getJail()).roll(); myGame.getPlayer(1).move(roll); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } else if(reply == JOptionPane.NO_OPTION) { myGame.getPlayer(1).takeTurn(); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } else if(reply == JOptionPane.CANCEL_OPTION) { myGame.getPlayer(1).pay(50); p2Money.setText("$"+myGame.getPlayer(1).getMoney()); myGame.playRound(); ((Jail)myGame.getBoard().getJail()).letOut(); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } } } if(e.getSource() == p2EndTurn) { if(myGame.getTurn() %2 == 1) { myGame.endTurn(); canRoll = true; } if(((Jail)(myGame.getBoard().getJail())).getPrisoner() == myGame.getPlayer(0)) { if(myGame.getPlayer(1).getLocation() != 10) { dList[10].setElementAt("p1", 0); myList[10].setModel(dList[10]); } else { dList[10].setElementAt("p1", 0); dList[10].setElementAt("p2", 1); myList[10].setModel(dList[10]); } Object[] options = {"Roll", "Use get out card", "pay $50"}; int reply1 = JOptionPane.showOptionDialog(null,"You are in Jail. Pick a way to get out.","Jail",JOptionPane.YES_NO_CANCEL_OPTION,JOptionPane.QUESTION_MESSAGE, null, options, options[2]); if(reply1 == JOptionPane.YES_OPTION) { int roll1 = ((Jail)myGame.getBoard().getJail()).roll(); myGame.getPlayer(0).move(roll1); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } else if(reply1 == JOptionPane.NO_OPTION) { myGame.getPlayer(0).takeTurn(); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } else if(reply1 == JOptionPane.CANCEL_OPTION) { myGame.getPlayer(0).pay(50); p1Money.setText("$"+myGame.getPlayer(0).getMoney()); myGame.playRound(); ((Jail)myGame.getBoard().getJail()).letOut(); for(int i =0; i<40;i++) { if(myGame.getPlayer(0).getLocation() == i) { dList[i].setElementAt("p1", 0); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 0); myList[i].setModel(dList[i]); } if(myGame.getPlayer(1).getLocation() == i) { dList[i].setElementAt("p2", 1); myList[i].setModel(dList[i]); } else { dList[i].setElementAt(" ", 1); myList[i].setModel(dList[i]); } canRoll = false; } } } } if(e.getSource() == p1BuyStreet) { if(myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation()).getOwner() == null) { myGame.playerBuyLocation(myGame.getPlayer(0).getLocation()); p1Money.setText("$"+myGame.getPlayer(0).getMoney()); p1Prop.add(new JButton(" "+myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation()).getName()+" ")); p1Prop.get(numP1).addActionListener(this); p1PropertiesPanel.add(p1Prop.get(numP1)); myList[myGame.getPlayer(0).getLocation()].setBackground(Color.red); p1Properties.revalidate(); p1Properties.repaint(); numP1++; } } if(e.getSource() == p2BuyStreet) { if(myGame.getBoard().getSquare(myGame.getPlayer(0).getLocation()).getOwner() == null) { myGame.playerBuyLocation(myGame.getPlayer(1).getLocation()); p2Money.setText("$"+myGame.getPlayer(1).getMoney()); p2Prop.add(new JButton(" "+myGame.getBoard().getSquare(myGame.getPlayer(1).getLocation()).getName()+" ")); p2Prop.get(numP2).addActionListener(this); p2PropertiesPanel.add(p2Prop.get(numP2)); myList[myGame.getPlayer(1).getLocation()].setBackground(Color.blue); p2Properties.revalidate(); p2Properties.repaint(); numP2++; } } for(int i = 0; i < p1Prop.size(); i++) { if(e.getSource() == p1Prop.get(i)) { int reply2 = JOptionPane.showConfirmDialog(null,"Number of Houses: "+((Street) (myGame.getPlayer(0).getProperty(i))).getHouses()+ "\n Would you like to buy another house?","Houses",JOptionPane.YES_NO_OPTION); if(reply2 == JOptionPane.YES_OPTION) { myGame.getPlayer(0).buyHouse(i); p1Money.setText("$" + myGame.getPlayer(0).getMoney()); } } } for(int i = 0; i < p2Prop.size(); i++) { if(e.getSource() == p2Prop.get(i)) { int reply = JOptionPane.showConfirmDialog(null,"Number of Houses: "+((Street) (myGame.getPlayer(1).getProperty(i))).getHouses()+ "\n Would you like to buy another house?","Houses",JOptionPane.YES_NO_OPTION); if(reply == JOptionPane.YES_OPTION) { myGame.getPlayer(1).buyHouse(i); p2Money.setText("$" + myGame.getPlayer(1).getMoney()); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.functions.casting; import org.apache.flink.table.planner.codegen.CodeGenUtils; import org.apache.flink.table.types.logical.DistinctType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.utils.EncodingUtils; import java.lang.reflect.Method; import java.util.Arrays; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; import static org.apache.flink.table.planner.codegen.CodeGenUtils.className; import static org.apache.flink.table.planner.codegen.CodeGenUtils.newName; import static org.apache.flink.table.planner.codegen.CodeGenUtils.primitiveDefaultValue; import static org.apache.flink.table.planner.codegen.CodeGenUtils.primitiveTypeTermForType; /** This class contains a set of utilities to develop {@link CastRule}. */ final class CastRuleUtils { static final String NULL_STR_LITERAL = strLiteral("null"); static final String EMPTY_STR_LITERAL = "\"\""; static String staticCall(Class<?> clazz, String methodName, Object... args) { return methodCall(className(clazz), methodName, args); } static String staticCall(Method staticMethod, Object... args) { return functionCall(CodeGenUtils.qualifyMethod(staticMethod), args); } static String constructorCall(Class<?> clazz, Object... args) { return functionCall("new " + className(clazz), args); } static String methodCall(String instanceTerm, String methodName, Object... args) { return functionCall(instanceTerm + "." + methodName, args); } private static String functionCall(String functionName, Object... args) { return functionName + "(" + Arrays.stream(args).map(Object::toString).collect(Collectors.joining(", ")) + ")"; } static String newArray(String innerType, String arraySize) { return "new " + innerType + "[" + arraySize + "]"; } static String stringConcat(Object... args) { return Arrays.stream(args).map(Object::toString).collect(Collectors.joining(" + ")); } static String accessStaticField(Class<?> clazz, String fieldName) { return className(clazz) + "." + fieldName; } static String ternaryOperator(String condition, String ifTrue, String ifFalse) { return "((" + condition + ") ? (" + ifTrue + ") : (" + ifFalse + "))"; } static String strLiteral(String str) { return "\"" + EncodingUtils.escapeJava(str) + "\""; } static String cast(String target, String expression) { return "((" + target + ")(" + expression + "))"; } static String castToPrimitive(LogicalType target, String expression) { return cast(primitiveTypeTermForType(target), expression); } static String unbox(String term, LogicalType type) { switch (type.getTypeRoot()) { case BOOLEAN: return methodCall(term, "booleanValue"); case TINYINT: return methodCall(term, "byteValue"); case SMALLINT: return methodCall(term, "shortValue"); case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: return methodCall(term, "intValue"); case BIGINT: case INTERVAL_DAY_TIME: return methodCall(term, "longValue"); case FLOAT: return methodCall(term, "floatValue"); case DOUBLE: return methodCall(term, "doubleValue"); case DISTINCT_TYPE: return unbox(term, ((DistinctType) type).getSourceType()); } return term; } static String box(String term, LogicalType type) { switch (type.getTypeRoot()) { // ordered by type root definition case BOOLEAN: return staticCall(Boolean.class, "valueOf", term); case TINYINT: return staticCall(Byte.class, "valueOf", term); case SMALLINT: return staticCall(Short.class, "valueOf", term); case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: return staticCall(Integer.class, "valueOf", term); case BIGINT: case INTERVAL_DAY_TIME: return staticCall(Long.class, "valueOf", term); case FLOAT: return staticCall(Float.class, "valueOf", term); case DOUBLE: return staticCall(Double.class, "valueOf", term); case DISTINCT_TYPE: box(term, ((DistinctType) type).getSourceType()); } return term; } static final class CodeWriter { StringBuilder builder = new StringBuilder(); public CodeWriter declStmt(String varType, String varName, String value) { return stmt(varType + " " + varName + " = " + value); } public CodeWriter declStmt(Class<?> clazz, String varName, String value) { return declStmt(className(clazz), varName, value); } public CodeWriter declPrimitiveStmt(LogicalType logicalType, String varName, String value) { return declStmt(primitiveTypeTermForType(logicalType), varName, value); } public CodeWriter declPrimitiveStmt(LogicalType logicalType, String varName) { return declStmt( primitiveTypeTermForType(logicalType), varName, primitiveDefaultValue(logicalType)); } public CodeWriter declStmt(String varType, String varName) { return stmt(varType + " " + varName); } public CodeWriter declStmt(Class<?> clazz, String varName) { return declStmt(className(clazz), varName); } public CodeWriter assignStmt(String varName, String value) { return stmt(varName + " = " + value); } public CodeWriter assignArrayStmt(String varName, String index, String value) { return stmt(varName + "[" + index + "] = " + value); } public CodeWriter stmt(String stmt) { builder.append(stmt).append(';').append('\n'); return this; } public CodeWriter forStmt( String upperBound, BiConsumer<String, CodeWriter> bodyWriterConsumer) { final String indexTerm = newName("i"); final CodeWriter innerWriter = new CodeWriter(); builder.append("for (int ") .append(indexTerm) .append(" = 0; ") .append(indexTerm) .append(" < ") .append(upperBound) .append("; ") .append(indexTerm) .append("++) {\n"); bodyWriterConsumer.accept(indexTerm, innerWriter); builder.append(innerWriter).append("}\n"); return this; } public CodeWriter ifStmt(String condition, Consumer<CodeWriter> bodyWriterConsumer) { final CodeWriter innerWriter = new CodeWriter(); builder.append("if (").append(condition).append(") {\n"); bodyWriterConsumer.accept(innerWriter); builder.append(innerWriter).append("}\n"); return this; } public CodeWriter ifStmt( String condition, Consumer<CodeWriter> thenWriterConsumer, Consumer<CodeWriter> elseWriterConsumer) { final CodeWriter thenWriter = new CodeWriter(); final CodeWriter elseWriter = new CodeWriter(); builder.append("if (").append(condition).append(") {\n"); thenWriterConsumer.accept(thenWriter); builder.append(thenWriter).append("} else {\n"); elseWriterConsumer.accept(elseWriter); builder.append(elseWriter).append("}\n"); return this; } public CodeWriter append(CastCodeBlock codeBlock) { builder.append(codeBlock.getCode()); return this; } public CodeWriter appendBlock(String codeBlock) { builder.append(codeBlock); return this; } @Override public String toString() { return builder.toString(); } } }