gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 2014 NAVER Corp. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.interceptor.bci; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.interceptor.InterceptorExceptionHandler; import com.navercorp.pinpoint.bootstrap.interceptor.InterceptorRegistry; import com.navercorp.pinpoint.bootstrap.interceptor.SimpleAroundInterceptor; import com.navercorp.pinpoint.bootstrap.interceptor.StaticAroundInterceptor; import com.navercorp.pinpoint.profiler.util.JavaAssistUtils; /** * @author Jongho Moon * */ public class InterceptorInvokeCodeGenerator { private final int interceptorId; private final Class<?> interceptorClass; private final Method interceptorMethod; private final InstrumentClass targetClass; private final InstrumentMethod targetMethod; private final boolean inCatch; private final Type type; private enum Type { SIMPLE, STATIC, CUSTOM } public InterceptorInvokeCodeGenerator(int interceptorId, Class<?> interceptorClass, Method interceptorMethod, InstrumentClass targetClass, InstrumentMethod targetMethod, boolean inCatch) { this.interceptorId = interceptorId; this.interceptorClass = interceptorClass; this.interceptorMethod = interceptorMethod; this.targetClass = targetClass; this.targetMethod = targetMethod; this.inCatch = inCatch; if (SimpleAroundInterceptor.class.isAssignableFrom(interceptorClass)) { type = Type.SIMPLE; } else if (StaticAroundInterceptor.class.isAssignableFrom(interceptorClass)) { type = Type.STATIC; } else { type = Type.CUSTOM; } } public String generate() { CodeBuilder builder = new CodeBuilder(); builder.begin(); // try { // (($INTERCEPTOR_TYPE)InterceptorRegistry.findInterceptor($INTERCEPTOR_ID)).$INTERCEPTOR_METHOD_NAME($ARGUMENTS); // } catch (Throwable t) { // InterceptorExceptionHandler.handleException(t); // } builder.append("try { "); appendInterceptorRetrieval(builder); builder.format(".%1$s(", interceptorMethod.getName()); appendArguments(builder); builder.format("); } catch (java.lang.Throwable _$PINPOINT_EXCEPTION$_) { %1$s.handleException(_$PINPOINT_EXCEPTION$_); }", InterceptorExceptionHandler.class.getName()); if (inCatch) { builder.append(" throw $e;"); } builder.end(); return builder.toString(); } private void appendInterceptorRetrieval(CodeBuilder builder) { switch (type) { case SIMPLE: builder.format("%1$s.getSimpleInterceptor(%2$d)", InterceptorRegistry.class.getName(), interceptorId); break; case STATIC: builder.format("%1$s.getStaticInterceptor(%2$d)", InterceptorRegistry.class.getName(), interceptorId); break; case CUSTOM: builder.format("((%1$s)%2$s.getStaticInterceptor(%2$d))", interceptorClass.getName(), InterceptorRegistry.class.getName(), interceptorId); break; } } private String getParameterTypes() { String[] parameterTypes = targetMethod.getParameterTypes(); return JavaAssistUtils.getParameterDescription(parameterTypes); } private String getTarget() { return Modifier.isStatic(targetMethod.getModifiers()) ? "null" : "this"; } private String getReturnValue() { if (inCatch) { return "null"; } if (!targetMethod.isConstructor()) { if ("void".equals(targetMethod.getReturnType())) { return "null"; } } return "($w)$_"; } private String getArguments() { if (targetMethod.getParameterTypes().length == 0) { return "null"; } return "$args"; } private String getException() { if (inCatch) { return "$e"; } return "null"; } private void appendArguments(CodeBuilder builder) { boolean after = "after".equals(interceptorMethod.getName()); if (after) { switch (type) { case SIMPLE: appendSimpleAfterArguments(builder); break; case STATIC: appendStaticAfterArguments(builder); break; case CUSTOM: appendCustomAfterArguments(builder); break; } } else { switch (type) { case SIMPLE: appendSimpleBeforeArguments(builder); break; case STATIC: appendStaticBeforeArguments(builder); break; case CUSTOM: appendCustomBeforeArguments(builder); break; } } } private void appendSimpleAfterArguments(CodeBuilder builder) { builder.format("%1$s, %2$s, %3$s, %4$s", getTarget(), getArguments(), getReturnValue(), getException()); } private void appendSimpleBeforeArguments(CodeBuilder builder) { builder.format("%1$s, %2$s", getTarget(), getArguments()); } private void appendStaticBeforeArguments(CodeBuilder builder) { builder.format("%1$s, \"%2$s\", \"%3$s\", \"%4$s\", %5$s", getTarget(), targetClass.getName(), targetMethod.getName(), getParameterTypes(), getArguments()); } private void appendStaticAfterArguments(CodeBuilder builder) { builder.format("%1$s, \"%2$s\", \"%3$s\", \"%4$s\", %5$s, %6$s, %7$s", getTarget(), targetClass.getName(), targetMethod.getName(), getParameterTypes(), getArguments(), getReturnValue(), getException()); } private void appendCustomBeforeArguments(CodeBuilder builder) { Class<?>[] paramTypes = interceptorMethod.getParameterTypes(); if (paramTypes.length == 0) { return; } builder.append(getTarget()); int i = 0; int argNum = targetMethod.getParameterTypes().length; int interceptorArgNum = paramTypes.length - 1; int matchNum = Math.min(argNum, interceptorArgNum); for (; i < matchNum; i++) { builder.append(", $" + (i + 1)); } for (; i < interceptorArgNum; i++) { builder.append(", null"); } } private void appendCustomAfterArguments(CodeBuilder builder) { Class<?>[] paramTypes = interceptorMethod.getParameterTypes(); if (paramTypes.length == 0) { return; } builder.append(getTarget()); if (paramTypes.length >= 2) { builder.append(", "); builder.append(getReturnValue()); } if (paramTypes.length >= 3) { builder.append(", "); builder.append(getException()); } int i = 0; int argNum = targetMethod.getParameterTypes().length; int interceptorArgNum = paramTypes.length - 3; int matchNum = Math.min(argNum, interceptorArgNum); for (; i < matchNum; i++) { builder.append(", $" + (i + 1)); } for (; i < interceptorArgNum; i++) { builder.append(", null"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samoa.moa.classifiers.core.driftdetection; import org.apache.samoa.moa.AbstractMOAObject; /** * ADaptive sliding WINdow method. This method is a change detector and estimator. It keeps a variable-length window of * recently seen items, with the property that the window has the maximal length statistically consistent with the * hypothesis "there has been no change in the average value inside the window". * * * @author Albert Bifet (abifet at cs dot waikato dot ac dot nz) * @version $Revision: 7 $ */ public class ADWIN extends AbstractMOAObject { private class List extends AbstractMOAObject { protected int count; protected ListItem head; protected ListItem tail; public List() { // post: initializes the list to be empty. clear(); addToHead(); } /* Interface Store Methods */ public int size() { // post: returns the number of elements in the list. return this.count; } public ListItem head() { // post: returns the number of elements in the list. return this.head; } public ListItem tail() { // post: returns the number of elements in the list. return this.tail; } public boolean isEmpty() { // post: returns the true iff store is empty. return (this.size() == 0); } public void clear() { // post: clears the list so that it contains no elements. this.head = null; this.tail = null; this.count = 0; } /* Interface List Methods */ public void addToHead() { // pre: anObject is non-null // post: the object is added to the beginning of the list this.head = new ListItem(this.head, null); if (this.tail == null) { this.tail = this.head; } this.count++; } public void removeFromHead() { // pre: list is not empty // post: removes and returns first object from the list // ListItem temp; // temp = this.head; this.head = this.head.next(); if (this.head != null) { this.head.setPrevious(null); } else { this.tail = null; } this.count--; } public void addToTail() { // pre: anObject is non-null // post: the object is added at the end of the list this.tail = new ListItem(null, this.tail); if (this.head == null) { this.head = this.tail; } this.count++; } public void removeFromTail() { // pre: list is not empty // post: the last object in the list is removed and returned // ListItem temp; // temp = this.tail; this.tail = this.tail.previous(); if (this.tail == null) { this.head = null; } else { this.tail.setNext(null); } this.count--; // temp=null; } @Override public void getDescription(StringBuilder sb, int indent) { } } private class ListItem extends AbstractMOAObject { protected ListItem next; protected ListItem previous; protected int bucketSizeRow = 0; protected int MAXBUCKETS = ADWIN.MAXBUCKETS; protected double bucketTotal[] = new double[MAXBUCKETS + 1]; protected double bucketVariance[] = new double[MAXBUCKETS + 1]; public ListItem() { // post: initializes the node to be a tail node // containing the given value. this(null, null); } public void clear() { bucketSizeRow = 0; for (int k = 0; k <= MAXBUCKETS; k++) { clearBucket(k); } } private void clearBucket(int k) { setTotal(0, k); setVariance(0, k); } public ListItem(ListItem nextNode, ListItem previousNode) { // post: initializes the node to contain the given // object and link to the given next node. // this.data = element; this.next = nextNode; this.previous = previousNode; if (nextNode != null) { nextNode.previous = this; } if (previousNode != null) { previousNode.next = this; } clear(); } public void insertBucket(double Value, double Variance) { // insert a Bucket at the end int k = bucketSizeRow; bucketSizeRow++; // Insert new bucket setTotal(Value, k); setVariance(Variance, k); } public void RemoveBucket() { // Removes the first Buvket compressBucketsRow(1); } public void compressBucketsRow(int NumberItemsDeleted) { // Delete first elements for (int k = NumberItemsDeleted; k <= MAXBUCKETS; k++) { bucketTotal[k - NumberItemsDeleted] = bucketTotal[k]; bucketVariance[k - NumberItemsDeleted] = bucketVariance[k]; } for (int k = 1; k <= NumberItemsDeleted; k++) { clearBucket(MAXBUCKETS - k + 1); } bucketSizeRow -= NumberItemsDeleted; // BucketNumber-=NumberItemsDeleted; } public ListItem previous() { // post: returns the previous node. return this.previous; } public void setPrevious(ListItem previous) { // post: sets the previous node to be the given node this.previous = previous; } public ListItem next() { // post: returns the next node. return this.next; } public void setNext(ListItem next) { // post: sets the next node to be the given node this.next = next; } public double Total(int k) { // post: returns the element in this node return bucketTotal[k]; } public double Variance(int k) { // post: returns the element in this node return bucketVariance[k]; } public void setTotal(double value, int k) { // post: sets the element in this node to the given // object. bucketTotal[k] = value; } public void setVariance(double value, int k) { // post: sets the element in this node to the given // object. bucketVariance[k] = value; } /* * public ListItem(Object element, ListItem nextNode){ // post: initializes * the node to contain the given // object and link to the given next node. * this.data = element; this.next = nextNode; } public ListItem(Object * element) { // post: initializes the node to be a tail node // containing * the given value. this(element, null); } * * * public Object value() { // post: returns the element in this node return * this.data; } public void setValue(Object anObject) { // post: sets the * element in this node to the given // object. this.data = anObject; } */ @Override public void getDescription(StringBuilder sb, int indent) { } } public static final double DELTA = .002; // .1; private static final int mintMinimLongitudWindow = 10; // 10 private double mdbldelta = .002; // .1; private int mintTime = 0; private int mintClock = 32; private double mdblWidth = 0; // Mean of Width = mdblWidth/Number of items // BUCKET public static final int MAXBUCKETS = 5; private int lastBucketRow = 0; private double TOTAL = 0; private double VARIANCE = 0; private int WIDTH = 0; private int BucketNumber = 0; private int Detect = 0; private int numberDetections = 0; private int DetectTwice = 0; private boolean blnBucketDeleted = false; private int BucketNumberMAX = 0; private int mintMinWinLength = 5; private List listRowBuckets; public boolean getChange() { return blnBucketDeleted; } public void resetChange() { blnBucketDeleted = false; } public int getBucketsUsed() { return BucketNumberMAX; } public int getWidth() { return WIDTH; } public void setClock(int intClock) { mintClock = intClock; } public int getClock() { return mintClock; } public boolean getWarning() { return false; } public boolean getDetect() { return (Detect == mintTime); } public int getNumberDetections() { return numberDetections; } public double getTotal() { return TOTAL; } public double getEstimation() { return TOTAL / WIDTH; } public double getVariance() { return VARIANCE / WIDTH; } public double getWidthT() { return mdblWidth; } private void initBuckets() { // Init buckets listRowBuckets = new List(); lastBucketRow = 0; TOTAL = 0; VARIANCE = 0; WIDTH = 0; BucketNumber = 0; } private void insertElement(double Value) { WIDTH++; insertElementBucket(0, Value, listRowBuckets.head()); double incVariance = 0; if (WIDTH > 1) { incVariance = (WIDTH - 1) * (Value - TOTAL / (WIDTH - 1)) * (Value - TOTAL / (WIDTH - 1)) / WIDTH; } VARIANCE += incVariance; TOTAL += Value; compressBuckets(); } private void insertElementBucket(double Variance, double Value, ListItem Node) { // Insert new bucket Node.insertBucket(Value, Variance); BucketNumber++; if (BucketNumber > BucketNumberMAX) { BucketNumberMAX = BucketNumber; } } private int bucketSize(int Row) { return (int) Math.pow(2, Row); } public int deleteElement() { // LIST // Update statistics ListItem Node; Node = listRowBuckets.tail(); int n1 = bucketSize(lastBucketRow); WIDTH -= n1; TOTAL -= Node.Total(0); double u1 = Node.Total(0) / n1; double incVariance = Node.Variance(0) + n1 * WIDTH * (u1 - TOTAL / WIDTH) * (u1 - TOTAL / WIDTH) / (n1 + WIDTH); VARIANCE -= incVariance; // Delete Bucket Node.RemoveBucket(); BucketNumber--; if (Node.bucketSizeRow == 0) { listRowBuckets.removeFromTail(); lastBucketRow--; } return n1; } public void compressBuckets() { // Traverse the list of buckets in increasing order int n1, n2; double u2, u1, incVariance; ListItem cursor; ListItem nextNode; cursor = listRowBuckets.head(); int i = 0; do { // Find the number of buckets in a row int k = cursor.bucketSizeRow; // If the row is full, merge buckets if (k == MAXBUCKETS + 1) { nextNode = cursor.next(); if (nextNode == null) { listRowBuckets.addToTail(); nextNode = cursor.next(); lastBucketRow++; } n1 = bucketSize(i); n2 = bucketSize(i); u1 = cursor.Total(0) / n1; u2 = cursor.Total(1) / n2; incVariance = n1 * n2 * (u1 - u2) * (u1 - u2) / (n1 + n2); nextNode.insertBucket(cursor.Total(0) + cursor.Total(1), cursor.Variance(0) + cursor.Variance(1) + incVariance); BucketNumber++; cursor.compressBucketsRow(2); if (nextNode.bucketSizeRow <= MAXBUCKETS) { break; } } else { break; } cursor = cursor.next(); i++; } while (cursor != null); } public boolean setInput(double intEntrada) { return setInput(intEntrada, mdbldelta); } public boolean setInput(double intEntrada, double delta) { boolean blnChange = false; boolean blnExit; ListItem cursor; mintTime++; // 1,2)Increment window in one element insertElement(intEntrada); blnBucketDeleted = false; // 3)Reduce window if (mintTime % mintClock == 0 && getWidth() > mintMinimLongitudWindow) { boolean blnReduceWidth = true; // Diference while (blnReduceWidth) // Diference { blnReduceWidth = false; // Diference blnExit = false; int n0 = 0; int n1 = WIDTH; double u0 = 0; double u1 = getTotal(); double v0 = 0; double v1 = VARIANCE; double n2; double u2; cursor = listRowBuckets.tail(); int i = lastBucketRow; do { for (int k = 0; k <= (cursor.bucketSizeRow - 1); k++) { n2 = bucketSize(i); u2 = cursor.Total(k); if (n0 > 0) { v0 += cursor.Variance(k) + (double) n0 * n2 * (u0 / n0 - u2 / n2) * (u0 / n0 - u2 / n2) / (n0 + n2); } if (n1 > 0) { v1 -= cursor.Variance(k) + (double) n1 * n2 * (u1 / n1 - u2 / n2) * (u1 / n1 - u2 / n2) / (n1 + n2); } n0 += bucketSize(i); n1 -= bucketSize(i); u0 += cursor.Total(k); u1 -= cursor.Total(k); if (i == 0 && k == cursor.bucketSizeRow - 1) { blnExit = true; break; } double absvalue = (u0 / n0) - (u1 / n1); // n1<WIDTH-mintMinWinLength-1 if ((n1 > mintMinWinLength + 1 && n0 > mintMinWinLength + 1) && // Diference NEGATIVE if( blnCutexpression(n0, n1, u0, u1, v0, v1, absvalue, delta)) { blnBucketDeleted = true; Detect = mintTime; if (Detect == 0) { Detect = mintTime; // blnFirst=true; // blnWarning=true; } else if (DetectTwice == 0) { DetectTwice = mintTime; // blnDetect=true; } blnReduceWidth = true; // Diference blnChange = true; if (getWidth() > 0) { // Reduce width of the window // while (n0>0) // Diference NEGATIVE n0 -= deleteElement(); blnExit = true; break; } } // End if }// Next k cursor = cursor.previous(); i--; } while (((!blnExit && cursor != null))); }// End While // Diference }// End if mdblWidth += getWidth(); if (blnChange) { numberDetections++; } return blnChange; } private boolean blnCutexpression(int n0, int n1, double u0, double u1, double v0, double v1, double absvalue, double delta) { int n = getWidth(); double dd = Math.log(2 * Math.log(n) / delta); // -- ull perque el ln n va al numerador. // Formula Gener 2008 double v = getVariance(); double m = ((double) 1 / ((n0 - mintMinWinLength + 1))) + ((double) 1 / ((n1 - mintMinWinLength + 1))); double epsilon = Math.sqrt(2 * m * v * dd) + (double) 2 / 3 * dd * m; return (Math.abs(absvalue) > epsilon); } public ADWIN() { mdbldelta = DELTA; initBuckets(); Detect = 0; numberDetections = 0; DetectTwice = 0; } public ADWIN(double d) { mdbldelta = d; initBuckets(); Detect = 0; numberDetections = 0; DetectTwice = 0; } public ADWIN(int cl) { mdbldelta = DELTA; initBuckets(); Detect = 0; numberDetections = 0; DetectTwice = 0; mintClock = cl; } public String getEstimatorInfo() { return "ADWIN;;"; } public void setW(int W0) { } @Override public void getDescription(StringBuilder sb, int indent) { } }
package it.softphone.rd.gwt.client.widget.base.social; import it.softphone.rd.gwt.client.CommonWidgetsStyle; import it.softphone.rd.gwt.client.resources.base.CommentBoxCss; import it.softphone.rd.gwt.client.widget.base.HTMLLink; import it.softphone.rd.model.shared.Feed; import java.util.Date; import java.util.logging.Logger; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.i18n.client.DateTimeFormat; import com.google.gwt.safehtml.client.SafeHtmlTemplates; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.safehtml.shared.SafeUri; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.InlineHTML; import com.google.gwt.user.client.ui.ScrollPanel; /** * <h1>A box containing a {@link Feed}</h1> * * This class contains a {@link Feed} and consists in: * * <ul> * <li>The text of the comment</li> * <li>The username</li> * <li>The insert date</li> * </ul> * * If a comment length is provided, the comment will be truncated and a link will be added to the box. * Clicking the link the entire comment will be showed. * If no lenght is provided, will be used the default value of 200 * @author Alessandro Casolla * * @param <T> the type of the class */ public class CommentBox<T extends Feed> extends SocialBox { private final static Logger logger = Logger.getLogger(""); private final FlowPanel mainContainer = new FlowPanel(); private final ScrollPanel scroll = new ScrollPanel(); private final FlowPanel scrollContainer = new FlowPanel(); private final HTMLLink more = new HTMLLink("More"); private String displayedText = ""; private String user; private Date date; /** * The default comment length */ private static final int DEFAULT_MAX_TEXT_LENGTH = 200; private final int textLength; private String textExtended; private final DateTimeFormat dtf = DateTimeFormat.getFormat("MMMM yyyy HH:mm"); private final FlowPanel infoContainer = new FlowPanel(); private InlineHTML hComment = new InlineHTML(""); private final HTML info = new HTML(); private CommentBoxCss css; private boolean isExpanded = false; private ClickHandler moreHandler = new ClickHandler() { @Override public void onClick(ClickEvent event) { if ( isExpanded ){ hComment.setHTML(displayedText); more.setText("More"); isExpanded = false; } else { hComment.setHTML(textExtended); more.setText("Back"); isExpanded = true; } } }; private final static String infoTemplate = "<div class='{0}'>" + "<div>Entered by <b>{1}</b> on <b>{2}</b></div>" + "</div>"; public interface Template extends SafeHtmlTemplates { @SafeHtmlTemplates.Template(infoTemplate) SafeHtml info(String styleContainer, SafeHtml user, SafeHtml date); } private static Template TEMPLATE_COMMENT = GWT.create(Template.class); /** * Constructs an empty CommentBox */ public CommentBox( ){ this("","",new Date(),DEFAULT_MAX_TEXT_LENGTH); } /** * Constructs a CommentBox by the fiven params * @param comment the text of the comment * @param user the user name * @param date the insert date * @param commentLength the maximum commentlength */ public CommentBox( String comment, String user, Date date,int commentLength ){ this(CommonWidgetsStyle.getTheme().getCommonsWidgetClientBundle().commentBox(),comment,user,date,null,true,commentLength); } /** * Constructs a CommentBox by the fiven params * @param feed the feed * @param isReply the comment is a reply */ public CommentBox( T feed,boolean isReply ){ this(CommonWidgetsStyle.getTheme().getCommonsWidgetClientBundle().commentBox(), feed.getText(), feed.getUser(), feed.getDate(), feed.getIcon(), isReply, DEFAULT_MAX_TEXT_LENGTH); } /** * Constructs a CommentBox by the fiven params * @param feed the feed * @param commentLength the maximum commentlength */ public CommentBox( T feed,int commentLength ){ this(CommonWidgetsStyle.getTheme().getCommonsWidgetClientBundle().commentBox(), feed.getText(), feed.getUser(), feed.getDate(), feed.getIcon(), true, commentLength); } /** * Constructs a CommentBox by the fiven params * @param comment the text of the comment * @param user the user name * @param date the insert date * @param commentLength the maximum commentlength * @param css the css to use * @param isUser if true, the customer agent image will be used */ public CommentBox( CommentBoxCss css,String comment, String user, Date date,final String icon,boolean isUser,int commentLength ){ this.textLength = commentLength; this.css = css; css.ensureInjected(); setText(comment); setUser(user); setDate(date); mainContainer.setStylePrimaryName(css.commentBoxListItem()); scroll.setStylePrimaryName(css.commentBoxContainer()); infoContainer.setStylePrimaryName(css.commentBoxInfoContainer()); SafeUri uri = null; if ( icon != null ){ uri = new SafeUri() { @Override public String asString() { return icon; } }; } if ( isUser ){ if ( uri != null ) setImage(css.commentBoxImageUser(), uri); else setImage(css.commentBoxImageUser(), resources.callCenterUser().getSafeUri()); } else { if ( uri != null ) setImage(css.commentBoxImageCustomer(), uri); else setImage(css.commentBoxImageCustomer(), resources.customer().getSafeUri()); } init(); checkCommentLength(); } private void init(){ mainContainer.add(scroll); scroll.setWidget(scrollContainer); scrollContainer.add(hComment); more.addClickHandler(moreHandler); scrollContainer.add(infoContainer); info.setHTML(new SafeHtmlBuilder().append( TEMPLATE_COMMENT.info(css.commentBoxInfoContainer(), SafeHtmlUtils.fromString(getUser()), SafeHtmlUtils.fromString(getStringDate()))) .toSafeHtml()); infoContainer.add(info); setContent(mainContainer); } /** * Returns the username * @return a string */ public String getUser(){ if ( user == null || user.isEmpty() ) return "ok"; return user; } /** * Returns the inser comment date * @return a date */ public Date getDate(){ return date; } /** * Returns the insert comment date formatted * @return a string */ public String getStringDate(){ if ( date == null ) return ""; return dtf.format(getDate()); } /** * Sets the comment * @param feed the feed to set */ public void setFeed( T feed ){ setText(feed.getText()); setUser(feed.getUser()); setDate(feed.getDate()); } private void checkCommentLength(){ if (displayedText == null ) return; if ( textExtended == null ){ textExtended = new String(displayedText); } boolean isTruncated = displayedText.length() > textLength; displayedText = isTruncated ? displayedText.substring(0, textLength - 3 ).concat("...") : displayedText; hComment.setHTML(displayedText); if ( isTruncated ){ infoContainer.add(more); } } private void setText( String text ){ this.displayedText = text; textExtended = null; checkCommentLength(); } private void setDate( Date date ){ this.date = date; renderInfoContainer(); } private void setUser( String user ){ this.user = user; renderInfoContainer(); } private void renderInfoContainer(){ logger.info("user=" + getUser() + " date=" + getStringDate()); info.setHTML(new SafeHtmlBuilder().append( TEMPLATE_COMMENT.info(css.commentBoxInfoContainer(), SafeHtmlUtils.fromString(getUser()), SafeHtmlUtils.fromString(getStringDate()))) .toSafeHtml()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.synapse; import org.apache.axis2.AxisFault; import org.apache.axis2.Constants; import org.apache.axis2.deployment.DeploymentEngine; import org.apache.axis2.addressing.AddressingConstants; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.axis2.description.*; import org.apache.axis2.dispatchers.SOAPMessageBodyBasedDispatcher; import org.apache.axis2.engine.*; import org.apache.axis2.format.BinaryBuilder; import org.apache.axis2.format.PlainTextBuilder; import org.apache.axis2.phaseresolver.PhaseException; import org.apache.axis2.phaseresolver.PhaseMetadata; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.synapse.commons.beanstalk.enterprise.EnterpriseBeanstalkConstants; import org.apache.synapse.commons.beanstalk.enterprise.EnterpriseBeanstalkManager; import org.apache.synapse.commons.datasource.DataSourceRepositoryHolder; import org.apache.synapse.commons.util.RMIRegistryController; import org.apache.synapse.config.*; import org.apache.synapse.config.xml.MultiXMLConfigurationBuilder; import org.wso2.securevault.SecurityConstants; import org.wso2.securevault.secret.SecretCallbackHandler; import org.apache.synapse.commons.datasource.DataSourceInformationRepository; import org.apache.synapse.commons.datasource.DataSourceConstants; import org.apache.synapse.commons.jmx.JmxInformation; import org.apache.synapse.commons.jmx.JmxInformationFactory; import org.apache.synapse.core.SynapseEnvironment; import org.apache.synapse.core.axis2.*; import org.apache.synapse.deployers.ImportDeployer; import org.apache.synapse.deployers.LibraryArtifactDeployer; import org.apache.synapse.deployers.SynapseArtifactDeploymentStore; import org.apache.synapse.eventing.SynapseEventSource; import org.apache.synapse.libraries.imports.SynapseImport; import org.apache.synapse.task.*; import org.wso2.securevault.secret.handler.SharedSecretCallbackHandlerCache; import org.apache.synapse.util.xpath.ext.SynapseXpathFunctionContextProvider; import org.apache.synapse.util.xpath.ext.SynapseXpathVariableResolver; import org.apache.synapse.util.xpath.ext.XpathExtensionUtil; import java.io.File; import java.util.*; /** * Axis2 Based Synapse Controller. * * @see org.apache.synapse.SynapseController */ public class Axis2SynapseController implements SynapseController { private static final Log log = LogFactory.getLog(Axis2SynapseController.class); private static final String JMX_AGENT_NAME = "jmx.agent.name"; /** The Axis2 listener Manager */ private ListenerManager listenerManager; /** The Axis2 configuration context used by Synapse */ private ConfigurationContext configurationContext; /** Reference to the Synapse configuration */ protected SynapseConfiguration synapseConfiguration; /** Reference to the Synapse configuration */ protected SynapseEnvironment synapseEnvironment; /** Indicate initialization state */ private boolean initialized; /** ServerConfiguration Information */ protected ServerConfigurationInformation serverConfigurationInformation; /** Runtime information about the server */ protected ServerContextInformation serverContextInformation; /** JMX Adapter */ private JmxAdapter jmxAdapter; private TaskDescriptionRepository taskDescriptionRepository; private TaskScheduler taskScheduler; private TaskManager taskManagerImpl; /** * {@inheritDoc} * * @param serverConfigurationInformation ServerConfigurationInformation Instance * @param serverContextInformation Server Context if the Axis2 Based Server * Environment has been already set up. */ public void init(ServerConfigurationInformation serverConfigurationInformation, ServerContextInformation serverContextInformation) { log.info("Initializing Synapse at : " + new Date()); if (serverConfigurationInformation == null) { throw new IllegalArgumentException("ServerConfigurationInformation cannot be null"); } if (serverContextInformation == null) { throw new IllegalArgumentException("ServerContextInformation cannot be null"); } this.serverConfigurationInformation = serverConfigurationInformation; this.serverContextInformation = serverContextInformation; /* If no system property for the JMX agent is specified from outside, use a default one to show all MBeans (including the Axis2-MBeans) within the Synapse tree */ if (System.getProperty(JMX_AGENT_NAME) == null) { System.setProperty(JMX_AGENT_NAME, "org.apache.synapse"); } if (serverContextInformation.getServerContext() == null || serverConfigurationInformation.isCreateNewInstance()) { if (log.isDebugEnabled()) { log.debug("Initializing Synapse in a new axis2 server environment instance"); } createNewInstance(serverConfigurationInformation); } else { Object context = serverContextInformation.getServerContext(); if (context instanceof ConfigurationContext) { if (log.isDebugEnabled()) { log.debug("Initializing Synapse in an already existing " + "axis2 server environment instance"); } configurationContext = (ConfigurationContext) context; configurationContext.setProperty( AddressingConstants.ADDR_VALIDATE_ACTION, Boolean.FALSE); } else { handleFatal("Synapse startup initialization failed : Provided server context is" + " invalid, expected an Axis2 ConfigurationContext instance"); } } // set the configuration context serverContextInformation.setServerContext(configurationContext); // set the ServerContextInformation as a parameter Parameter serverContextParameter = new Parameter( SynapseConstants.SYNAPSE_SERVER_CTX_INFO, serverContextInformation); // set the ServerConfiguration as a parameter Parameter serverConfigParameter = new Parameter( SynapseConstants.SYNAPSE_SERVER_CONFIG_INFO, serverConfigurationInformation); try { configurationContext.getAxisConfiguration().addParameter(serverContextParameter); configurationContext.getAxisConfiguration().addParameter(serverConfigParameter); } catch (AxisFault ignored) { log.fatal("Error adding the parameter to the Axis Configuration"); } // we retrieve these properties to initialize the task scheduler in the environment Object repo = serverContextInformation.getProperty(TaskConstants.TASK_DESCRIPTION_REPOSITORY); Object taskScheduler = serverContextInformation.getProperty(TaskConstants.TASK_SCHEDULER); if (repo != null && (repo instanceof TaskDescriptionRepository)) { this.taskDescriptionRepository = (TaskDescriptionRepository) repo; } if (taskScheduler != null && (taskScheduler instanceof TaskScheduler)) { this.taskScheduler = (TaskScheduler) taskScheduler; } addDefaultBuildersAndFormatters(configurationContext.getAxisConfiguration()); initDataSourceHelper(serverContextInformation); initSharedSecretCallbackHandlerCache(serverContextInformation); initEnterpriseBeanstalkHolder(serverContextInformation); initialized = true; } /** * {@inheritDoc} */ public void destroy() { try { // only if we have created the server if (serverConfigurationInformation.isCreateNewInstance()) { // destroy listener manager if (listenerManager != null) { listenerManager.destroy(); } stopJmxAdapter(); RMIRegistryController.getInstance().shutDown(); // we need to call this method to clean the temp files we created. if (configurationContext != null) { configurationContext.terminate(); } } initialized = false; } catch (Exception e) { log.error("Error stopping the Axis2 Based Server Environment", e); } } /** * {@inheritDoc} */ public boolean isInitialized() { return initialized; } /** * Adds the synapse handlers to the inflow Dispatch phase and starts the listener manager * if the axis2 instance is created by the Synapse */ public void start() { // add the Synapse handlers if (configurationContext != null) { List<Phase> inflowPhases = configurationContext.getAxisConfiguration().getInFlowPhases(); for (Phase inPhase : inflowPhases) { // we are interested about the Dispatch phase in the inflow if (PhaseMetadata.PHASE_DISPATCH.equals(inPhase.getPhaseName())) { try { inPhase.addHandler(prepareSynapseDispatcher()); inPhase.addHandler(prepareMustUnderstandHandler()); } catch (PhaseException e) { handleFatal("Couldn't start Synapse, " + "Cannot add the required Synapse handlers", e); } } } } else { handleFatal("Couldn't start Synapse, ConfigurationContext not found"); } // if the axis2 instance is created by us, then start the listener manager if (serverConfigurationInformation.isCreateNewInstance()) { if (listenerManager != null) { listenerManager.start(); } else { handleFatal("Couldn't start Synapse, ListenerManager not found"); } /* if JMX Adapter has been configured and started, output usage information rather at the end of the startup process to make it more obvious */ if (jmxAdapter != null && jmxAdapter.isRunning()) { log.info("Management using JMX available via: " + jmxAdapter.getJmxInformation().getJmxUrl()); } } } /** * {@inheritDoc} */ public void startMaintenance() { log.info("Putting transport listeners, senders and tasks into maintenance mode.."); // pause transport listeners and senders Axis2TransportHelper transportHelper = new Axis2TransportHelper(configurationContext); transportHelper.pauseListeners(); transportHelper.pauseSenders(); // put tasks on hold SynapseTaskManager synapseTaskManager = synapseEnvironment.getTaskManager(); if (synapseTaskManager.isInitialized()) { synapseTaskManager.pauseAll(); } log.info("Entered maintenance mode"); } /** * {@inheritDoc} */ public void endMaintenance() { log.info("Resuming transport listeners, senders and tasks from maintenance mode..."); // resume transport listeners and senders Axis2TransportHelper transportHelper = new Axis2TransportHelper(configurationContext); transportHelper.resumeListeners(); transportHelper.resumeSenders(); // resume tasks SynapseTaskManager synapseTaskManager = synapseEnvironment.getTaskManager(); if (synapseTaskManager.isInitialized()) { synapseTaskManager.resumeAll(); } log.info("Resumed normal operation from maintenance mode"); } /** * Cleanup the axis2 environment and stop the synapse environment. */ public void stop() { try { // stop tasks SynapseTaskManager synapseTaskManager = synapseEnvironment.getTaskManager(); if (synapseTaskManager.isInitialized()) { synapseTaskManager.cleanup(); } EnterpriseBeanstalkManager manager = (EnterpriseBeanstalkManager) serverContextInformation.getProperty(EnterpriseBeanstalkConstants.BEANSTALK_MANAGER_PROP_NAME); if (manager != null) { manager.destroy(); } // stop the listener manager if (listenerManager != null) { listenerManager.stop(); } // detach the synapse handlers if (configurationContext != null) { List<Phase> inflowPhases = configurationContext.getAxisConfiguration().getInFlowPhases(); for (Phase inPhase : inflowPhases) { // we are interested about the Dispatch phase in the inflow if (PhaseMetadata.PHASE_DISPATCH.equals(inPhase.getPhaseName())) { List<HandlerDescription> synapseHandlers = new ArrayList<HandlerDescription>(); for (Handler handler : inPhase.getHandlers()) { if (SynapseDispatcher.NAME.equals(handler.getName()) || SynapseMustUnderstandHandler.NAME.equals(handler.getName())) { synapseHandlers.add(handler.getHandlerDesc()); } } for (HandlerDescription handlerMD : synapseHandlers) { inPhase.removeHandler(handlerMD); } } } } else { handleException("Couldn't detach the Synapse handlers, " + "ConfigurationContext not found."); } // continue stopping the axis2 environment if we created it if (serverConfigurationInformation.isCreateNewInstance() && configurationContext != null && configurationContext.getAxisConfiguration() != null) { Map<String, AxisService> serviceMap = configurationContext.getAxisConfiguration().getServices(); for (AxisService svc : serviceMap.values()) { svc.setActive(false); } // stop all modules Map<String, AxisModule> moduleMap = configurationContext.getAxisConfiguration().getModules(); for (AxisModule mod : moduleMap.values()) { if (mod.getModule() != null && !"synapse".equals(mod.getName())) { mod.getModule().shutdown(configurationContext); } } } } catch (AxisFault e) { log.error("Error stopping the Axis2 Environment"); } } /** * Setup synapse in axis2 environment and return the created instance. * * @return SynapseEnvironment instance */ public SynapseEnvironment createSynapseEnvironment() { try { deployMediationLibraryArtifacts(); deployMediatorExtensions(); deploySynapseService(); deployProxyServices(); deployEventSources(); //deployMediatorExtensions(); } catch (AxisFault axisFault) { log.fatal("Synapse startup failed...", axisFault); throw new SynapseException("Synapse startup failed", axisFault); } synapseEnvironment = new Axis2SynapseEnvironment( configurationContext, synapseConfiguration, serverContextInformation); MessageContextCreatorForAxis2.setSynEnv(synapseEnvironment); Parameter synapseEnvironmentParameter = new Parameter( SynapseConstants.SYNAPSE_ENV, synapseEnvironment); try { configurationContext.getAxisConfiguration().addParameter(synapseEnvironmentParameter); } catch (AxisFault e) { handleFatal("Could not set parameter '" + SynapseConstants.SYNAPSE_ENV + "' to the Axis2 configuration : " + e.getMessage(), e); } synapseEnvironment.getTaskManager().init(taskDescriptionRepository, taskScheduler, synapseConfiguration.getTaskManager()); synapseConfiguration.init(synapseEnvironment); synapseEnvironment.setInitialized(true); return synapseEnvironment; } /** * The mediation library deployer will handling the process of deploying the * libararyArtifacts, this is required since the library specific artifacts * has to be initialized priorly for the cases like connectors * */ private void deployMediationLibraryArtifacts() { if (configurationContext == null || synapseConfiguration == null) { return; } DeploymentEngine deploymentEngine = (DeploymentEngine) configurationContext .getAxisConfiguration().getConfigurator(); String carbonRepoPath = configurationContext.getAxisConfiguration().getRepository() .getPath(); SynapseArtifactDeploymentStore deploymentStore = synapseConfiguration .getArtifactDeploymentStore(); String synapseImportDir = synapseConfiguration.getPathToConfigFile() + File.separator + MultiXMLConfigurationBuilder.SYNAPSE_IMPORTS_DIR; /*Registering Import Deployer is not required here.*/ //deploymentEngine.addDeployer(new ImportDeployer(), synapseImportDir, "xml"); String libsPath = carbonRepoPath + File.separator + "synapse-libs"; deploymentEngine.addDeployer(new LibraryArtifactDeployer(), libsPath, "zip"); } /** * Destroys the Synapse Environment by undeploying all Axis2 services. */ public void destroySynapseEnvironment() { if (synapseEnvironment != null) { try { undeploySynapseService(); undeployProxyServices(); undeployEventSources(); } catch (AxisFault e) { handleFatal("Error while shutting down the Synapse environment", e); } synapseEnvironment.setInitialized(false); } } /** * {@inheritDoc} */ public SynapseConfiguration createSynapseConfiguration() { String synapseXMLLocation = serverConfigurationInformation.getSynapseXMLLocation(); Properties properties = SynapsePropertiesLoader.loadSynapseProperties(); if (serverConfigurationInformation.getResolveRoot() != null) { properties.put(SynapseConstants.RESOLVE_ROOT, serverConfigurationInformation.getResolveRoot()); } if (serverConfigurationInformation.getSynapseHome() != null) { properties.put(SynapseConstants.SYNAPSE_HOME, serverConfigurationInformation.getSynapseHome()); } if (synapseXMLLocation != null) { synapseConfiguration = SynapseConfigurationBuilder.getConfiguration( synapseXMLLocation, properties); } else { log.warn("System property or init-parameter '" + SynapseConstants.SYNAPSE_XML + "' is not specified. Using default configuration.."); synapseConfiguration = SynapseConfigurationBuilder.getDefaultConfiguration(); } Enumeration keys = properties.keys(); while (keys.hasMoreElements()) { String key = (String) keys.nextElement(); synapseConfiguration.setProperty(key, properties.getProperty(key)); } // Set the Axis2 ConfigurationContext to the SynapseConfiguration synapseConfiguration.setAxisConfiguration(configurationContext.getAxisConfiguration()); MessageContextCreatorForAxis2.setSynConfig(synapseConfiguration); // set the Synapse configuration into the Axis2 configuration Parameter synapseConfigurationParameter = new Parameter( SynapseConstants.SYNAPSE_CONFIG, synapseConfiguration); try { configurationContext.getAxisConfiguration().addParameter(synapseConfigurationParameter); } catch (AxisFault e) { handleFatal("Could not set parameters '" + SynapseConstants.SYNAPSE_CONFIG + "' to the Axis2 configuration : " + e.getMessage(), e); } addServerIPAndHostEntries(); return synapseConfiguration; } /** * {@inheritDoc} */ public void destroySynapseConfiguration() { if (synapseConfiguration != null) { synapseConfiguration.destroy(); synapseConfiguration = null; } } /** * Waits until it is safe to stop or the the specified end time has been reached. A delay * of <code>waitIntervalMillis</code> milliseconds is used between each subsequent check. * If the state "safeToStop" is reached before the specified <code>endTime</code>, * the return value is true. * * @param waitIntervalMillis the pause time (delay) in milliseconds between subsequent checks * @param endTime the time until which the checks need to finish successfully * * @return true, if a safe state is reached before the specified <code>endTime</code>, * otherwise false (forceful stop required) */ public boolean waitUntilSafeToStop(long waitIntervalMillis, long endTime) { boolean safeToStop = false; boolean forcefulStop = false; Axis2TransportHelper transportHelper = new Axis2TransportHelper(configurationContext); // wait until it is safe to shutdown (listeners and tasks are idle, no callbacks) while (!safeToStop && !forcefulStop) { int pendingListenerThreads = transportHelper.getPendingListenerThreadCount(); if (pendingListenerThreads > 0) { log.info(new StringBuilder("Waiting for: ").append(pendingListenerThreads) .append(" listener threads to complete").toString()); } int pendingSenderThreads = transportHelper.getPendingSenderThreadCount(); if (pendingSenderThreads > 0) { log.info(new StringBuilder("Waiting for: ").append(pendingSenderThreads) .append(" listener threads to complete").toString()); } int activeConnections = transportHelper.getActiveConnectionsCount(); if (activeConnections > 0) { log.info("Waiting for: " + activeConnections + " active connections to be closed.."); } int pendingTransportThreads = pendingListenerThreads + pendingSenderThreads; int pendingCallbacks = serverContextInformation.getCallbackCount(); if (pendingCallbacks > 0) { log.info("Waiting for: " + pendingCallbacks + " callbacks/replies.."); } int runningTasks = 0; SynapseTaskManager synapseTaskManager = synapseEnvironment.getTaskManager(); if (synapseTaskManager.isInitialized()) { runningTasks = synapseTaskManager.getTaskScheduler().getRunningTaskCount(); if (runningTasks > 0) { log.info("Waiting for : " + runningTasks + " tasks to complete.."); } } // it is safe to stop if all used listener threads, callbacks and tasks are zero safeToStop = ((pendingTransportThreads + pendingCallbacks + runningTasks) == 0); if (safeToStop) { log.info("All transport threads and tasks are idle and no pending callbacks.."); } else { if (System.currentTimeMillis() < endTime) { log.info(new StringBuilder("Waiting for a maximum of another ") .append((endTime - System.currentTimeMillis()) / 1000) .append(" seconds until transport threads and tasks become idle, ") .append("active connections to get closed,") .append(" and callbacks to be completed..").toString()); try { Thread.sleep(waitIntervalMillis); } catch (InterruptedException ignore) { // nothing to do here } } else { // maximum time to wait is over, do a forceful stop forcefulStop = true; } } } return !forcefulStop; } public Object getContext() { return configurationContext; } /** * Create a Axis2 Based Server Environment * * @param serverConfigurationInformation ServerConfigurationInformation instance */ private void createNewInstance(ServerConfigurationInformation serverConfigurationInformation) { try { configurationContext = ConfigurationContextFactory. createConfigurationContextFromFileSystem( serverConfigurationInformation.getAxis2RepoLocation(), serverConfigurationInformation.getAxis2Xml()); configurationContext.setProperty( AddressingConstants.ADDR_VALIDATE_ACTION, Boolean.FALSE); startJmxAdapter(); listenerManager = configurationContext.getListenerManager(); if (listenerManager == null) { // create and initialize the listener manager but do not start listenerManager = new ListenerManager(); listenerManager.init(configurationContext); } // do not use the listener manager shutdown hook, because it clashes with the // SynapseServer shutdown hook. listenerManager.setShutdownHookRequired(false); } catch (Throwable t) { handleFatal("Failed to create a new Axis2 instance...", t); } } /** * Adds Synapse Service to Axis2 configuration which enables the main message mediation. * * @throws AxisFault if an error occurs during Axis2 service initialization */ private void deploySynapseService() throws AxisFault { log.info("Deploying the Synapse service..."); // Dynamically initialize the Synapse Service and deploy it into Axis2 AxisConfiguration axisCfg = configurationContext.getAxisConfiguration(); AxisService synapseService = new AxisService(SynapseConstants.SYNAPSE_SERVICE_NAME); AxisOperation mediateOperation = new InOutAxisOperation( SynapseConstants.SYNAPSE_OPERATION_NAME); mediateOperation.setMessageReceiver(new SynapseMessageReceiver()); synapseService.addOperation(mediateOperation); List<String> transports = new ArrayList<String>(); transports.add(Constants.TRANSPORT_HTTP); transports.add(Constants.TRANSPORT_HTTPS); synapseService.setExposedTransports(transports); AxisServiceGroup synapseServiceGroup = new AxisServiceGroup(axisCfg); synapseServiceGroup.setServiceGroupName(SynapseConstants.SYNAPSE_SERVICE_NAME); synapseServiceGroup.addParameter(SynapseConstants.HIDDEN_SERVICE_PARAM, "true"); synapseServiceGroup.addService(synapseService); axisCfg.addServiceGroup(synapseServiceGroup); } /** * Removes the Synapse Service from the Axis2 configuration. * * @throws AxisFault if an error occurs during Axis2 service removal */ private void undeploySynapseService() throws AxisFault { log.info("Undeploying the Synapse service..."); configurationContext.getAxisConfiguration().removeService( SynapseConstants.SYNAPSE_SERVICE_NAME); } /** * Adds all Synapse proxy services to the Axis2 configuration. */ private void deployProxyServices() { boolean failSafeProxyEnabled = SynapseConfigUtils.isFailSafeEnabled( SynapseConstants.FAIL_SAFE_MODE_PROXY_SERVICES); log.info("Deploying Proxy services..."); String thisServerName = serverConfigurationInformation.getServerName(); if (thisServerName == null || "".equals(thisServerName)) { thisServerName = serverConfigurationInformation.getHostName(); if (thisServerName == null || "".equals(thisServerName)) { thisServerName = "localhost"; } } for (ProxyService proxy : synapseConfiguration.getProxyServices()) { // start proxy service if either, pinned server name list is empty // or pinned server list has this server name List pinnedServers = proxy.getPinnedServers(); if (pinnedServers != null && !pinnedServers.isEmpty()) { if (!pinnedServers.contains(thisServerName)) { log.info("Server name not in pinned servers list." + " Not deploying Proxy service : " + proxy.getName()); continue; } } try { AxisService proxyService = proxy.buildAxisService(synapseConfiguration, configurationContext.getAxisConfiguration()); if (proxyService != null) { log.info("Deployed Proxy service : " + proxy.getName()); if (!proxy.isStartOnLoad()) { proxy.stop(synapseConfiguration); } } else { log.warn("The proxy service " + proxy.getName() + " will NOT be available"); } } catch (SynapseException e) { if (failSafeProxyEnabled) { log.warn("The proxy service " + proxy.getName() + " cannot be deployed - " + "Continue in Proxy Service fail-safe mode."); } else { handleException("The proxy service " + proxy.getName() + " : Deployment Error"); } } } } /** * Removes all Synapse proxy services from the Axis2 configuration. * * @throws AxisFault if an error occurs undeploying proxy services */ private void undeployProxyServices() throws AxisFault { log.info("Undeploying Proxy services..."); for (ProxyService proxy : synapseConfiguration.getProxyServices()) { configurationContext.getAxisConfiguration().removeService( proxy.getName()); } } /** * Deploys the mediators in the mediator extensions folder. */ private void deployMediatorExtensions() { log.info("Loading mediator extensions..."); AxisConfigurator configurator = configurationContext.getAxisConfiguration().getConfigurator(); if (configurator instanceof DeploymentEngine) { ((DeploymentEngine) configurator).getRepoListener().checkServices(); } else { log.warn("Unable to access the repository listener. Custom extensions will " + "not get loaded now!"); } } /** * Deploys all event sources. * * @throws AxisFault if an error occurs deploying the event sources. */ private void deployEventSources() throws AxisFault { log.info("Deploying EventSources..."); for (SynapseEventSource eventSource : synapseConfiguration.getEventSources()) { eventSource.buildService(configurationContext.getAxisConfiguration()); } } /** * Undeploys all event sources. * * @throws AxisFault if an error occurs undeploying the event sources. */ private void undeployEventSources() throws AxisFault { log.info("Undeploying EventSources..."); for (SynapseEventSource eventSource : synapseConfiguration.getEventSources()) { configurationContext.getAxisConfiguration().removeService(eventSource.getName()); } } /** * Initiating DataSourceRepositoryHolder with a new data source information repository or * reusing an existing repository. * * @param serverContextInformation ServerContextInformation instance */ private void initDataSourceHelper(ServerContextInformation serverContextInformation) { DataSourceRepositoryHolder repositoryHolder = DataSourceRepositoryHolder.getInstance(); Properties synapseProperties = SynapsePropertiesLoader.reloadSynapseProperties(); Object repo = serverContextInformation.getProperty( DataSourceConstants.DATA_SOURCE_INFORMATION_REPOSITORY); if (repo instanceof DataSourceInformationRepository) { repositoryHolder.init((DataSourceInformationRepository) repo, synapseProperties); } else { repositoryHolder.init(null, synapseProperties); } } /** * Initiating SharedSecretCallbackHandlerCache reusing an existing SecretCallbackHandler instance - * a SecretCallbackHandler passed when start synapse. * * @param information ServerContextInformation instance */ private void initSharedSecretCallbackHandlerCache(ServerContextInformation information) { SharedSecretCallbackHandlerCache cache = SharedSecretCallbackHandlerCache.getInstance(); Object handler = information.getProperty( SecurityConstants.PROP_SECRET_CALLBACK_HANDLER); if (handler instanceof SecretCallbackHandler) { cache.setSecretCallbackHandler((SecretCallbackHandler) handler); } } private synchronized void initEnterpriseBeanstalkHolder(ServerContextInformation serverContextInformation) { if (serverContextInformation.getProperty(EnterpriseBeanstalkConstants.BEANSTALK_MANAGER_PROP_NAME) == null) { EnterpriseBeanstalkManager beanstalkHolder = new EnterpriseBeanstalkManager(); Properties synapseProperties = SynapsePropertiesLoader.reloadSynapseProperties(); beanstalkHolder.init(synapseProperties); serverContextInformation.addProperty(EnterpriseBeanstalkConstants.BEANSTALK_MANAGER_PROP_NAME, beanstalkHolder); } } private void addDefaultBuildersAndFormatters(AxisConfiguration axisConf) { if (axisConf.getMessageBuilder("text/plain") == null) { axisConf.addMessageBuilder("text/plain", new PlainTextBuilder()); } if (axisConf.getMessageBuilder("application/octet-stream") == null) { axisConf.addMessageBuilder("application/octet-stream", new BinaryBuilder()); } } private void addServerIPAndHostEntries() { String hostName = serverConfigurationInformation.getHostName(); String ipAddress = serverConfigurationInformation.getIpAddress(); if (hostName != null && !"".equals(hostName)) { Entry entry = new Entry(SynapseConstants.SERVER_HOST); entry.setValue(hostName); synapseConfiguration.addEntry(SynapseConstants.SERVER_HOST, entry); } if (ipAddress != null && !"".equals(ipAddress)) { Entry entry = new Entry(SynapseConstants.SERVER_IP); entry.setValue(ipAddress); if (synapseConfiguration.getAxisConfiguration().getTransportsIn() != null) { Map<String, TransportInDescription> transportInConfigMap = synapseConfiguration.getAxisConfiguration().getTransportsIn(); if (transportInConfigMap != null) { TransportInDescription transportInDescription = transportInConfigMap.get("http"); if (transportInDescription != null) { Parameter bindAddressParam = transportInDescription.getParameter("bind-address"); if (bindAddressParam != null) { entry.setValue(bindAddressParam.getValue()); } } } } synapseConfiguration.addEntry(SynapseConstants.SERVER_IP, entry); } } private HandlerDescription prepareSynapseDispatcher() { HandlerDescription handlerMD = new HandlerDescription(SynapseDispatcher.NAME); // <order after="SOAPMessageBodyBasedDispatcher" phase="Dispatch"/> PhaseRule rule = new PhaseRule(PhaseMetadata.PHASE_DISPATCH); rule.setAfter(SOAPMessageBodyBasedDispatcher.NAME); handlerMD.setRules(rule); SynapseDispatcher synapseDispatcher = new SynapseDispatcher(); synapseDispatcher.initDispatcher(); handlerMD.setHandler(synapseDispatcher); return handlerMD; } private HandlerDescription prepareMustUnderstandHandler() { HandlerDescription handlerMD = new HandlerDescription(SynapseMustUnderstandHandler.NAME); // <order after="SynapseDispatcher" phase="Dispatch"/> PhaseRule rule = new PhaseRule(PhaseMetadata.PHASE_DISPATCH); rule.setAfter(SynapseDispatcher.NAME); handlerMD.setRules(rule); SynapseMustUnderstandHandler synapseMustUnderstandHandler = new SynapseMustUnderstandHandler(); synapseMustUnderstandHandler.init(handlerMD); handlerMD.setHandler(synapseMustUnderstandHandler); return handlerMD; } /** * Starts the JMX Adaptor. * * @throws SynapseException if the JMX configuration is erroneous and/or the connector server * cannot be started */ private void startJmxAdapter() { Properties synapseProperties = SynapsePropertiesLoader.loadSynapseProperties(); JmxInformation jmxInformation = JmxInformationFactory.createJmxInformation( synapseProperties, serverConfigurationInformation.getHostName()); // Start JMX Adapter only if at least a JMX JNDI port is configured if (jmxInformation.getJndiPort() != -1) { jmxAdapter = new JmxAdapter(jmxInformation); jmxAdapter.start(); } } /** * Stops the JMX Adaptor. */ private void stopJmxAdapter() { if (jmxAdapter != null) { jmxAdapter.stop(); } } private void handleFatal(String msg, Throwable e) { log.fatal(msg, e); throw new SynapseException(msg, e); } private void handleFatal(String msg) { log.fatal(msg); throw new SynapseException(msg); } private void handleException(String msg) { log.error(msg); throw new SynapseException(msg); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.repositories.gcs; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.AbstractInputStreamContent; import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpMethods; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpResponseException; import com.google.api.client.http.LowLevelHttpRequest; import com.google.api.client.http.LowLevelHttpResponse; import com.google.api.client.http.MultipartContent; import com.google.api.client.json.JsonFactory; import com.google.api.client.testing.http.MockHttpTransport; import com.google.api.client.testing.http.MockLowLevelHttpRequest; import com.google.api.client.testing.http.MockLowLevelHttpResponse; import com.google.api.services.storage.Storage; import com.google.api.services.storage.model.Bucket; import com.google.api.services.storage.model.StorageObject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.rest.RestStatus; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import java.util.ArrayList; import java.util.concurrent.ConcurrentMap; import static org.mockito.Mockito.mock; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs * in a given concurrent map. */ class MockStorage extends Storage { /* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */ private static final String DELETION_HEADER = "x-blob-to-delete"; private final String bucketName; private final ConcurrentMap<String, byte[]> blobs; MockStorage(final String bucket, final ConcurrentMap<String, byte[]> blobs) { super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class)); this.bucketName = bucket; this.blobs = blobs; } @Override public Buckets buckets() { return new MockBuckets(); } @Override public Objects objects() { return new MockObjects(); } class MockBuckets extends Buckets { @Override public Get get(String getBucket) { return new Get(getBucket) { @Override public Bucket execute() { if (bucketName.equals(getBucket())) { Bucket bucket = new Bucket(); bucket.setId(bucketName); return bucket; } else { return null; } } }; } } class MockObjects extends Objects { @Override public Get get(String getBucket, String getObject) { return new Get(getBucket, getObject) { @Override public StorageObject execute() throws IOException { if (bucketName.equals(getBucket()) == false) { throw newBucketNotFoundException(getBucket()); } if (blobs.containsKey(getObject()) == false) { throw newObjectNotFoundException(getObject()); } StorageObject storageObject = new StorageObject(); storageObject.setId(getObject()); return storageObject; } @Override public InputStream executeMediaAsInputStream() throws IOException { if (bucketName.equals(getBucket()) == false) { throw newBucketNotFoundException(getBucket()); } if (blobs.containsKey(getObject()) == false) { throw newObjectNotFoundException(getObject()); } return new ByteArrayInputStream(blobs.get(getObject())); } }; } @Override public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) { return new Insert(insertBucket, insertObject) { @Override public StorageObject execute() throws IOException { if (bucketName.equals(getBucket()) == false) { throw newBucketNotFoundException(getBucket()); } ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(insertStream.getInputStream(), out); blobs.put(getName(), out.toByteArray()); return null; } }; } @Override public List list(String listBucket) { return new List(listBucket) { @Override public com.google.api.services.storage.model.Objects execute() throws IOException { if (bucketName.equals(getBucket()) == false) { throw newBucketNotFoundException(getBucket()); } final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); final java.util.List<StorageObject> storageObjects = new ArrayList<>(); for (Entry<String, byte[]> blob : blobs.entrySet()) { if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) { StorageObject storageObject = new StorageObject(); storageObject.setId(blob.getKey()); storageObject.setName(blob.getKey()); storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); storageObjects.add(storageObject); } } objects.setItems(storageObjects); return objects; } }; } @Override public Delete delete(String deleteBucket, String deleteObject) { return new Delete(deleteBucket, deleteObject) { @Override public Void execute() throws IOException { if (bucketName.equals(getBucket()) == false) { throw newBucketNotFoundException(getBucket()); } if (blobs.containsKey(getObject()) == false) { throw newObjectNotFoundException(getObject()); } blobs.remove(getObject()); return null; } @Override public HttpRequest buildHttpRequest() throws IOException { HttpRequest httpRequest = super.buildHttpRequest(); httpRequest.getHeaders().put(DELETION_HEADER, getObject()); return httpRequest; } }; } @Override public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) { return new Copy(srcBucket, srcObject, destBucket, destObject, content) { @Override public StorageObject execute() throws IOException { if (bucketName.equals(getSourceBucket()) == false) { throw newBucketNotFoundException(getSourceBucket()); } if (bucketName.equals(getDestinationBucket()) == false) { throw newBucketNotFoundException(getDestinationBucket()); } final byte[] bytes = blobs.get(getSourceObject()); if (bytes == null) { throw newObjectNotFoundException(getSourceObject()); } blobs.put(getDestinationObject(), bytes); StorageObject storageObject = new StorageObject(); storageObject.setId(getDestinationObject()); return storageObject; } }; } } private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); return new GoogleJsonResponseException(builder, new GoogleJsonError()); } private static GoogleJsonResponseException newObjectNotFoundException(final String object) { HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); return new GoogleJsonResponseException(builder, new GoogleJsonError()); } /** * {@link MockedHttpTransport} extends the existing testing transport to analyze the content * of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates * blobs. We use this because {@link Storage#batch()} is final and there is no other way to * extend batch requests for testing purposes. */ static class MockedHttpTransport extends MockHttpTransport { private final ConcurrentMap<String, byte[]> blobs; MockedHttpTransport(final ConcurrentMap<String, byte[]> blobs) { this.blobs = blobs; } @Override public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { // We analyze the content of the Batch request to detect our custom HTTP header, // and extract from it the name of the blob to delete. Then we reply a simple // batch response so that the client parser is happy. // // See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the // format of the batch request body. if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) { return new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { final String contentType = new MultipartContent().getType(); final StringBuilder builder = new StringBuilder(); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { getStreamingContent().writeTo(out); Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { if (line != null && line.startsWith(DELETION_HEADER)) { builder.append("--__END_OF_PART__\r\n"); builder.append("Content-Type: application/http").append("\r\n"); builder.append("\r\n"); builder.append("HTTP/1.1 "); final String blobName = line.substring(line.indexOf(':') + 1).trim(); if (blobs.containsKey(blobName)) { builder.append(RestStatus.OK.getStatus()); blobs.remove(blobName); } else { builder.append(RestStatus.NOT_FOUND.getStatus()); } builder.append("\r\n"); builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n"); builder.append("Content-Length: 0").append("\r\n"); builder.append("\r\n"); } }); builder.append("\r\n"); builder.append("--__END_OF_PART__--"); } MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); response.setStatusCode(200); response.setContent(builder.toString()); response.setContentType(contentType); return response; } }; } else { return super.buildRequest(method, url); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.index.lucene; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Set; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.index.IndexEditor; import org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter; import org.apache.jackrabbit.oak.plugins.index.search.Aggregate; import org.apache.jackrabbit.oak.plugins.index.search.Aggregate.Matcher; import org.apache.jackrabbit.oak.plugins.index.search.IndexDefinition; import org.apache.jackrabbit.oak.plugins.index.search.PropertyDefinition; import org.apache.jackrabbit.oak.plugins.index.search.PropertyUpdateCallback; import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState; import org.apache.jackrabbit.oak.spi.commit.Editor; import org.apache.jackrabbit.oak.spi.filter.PathFilter; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.lucene.document.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.jackrabbit.oak.commons.PathUtils.concat; /** * {@link IndexEditor} implementation that is responsible for keeping the * {@link LuceneIndex} up to date * * @see LuceneIndex */ public class LuceneIndexEditor implements IndexEditor, Aggregate.AggregateRoot { private static final Logger log = LoggerFactory.getLogger(LuceneIndexEditor.class); private final LuceneIndexEditorContext context; /** Name of this node, or {@code null} for the root node. */ private final String name; /** Parent editor or {@code null} if this is the root editor. */ private final LuceneIndexEditor parent; /** Path of this editor, built lazily in {@link #getPath()}. */ private String path; private boolean propertiesChanged = false; private List<PropertyState> propertiesModified = Lists.newArrayList(); /** * Flag indicating if the current tree being traversed has a deleted parent. */ private final boolean isDeleted; private IndexDefinition.IndexingRule indexingRule; private List<Matcher> currentMatchers = Collections.emptyList(); private final MatcherState matcherState; private final PathFilter.Result pathFilterResult; LuceneIndexEditor(LuceneIndexEditorContext context) throws CommitFailedException { this.parent = null; this.name = null; this.path = "/"; this.context = context; this.isDeleted = false; this.matcherState = MatcherState.NONE; this.pathFilterResult = context.getDefinition().getPathFilter().filter(PathUtils.ROOT_PATH); } private LuceneIndexEditor(LuceneIndexEditor parent, String name, MatcherState matcherState, PathFilter.Result pathFilterResult, boolean isDeleted) { this.parent = parent; this.name = name; this.path = null; this.context = parent.context; this.isDeleted = isDeleted; this.matcherState = matcherState; this.pathFilterResult = pathFilterResult; } public String getPath() { if (path == null) { // => parent != null path = concat(parent.getPath(), name); } return path; } @Override public void enter(NodeState before, NodeState after) throws CommitFailedException { if (EmptyNodeState.MISSING_NODE == before && parent == null){ context.enableReindexMode(); } //Only check for indexing if the result is include. //In case like TRAVERSE nothing needs to be indexed for those //path if (pathFilterResult == PathFilter.Result.INCLUDE) { //For traversal in deleted sub tree before state has to be used NodeState current = after.exists() ? after : before; indexingRule = getDefinition().getApplicableIndexingRule(current); if (indexingRule != null) { currentMatchers = indexingRule.getAggregate().createMatchers(this); } } } @Override public void leave(NodeState before, NodeState after) throws CommitFailedException { if (propertiesChanged || !before.exists()) { String path = getPath(); if (addOrUpdate(path, after, before.exists())) { long indexed = context.incIndexedNodes(); if (indexed % 1000 == 0) { log.debug("[{}] => Indexed {} nodes...", getIndexName(), indexed); } } } for (Matcher m : matcherState.affectedMatchers){ m.markRootDirty(); } if (parent == null) { PropertyUpdateCallback callback = context.getPropertyUpdateCallback(); if (callback != null) { callback.done(); } try { context.closeWriter(); } catch (IOException e) { CommitFailedException ce = new CommitFailedException("Lucene", 4, "Failed to close the Lucene index " + context.getIndexingContext().getIndexPath(), e); context.getIndexingContext().indexUpdateFailed(ce); throw ce; } if (context.getIndexedNodes() > 0) { log.debug("[{}] => Indexed {} nodes, done.", getIndexName(), context.getIndexedNodes()); } } } @Override public void propertyAdded(PropertyState after) { markPropertyChanged(after.getName()); checkAggregates(after.getName()); propertyUpdated(null, after); } @Override public void propertyChanged(PropertyState before, PropertyState after) { markPropertyChanged(before.getName()); propertiesModified.add(before); checkAggregates(before.getName()); propertyUpdated(before, after); } @Override public void propertyDeleted(PropertyState before) { markPropertyChanged(before.getName()); propertiesModified.add(before); checkAggregates(before.getName()); propertyUpdated(before, null); } @Override public Editor childNodeAdded(String name, NodeState after) { PathFilter.Result filterResult = getPathFilterResult(name); if (filterResult != PathFilter.Result.EXCLUDE) { return new LuceneIndexEditor(this, name, getMatcherState(name, after), filterResult, false); } return null; } @Override public Editor childNodeChanged( String name, NodeState before, NodeState after) { PathFilter.Result filterResult = getPathFilterResult(name); if (filterResult != PathFilter.Result.EXCLUDE) { return new LuceneIndexEditor(this, name, getMatcherState(name, after), filterResult, false); } return null; } @Override public Editor childNodeDeleted(String name, NodeState before) throws CommitFailedException { PathFilter.Result filterResult = getPathFilterResult(name); if (filterResult == PathFilter.Result.EXCLUDE) { return null; } if (!isDeleted) { // tree deletion is handled on the parent node String path = concat(getPath(), name); try { LuceneIndexWriter writer = context.getWriter(); // Remove all index entries in the removed subtree writer.deleteDocuments(path); this.context.indexUpdate(); } catch (IOException e) { CommitFailedException ce = new CommitFailedException("Lucene", 5, "Failed to remove the index entries of" + " the removed subtree " + path + "for index " + context.getIndexingContext().getIndexPath(), e); context.getIndexingContext().indexUpdateFailed(ce); throw ce; } } MatcherState ms = getMatcherState(name, before); if (!ms.isEmpty()){ return new LuceneIndexEditor(this, name, ms, filterResult, true); } return null; // no need to recurse down the removed subtree } LuceneIndexEditorContext getContext() { return context; } private boolean addOrUpdate(String path, NodeState state, boolean isUpdate) throws CommitFailedException { try { Document d = makeDocument(path, state, isUpdate); if (d != null) { if (log.isTraceEnabled()) { log.trace("[{}] Indexed document for {} is {}", getIndexName(), path, d); } context.indexUpdate(); context.getWriter().updateDocument(path, d); return true; } } catch (IOException e) { CommitFailedException ce = new CommitFailedException("Lucene", 3, "Failed to index the node " + path, e); context.getIndexingContext().indexUpdateFailed(ce); throw ce; } catch (IllegalArgumentException ie) { log.warn("Failed to index the node [{}]", path, ie); } return false; } private Document makeDocument(String path, NodeState state, boolean isUpdate) throws IOException { if (!isIndexable()) { return null; } return context.newDocumentMaker(indexingRule, path).makeDocument(state, isUpdate, propertiesModified); } //~-------------------------------------------------------< Aggregate > @Override public void markDirty() { propertiesChanged = true; } private MatcherState getMatcherState(String name, NodeState after) { List<Matcher> matched = Lists.newArrayList(); List<Matcher> inherited = Lists.newArrayList(); for (Matcher m : Iterables.concat(matcherState.inherited, currentMatchers)) { Matcher result = m.match(name, after); if (result.getStatus() == Matcher.Status.MATCH_FOUND){ matched.add(result); } if (result.getStatus() != Matcher.Status.FAIL){ inherited.addAll(result.nextSet()); } } if (!matched.isEmpty() || !inherited.isEmpty()) { return new MatcherState(matched, inherited); } return MatcherState.NONE; } /** * Determines which all matchers are affected by this property change * * @param name modified property name */ private void checkAggregates(String name) { for (Matcher m : matcherState.matched) { if (!matcherState.affectedMatchers.contains(m) && m.aggregatesProperty(name)) { matcherState.affectedMatchers.add(m); } } } private static class MatcherState { final static MatcherState NONE = new MatcherState(Collections.<Matcher>emptyList(), Collections.<Matcher>emptyList()); final List<Matcher> matched; final List<Matcher> inherited; final Set<Matcher> affectedMatchers; public MatcherState(List<Matcher> matched, List<Matcher> inherited){ this.matched = matched; this.inherited = inherited; //Affected matches would only be used when there are //some matched matchers if (matched.isEmpty()){ affectedMatchers = Collections.emptySet(); } else { affectedMatchers = Sets.newIdentityHashSet(); } } public boolean isEmpty() { return matched.isEmpty() && inherited.isEmpty(); } } private void markPropertyChanged(String name) { if (isIndexable() && !propertiesChanged && indexingRule.isIndexed(name)) { propertiesChanged = true; } } private void propertyUpdated(PropertyState before, PropertyState after) { PropertyUpdateCallback callback = context.getPropertyUpdateCallback(); //Avoid further work if no callback is present if (callback == null) { return; } String propertyName = before != null ? before.getName() : after.getName(); if (isIndexable()) { PropertyDefinition pd = indexingRule.getConfig(propertyName); if (pd != null) { callback.propertyUpdated(getPath(), propertyName, pd, before, after); } } for (Matcher m : matcherState.matched) { if (m.aggregatesProperty(propertyName)) { Aggregate.Include i = m.getCurrentInclude(); if (i instanceof Aggregate.PropertyInclude) { PropertyDefinition pd = ((Aggregate.PropertyInclude) i).getPropertyDefinition(); String propertyRelativePath = PathUtils.concat(m.getMatchedPath(), propertyName); callback.propertyUpdated(m.getRootPath(), propertyRelativePath, pd, before, after); } } } } private IndexDefinition getDefinition() { return context.getDefinition(); } private boolean isIndexable(){ return indexingRule != null; } private PathFilter.Result getPathFilterResult(String childNodeName) { return context.getDefinition().getPathFilter().filter(concat(getPath(), childNodeName)); } private String getIndexName() { return context.getDefinition().getIndexName(); } }
/* * Copyright 2012 - 2013, Michael Schorn (me@mschorn.net). All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package net.mschorn.sandbox.lwjgl.deferred.samples.shading; import static org.lwjgl.opengl.GL11.GL_BLEND; import static org.lwjgl.opengl.GL11.GL_COLOR_BUFFER_BIT; import static org.lwjgl.opengl.GL11.GL_DEPTH_BUFFER_BIT; import static org.lwjgl.opengl.GL11.GL_LINEAR; import static org.lwjgl.opengl.GL11.GL_ONE; import static org.lwjgl.opengl.GL11.GL_REPEAT; import static org.lwjgl.opengl.GL11.GL_TEXTURE_MAG_FILTER; import static org.lwjgl.opengl.GL11.GL_TEXTURE_MIN_FILTER; import static org.lwjgl.opengl.GL11.GL_TEXTURE_WRAP_S; import static org.lwjgl.opengl.GL11.GL_TEXTURE_WRAP_T; import static org.lwjgl.opengl.GL11.glBlendFunc; import static org.lwjgl.opengl.GL11.glClear; import static org.lwjgl.opengl.GL11.glDisable; import static org.lwjgl.opengl.GL11.glEnable; import static org.lwjgl.opengl.GL15.GL_STATIC_DRAW; import static org.lwjgl.opengl.GL20.GL_FRAGMENT_SHADER; import static org.lwjgl.opengl.GL20.GL_VERTEX_SHADER; import net.mschorn.sandbox.lwjgl.tools.buffer.IBO; import net.mschorn.sandbox.lwjgl.tools.buffer.VBO; import net.mschorn.sandbox.lwjgl.tools.geometry.Attribute; import net.mschorn.sandbox.lwjgl.tools.geometry.Geometry; import net.mschorn.sandbox.lwjgl.tools.geometry.Quad; import net.mschorn.sandbox.lwjgl.tools.light.Light; import net.mschorn.sandbox.lwjgl.tools.shader.Program; import net.mschorn.sandbox.lwjgl.tools.shader.Shader; import net.mschorn.sandbox.lwjgl.tools.state.VAO; import net.mschorn.sandbox.lwjgl.tools.texture.Sampler; import net.mschorn.sandbox.lwjgl.tools.texture.Texture; public final class LightPass { private static final String LIGHT_VS = "light.vs.glsl"; private static final String LIGHT_FS = "light.fs.glsl"; private static final int LIGHT_BINDING = 1; private final Sampler sampler; private final Texture vBuffer; private final Texture vnBuffer; private final Texture diffuseBuffer; private final Texture specularBuffer; private final Shader vs; private final Shader fs; private final Program p; private final IBO ibo; private final VBO vbo; private final VAO vao; private final Light[] lights; public LightPass(final Texture vBuffer, final Texture vnBuffer, final Texture diffuseBuffer, final Texture specularBuffer) { this.vBuffer = vBuffer; this.vnBuffer = vnBuffer; this.diffuseBuffer = diffuseBuffer; this.specularBuffer = specularBuffer; sampler = createSampler(); lights = createLights(); vs = new Shader(GL_VERTEX_SHADER, getClass().getResourceAsStream(LIGHT_VS)); fs = new Shader(GL_FRAGMENT_SHADER, getClass().getResourceAsStream(LIGHT_FS)); p = new Program(vs, fs); final Geometry geometry = new Quad(); ibo = new IBO(geometry.getIndices(), GL_STATIC_DRAW); vbo = new VBO(geometry.getAttributes(), GL_STATIC_DRAW); vao = new VAO(geometry.getMode().getGlMode(), geometry.getIndices().size(), ibo, vbo); vao.addVertexAttribute(0, geometry.getAttributeDescriptor(Attribute.V)); vao.addVertexAttribute(1, geometry.getAttributeDescriptor(Attribute.VT)); vao.addVertexAttribute(2, geometry.getAttributeDescriptor(Attribute.VN)); } private Sampler createSampler() { final Sampler sampler = new Sampler(0, 1, 2, 3); sampler.addParameter(GL_TEXTURE_MAG_FILTER, GL_LINEAR); sampler.addParameter(GL_TEXTURE_MIN_FILTER, GL_LINEAR); sampler.addParameter(GL_TEXTURE_WRAP_S, GL_REPEAT); sampler.addParameter(GL_TEXTURE_WRAP_T, GL_REPEAT); return sampler; } private Light[] createLights() { final Light[] lights = new Light[2]; lights[0] = new Light(LIGHT_BINDING); lights[0].setAmbient(0.05f, 0.05f, 0.05f); lights[0].setDiffuse(0.6f, 0.5f, 0.5f); lights[0].setSpecular(0.6f, 0.3f, 0.3f); lights[0].setPosition(10, 0, 5); lights[1] = new Light(LIGHT_BINDING); lights[1].setAmbient(0.05f, 0.05f, 0.05f); lights[1].setDiffuse(0.5f, 0.5f, 0.6f); lights[1].setSpecular(0.3f, 0.3f, 0.6f); lights[1].setPosition(5, -20, 5); return lights; } public void glInit() { sampler.glInit(); vs.glInit(); fs.glInit(); p.glInit(); fs.glDispose(); vs.glDispose(); ibo.glInit(); vbo.glInit(); vao.glInit(); for (final Light light : lights) light.glInit(); } public void glDisplay() { glClear(GL_COLOR_BUFFER_BIT); glEnable(GL_BLEND); glBlendFunc(GL_ONE, GL_ONE); p.glBind(); sampler.glBind(); vBuffer.glBind(); vnBuffer.glBind(); diffuseBuffer.glBind(); specularBuffer.glBind(); for (final Light light : lights) { glClear(GL_DEPTH_BUFFER_BIT); light.glBind(); vao.glDraw(); } glDisable(GL_BLEND); } public void glDispose() { sampler.glDispose(); p.glDispose(); ibo.glDispose(); vbo.glDispose(); vao.glDispose(); for (final Light light : lights) light.glDispose(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.spi; import java.util.Map; /** * Configuration use by {@link org.apache.camel.spi.RestConsumerFactory} and {@link org.apache.camel.spi.RestApiConsumerFactory} * for Camel components to support the Camel {@link org.apache.camel.model.rest.RestDefinition rest} DSL. */ public class RestConfiguration { public static final String CORS_ACCESS_CONTROL_ALLOW_ORIGIN = "*"; public static final String CORS_ACCESS_CONTROL_ALLOW_METHODS = "GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS, CONNECT, PATCH"; public static final String CORS_ACCESS_CONTROL_MAX_AGE = "3600"; public static final String CORS_ACCESS_CONTROL_ALLOW_HEADERS = "Origin, Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers"; public enum RestBindingMode { auto, off, json, xml, json_xml } public enum RestHostNameResolver { allLocalIp, localIp, localHostName } private String component; private String apiComponent; private String producerComponent; private String producerApiDoc; private String scheme; private String host; private String apiHost; private int port; private String contextPath; private String apiContextPath; private String apiContextRouteId; private String apiContextIdPattern; private boolean apiContextListing; private boolean apiVendorExtension; private RestHostNameResolver hostNameResolver = RestHostNameResolver.allLocalIp; private RestBindingMode bindingMode = RestBindingMode.off; private boolean skipBindingOnErrorCode = true; private boolean enableCORS; private String jsonDataFormat; private String xmlDataFormat; private Map<String, Object> componentProperties; private Map<String, Object> endpointProperties; private Map<String, Object> consumerProperties; private Map<String, Object> dataFormatProperties; private Map<String, Object> apiProperties; private Map<String, String> corsHeaders; /** * Gets the name of the Camel component to use as the REST consumer * * @return the component name, or <tt>null</tt> to let Camel search the {@link Registry} to find suitable implementation */ public String getComponent() { return component; } /** * Sets the name of the Camel component to use as the REST consumer * * @param componentName the name of the component (such as restlet, spark-rest, etc.) */ public void setComponent(String componentName) { this.component = componentName; } /** * Gets the name of the Camel component to use as the REST API (such as swagger) * * @return the component name, or <tt>null</tt> to let Camel use the default name <tt>swagger</tt> */ public String getApiComponent() { return apiComponent; } /** * Sets the name of the Camel component to use as the REST API (such as swagger) * * @param apiComponent the name of the component (such as swagger) */ public void setApiComponent(String apiComponent) { this.apiComponent = apiComponent; } /** * Gets the name of the Camel component to use as the REST producer * * @return the component name, or <tt>null</tt> to let Camel search the {@link Registry} to find suitable implementation */ public String getProducerComponent() { return producerComponent; } /** * Sets the name of the Camel component to use as the REST producer * * @param componentName the name of the component (such as restlet, jetty, etc.) */ public void setProducerComponent(String componentName) { this.producerComponent = componentName; } /** * Gets the location of the api document (swagger api) the REST producer will use * to validate the REST uri and query parameters are valid accordingly to the api document. */ public String getProducerApiDoc() { return producerApiDoc; } /** * Sets the location of the api document (swagger api) the REST producer will use * to validate the REST uri and query parameters are valid accordingly to the api document. * This requires adding camel-swagger-java to the classpath, and any miss configuration * will let Camel fail on startup and report the error(s). * <p/> * The location of the api document is loaded from classpath by default, but you can use * <tt>file:</tt> or <tt>http:</tt> to refer to resources to load from file or http url. */ public void setProducerApiDoc(String producerApiDoc) { this.producerApiDoc = producerApiDoc; } /** * Gets the hostname to use by the REST consumer * * @return the hostname, or <tt>null</tt> to use default hostname */ public String getHost() { return host; } /** * Sets the hostname to use by the REST consumer * * @param host the hostname */ public void setHost(String host) { this.host = host; } public String getApiHost() { return apiHost; } /** * To use an specific hostname for the API documentation (eg swagger) * <p/> * This can be used to override the generated host with this configured hostname */ public void setApiHost(String apiHost) { this.apiHost = apiHost; } /** * Gets the scheme to use by the REST consumer * * @return the scheme, or <tt>null</tt> to use default scheme */ public String getScheme() { return scheme; } /** * Sets the scheme to use by the REST consumer * * @param scheme the scheme */ public void setScheme(String scheme) { this.scheme = scheme; } /** * Gets the port to use by the REST consumer * * @return the port, or <tt>0</tt> or <tt>-1</tt> to use default port */ public int getPort() { return port; } /** * Sets the port to use by the REST consumer * * @param port the port number */ public void setPort(int port) { this.port = port; } /** * Gets the configured context-path * * @return the context path, or <tt>null</tt> if none configured. */ public String getContextPath() { return contextPath; } /** * Sets a leading context-path the REST services will be using. * <p/> * This can be used when using components such as <tt>camel-servlet</tt> where the deployed web application * is deployed using a context-path. Or for components such as <tt>camel-jetty</tt> or <tt>camel-netty4-http</tt> * that includes a HTTP server. * * @param contextPath the context path */ public void setContextPath(String contextPath) { this.contextPath = contextPath; } public String getApiContextPath() { return apiContextPath; } /** * Sets a leading API context-path the REST API services will be using. * <p/> * This can be used when using components such as <tt>camel-servlet</tt> where the deployed web application * is deployed using a context-path. * * @param contextPath the API context path */ public void setApiContextPath(String contextPath) { this.apiContextPath = contextPath; } public String getApiContextRouteId() { return apiContextRouteId; } /** * Sets the route id to use for the route that services the REST API. * <p/> * The route will by default use an auto assigned route id. * * @param apiContextRouteId the route id */ public void setApiContextRouteId(String apiContextRouteId) { this.apiContextRouteId = apiContextRouteId; } public String getApiContextIdPattern() { return apiContextIdPattern; } /** * Optional CamelContext id pattern to only allow Rest APIs from rest services within CamelContext's which name matches the pattern. * <p/> * The pattern <tt>#name#</tt> refers to the CamelContext name, to match on the current CamelContext only. * For any other value, the pattern uses the rules from {@link org.apache.camel.util.EndpointHelper#matchPattern(String, String)} * * @param apiContextIdPattern the pattern */ public void setApiContextIdPattern(String apiContextIdPattern) { this.apiContextIdPattern = apiContextIdPattern; } public boolean isApiContextListing() { return apiContextListing; } /** * Sets whether listing of all available CamelContext's with REST services in the JVM is enabled. If enabled it allows to discover * these contexts, if <tt>false</tt> then only the current CamelContext is in use. */ public void setApiContextListing(boolean apiContextListing) { this.apiContextListing = apiContextListing; } public boolean isApiVendorExtension() { return apiVendorExtension; } /** * Whether vendor extension is enabled in the Rest APIs. If enabled then Camel will include additional information * as vendor extension (eg keys starting with x-) such as route ids, class names etc. * Not all 3rd party API gateways and tools supports vendor-extensions when importing your API docs. */ public void setApiVendorExtension(boolean apiVendorExtension) { this.apiVendorExtension = apiVendorExtension; } /** * Gets the resolver to use for resolving hostname * * @return the resolver * @deprecated use getHostNameResolver */ @Deprecated public RestHostNameResolver getRestHostNameResolver() { return getHostNameResolver(); } /** * Sets the resolver to use for resolving hostname * * @param restHostNameResolver the resolver * @deprecated use setHostNameResolver */ @Deprecated public void setRestHostNameResolver(RestHostNameResolver restHostNameResolver) { setHostNameResolver(restHostNameResolver); } /** * Sets the resolver to use for resolving hostname * * @param restHostNameResolver the resolver * @deprecated use setHostNameResolver */ @Deprecated public void setRestHostNameResolver(String restHostNameResolver) { settHostNameResolver(restHostNameResolver); } /** * Gets the resolver to use for resolving hostname * * @return the resolver */ public RestHostNameResolver getHostNameResolver() { return hostNameResolver; } /** * Sets the resolver to use for resolving hostname * * @param hostNameResolver the resolver */ public void setHostNameResolver(RestHostNameResolver hostNameResolver) { this.hostNameResolver = hostNameResolver; } /** * Sets the resolver to use for resolving hostname * * @param hostNameResolver the resolver */ public void settHostNameResolver(String hostNameResolver) { this.hostNameResolver = RestHostNameResolver.valueOf(hostNameResolver); } /** * Gets the binding mode used by the REST consumer * * @return the binding mode */ public RestBindingMode getBindingMode() { return bindingMode; } /** * Sets the binding mode to be used by the REST consumer * * @param bindingMode the binding mode */ public void setBindingMode(RestBindingMode bindingMode) { this.bindingMode = bindingMode; } /** * Sets the binding mode to be used by the REST consumer * * @param bindingMode the binding mode */ public void setBindingMode(String bindingMode) { this.bindingMode = RestBindingMode.valueOf(bindingMode); } /** * Whether to skip binding output if there is a custom HTTP error code, and instead use the response body as-is. * <p/> * This option is default <tt>true</tt>. * * @return whether to skip binding on error code */ public boolean isSkipBindingOnErrorCode() { return skipBindingOnErrorCode; } /** * Whether to skip binding output if there is a custom HTTP error code, and instead use the response body as-is. * <p/> * This option is default <tt>true</tt>. * * @param skipBindingOnErrorCode whether to skip binding on error code */ public void setSkipBindingOnErrorCode(boolean skipBindingOnErrorCode) { this.skipBindingOnErrorCode = skipBindingOnErrorCode; } /** * To specify whether to enable CORS which means Camel will automatic include CORS in the HTTP headers in the response. * <p/> * This option is default <tt>false</tt> * * @return whether CORS is enabled or not */ public boolean isEnableCORS() { return enableCORS; } /** * To specify whether to enable CORS which means Camel will automatic include CORS in the HTTP headers in the response. * <p/> * This option is default <tt>false</tt> * * @param enableCORS <tt>true</tt> to enable CORS */ public void setEnableCORS(boolean enableCORS) { this.enableCORS = enableCORS; } /** * Gets the name of the json data format. * <p/> * <b>Important:</b> This option is only for setting a custom name of the data format, not to refer to an existing data format instance. * * @return the name, or <tt>null</tt> to use default */ public String getJsonDataFormat() { return jsonDataFormat; } /** * Sets a custom json data format to be used * <p/> * <b>Important:</b> This option is only for setting a custom name of the data format, not to refer to an existing data format instance. * * @param name name of the data format */ public void setJsonDataFormat(String name) { this.jsonDataFormat = name; } /** * Gets the name of the xml data format. * <p/> * <b>Important:</b> This option is only for setting a custom name of the data format, not to refer to an existing data format instance. * * @return the name, or <tt>null</tt> to use default */ public String getXmlDataFormat() { return xmlDataFormat; } /** * Sets a custom xml data format to be used. * <p/> * <b>Important:</b> This option is only for setting a custom name of the data format, not to refer to an existing data format instance. * * @param name name of the data format */ public void setXmlDataFormat(String name) { this.xmlDataFormat = name; } /** * Gets additional options on component level * * @return additional options */ public Map<String, Object> getComponentProperties() { return componentProperties; } /** * Sets additional options on component level * * @param componentProperties the options */ public void setComponentProperties(Map<String, Object> componentProperties) { this.componentProperties = componentProperties; } /** * Gets additional options on endpoint level * * @return additional options */ public Map<String, Object> getEndpointProperties() { return endpointProperties; } /** * Sets additional options on endpoint level * * @param endpointProperties the options */ public void setEndpointProperties(Map<String, Object> endpointProperties) { this.endpointProperties = endpointProperties; } /** * Gets additional options on consumer level * * @return additional options */ public Map<String, Object> getConsumerProperties() { return consumerProperties; } /** * Sets additional options on consumer level * * @param consumerProperties the options */ public void setConsumerProperties(Map<String, Object> consumerProperties) { this.consumerProperties = consumerProperties; } /** * Gets additional options on data format level * * @return additional options */ public Map<String, Object> getDataFormatProperties() { return dataFormatProperties; } /** * Sets additional options on data format level * * @param dataFormatProperties the options */ public void setDataFormatProperties(Map<String, Object> dataFormatProperties) { this.dataFormatProperties = dataFormatProperties; } public Map<String, Object> getApiProperties() { return apiProperties; } /** * Sets additional options on api level * * @param apiProperties the options */ public void setApiProperties(Map<String, Object> apiProperties) { this.apiProperties = apiProperties; } /** * Gets the CORS headers to use if CORS has been enabled. * * @return the CORS headers */ public Map<String, String> getCorsHeaders() { return corsHeaders; } /** * Sets the CORS headers to use if CORS has been enabled. * * @param corsHeaders the CORS headers */ public void setCorsHeaders(Map<String, String> corsHeaders) { this.corsHeaders = corsHeaders; } }
/* * $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//httpclient/src/java/org/apache/commons/httpclient/ChunkedOutputStream.java,v 1.16 2004/05/13 04:03:25 mbecke Exp $ * $Revision: 480424 $ * $Date: 2006-11-29 06:56:49 +0100 (Wed, 29 Nov 2006) $ * * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package hudson.util; import java.io.IOException; import java.io.OutputStream; /** * Implements HTTP chunking support. Writes are buffered to an internal buffer * (2048 default size). Chunks are guaranteed to be at least as large as the * buffer size (except for the last chunk). * * @author Mohammad Rezaei, Goldman, Sachs & Co. */ public class ChunkedOutputStream extends OutputStream { // ------------------------------------------------------- Static Variables private static final byte CRLF[] = new byte[]{(byte) 13, (byte) 10}; /** * End chunk */ private static final byte ENDCHUNK[] = CRLF; /** * 0 */ private static final byte ZERO[] = new byte[]{(byte) '0'}; // ----------------------------------------------------- Instance Variables private OutputStream stream = null; private byte[] cache; private int cachePosition = 0; private boolean wroteLastChunk = false; // ----------------------------------------------------------- Constructors /** * Wraps a stream and chunks the output. * * @param stream to wrap * @param bufferSize minimum chunk size (excluding last chunk) * @throws IOException * * @since 3.0 */ public ChunkedOutputStream(OutputStream stream, int bufferSize) throws IOException { this.cache = new byte[bufferSize]; this.stream = stream; } /** * Wraps a stream and chunks the output. The default buffer size of 2048 was * chosen because the chunk overhead is less than 0.5% * * @param stream * @throws IOException */ public ChunkedOutputStream(OutputStream stream) throws IOException { this(stream, 2048); } // ----------------------------------------------------------- Internal methods /** * Writes the cache out onto the underlying stream * * @throws IOException * * @since 3.0 */ protected void flushCache() throws IOException { if (cachePosition > 0) { byte chunkHeader[] = (Integer.toHexString(cachePosition) + "\r\n").getBytes("US-ASCII"); stream.write(chunkHeader, 0, chunkHeader.length); stream.write(cache, 0, cachePosition); stream.write(ENDCHUNK, 0, ENDCHUNK.length); cachePosition = 0; } } /** * Writes the cache and bufferToAppend to the underlying stream as one large * chunk * * @param bufferToAppend * @param off * @param len * @throws IOException * * @since 3.0 */ protected void flushCacheWithAppend(byte bufferToAppend[], int off, int len) throws IOException { byte chunkHeader[] = (Integer.toHexString(cachePosition + len) + "\r\n").getBytes("US-ASCII"); stream.write(chunkHeader, 0, chunkHeader.length); stream.write(cache, 0, cachePosition); stream.write(bufferToAppend, off, len); stream.write(ENDCHUNK, 0, ENDCHUNK.length); cachePosition = 0; } protected void writeClosingChunk() throws IOException { // Write the final chunk. stream.write(ZERO, 0, ZERO.length); stream.write(CRLF, 0, CRLF.length); stream.write(ENDCHUNK, 0, ENDCHUNK.length); } // ----------------------------------------------------------- Public Methods /** * Must be called to ensure the internal cache is flushed and the closing * chunk is written. * * @throws IOException * * @since 3.0 */ public void finish() throws IOException { if (!wroteLastChunk) { flushCache(); writeClosingChunk(); wroteLastChunk = true; } } // -------------------------------------------- OutputStream Methods /** * Write the specified byte to our output stream. * * Note: Avoid this method as it will cause an inefficient single byte * chunk. Use write (byte[], int, int) instead. * * @param b The byte to be written * @throws IOException if an input/output error occurs */ public void write(int b) throws IOException { cache[cachePosition] = (byte) b; cachePosition++; if (cachePosition == cache.length) { flushCache(); } } /** * Writes the array. If the array does not fit within the buffer, it is not * split, but rather written out as one large chunk. * * @param b * @throws IOException * * @since 3.0 */ @Override public void write(byte b[]) throws IOException { this.write(b, 0, b.length); } @Override public void write(byte src[], int off, int len) throws IOException { if (len >= cache.length - cachePosition) { flushCacheWithAppend(src, off, len); } else { System.arraycopy(src, off, cache, cachePosition, len); cachePosition += len; } } /** * Flushes the underlying stream, but leaves the internal buffer alone. * * @throws IOException */ @Override public void flush() throws IOException { flushCache(); // Kohsuke: flush should flush the cache stream.flush(); } /** * Finishes writing to the underlying stream, but does NOT close the * underlying stream. * * @throws IOException */ @Override public void close() throws IOException { finish(); super.close(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.util.EnumSet; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.HardLink; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RemoteException; import org.junit.Assert; import org.junit.Test; /** * This class tests the building blocks that are needed to * support HDFS appends. */ public class TestFileAppend{ final boolean simulatedStorage = false; private static byte[] fileContents = null; // // writes to file but does not close it // private void writeFile(FSDataOutputStream stm) throws IOException { byte[] buffer = AppendTestUtil.initBuffer(AppendTestUtil.FILE_SIZE); stm.write(buffer); } // // verify that the data written to the full blocks are sane // private void checkFile(DistributedFileSystem fileSys, Path name, int repl) throws IOException { boolean done = false; // wait till all full blocks are confirmed by the datanodes. while (!done) { try { Thread.sleep(1000); } catch (InterruptedException e) {;} done = true; BlockLocation[] locations = fileSys.getFileBlockLocations( fileSys.getFileStatus(name), 0, AppendTestUtil.FILE_SIZE); if (locations.length < AppendTestUtil.NUM_BLOCKS) { System.out.println("Number of blocks found " + locations.length); done = false; continue; } for (int idx = 0; idx < AppendTestUtil.NUM_BLOCKS; idx++) { if (locations[idx].getHosts().length < repl) { System.out.println("Block index " + idx + " not yet replciated."); done = false; break; } } } byte[] expected = new byte[AppendTestUtil.NUM_BLOCKS * AppendTestUtil.BLOCK_SIZE]; if (simulatedStorage) { LocatedBlocks lbs = fileSys.getClient().getLocatedBlocks(name.toString(), 0, AppendTestUtil.FILE_SIZE); DFSTestUtil.fillExpectedBuf(lbs, expected); } else { System.arraycopy(fileContents, 0, expected, 0, expected.length); } // do a sanity check. Read the file // do not check file status since the file is not yet closed. AppendTestUtil.checkFullFile(fileSys, name, AppendTestUtil.NUM_BLOCKS * AppendTestUtil.BLOCK_SIZE, expected, "Read 1", false); } /** * Test a simple flush on a simple HDFS file. * @throws IOException an exception might be thrown */ @Test public void testSimpleFlush() throws IOException { Configuration conf = new HdfsConfiguration(); if (simulatedStorage) { SimulatedFSDataset.setFactory(conf); } fileContents = AppendTestUtil.initBuffer(AppendTestUtil.FILE_SIZE); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); DistributedFileSystem fs = cluster.getFileSystem(); try { // create a new file. Path file1 = new Path("/simpleFlush.dat"); FSDataOutputStream stm = AppendTestUtil.createFile(fs, file1, 1); System.out.println("Created file simpleFlush.dat"); // write to file int mid = AppendTestUtil.FILE_SIZE /2; stm.write(fileContents, 0, mid); stm.hflush(); System.out.println("Wrote and Flushed first part of file."); // write the remainder of the file stm.write(fileContents, mid, AppendTestUtil.FILE_SIZE - mid); System.out.println("Written second part of file"); stm.hflush(); stm.hflush(); System.out.println("Wrote and Flushed second part of file."); // verify that full blocks are sane checkFile(fs, file1, 1); stm.close(); System.out.println("Closed file."); // verify that entire file is good AppendTestUtil.checkFullFile(fs, file1, AppendTestUtil.FILE_SIZE, fileContents, "Read 2"); } catch (IOException e) { System.out.println("Exception :" + e); throw e; } catch (Throwable e) { System.out.println("Throwable :" + e); e.printStackTrace(); throw new IOException("Throwable : " + e); } finally { fs.close(); cluster.shutdown(); } } /** * Test that file data can be flushed. * @throws IOException an exception might be thrown */ @Test public void testComplexFlush() throws IOException { Configuration conf = new HdfsConfiguration(); if (simulatedStorage) { SimulatedFSDataset.setFactory(conf); } fileContents = AppendTestUtil.initBuffer(AppendTestUtil.FILE_SIZE); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); DistributedFileSystem fs = cluster.getFileSystem(); try { // create a new file. Path file1 = new Path("/complexFlush.dat"); FSDataOutputStream stm = AppendTestUtil.createFile(fs, file1, 1); System.out.println("Created file complexFlush.dat"); int start = 0; for (start = 0; (start + 29) < AppendTestUtil.FILE_SIZE; ) { stm.write(fileContents, start, 29); stm.hflush(); start += 29; } stm.write(fileContents, start, AppendTestUtil.FILE_SIZE -start); // need to make sure we completely write out all full blocks before // the checkFile() call (see FSOutputSummer#flush) stm.flush(); // verify that full blocks are sane checkFile(fs, file1, 1); stm.close(); // verify that entire file is good AppendTestUtil.checkFullFile(fs, file1, AppendTestUtil.FILE_SIZE, fileContents, "Read 2"); } catch (IOException e) { System.out.println("Exception :" + e); throw e; } catch (Throwable e) { System.out.println("Throwable :" + e); e.printStackTrace(); throw new IOException("Throwable : " + e); } finally { fs.close(); cluster.shutdown(); } } /** * FileNotFoundException is expected for appending to a non-exisiting file * * @throws FileNotFoundException as the result */ @Test(expected = FileNotFoundException.class) public void testFileNotFound() throws IOException { Configuration conf = new HdfsConfiguration(); if (simulatedStorage) { SimulatedFSDataset.setFactory(conf); } MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); FileSystem fs = cluster.getFileSystem(); try { Path file1 = new Path("/nonexistingfile.dat"); fs.append(file1); } finally { fs.close(); cluster.shutdown(); } } /** Test two consecutive appends on a file with a full block. */ @Test public void testAppendTwice() throws Exception { Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); final FileSystem fs1 = cluster.getFileSystem(); final FileSystem fs2 = AppendTestUtil.createHdfsWithDifferentUsername(conf); try { final Path p = new Path("/testAppendTwice/foo"); final int len = 1 << 16; final byte[] fileContents = AppendTestUtil.initBuffer(len); { // create a new file with a full block. FSDataOutputStream out = fs2.create(p, true, 4096, (short)1, len); out.write(fileContents, 0, len); out.close(); } //1st append does not add any data so that the last block remains full //and the last block in INodeFileUnderConstruction is a BlockInfo //but does not have a BlockUnderConstructionFeature. fs2.append(p); //2nd append should get AlreadyBeingCreatedException fs1.append(p); Assert.fail(); } catch(RemoteException re) { AppendTestUtil.LOG.info("Got an exception:", re); Assert.assertEquals(AlreadyBeingCreatedException.class.getName(), re.getClassName()); } finally { fs2.close(); fs1.close(); cluster.shutdown(); } } /** Test two consecutive appends on a file with a full block. */ @Test public void testAppend2Twice() throws Exception { Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); final DistributedFileSystem fs1 = cluster.getFileSystem(); final FileSystem fs2 = AppendTestUtil.createHdfsWithDifferentUsername(conf); try { final Path p = new Path("/testAppendTwice/foo"); final int len = 1 << 16; final byte[] fileContents = AppendTestUtil.initBuffer(len); { // create a new file with a full block. FSDataOutputStream out = fs2.create(p, true, 4096, (short)1, len); out.write(fileContents, 0, len); out.close(); } //1st append does not add any data so that the last block remains full //and the last block in INodeFileUnderConstruction is a BlockInfo //but does not have a BlockUnderConstructionFeature. ((DistributedFileSystem) fs2).append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null); // 2nd append should get AlreadyBeingCreatedException fs1.append(p); Assert.fail(); } catch(RemoteException re) { AppendTestUtil.LOG.info("Got an exception:", re); Assert.assertEquals(AlreadyBeingCreatedException.class.getName(), re.getClassName()); } finally { fs2.close(); fs1.close(); cluster.shutdown(); } } /** Tests appending after soft-limit expires. */ @Test public void testAppendAfterSoftLimit() throws IOException, InterruptedException { Configuration conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); //Set small soft-limit for lease final long softLimit = 1L; final long hardLimit = 9999999L; MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) .build(); cluster.setLeasePeriod(softLimit, hardLimit); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); FileSystem fs2 = new DistributedFileSystem(); fs2.initialize(fs.getUri(), conf); final Path testPath = new Path("/testAppendAfterSoftLimit"); final byte[] fileContents = AppendTestUtil.initBuffer(32); // create a new file without closing FSDataOutputStream out = fs.create(testPath); out.write(fileContents); //Wait for > soft-limit Thread.sleep(250); try { FSDataOutputStream appendStream2 = fs2.append(testPath); appendStream2.write(fileContents); appendStream2.close(); assertEquals(fileContents.length, fs.getFileStatus(testPath).getLen()); } finally { fs.close(); fs2.close(); cluster.shutdown(); } } /** Tests appending after soft-limit expires. */ @Test public void testAppend2AfterSoftLimit() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); //Set small soft-limit for lease final long softLimit = 1L; final long hardLimit = 9999999L; MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) .build(); cluster.setLeasePeriod(softLimit, hardLimit); cluster.waitActive(); DistributedFileSystem fs = cluster.getFileSystem(); DistributedFileSystem fs2 = new DistributedFileSystem(); fs2.initialize(fs.getUri(), conf); final Path testPath = new Path("/testAppendAfterSoftLimit"); final byte[] fileContents = AppendTestUtil.initBuffer(32); // create a new file without closing FSDataOutputStream out = fs.create(testPath); out.write(fileContents); //Wait for > soft-limit Thread.sleep(250); try { FSDataOutputStream appendStream2 = fs2.append(testPath, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null); appendStream2.write(fileContents); appendStream2.close(); assertEquals(fileContents.length, fs.getFileStatus(testPath).getLen()); // make sure we now have 1 block since the first writer was revoked LocatedBlocks blks = fs.getClient().getLocatedBlocks(testPath.toString(), 0L); assertEquals(1, blks.getLocatedBlocks().size()); for (LocatedBlock blk : blks.getLocatedBlocks()) { assertEquals(fileContents.length, blk.getBlockSize()); } } finally { fs.close(); fs2.close(); cluster.shutdown(); } } /** * Old replica of the block should not be accepted as valid for append/read */ @Test public void testFailedAppendBlockRejection() throws Exception { Configuration conf = new HdfsConfiguration(); conf.set("dfs.client.block.write.replace-datanode-on-failure.enable", "false"); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3) .build(); DistributedFileSystem fs = null; try { fs = cluster.getFileSystem(); Path path = new Path("/test"); FSDataOutputStream out = fs.create(path); out.writeBytes("hello\n"); out.close(); // stop one datanode DataNodeProperties dnProp = cluster.stopDataNode(0); String dnAddress = dnProp.datanode.getXferAddress().toString(); if (dnAddress.startsWith("/")) { dnAddress = dnAddress.substring(1); } // append again to bump genstamps for (int i = 0; i < 2; i++) { out = fs.append(path); out.writeBytes("helloagain\n"); out.close(); } // re-open and make the block state as underconstruction out = fs.append(path); cluster.restartDataNode(dnProp, true); // wait till the block report comes Thread.sleep(2000); // check the block locations, this should not contain restarted datanode BlockLocation[] locations = fs.getFileBlockLocations(path, 0, Long.MAX_VALUE); String[] names = locations[0].getNames(); for (String node : names) { if (node.equals(dnAddress)) { fail("Failed append should not be present in latest block locations."); } } out.close(); } finally { IOUtils.closeStream(fs); cluster.shutdown(); } } /** * Old replica of the block should not be accepted as valid for append/read */ @Test public void testMultiAppend2() throws Exception { Configuration conf = new HdfsConfiguration(); conf.set("dfs.client.block.write.replace-datanode-on-failure.enable", "false"); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3) .build(); DistributedFileSystem fs = null; final String hello = "hello\n"; try { fs = cluster.getFileSystem(); Path path = new Path("/test"); FSDataOutputStream out = fs.create(path); out.writeBytes(hello); out.close(); // stop one datanode DataNodeProperties dnProp = cluster.stopDataNode(0); String dnAddress = dnProp.datanode.getXferAddress().toString(); if (dnAddress.startsWith("/")) { dnAddress = dnAddress.substring(1); } // append again to bump genstamps for (int i = 0; i < 2; i++) { out = fs.append(path, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null); out.writeBytes(hello); out.close(); } // re-open and make the block state as underconstruction out = fs.append(path, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null); cluster.restartDataNode(dnProp, true); // wait till the block report comes Thread.sleep(2000); out.writeBytes(hello); out.close(); // check the block locations LocatedBlocks blocks = fs.getClient().getLocatedBlocks(path.toString(), 0L); // since we append the file 3 time, we should be 4 blocks assertEquals(4, blocks.getLocatedBlocks().size()); for (LocatedBlock block : blocks.getLocatedBlocks()) { assertEquals(hello.length(), block.getBlockSize()); } StringBuilder sb = new StringBuilder(); for (int i = 0; i < 4; i++) { sb.append(hello); } final byte[] content = sb.toString().getBytes(); AppendTestUtil.checkFullFile(fs, path, content.length, content, "Read /test"); // restart namenode to make sure the editlog can be properly applied cluster.restartNameNode(true); cluster.waitActive(); AppendTestUtil.checkFullFile(fs, path, content.length, content, "Read /test"); blocks = fs.getClient().getLocatedBlocks(path.toString(), 0L); // since we append the file 3 time, we should be 4 blocks assertEquals(4, blocks.getLocatedBlocks().size()); for (LocatedBlock block : blocks.getLocatedBlocks()) { assertEquals(hello.length(), block.getBlockSize()); } } finally { IOUtils.closeStream(fs); cluster.shutdown(); } } @Test(timeout = 10000) public void testAppendCorruptedBlock() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); conf.setInt("dfs.min.replication", 1); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1) .build(); try { DistributedFileSystem fs = cluster.getFileSystem(); Path fileName = new Path("/appendCorruptBlock"); DFSTestUtil.createFile(fs, fileName, 512, (short) 1, 0); DFSTestUtil.waitReplication(fs, fileName, (short) 1); Assert.assertTrue("File not created", fs.exists(fileName)); ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, fileName); cluster.corruptBlockOnDataNodes(block); DFSTestUtil.appendFile(fs, fileName, "appendCorruptBlock"); } finally { cluster.shutdown(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution; import io.airlift.stats.CounterStat; import io.airlift.stats.DistributionStat; import io.airlift.stats.TimeStat; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; import java.util.concurrent.atomic.AtomicInteger; import static com.facebook.presto.spi.StandardErrorCode.ABANDONED_QUERY; import static com.facebook.presto.spi.StandardErrorCode.USER_CANCELED; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; public class SqlQueryManagerStats { private final AtomicInteger runningQueries = new AtomicInteger(); private final CounterStat startedQueries = new CounterStat(); private final CounterStat completedQueries = new CounterStat(); private final CounterStat failedQueries = new CounterStat(); private final CounterStat abandonedQueries = new CounterStat(); private final CounterStat canceledQueries = new CounterStat(); private final CounterStat userErrorFailures = new CounterStat(); private final CounterStat internalFailures = new CounterStat(); private final CounterStat externalFailures = new CounterStat(); private final CounterStat insufficientResourcesFailures = new CounterStat(); private final CounterStat consumedInputRows = new CounterStat(); private final CounterStat consumedInputBytes = new CounterStat(); private final CounterStat consumedCpuTimeSecs = new CounterStat(); private final TimeStat executionTime = new TimeStat(MILLISECONDS); private final DistributionStat wallInputBytesRate = new DistributionStat(); private final DistributionStat cpuInputByteRate = new DistributionStat(); public void queryStarted() { startedQueries.update(1); runningQueries.incrementAndGet(); } public void queryStopped() { runningQueries.decrementAndGet(); } public void queryFinished(QueryInfo info) { completedQueries.update(1); long rawInputBytes = info.getQueryStats().getRawInputDataSize().toBytes(); consumedCpuTimeSecs.update((long) info.getQueryStats().getTotalCpuTime().getValue(SECONDS)); consumedInputBytes.update(info.getQueryStats().getRawInputDataSize().toBytes()); consumedInputRows.update(info.getQueryStats().getRawInputPositions()); executionTime.add(info.getQueryStats().getExecutionTime()); long executionWallMillis = info.getQueryStats().getExecutionTime().toMillis(); if (executionWallMillis > 0) { wallInputBytesRate.add(rawInputBytes * 1000 / executionWallMillis); } long executionCpuMillis = info.getQueryStats().getTotalCpuTime().toMillis(); if (executionCpuMillis > 0) { cpuInputByteRate.add(rawInputBytes * 1000 / executionCpuMillis); } if (info.getErrorCode() != null) { switch (info.getErrorCode().getType()) { case USER_ERROR: userErrorFailures.update(1); break; case INTERNAL_ERROR: internalFailures.update(1); break; case INSUFFICIENT_RESOURCES: insufficientResourcesFailures.update(1); break; case EXTERNAL: externalFailures.update(1); break; } if (info.getErrorCode().getCode() == ABANDONED_QUERY.toErrorCode().getCode()) { abandonedQueries.update(1); } else if (info.getErrorCode().getCode() == USER_CANCELED.toErrorCode().getCode()) { canceledQueries.update(1); } failedQueries.update(1); } } @Managed public long getRunningQueries() { // This is not startedQueries - completeQueries, since queries can finish without ever starting (cancelled before started, for example) return runningQueries.get(); } @Managed @Nested public CounterStat getStartedQueries() { return startedQueries; } @Managed @Nested public CounterStat getCompletedQueries() { return completedQueries; } @Managed @Nested public CounterStat getFailedQueries() { return failedQueries; } @Managed @Nested public CounterStat getConsumedInputRows() { return consumedInputRows; } @Managed @Nested public CounterStat getConsumedInputBytes() { return consumedInputBytes; } @Managed @Nested public CounterStat getConsumedCpuTimeSecs() { return consumedCpuTimeSecs; } @Managed @Nested public TimeStat getExecutionTime() { return executionTime; } @Managed @Nested public CounterStat getUserErrorFailures() { return userErrorFailures; } @Managed @Nested public CounterStat getInternalFailures() { return internalFailures; } @Managed @Nested public CounterStat getAbandonedQueries() { return abandonedQueries; } @Managed @Nested public CounterStat getCanceledQueries() { return canceledQueries; } @Managed @Nested public CounterStat getExternalFailures() { return externalFailures; } @Managed @Nested public CounterStat getInsufficientResourcesFailures() { return insufficientResourcesFailures; } @Managed(description = "Distribution of query input data rates (wall)") @Nested public DistributionStat getWallInputBytesRate() { return wallInputBytesRate; } @Managed(description = "Distribution of query input data rates (cpu)") @Nested public DistributionStat getCpuInputByteRate() { return cpuInputByteRate; } }
/* * Copyright 2005-2006 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.fp.document.web.struts; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.kuali.kfs.coa.businessobject.BalanceType; import org.kuali.kfs.coa.service.BalanceTypeService; import org.kuali.kfs.fp.document.JournalVoucherDocument; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.businessobject.SourceAccountingLine; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.rice.krad.util.ObjectUtils; /** * This class is the Struts specific form object that works in conjunction with the pojo utilities to build the UI for the Journal * Voucher Document. This class is unique in that it leverages a helper data structure called the VoucherAccountingLineHelper * because the Journal Voucher, under certain conditions, presents the user with a debit and credit column for amount entry. In * addition, this form class must keep track of the changes between the old and new balance type selection so that the corresponding * action class and make decisions based upon the differences. New accounting lines use specific credit and debit amount fields b/c * the new line is explicitly known; however, already existing accounting lines need to exist within a list with ordering that * matches the accounting lines source list. */ public class JournalVoucherForm extends VoucherForm { protected List balanceTypes; protected String originalBalanceType; protected BalanceType selectedBalanceType; /** * Constructs a JournalVoucherForm instance. */ public JournalVoucherForm() { super(); selectedBalanceType = new BalanceType(KFSConstants.BALANCE_TYPE_ACTUAL); originalBalanceType = ""; } @Override protected String getDefaultDocumentTypeName() { return "JV"; } /** * Overrides the parent to call super.populate and then to call the two methods that are specific to loading the two select * lists on the page. In addition, this also makes sure that the credit and debit amounts are filled in for situations where * validation errors occur and the page reposts. * * @see org.kuali.rice.kns.web.struts.pojo.PojoForm#populate(javax.servlet.http.HttpServletRequest) */ @Override public void populate(HttpServletRequest request) { super.populate(request); populateBalanceTypeListForRendering(); } /** * Override the parent, to push the chosen accounting period and balance type down into the source accounting line object. In * addition, check the balance type to see if it's the "External Encumbrance" balance and alter the encumbrance update code on * the accounting line appropriately. * * @see org.kuali.rice.kns.web.struts.form.KualiTransactionalDocumentFormBase#populateSourceAccountingLine(org.kuali.rice.krad.bo.SourceAccountingLine) */ @Override public void populateSourceAccountingLine(SourceAccountingLine sourceLine, String accountingLinePropertyName, Map parameterMap) { super.populateSourceAccountingLine(sourceLine, accountingLinePropertyName, parameterMap); populateSourceAccountingLineEncumbranceCode(sourceLine); } /** * Sets the encumbrance code of the line based on the balance type. * * @param sourceLine - line to set code on */ protected void populateSourceAccountingLineEncumbranceCode(SourceAccountingLine sourceLine) { BalanceType selectedBalanceType = getSelectedBalanceType(); if (ObjectUtils.isNotNull(selectedBalanceType)) { selectedBalanceType.refresh(); sourceLine.setBalanceTyp(selectedBalanceType); sourceLine.setBalanceTypeCode(selectedBalanceType.getCode()); // set the encumbrance update code appropriately // KFSMI-5565 remove the default encumbrance code // no more default encumbrance code // if (KFSConstants.BALANCE_TYPE_EXTERNAL_ENCUMBRANCE.equals(selectedBalanceType.getCode())) { // sourceLine.setEncumbranceUpdateCode(KFSConstants.JOURNAL_VOUCHER_ENCUMBRANCE_UPDATE_CODE_BALANCE_TYPE_EXTERNAL_ENCUMBRANCE); // } // else { // sourceLine.setEncumbranceUpdateCode(null); // } } else { // it's the first time in, the form will be empty the first time in // set up default selection value selectedBalanceType = getPopulatedBalanceTypeInstance(KFSConstants.BALANCE_TYPE_ACTUAL); setSelectedBalanceType(selectedBalanceType); setOriginalBalanceType(selectedBalanceType.getCode()); sourceLine.setEncumbranceUpdateCode(null); } } /** * This method retrieves the list of valid balance types to display. * * @return List */ public List getBalanceTypes() { return balanceTypes; } /** * This method sets the selected balance type. * * @return BalanceTyp */ public BalanceType getSelectedBalanceType() { return selectedBalanceType; } /** * This method retrieves the selected balance type. * * @param selectedBalanceType */ public void setSelectedBalanceType(BalanceType selectedBalanceType) { this.selectedBalanceType = selectedBalanceType; } /** * This method sets the list of valid balance types to display. * * @param balanceTypes */ public void setBalanceTypes(List balanceTypes) { this.balanceTypes = balanceTypes; } /** * This method returns the journal voucher document associated with this form. * * @return Returns the journalVoucherDocument. */ public JournalVoucherDocument getJournalVoucherDocument() { return (JournalVoucherDocument) getTransactionalDocument(); } /** * This method sets the journal voucher document associated with this form. * * @param journalVoucherDocument The journalVoucherDocument to set. */ public void setJournalVoucherDocument(JournalVoucherDocument journalVoucherDocument) { setDocument(journalVoucherDocument); } /** * This method retrieves the originalBalanceType attribute. * * @return String */ public String getOriginalBalanceType() { return originalBalanceType; } /** * This method sets the originalBalanceType attribute. * * @param changedBalanceType */ public void setOriginalBalanceType(String changedBalanceType) { this.originalBalanceType = changedBalanceType; } /** * Using the selected accounting period to determine university fiscal year and look up all the encumbrance * balance type - check if the selected balance type is for encumbrance * * @return true/false - true if it is an encumbrance balance type */ public boolean getIsEncumbranceBalanceType(){ //get encumbrance balance type list BalanceTypeService balanceTypeSerivce = SpringContext.getBean(BalanceTypeService.class); List<String> encumbranceBalanceTypes = balanceTypeSerivce.getEncumbranceBalanceTypes(getSelectedPostingYear()); return encumbranceBalanceTypes.contains(selectedBalanceType.getCode()); } /** * This method retrieves all of the balance types in the system and prepares them to be rendered in a dropdown UI component. */ protected void populateBalanceTypeListForRendering() { // grab the list of valid balance types ArrayList balanceTypes = new ArrayList(SpringContext.getBean(BalanceTypeService.class).getAllBalanceTypes()); // set into the form for rendering this.setBalanceTypes(balanceTypes); String selectedBalanceTypeCode = getSelectedBalanceType().getCode(); if (StringUtils.isBlank(selectedBalanceTypeCode)) { selectedBalanceTypeCode = KFSConstants.BALANCE_TYPE_ACTUAL; } setSelectedBalanceType(getPopulatedBalanceTypeInstance(selectedBalanceTypeCode)); getJournalVoucherDocument().setBalanceTypeCode(selectedBalanceTypeCode); } /** * This method will fully populate a balance type given the passed in code, by calling the business object service that * retrieves the rest of the instances' information. * * @param balanceTypeCode * @return BalanceTyp */ protected BalanceType getPopulatedBalanceTypeInstance(String balanceTypeCode) { // now we have to get the code and the name of the original and new balance types BalanceTypeService bts = SpringContext.getBean(BalanceTypeService.class); BalanceType balanceType = bts.getBalanceTypeByCode(balanceTypeCode); balanceType.setCode(balanceTypeCode); return balanceType; } /** * If the balance type is an offset generation balance type, then the user is able to enter the amount as either a debit or a * credit, otherwise, they only need to deal with the amount field in this case we always need to update the underlying bo so * that the debit/credit code along with the amount, is properly set. */ protected void populateCreditAndDebitAmounts() { if (isSelectedBalanceTypeFinancialOffsetGenerationIndicator()) { super.populateCreditAndDebitAmounts(); } } /** * This is a convenience helper method that is used several times throughout this action class to determine if the selected * balance type contained within the form instance is a financial offset generation balance type or not. * * @return boolean True if it is an offset generation balance type, false otherwise. */ protected boolean isSelectedBalanceTypeFinancialOffsetGenerationIndicator() { return getPopulatedBalanceTypeInstance(getSelectedBalanceType().getCode()).isFinancialOffsetGenerationIndicator(); } }
package edu.mit.civil.blastassessment.calculation; import java.util.HashMap; import java.util.Map; /** * * @author koleary UFC 3-340, Figure 2-3 pg79 * */ public class PeakDynamicPressureQo { private static Map<Double, Double> graph = new HashMap<Double, Double>(); static { graph.put((double) 10, (double) 2.25); graph.put((double) 11, (double) 2.583333333); graph.put((double) 12, (double) 2.916666667); graph.put((double) 13, (double) 3.25); graph.put((double) 14, (double) 4); graph.put((double) 15, (double) 4.4); graph.put((double) 16, (double) 4.8); graph.put((double) 17, (double) 5.8); graph.put((double) 18, (double) 6.3); graph.put((double) 19, (double) 6.8); graph.put((double) 20, (double) 8); graph.put((double) 21, (double) 8.625); graph.put((double) 22, (double) 9.25); graph.put((double) 23, (double) 10); graph.put((double) 24, (double) 11.07142857); graph.put((double) 25, (double) 12.14285714); graph.put((double) 26, (double) 13.21428571); graph.put((double) 27, (double) 14.28571429); graph.put((double) 28, (double) 15.35714286); graph.put((double) 29, (double) 16.42857143); graph.put((double) 30, (double) 17.5); graph.put((double) 31, (double) 18.375); graph.put((double) 32, (double) 19.25); graph.put((double) 33, (double) 20.125); graph.put((double) 34, (double) 21); graph.put((double) 35, (double) 22.08333333); graph.put((double) 36, (double) 23.16666667); graph.put((double) 37, (double) 24.25); graph.put((double) 38, (double) 25.33333333); graph.put((double) 39, (double) 26.41666667); graph.put((double) 40, (double) 27.5); graph.put((double) 41, (double) 28.66666667); graph.put((double) 42, (double) 29.83333333); graph.put((double) 43, (double) 31); graph.put((double) 44, (double) 32.28571429); graph.put((double) 45, (double) 33.57142857); graph.put((double) 46, (double) 34.85714286); graph.put((double) 47, (double) 36.14285714); graph.put((double) 48, (double) 37.42857143); graph.put((double) 49, (double) 38.71428571); graph.put((double) 50, (double) 40); graph.put((double) 51, (double) 41.28571429); graph.put((double) 52, (double) 42.57142857); graph.put((double) 53, (double) 43.85714286); graph.put((double) 54, (double) 45.14285714); graph.put((double) 55, (double) 46.42857143); graph.put((double) 56, (double) 47.71428571); graph.put((double) 57, (double) 49); graph.put((double) 58, (double) 51); graph.put((double) 59, (double) 53); graph.put((double) 60, (double) 55); graph.put((double) 61, (double) 56.36363636); graph.put((double) 62, (double) 57.72727273); graph.put((double) 63, (double) 59.09090909); graph.put((double) 64, (double) 60.45454545); graph.put((double) 65, (double) 61.81818182); graph.put((double) 66, (double) 63.18181818); graph.put((double) 67, (double) 64.54545455); graph.put((double) 68, (double) 65.90909091); graph.put((double) 69, (double) 67.27272727); graph.put((double) 70, (double) 68.63636364); graph.put((double) 71, (double) 70); graph.put((double) 72, (double) 71.66666667); graph.put((double) 73, (double) 73.33333333); graph.put((double) 74, (double) 75); graph.put((double) 75, (double) 76.66666667); graph.put((double) 76, (double) 78.33333333); graph.put((double) 77, (double) 80); graph.put((double) 78, (double) 81.66666667); graph.put((double) 79, (double) 83.33333333); graph.put((double) 80, (double) 85); graph.put((double) 81, (double) 87); graph.put((double) 82, (double) 89); graph.put((double) 83, (double) 91); graph.put((double) 84, (double) 93); graph.put((double) 85, (double) 95); graph.put((double) 86, (double) 97); graph.put((double) 87, (double) 99); graph.put((double) 88, (double) 101); graph.put((double) 89, (double) 103); graph.put((double) 90, (double) 105); graph.put((double) 91, (double) 107.5); graph.put((double) 92, (double) 110); graph.put((double) 93, (double) 112.5); graph.put((double) 94, (double) 115); graph.put((double) 95, (double) 117.5); graph.put((double) 96, (double) 120); graph.put((double) 97, (double) 122.5); graph.put((double) 98, (double) 125); graph.put((double) 99, (double) 127.5); graph.put((double) 100, (double) 130); graph.put((double) 101, (double) 131.75); graph.put((double) 102, (double) 133.5); graph.put((double) 103, (double) 135.25); graph.put((double) 104, (double) 137); graph.put((double) 105, (double) 138.75); graph.put((double) 106, (double) 140.5); graph.put((double) 107, (double) 142.25); graph.put((double) 108, (double) 144); graph.put((double) 109, (double) 145.75); graph.put((double) 110, (double) 147.5); graph.put((double) 111, (double) 149.25); graph.put((double) 112, (double) 151); graph.put((double) 113, (double) 152.75); graph.put((double) 114, (double) 154.5); graph.put((double) 115, (double) 156.25); graph.put((double) 116, (double) 158); graph.put((double) 117, (double) 159.75); graph.put((double) 118, (double) 161.5); graph.put((double) 119, (double) 163.25); graph.put((double) 120, (double) 165); graph.put((double) 121, (double) 166.75); graph.put((double) 122, (double) 168.5); graph.put((double) 123, (double) 170.25); graph.put((double) 124, (double) 172); graph.put((double) 125, (double) 173.75); graph.put((double) 126, (double) 175.5); graph.put((double) 127, (double) 177.25); graph.put((double) 128, (double) 179); graph.put((double) 129, (double) 180.75); graph.put((double) 130, (double) 182.5); graph.put((double) 131, (double) 184.25); graph.put((double) 132, (double) 186); graph.put((double) 133, (double) 187.75); graph.put((double) 134, (double) 189.5); graph.put((double) 135, (double) 191.25); graph.put((double) 136, (double) 193); graph.put((double) 137, (double) 194.75); graph.put((double) 138, (double) 196.5); graph.put((double) 139, (double) 198.25); graph.put((double) 140, (double) 200); graph.put((double) 141, (double) 202.5); graph.put((double) 142, (double) 205); graph.put((double) 143, (double) 207.5); graph.put((double) 144, (double) 210); graph.put((double) 145, (double) 212.5); graph.put((double) 146, (double) 215); graph.put((double) 147, (double) 217.5); graph.put((double) 148, (double) 220); graph.put((double) 149, (double) 222.5); graph.put((double) 150, (double) 225); graph.put((double) 151, (double) 227.5); graph.put((double) 152, (double) 230); graph.put((double) 153, (double) 232.5); graph.put((double) 154, (double) 235); graph.put((double) 155, (double) 237.5); graph.put((double) 156, (double) 240); graph.put((double) 157, (double) 242.5); graph.put((double) 158, (double) 245); graph.put((double) 159, (double) 247.5); graph.put((double) 160, (double) 250); graph.put((double) 161, (double) 252.5); graph.put((double) 162, (double) 255); graph.put((double) 163, (double) 257.5); graph.put((double) 164, (double) 260); graph.put((double) 165, (double) 262.5); graph.put((double) 166, (double) 265); graph.put((double) 167, (double) 267.5); graph.put((double) 168, (double) 270); graph.put((double) 169, (double) 272.5); graph.put((double) 170, (double) 275); graph.put((double) 171, (double) 277.5); graph.put((double) 172, (double) 280); graph.put((double) 173, (double) 282.5); graph.put((double) 174, (double) 285); graph.put((double) 175, (double) 287.5); graph.put((double) 176, (double) 290); graph.put((double) 177, (double) 292.5); graph.put((double) 178, (double) 295); graph.put((double) 179, (double) 297.5); graph.put((double) 180, (double) 300); graph.put((double) 181, (double) 302.5); graph.put((double) 182, (double) 305); graph.put((double) 183, (double) 307.5); graph.put((double) 184, (double) 310); graph.put((double) 185, (double) 312.5); graph.put((double) 186, (double) 315); graph.put((double) 187, (double) 317.5); graph.put((double) 188, (double) 320); graph.put((double) 189, (double) 322.5); graph.put((double) 190, (double) 325); graph.put((double) 191, (double) 327.5); graph.put((double) 192, (double) 330); graph.put((double) 193, (double) 332.5); graph.put((double) 194, (double) 335); graph.put((double) 195, (double) 337.5); graph.put((double) 196, (double) 340); graph.put((double) 197, (double) 342.5); graph.put((double) 198, (double) 345); graph.put((double) 199, (double) 347.5); graph.put((double) 200, (double) 350); graph.put((double) 201, (double) 352); graph.put((double) 202, (double) 354); graph.put((double) 203, (double) 356); graph.put((double) 204, (double) 358); graph.put((double) 205, (double) 360); graph.put((double) 206, (double) 362); graph.put((double) 207, (double) 364); graph.put((double) 208, (double) 366); graph.put((double) 209, (double) 368); graph.put((double) 210, (double) 370); graph.put((double) 211, (double) 372); graph.put((double) 212, (double) 374); graph.put((double) 213, (double) 376); graph.put((double) 214, (double) 378); graph.put((double) 215, (double) 380); graph.put((double) 216, (double) 382); graph.put((double) 217, (double) 384); graph.put((double) 218, (double) 386); graph.put((double) 219, (double) 388); graph.put((double) 220, (double) 390); graph.put((double) 221, (double) 392); graph.put((double) 222, (double) 394); graph.put((double) 223, (double) 396); graph.put((double) 224, (double) 398); graph.put((double) 225, (double) 400); graph.put((double) 226, (double) 402.8571429); graph.put((double) 227, (double) 405.7142857); graph.put((double) 228, (double) 408.5714286); graph.put((double) 229, (double) 411.4285714); graph.put((double) 230, (double) 414.2857143); graph.put((double) 231, (double) 417.1428571); graph.put((double) 232, (double) 420); graph.put((double) 233, (double) 422.8571429); graph.put((double) 234, (double) 425.7142857); graph.put((double) 235, (double) 428.5714286); graph.put((double) 236, (double) 431.4285714); graph.put((double) 237, (double) 434.2857143); graph.put((double) 238, (double) 437.1428571); graph.put((double) 239, (double) 440); graph.put((double) 240, (double) 442.8571429); graph.put((double) 241, (double) 445.7142857); graph.put((double) 242, (double) 448.5714286); graph.put((double) 243, (double) 451.4285714); graph.put((double) 244, (double) 454.2857143); graph.put((double) 245, (double) 457.1428571); graph.put((double) 246, (double) 460); graph.put((double) 247, (double) 462.8571429); graph.put((double) 248, (double) 465.7142857); graph.put((double) 249, (double) 468.5714286); graph.put((double) 250, (double) 471.4285714); graph.put((double) 251, (double) 474.2857143); graph.put((double) 252, (double) 477.1428571); graph.put((double) 253, (double) 480); graph.put((double) 254, (double) 482.8571429); graph.put((double) 255, (double) 485.7142857); graph.put((double) 256, (double) 488.5714286); graph.put((double) 257, (double) 491.4285714); graph.put((double) 258, (double) 494.2857143); graph.put((double) 259, (double) 497.1428571); graph.put((double) 260, (double) 500); graph.put((double) 261, (double) 502.5641026); graph.put((double) 262, (double) 505.1282051); graph.put((double) 263, (double) 507.6923077); graph.put((double) 264, (double) 510.2564103); graph.put((double) 265, (double) 512.8205128); graph.put((double) 266, (double) 515.3846154); graph.put((double) 267, (double) 517.9487179); graph.put((double) 268, (double) 520.5128205); graph.put((double) 269, (double) 523.0769231); graph.put((double) 270, (double) 525.6410256); graph.put((double) 271, (double) 528.2051282); graph.put((double) 272, (double) 530.7692308); graph.put((double) 273, (double) 533.3333333); graph.put((double) 274, (double) 535.8974359); graph.put((double) 275, (double) 538.4615385); graph.put((double) 276, (double) 541.025641); graph.put((double) 277, (double) 543.5897436); graph.put((double) 278, (double) 546.1538462); graph.put((double) 279, (double) 548.7179487); graph.put((double) 280, (double) 551.2820513); graph.put((double) 281, (double) 553.8461538); graph.put((double) 282, (double) 556.4102564); graph.put((double) 283, (double) 558.974359); graph.put((double) 284, (double) 561.5384615); graph.put((double) 285, (double) 564.1025641); graph.put((double) 286, (double) 566.6666667); graph.put((double) 287, (double) 569.2307692); graph.put((double) 288, (double) 571.7948718); graph.put((double) 289, (double) 574.3589744); graph.put((double) 290, (double) 576.9230769); graph.put((double) 291, (double) 579.4871795); graph.put((double) 292, (double) 582.0512821); graph.put((double) 293, (double) 584.6153846); graph.put((double) 294, (double) 587.1794872); graph.put((double) 295, (double) 589.7435897); graph.put((double) 296, (double) 592.3076923); graph.put((double) 297, (double) 594.8717949); graph.put((double) 298, (double) 597.4358974); graph.put((double) 299, (double) 600); graph.put((double) 300, (double) 602.4390244); graph.put((double) 301, (double) 604.8780488); graph.put((double) 302, (double) 607.3170732); graph.put((double) 303, (double) 609.7560976); graph.put((double) 304, (double) 612.195122); graph.put((double) 305, (double) 614.6341463); graph.put((double) 306, (double) 617.0731707); graph.put((double) 307, (double) 619.5121951); graph.put((double) 308, (double) 621.9512195); graph.put((double) 309, (double) 624.3902439); graph.put((double) 310, (double) 626.8292683); graph.put((double) 311, (double) 629.2682927); graph.put((double) 312, (double) 631.7073171); graph.put((double) 313, (double) 634.1463415); graph.put((double) 314, (double) 636.5853659); graph.put((double) 315, (double) 639.0243902); graph.put((double) 316, (double) 641.4634146); graph.put((double) 317, (double) 643.902439); graph.put((double) 318, (double) 646.3414634); graph.put((double) 319, (double) 648.7804878); graph.put((double) 320, (double) 651.2195122); graph.put((double) 321, (double) 653.6585366); graph.put((double) 322, (double) 656.097561); graph.put((double) 323, (double) 658.5365854); graph.put((double) 324, (double) 660.9756098); graph.put((double) 325, (double) 663.4146341); graph.put((double) 326, (double) 665.8536585); graph.put((double) 327, (double) 668.2926829); graph.put((double) 328, (double) 670.7317073); graph.put((double) 329, (double) 673.1707317); graph.put((double) 330, (double) 675.6097561); graph.put((double) 331, (double) 678.0487805); graph.put((double) 332, (double) 680.4878049); graph.put((double) 333, (double) 682.9268293); graph.put((double) 334, (double) 685.3658537); graph.put((double) 335, (double) 687.804878); graph.put((double) 336, (double) 690.2439024); graph.put((double) 337, (double) 692.6829268); graph.put((double) 338, (double) 695.1219512); graph.put((double) 339, (double) 700); graph.put((double) 340, (double) 702.7777778); graph.put((double) 341, (double) 705.5555556); graph.put((double) 342, (double) 708.3333333); graph.put((double) 343, (double) 711.1111111); graph.put((double) 344, (double) 713.8888889); graph.put((double) 345, (double) 716.6666667); graph.put((double) 346, (double) 719.4444444); graph.put((double) 347, (double) 722.2222222); graph.put((double) 348, (double) 725); graph.put((double) 349, (double) 727.7777778); graph.put((double) 350, (double) 730.5555556); graph.put((double) 351, (double) 733.3333333); graph.put((double) 352, (double) 736.1111111); graph.put((double) 353, (double) 738.8888889); graph.put((double) 354, (double) 741.6666667); graph.put((double) 355, (double) 744.4444444); graph.put((double) 356, (double) 747.2222222); graph.put((double) 357, (double) 750); graph.put((double) 358, (double) 752.7777778); graph.put((double) 359, (double) 755.5555556); graph.put((double) 360, (double) 758.3333333); graph.put((double) 361, (double) 761.1111111); graph.put((double) 362, (double) 763.8888889); graph.put((double) 363, (double) 766.6666667); graph.put((double) 364, (double) 769.4444444); graph.put((double) 365, (double) 772.2222222); graph.put((double) 366, (double) 775); graph.put((double) 367, (double) 777.7777778); graph.put((double) 368, (double) 780.5555556); graph.put((double) 369, (double) 783.3333333); graph.put((double) 370, (double) 786.1111111); graph.put((double) 371, (double) 788.8888889); graph.put((double) 372, (double) 791.6666667); graph.put((double) 373, (double) 794.4444444); graph.put((double) 374, (double) 797.2222222); graph.put((double) 375, (double) 800); graph.put((double) 376, (double) 802); graph.put((double) 377, (double) 804); graph.put((double) 378, (double) 806); graph.put((double) 379, (double) 808); graph.put((double) 380, (double) 810); graph.put((double) 381, (double) 812); graph.put((double) 382, (double) 814); graph.put((double) 383, (double) 816); graph.put((double) 384, (double) 818); graph.put((double) 385, (double) 820); graph.put((double) 386, (double) 822); graph.put((double) 387, (double) 824); graph.put((double) 388, (double) 826); graph.put((double) 389, (double) 828); graph.put((double) 390, (double) 830); graph.put((double) 391, (double) 832); graph.put((double) 392, (double) 834); graph.put((double) 393, (double) 836); graph.put((double) 394, (double) 838); graph.put((double) 395, (double) 840); graph.put((double) 396, (double) 842); graph.put((double) 397, (double) 844); graph.put((double) 398, (double) 846); graph.put((double) 399, (double) 848); graph.put((double) 400, (double) 850); } /** * Return Peak Incident Over-Pressure Pso * * @param z * @return */ public static double calculatQoWithPso(final double pso) { // Check that Z is greater than 0 if (pso > 0) { // http://stackoverflow.com/questions/5419123/how-to-round-a-float-to-the-nearest-quarter // float coeff = 4f; double nearestWholeValue = Math.round(pso); return (graph.get(nearestWholeValue)); } // TODO add logging here throw new RuntimeException("invalid Pso value - MAX Value = 400psi"); } }
/* * Copyright 2010 Henry Coles * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.pitest.mutationtest.config; import static org.pitest.functional.prelude.Prelude.not; import static org.pitest.functional.prelude.Prelude.or; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import org.pitest.classpath.ClassFilter; import org.pitest.classpath.ClassPath; import org.pitest.classpath.ClassPathRoot; import org.pitest.classpath.PathFilter; import org.pitest.classpath.ProjectClassPaths; import org.pitest.coverage.execute.CoverageOptions; import org.pitest.execute.Pitest; import org.pitest.functional.F; import org.pitest.functional.FCollection; import org.pitest.functional.Option; import org.pitest.functional.predicate.Predicate; import org.pitest.functional.prelude.Prelude; import org.pitest.help.Help; import org.pitest.help.PitHelpError; import org.pitest.mutationtest.build.PercentAndConstantTimeoutStrategy; import org.pitest.mutationtest.incremental.FileWriterFactory; import org.pitest.mutationtest.incremental.NullWriterFactory; import org.pitest.mutationtest.incremental.WriterFactory; import org.pitest.testapi.Configuration; import org.pitest.testapi.TestGroupConfig; import org.pitest.util.Glob; import org.pitest.util.ResultOutputStrategy; import org.pitest.util.Unchecked; // FIXME move all logic to SettingsFactory and turn into simple bean /** * Big ball of user supplied options to configure various aspects of mutation * testing. * */ public class ReportOptions { public final static Collection<String> LOGGING_CLASSES = Arrays .asList( "java.util.logging", "org.apache.log4j", "org.slf4j", "org.apache.commons.logging"); private Configuration config; private Collection<Predicate<String>> targetClasses; private Collection<Predicate<String>> excludedMethods = Collections .emptyList(); private Collection<Predicate<String>> excludedClasses = Collections .emptyList(); private Collection<String> codePaths; private String reportDir; private File historyInputLocation; private File historyOutputLocation; private Collection<File> sourceDirs; private Collection<String> classPathElements; private Collection<String> mutators; private int dependencyAnalysisMaxDistance; private boolean mutateStaticInitializers = false; private final List<String> jvmArgs = new ArrayList<String>(); private int numberOfThreads = 0; private float timeoutFactor = PercentAndConstantTimeoutStrategy.DEFAULT_FACTOR; private long timeoutConstant = PercentAndConstantTimeoutStrategy.DEFAULT_CONSTANT; private Collection<Predicate<String>> targetTests; private Collection<String> loggingClasses = new ArrayList<String>(); private int maxMutationsPerClass; private boolean verbose = false; private boolean failWhenNoMutations = false; private final Collection<String> outputs = new LinkedHashSet<String>(); private TestGroupConfig groupConfig; private int mutationUnitSize; private boolean shouldCreateTimestampedReports = true; private boolean detectInlinedCode = false; private boolean exportLineCoverage = false; private int mutationThreshold; private int coverageThreshold; private String mutationEngine = "gregor"; private String javaExecutable; private boolean includeLaunchClasspath = true; public boolean isVerbose() { return this.verbose; } /** * @return the reportDir */ public String getReportDir() { return this.reportDir; } /** * @param reportDir * the reportDir to set */ public void setReportDir(final String reportDir) { this.reportDir = reportDir; } /** * @return the sourceDirs */ public Collection<File> getSourceDirs() { return this.sourceDirs; } public Collection<String> getClassPathElements() { return this.classPathElements; } public void setClassPathElements(final Collection<String> classPathElements) { this.classPathElements = classPathElements; } /** * @param sourceDirs * the sourceDirs to set */ public void setSourceDirs(final Collection<File> sourceDirs) { this.sourceDirs = sourceDirs; } /** * @return the mutators */ public Collection<String> getMutators() { return this.mutators; } /** * @param mutators * the mutators to set */ public void setMutators(final Collection<String> mutators) { this.mutators = mutators; } /** * @return the dependencyAnalysisMaxDistance */ public int getDependencyAnalysisMaxDistance() { return this.dependencyAnalysisMaxDistance; } /** * @param dependencyAnalysisMaxDistance * the dependencyAnalysisMaxDistance to set */ public void setDependencyAnalysisMaxDistance( final int dependencyAnalysisMaxDistance) { this.dependencyAnalysisMaxDistance = dependencyAnalysisMaxDistance; } public List<String> getJvmArgs() { return this.jvmArgs; } public void addChildJVMArgs(final List<String> args) { this.jvmArgs.addAll(args); } public ClassPath getClassPath() { if (this.classPathElements != null) { return createClassPathFromElements(); } else { return new ClassPath(); } } private ClassPath createClassPathFromElements() { return new ClassPath( FCollection.map(this.classPathElements, stringToFile())); } private static F<String, File> stringToFile() { return new F<String, File>() { public File apply(final String a) { return new File(a); } }; } public Collection<Predicate<String>> getTargetClasses() { return this.targetClasses; } @SuppressWarnings("unchecked") public Predicate<String> getTargetClassesFilter() { final Predicate<String> filter = Prelude.and(or(this.targetClasses), not(isBlackListed(ReportOptions.this.excludedClasses))); checkNotTryingToMutateSelf(filter); return filter; } private void checkNotTryingToMutateSelf(final Predicate<String> filter) { if (filter.apply(Pitest.class.getName())) { throw new PitHelpError(Help.BAD_FILTER); } } public void setTargetClasses(final Collection<Predicate<String>> targetClasses) { this.targetClasses = targetClasses; } public void setTargetTests( final Collection<Predicate<String>> targetTestsPredicates) { this.targetTests = targetTestsPredicates; } public boolean isMutateStaticInitializers() { return this.mutateStaticInitializers; } public void setMutateStaticInitializers(final boolean mutateStaticInitializers) { this.mutateStaticInitializers = mutateStaticInitializers; } public int getNumberOfThreads() { return this.numberOfThreads; } public void setNumberOfThreads(final int numberOfThreads) { this.numberOfThreads = numberOfThreads; } public float getTimeoutFactor() { return this.timeoutFactor; } public long getTimeoutConstant() { return this.timeoutConstant; } public void setTimeoutConstant(final long timeoutConstant) { this.timeoutConstant = timeoutConstant; } public void setTimeoutFactor(final float timeoutFactor) { this.timeoutFactor = timeoutFactor; } public Collection<Predicate<String>> getTargetTests() { return this.targetTests; } @SuppressWarnings("unchecked") public Predicate<String> getTargetTestsFilter() { if ((this.targetTests == null) || this.targetTests.isEmpty()) { return this.getTargetClassesFilter(); // if no tests specified assume the // target classes filter covers both } else { return Prelude.and(or(this.targetTests), not(isBlackListed(ReportOptions.this.excludedClasses))); } } private static Predicate<String> isBlackListed( final Collection<Predicate<String>> excludedClasses) { return new Predicate<String>() { public Boolean apply(final String a) { return or(excludedClasses).apply(a); } }; } public Collection<String> getLoggingClasses() { if (this.loggingClasses.isEmpty()) { return LOGGING_CLASSES; } else { return this.loggingClasses; } } public void setLoggingClasses(final Collection<String> loggingClasses) { this.loggingClasses = loggingClasses; } public Collection<Predicate<String>> getExcludedMethods() { return this.excludedMethods; } public void setExcludedMethods( final Collection<Predicate<String>> excludedMethods) { this.excludedMethods = excludedMethods; } public int getMaxMutationsPerClass() { return this.maxMutationsPerClass; } public void setMaxMutationsPerClass(final int maxMutationsPerClass) { this.maxMutationsPerClass = maxMutationsPerClass; } public void setVerbose(final boolean verbose) { this.verbose = verbose; } public void setExcludedClasses( final Collection<Predicate<String>> excludedClasses) { this.excludedClasses = excludedClasses; } public void addOutputFormats(final Collection<String> formats) { this.outputs.addAll(formats); } public Collection<String> getOutputFormats() { return this.outputs; } public Collection<Predicate<String>> getExcludedClasses() { return this.excludedClasses; } public boolean shouldFailWhenNoMutations() { return this.failWhenNoMutations; } public void setFailWhenNoMutations(final boolean failWhenNoMutations) { this.failWhenNoMutations = failWhenNoMutations; } @SuppressWarnings("unchecked") public CoverageOptions createCoverageOptions() { return new CoverageOptions(Prelude.and(this.getTargetClassesFilter(), not(commonClasses())), this.config, this.isVerbose(), this.getDependencyAnalysisMaxDistance()); } private static F<String, Boolean> commonClasses() { return new F<String, Boolean>() { public Boolean apply(final String name) { return name.startsWith("java") || name.startsWith("sun/") || name.startsWith("org/junit") || name.startsWith("junit") || name.startsWith("org/pitest/coverage") || name.startsWith("org/pitest/reloc") || name.startsWith("org/pitest/boot"); } }; } public ProjectClassPaths getMutationClassPaths() { return new ProjectClassPaths(this.getClassPath(), createClassesFilter(), createPathFilter()); } public ClassFilter createClassesFilter() { return new ClassFilter(this.getTargetTestsFilter(), this.getTargetClassesFilter()); } private PathFilter createPathFilter() { return new PathFilter(createCodePathFilter(), not(new DefaultDependencyPathPredicate())); } private Predicate<ClassPathRoot> createCodePathFilter() { if ((this.codePaths != null) && !this.codePaths.isEmpty()) { return new PathNamePredicate(Prelude.or(Glob .toGlobPredicates(this.codePaths))); } else { return new DefaultCodePathPredicate(); } } public Collection<String> getCodePaths() { return this.codePaths; } public void setCodePaths(final Collection<String> codePaths) { this.codePaths = codePaths; } public void setConfiguration(final Configuration configuration) { this.config = configuration; } public void setGroupConfig(final TestGroupConfig groupConfig) { this.groupConfig = groupConfig; } public TestGroupConfig getGroupConfig() { return this.groupConfig; } public int getMutationUnitSize() { return this.mutationUnitSize; } public void setMutationUnitSize(final int size) { this.mutationUnitSize = size; } public ResultOutputStrategy getReportDirectoryStrategy() { return new DirectoryResultOutputStrategy(getReportDir(), pickDirectoryStrategy()); } public void setShouldCreateTimestampedReports( final boolean shouldCreateTimestampedReports) { this.shouldCreateTimestampedReports = shouldCreateTimestampedReports; } private ReportDirCreationStrategy pickDirectoryStrategy() { if (this.shouldCreateTimestampedReports) { return new DatedDirectoryReportDirCreationStrategy(); } else { return new UndatedReportDirCreationStrategy(); } } public boolean shouldCreateTimeStampedReports() { return this.shouldCreateTimestampedReports; } public boolean isDetectInlinedCode() { return this.detectInlinedCode; } public void setDetectInlinedCode(final boolean b) { this.detectInlinedCode = b; } public WriterFactory createHistoryWriter() { if (this.historyOutputLocation == null) { return new NullWriterFactory(); } return new FileWriterFactory(this.historyOutputLocation); } public Option<Reader> createHistoryReader() { if (this.historyInputLocation == null) { return Option.none(); } try { if (this.historyInputLocation.exists() && (this.historyInputLocation.length() > 0)) { return Option.<Reader> some(new InputStreamReader(new FileInputStream( this.historyInputLocation), "UTF-8")); } return Option.none(); } catch (final IOException ex) { throw Unchecked.translateCheckedException(ex); } } public void setHistoryInputLocation(final File historyInputLocation) { this.historyInputLocation = historyInputLocation; } public void setHistoryOutputLocation(final File historyOutputLocation) { this.historyOutputLocation = historyOutputLocation; } public File getHistoryInputLocation() { return this.historyInputLocation; } public File getHistoryOutputLocation() { return this.historyOutputLocation; } public void setExportLineCoverage(final boolean value) { this.exportLineCoverage = value; } public boolean shouldExportLineCoverage() { return this.exportLineCoverage; } public int getMutationThreshold() { return this.mutationThreshold; } public void setMutationThreshold(final int value) { this.mutationThreshold = value; } public String getMutationEngine() { return this.mutationEngine; } public void setMutationEngine(final String mutationEngine) { this.mutationEngine = mutationEngine; } public int getCoverageThreshold() { return this.coverageThreshold; } public void setCoverageThreshold(final int coverageThreshold) { this.coverageThreshold = coverageThreshold; } public String getJavaExecutable() { return this.javaExecutable; } public void setJavaExecutable(final String javaExecutable) { this.javaExecutable = javaExecutable; } public void setIncludeLaunchClasspath(final boolean b) { this.includeLaunchClasspath = b; } public boolean isIncludeLaunchClasspath() { return this.includeLaunchClasspath; } @Override public String toString() { return "ReportOptions [config=" + this.config + ", targetClasses=" + this.targetClasses + ", excludedMethods=" + this.excludedMethods + ", excludedClasses=" + this.excludedClasses + ", codePaths=" + this.codePaths + ", reportDir=" + this.reportDir + ", historyInputLocation=" + this.historyInputLocation + ", historyOutputLocation=" + this.historyOutputLocation + ", sourceDirs=" + this.sourceDirs + ", classPathElements=" + this.classPathElements + ", mutators=" + this.mutators + ", dependencyAnalysisMaxDistance=" + this.dependencyAnalysisMaxDistance + ", mutateStaticInitializers=" + this.mutateStaticInitializers + ", jvmArgs=" + this.jvmArgs + ", numberOfThreads=" + this.numberOfThreads + ", timeoutFactor=" + this.timeoutFactor + ", timeoutConstant=" + this.timeoutConstant + ", targetTests=" + this.targetTests + ", loggingClasses=" + this.loggingClasses + ", maxMutationsPerClass=" + this.maxMutationsPerClass + ", verbose=" + this.verbose + ", failWhenNoMutations=" + this.failWhenNoMutations + ", outputs=" + this.outputs + ", groupConfig=" + this.groupConfig + ", mutationUnitSize=" + this.mutationUnitSize + ", shouldCreateTimestampedReports=" + this.shouldCreateTimestampedReports + ", detectInlinedCode=" + this.detectInlinedCode + ", exportLineCoverage=" + this.exportLineCoverage + ", mutationThreshold=" + this.mutationThreshold + ", coverageThreshold=" + this.coverageThreshold + ", mutationEngine=" + this.mutationEngine + ", javaExecutable=" + this.javaExecutable + "]"; } }
/* * Copyright (c) 2015. Rick Hightower, Geoff Chandler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! */ package io.advantageous.qbit.vertx.http.server; import io.advantageous.boon.core.Str; import io.advantageous.boon.core.Sys; import io.advantageous.boon.core.reflection.BeanUtils; import io.advantageous.qbit.GlobalConstants; import io.advantageous.qbit.http.HttpContentTypes; import io.advantageous.qbit.http.config.HttpServerOptions; import io.advantageous.qbit.http.request.HttpRequest; import io.advantageous.qbit.http.request.HttpResponseCreator; import io.advantageous.qbit.http.request.decorator.HttpResponseDecorator; import io.advantageous.qbit.http.server.HttpServer; import io.advantageous.qbit.http.server.RequestContinuePredicate; import io.advantageous.qbit.http.server.impl.SimpleHttpServer; import io.advantageous.qbit.http.server.websocket.WebSocketMessage; import io.advantageous.qbit.http.websocket.WebSocket; import io.advantageous.qbit.service.discovery.ServiceDiscovery; import io.advantageous.qbit.service.health.HealthServiceAsync; import io.advantageous.qbit.system.QBitSystemManager; import io.advantageous.qbit.util.Timer; import io.vertx.core.Vertx; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.HttpServerRequest; import io.vertx.core.http.ServerWebSocket; import io.vertx.core.net.JksOptions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Predicate; /** */ public class HttpServerVertx implements HttpServer { private final Logger logger = LoggerFactory.getLogger(HttpServerVertx.class); private final boolean debug = GlobalConstants.DEBUG || logger.isDebugEnabled(); private final QBitSystemManager systemManager; private final SimpleHttpServer simpleHttpServer; private final int port; private final String host; private final Vertx vertx; private final HttpServerOptions options; private final VertxServerUtils vertxUtils = new VertxServerUtils(); private final boolean startedVertx; private io.vertx.core.http.HttpServer httpServer; /** * Holds on to Boon cache so we don't have to recreate reflected gak. */ private Object context = Sys.contextToHold(); /** * For Metrics. */ private volatile int exceptionCount; /** * For Metrics. */ private volatile int closeCount; public HttpServerVertx(final boolean startedVertx, final Vertx vertx, final String endpointName, final HttpServerOptions options, final QBitSystemManager systemManager, final ServiceDiscovery serviceDiscovery, final HealthServiceAsync healthServiceAsync, final int serviceDiscoveryTtl, final TimeUnit serviceDiscoveryTtlTimeUnit, final CopyOnWriteArrayList<HttpResponseDecorator> decorators, final HttpResponseCreator httpResponseCreator, final RequestContinuePredicate requestBodyContinuePredicate) { this.startedVertx = startedVertx; this.simpleHttpServer = new SimpleHttpServer(endpointName, systemManager, options.getFlushInterval(), options.getHost(), options.getPort(), serviceDiscovery, healthServiceAsync, serviceDiscoveryTtl, serviceDiscoveryTtlTimeUnit, decorators, httpResponseCreator, requestBodyContinuePredicate); this.vertx = vertx; this.systemManager = systemManager; this.port = options.getPort(); this.host = options.getHost(); this.options = BeanUtils.copy(options); this.setWebSocketIdleConsume(aVoid -> { }); this.setHttpRequestsIdleConsumer(aVoid -> { }); } @Override public void setShouldContinueHttpRequest(final Predicate<HttpRequest> shouldContinueHttpRequest) { this.simpleHttpServer.setShouldContinueHttpRequest(shouldContinueHttpRequest); } @Override public void setWebSocketMessageConsumer(final Consumer<WebSocketMessage> webSocketMessageConsumer) { this.simpleHttpServer.setWebSocketMessageConsumer(webSocketMessageConsumer); } @Override public void setWebSocketCloseConsumer(final Consumer<WebSocketMessage> webSocketMessageConsumer) { this.simpleHttpServer.setWebSocketCloseConsumer(webSocketMessageConsumer); } @Override public void setHttpRequestConsumer(final Consumer<HttpRequest> httpRequestConsumer) { this.simpleHttpServer.setHttpRequestConsumer(httpRequestConsumer); } @Override public void setHttpRequestsIdleConsumer(final Consumer<Void> idleRequestConsumer) { this.simpleHttpServer.setHttpRequestsIdleConsumer( aVoid -> { idleRequestConsumer.accept(null); vertxUtils.setTime(Timer.timer().now()); } ); } @Override public void setWebSocketIdleConsume(final Consumer<Void> idleWebSocketConsumer) { this.simpleHttpServer.setWebSocketIdleConsume( aVoid -> { idleWebSocketConsumer.accept(null); vertxUtils.setTime(Timer.timer().now()); } ); } @Override public void start() { startWithNotify(null); } @Override public void startWithNotify(final Runnable runnable) { simpleHttpServer.start(); if (debug) { vertx.setPeriodic(10_000, event -> logger.info("Exception Count {} Close Count {}", exceptionCount, closeCount)); } final io.vertx.core.http.HttpServerOptions vertxOptions = new io.vertx.core.http.HttpServerOptions(); vertxOptions.setTcpNoDelay(options.isTcpNoDelay()); vertxOptions.setSoLinger(options.getSoLinger()); vertxOptions.setUsePooledBuffers(options.isUsePooledBuffers()); vertxOptions.setReuseAddress(options.isReuseAddress()); vertxOptions.setAcceptBacklog(options.getAcceptBackLog()); vertxOptions.setTcpKeepAlive(options.isKeepAlive()); vertxOptions.setCompressionSupported(options.isCompressionSupport()); vertxOptions.setMaxWebsocketFrameSize(options.getMaxWebSocketFrameSize()); vertxOptions.setSsl(options.isSsl()); final JksOptions jksOptions = new JksOptions(); jksOptions.setPath(options.getTrustStorePath()); jksOptions.setPassword(options.getTrustStorePassword()); vertxOptions.setTrustStoreOptions(jksOptions); httpServer = vertx.createHttpServer(vertxOptions); httpServer.websocketHandler(this::handleWebSocketMessage); httpServer.requestHandler(this::handleHttpRequest); if (Str.isEmpty(host)) { httpServer.listen(port, event -> { if (event.failed()) { logger.error("HTTP SERVER unable to start on port " + port + " default host "); simpleHttpServer.getErrorHandler().accept(event.cause()); } else { if (runnable != null) { runnable.run(); } logger.info("HTTP SERVER started on port " + port + " default host "); simpleHttpServer.getOnStart().run(); } }); } else { httpServer.listen(port, host, event -> { if (event.failed()) { logger.error("HTTP SERVER UNABLE to START on port " + port + " host " + host); simpleHttpServer.getErrorHandler().accept(event.cause()); } else { if (runnable != null) runnable.run(); logger.info("HTTP SERVER started on port " + port + " host " + host); simpleHttpServer.getOnStart().run(); } }); } } @Override public HttpServer startServerAndWait() { final CountDownLatch latch = new CountDownLatch(1); this.startWithNotify(() -> latch.countDown()); try { latch.await(5, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.interrupted(); } return this; } @Override public void stop() { simpleHttpServer.stop(); try { if (httpServer != null) { httpServer.close(); } if (startedVertx && vertx != null) { vertx.close(); } } catch (Exception ex) { logger.info("HTTP SERVER unable to close " + port + " host " + host); } if (systemManager != null) systemManager.serviceShutDown(); } /** * Handle a vertx request by converting it into a QBit request. * * @param request request */ private void handleHttpRequest(final HttpServerRequest request) { if (debug) { setupMetrics(request); logger.debug("HttpServerVertx::handleHttpRequest::{}:{}", request.method(), request.uri()); } switch (request.method().toString()) { case "PUT": case "POST": case "OPTIONS": case "TRACE": case "DELETE": case "CONNECT": handleRequestWithBody(request); break; case "HEAD": case "GET": handleRequestWithNoBody(request); break; default: throw new IllegalStateException("method not supported yet " + request.method()); } } private void handleRequestWithNoBody(HttpServerRequest request) { final HttpRequest getRequest; getRequest = vertxUtils.createRequest(request, null, new HashMap<>(), simpleHttpServer.getDecorators(), simpleHttpServer.getHttpResponseCreator()); simpleHttpServer.handleRequest(getRequest); } private void handleRequestWithBody(HttpServerRequest request) { final String contentType = request.headers().get("Content-Type"); if (HttpContentTypes.isFormContentType(contentType)) { request.setExpectMultipart(true); } final Buffer[] bufferHolder = new Buffer[1]; final HttpRequest bodyHttpRequest = vertxUtils.createRequest(request, () -> bufferHolder[0], new HashMap<>(), simpleHttpServer.getDecorators(), simpleHttpServer.getHttpResponseCreator()); if (simpleHttpServer.getShouldContinueReadingRequestBody().test(bodyHttpRequest)) { request.bodyHandler((buffer) -> { bufferHolder[0] = buffer; simpleHttpServer.handleRequest(bodyHttpRequest); }); } else { logger.info("Request body rejected {} {}", request.method(), request.absoluteURI()); } } private void setupMetrics(final HttpServerRequest request) { request.exceptionHandler(event -> { if (debug) { exceptionCount++; } logger.info("EXCEPTION", event); }); request.endHandler(event -> { if (debug) { closeCount++; } logger.info("REQUEST OVER"); }); } private void handleWebSocketMessage(final ServerWebSocket webSocket) { simpleHttpServer.handleOpenWebSocket(vertxUtils.createWebSocket(webSocket)); } @Override public void setWebSocketOnOpenConsumer(Consumer<WebSocket> onOpenConsumer) { this.simpleHttpServer.setWebSocketOnOpenConsumer(onOpenConsumer); } }
/* * Copyright (c) 2006-2017 DMDirc Developers * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.dmdirc.updater.checking; import com.dmdirc.config.binding.ConfigBinding; import com.dmdirc.config.GlobalConfig; import com.dmdirc.config.provider.AggregateConfigProvider; import com.dmdirc.updater.UpdateChannel; import com.dmdirc.updater.UpdateComponent; import com.dmdirc.updater.Version; import com.dmdirc.util.LogUtils; import com.dmdirc.util.io.Downloader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; /** * A strategy which sends a request to the DMDirc update service for information. */ public class DMDircCheckStrategy implements UpdateCheckStrategy { private static final Logger LOG = LoggerFactory.getLogger(DMDircCheckStrategy.class); /** The URL to request to check for updates. */ private static final String UPDATE_URL = "https://updates.dmdirc.com/"; /** The update channel to check for updates on. */ private UpdateChannel channel; /** Downloader to download files. */ private final Downloader downloader; /** * Creates a new instance of {@link DMDircCheckStrategy}. * * @param configProvider The provider to use to retrieve update channel information. * @param downloader Used to download files */ @Inject public DMDircCheckStrategy(@GlobalConfig final AggregateConfigProvider configProvider, final Downloader downloader) { configProvider.getBinder().bind(this, DMDircCheckStrategy.class); this.downloader = downloader; } /** * Sets the channel which will be used by the {@link DMDircCheckStrategy}. * * @param channel The new channel to use */ @ConfigBinding(domain = "updater", key = "channel") public void setChannel(final String channel) { LOG.info("Changing channel to {}", channel); try { this.channel = UpdateChannel.valueOf(channel.toUpperCase()); } catch (IllegalArgumentException ex) { LOG.warn("Unknown channel {}", channel, ex); } } @Override public Map<UpdateComponent, UpdateCheckResult> checkForUpdates( final Collection<UpdateComponent> components) { final Map<UpdateComponent, UpdateCheckResult> res = new HashMap<>(); final Map<String, UpdateComponent> names = getComponentsByName(components); try { final List<String> response = downloader.getPage(UPDATE_URL, getPayload(components)); LOG.trace("Response from update server: {}", response); for (String line : response) { final UpdateComponent component = names.get(getComponent(line)); if (component == null) { LOG.warn("Unable to extract component from line: {}", line); continue; } final UpdateCheckResult result = parseResponse(component, line); if (result != null) { res.put(component, result); } } } catch (IOException ex) { LOG.warn("I/O exception when checking for updates", ex); } return res; } /** * Builds the data payload which will be sent to the update server. Specifically, iterates over * each component and appends their name, the channel name, and the component's version number. * * @param components The components to be added to the payload * * @return A string which can be posted to the DMDirc update server */ private String getPayload(final Collection<UpdateComponent> components) { final StringBuilder data = new StringBuilder("data="); for (UpdateComponent component : components) { LOG.trace("Adding payload info for component {} (version {})", component.getName(), component.getVersion()); data.append(component.getName()); data.append(','); data.append(channel.name()); data.append(','); data.append(component.getVersion()); data.append(';'); } LOG.debug("Constructed update payload: {}", data); return data.toString(); } /** * Extracts the name of the component a given response line contains. * * @param line The line to be parsed * * @return The name of the component extracted from the given line */ private String getComponent(final String line) { final String[] parts = line.split(" "); if (parts.length >= 2 && "outofdate".equals(parts[0])) { return parts[1]; } return parts.length >= 3 ? parts[2] : null; } /** * Checks the specified line to determine the message from the update server. * * @param component The component the line refers to * @param line The line to be checked */ private UpdateCheckResult parseResponse(final UpdateComponent component, final String line) { final String[] parts = line.split(" "); switch (parts[0]) { case "outofdate": return parseOutOfDateResponse(component, parts); case "uptodate": return new BaseCheckResult(component); case "error": LOG.warn("Error received from update server: {}", line); break; default: LOG.error("Unknown update line received from server: {}", line); break; } return null; } /** * Parses an "outofdate" response from the server. Extracts the URL, remote version and remote * friendly version into a {@link BaseDownloadableResult}. * * @param parts The tokenised parts of the response line * * @return A corresponding {@link UpdateCheckResult} or null on failure */ private UpdateCheckResult parseOutOfDateResponse( final UpdateComponent component, final String[] parts) { try { return new BaseDownloadableResult(component, new URL(parts[5]), parts[4], new Version(parts[3])); } catch (MalformedURLException ex) { LOG.error(LogUtils.APP_ERROR, "Unable to construct URL for update. Parts: {}", parts, ex); return null; } } /** * Builds a mapping of components' names to their actual component objects. * * @param components A collection of components to be mapped * * @return A corresponding Map containing a single entry for each component, which the * component's name as a key and the component itself as a value. */ private Map<String, UpdateComponent> getComponentsByName( final Collection<UpdateComponent> components) { final Map<String, UpdateComponent> res = new HashMap<>(); for (UpdateComponent component : components) { res.put(component.getName(), component); } return res; } }
/* * Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.io; import sun.nio.cs.ext.JIS_X_0208_Solaris_Decoder; import sun.nio.cs.ext.JIS_X_0212_Solaris_Decoder; /** * * @author Limin Shi * @author Ian Little * * EUC_JP variant converter for Solaris with vendor defined chars * added (4765370) */ public class ByteToCharEUC_JP_Solaris extends ByteToCharEUC_JP { private byte savedSecond = 0; ByteToCharJIS0201 bcJIS0201 = new ByteToCharJIS0201(); ByteToCharJIS0212_Solaris bcJIS0212 = new ByteToCharJIS0212_Solaris(); short[] j0208Index1 = JIS_X_0208_Solaris_Decoder.getIndex1(); String[] j0208Index2 = JIS_X_0208_Solaris_Decoder.getIndex2(); ByteToCharJIS0212_Solaris j0212Decoder = new ByteToCharJIS0212_Solaris(); public ByteToCharEUC_JP_Solaris() { super(); start = 0xA1; end = 0xFE; savedSecond = 0; } public int flush(char[] output, int outStart, int outEnd) throws MalformedInputException { if (savedSecond != 0) { reset(); throw new MalformedInputException(); } reset(); return 0; } /** * Resets the converter. * Call this method to reset the converter to its initial state */ public void reset() { super.reset(); savedSecond = 0; } public String getCharacterEncoding() { return "eucJP-open"; } protected char convSingleByte(int b) { if (b < 0 || b > 0x7F) return REPLACE_CHAR; return bcJIS0201.getUnicode(b); } protected char getUnicode(int byte1, int byte2) { if (byte1 == 0x8E) { return bcJIS0201.getUnicode(byte2 - 256); } // Fix for bug 4121358 - similar fix for bug 4117820 put // into ByteToCharDoubleByte.getUnicode() if (((byte1 < 0) || (byte1 > j0208Index1.length)) || ((byte2 < start) || (byte2 > end))) return REPLACE_CHAR; char result = super.getUnicode(byte1, byte2); if (result != '\uFFFD') { return result; } else { int n = (j0208Index1[byte1 - 0x80] & 0xf) * (end - start + 1) + (byte2 - start); return j0208Index2[j0208Index1[byte1 - 0x80] >> 4].charAt(n); } } protected char decode0212(int byte1, int byte2) { return j0212Decoder.getUnicode(byte1, byte2); } /** * Converts sequences of bytes to characters. * Conversions that result in Exceptions can be restarted by calling * convert again, with appropriately modified parameters. * @return the characters written to output. * @param input byte array containing text in Double/single Byte * @param inStart offset in input array * @param inEnd offset of last byte to be converted * @param output character array to receive conversion result * @param outStart starting offset * @param outEnd offset of last byte to be written to * @throw UnsupportedCharacterException for any bytes * that cannot be converted to the external character set. */ public int convert(byte[] input, int inOff, int inEnd, char[] output, int outOff, int outEnd) throws UnknownCharacterException, ConversionBufferFullException { char outputChar = REPLACE_CHAR; int inputSize = 0; // Size of input // Record beginning offsets charOff = outOff; byteOff = inOff; // Loop until we hit the end of the input while (byteOff < inEnd) { int byte1, byte2; if (savedByte == 0) { byte1 = input[byteOff]; inputSize = 1; } else { byte1 = savedByte; savedByte = 0; inputSize = 0; } outputChar = convSingleByte(byte1); if (outputChar == REPLACE_CHAR) { // Multibyte char if ((byte1 & 0xff) == 0x8F) { // JIS0212 if (byteOff + inputSize + 1 >= inEnd) { // split in the middle of a character // save the first 2 bytes for next time around savedByte = (byte) byte1; byteOff += inputSize; if (byteOff < inEnd) { savedSecond = input[byteOff]; byteOff++; } break; } if (savedSecond != 0) { byte1 = savedSecond & 0xff; savedSecond = 0; } else { byte1 = input[byteOff + inputSize] & 0xff; inputSize++; } byte2 = input[byteOff + inputSize] & 0xff; inputSize++; outputChar = bcJIS0212.getUnicode(byte1-0x80, byte2-0x80); } else { // JIS0208 if (byteOff + inputSize >= inEnd) { // split in the middle of a character // save the first byte for next time around savedByte = (byte) byte1; byteOff += inputSize; break; } byte1 &= 0xff; byte2 = input[byteOff + inputSize] & 0xff; inputSize++; outputChar = getUnicode(byte1, byte2); } } if (outputChar == REPLACE_CHAR) { if (subMode) outputChar = subChars[0]; else { badInputLength = inputSize; throw new UnknownCharacterException(); } } if (charOff >= outEnd) throw new ConversionBufferFullException(); output[charOff++] = outputChar; byteOff += inputSize; } return charOff - outOff; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.medlog.webservice.util; import com.google.gson.*; import static com.medlog.webservice.CONST.SETTINGS.*; import java.io.*; import java.text.*; import java.util.*; import java.util.logging.*; import java.util.regex.*; /** * * @author (c)2016 Guiding Technologies */ public class StrUtl { private static final String REGEX_HTML_MARKUP_CHARS = "\r\n|\r|\n|\t|<sty.*<.*>|<scr.*/script>|</?[a-z][a-z0-9]*[^<>]*>|<!--.*?-->|\\f|;"; /** * No instantiation. */ private StrUtl() { } /** * get Json * * @param state * @param msg * @return */ public static String getJSONMsg(String state, String msg) { return StrUtl.getJSONMsg(state, msg, null); } /** * Get JSON * * @param state info error * @param msg * @return */ public static String getJSONMsg(String state, String msg, Integer val) { JsonObject json = new JsonObject(); json.addProperty("state", StrUtl.toS(state)); json.addProperty("message", StrUtl.toS(msg, state.equals("error") ? "Something went wrong!" : "Unknown")); if (val != null) { json.addProperty("id", val); } return json.toString(); } /** * Safe toString method that converts nulls to empty strings. * * @param source * @return source or "" */ public static String toS(final Object source) { return source == null ? "" : source.toString().trim(); } /** * Safe toString method with default value. * * @param source input * @param defaultVal value to return if source is null * @return input or default value if input is null */ public static String toS(final Object source, String defaultVal) { try { return (source == null || source.toString().isEmpty()) ? toS(defaultVal) : toS(source.toString()).trim(); } catch (Exception e) { return defaultVal; } } /** * Converts int to String (if value is 0, returns empty string) * * @param source * @return */ public static String toS(final int source) { return source == 0 ? "" : String.valueOf(source); } /** * Removes all html tags from string * * @param val * @return */ public static String removeHtmlMarkups(String val) { String clean = ""; try { Pattern pattern = Pattern.compile(REGEX_HTML_MARKUP_CHARS, Pattern.DOTALL | Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE); Matcher matcher = pattern.matcher(val); try { clean = matcher.replaceAll(""); } catch (IllegalArgumentException ex) { } catch (Exception ex) { } } catch (Exception ex) { }// return toS(clean); } /** * HTML Encodes markup characters. * * @param src * @return */ public static String replaceEntities(String src) { src = src.replace("&", "&amp;"); src = src.replace("<", "&lt;"); src = src.replace(">", "&gt;"); src = src.replace("\"", "&quot;"); return src; } /** * Find first non-null and non-empty item * * @param items * @return First matched String or else empty string */ public static String coalesce(Object... items) { if (items == null) { return ""; } for (Object item : items) { if (item != null && ((String) item).length() > 0) { return item.toString(); } } return ""; } /** * Check if x is in the set vals * * @param x val to look for * @param vals a csv param * @return true if found, uot found or error = false */ public static boolean matchOR(int x, int... vals) { if (vals == null) { return false; } for (int i : vals) { if (x == i) { return true; } } return false; } /** * * @param valueToMatch * @param vals * @return */ public static boolean matchOR(Boolean valueToMatch, boolean... vals) { if (vals == null || valueToMatch == null) { return false; } for (boolean i : vals) { if (valueToMatch == i) { return true; } } return false; } public static boolean matchOR(String x, String... vals) { if (vals == null || x == null) { return false; } for (String i : vals) { if (x.equalsIgnoreCase(StrUtl.toS(i))) { return true; } } return false; } /** * Stores stack track as String * * @param exception Thrown * @return String containing stack trace. */ public static final String throwableStackTraceToString(final Throwable exception) { if (exception != null) { final StringWriter stringWriter = new StringWriter(); exception.printStackTrace(new PrintWriter(stringWriter)); return stringWriter.toString(); } else { LOG.warning("Exception null printing stack track"); return ""; } } /** * * @param e * @return */ public static final String stackStraceAsStringDetails(final Throwable e) { StringBuilder sb = new StringBuilder(""); if (e != null) { final StringWriter stringWriter = new StringWriter(); try { e.printStackTrace(new PrintWriter(stringWriter)); sb.append(stringWriter.toString()); sb.append("\nCause:"); sb.append(e); Throwable t = e.getCause(); while (t != null) { sb.append(t); sb.append("\n"); t = t.getCause(); } } catch (Exception exc) { sb.append(e.getMessage()); } finally { return StrUtl.toS(sb.toString(), "Sorry.. full stack trace not available"); } } else { LOG.warning("Exception null printing stack track"); return "Null exception"; } } private static final Logger LOG = Logger.getLogger(StrUtl.class.getName()); /** * * @param input * @param length * @return */ public static String truncateAtWord(String input, int length) { int offset; int iNextSpace; offset = 1; if (input == null || input.length() < (length - offset)) { return StrUtl.toS(input); } iNextSpace = input.lastIndexOf(" ", length); String trunc = input; try { trunc = String.format(input .substring(0, (iNextSpace > 0) ? iNextSpace : (length - offset)) .trim()); return trunc; } catch (Exception e) { return StrUtl.toS(trunc).trim(); } } /** * * @param REGEX String expression * @param valvalue to check * @param partialMatch allow partial match. * @return found */ public static boolean regexTest(String REGEX, String val, boolean partialMatch) { boolean foundMatch = false; try { Pattern regex = Pattern.compile(REGEX, Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE | Pattern.COMMENTS); Matcher regexMatcher = regex.matcher(StrUtl.toS(val)); foundMatch = partialMatch ? regexMatcher.find() : regexMatcher.matches(); } catch (PatternSyntaxException e) { if (DEBUG) { e.printStackTrace(); } } catch (IllegalArgumentException e) { if (DEBUG) { e.printStackTrace(); } } return foundMatch; } /** * Return format date with specified format. * * @param n date * @param format date format * @return */ public static String getDateWithFormat(Date n, String format) { if (n == null) { if (DEBUG) { System.out.println("com.medlog.webservice.util.StrUtl.getDateWithFormat() : Null Date"); return ""; } } DateFormat d = new SimpleDateFormat(format); d.setTimeZone(TimeZone.getTimeZone("EST")); return d.format(n); } /** * Format date as string * * @param n datte * @return date format yyyy--MM--dd */ public static String getDateWithFormat(Date n) { return StrUtl.getDateWithFormat(n, "yyyy-MM-dd"); } }
package jnetman.network; import java.net.InetAddress; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Vector; /** * This class represents the NetworkManager abstraction for a node. A node could * be a router as well as a simple host. A node is defined by its name and its * network interfaces. * * @author Carmelo Cascone * */ public class Node extends NetworkDevice { private Map<String, IfCard> ifCards; protected NodeAgent agent; public Node(String name, Network network) { super(name, network); this.ifCards = new HashMap<String, IfCard>(); } /** * Returns the IP address of the node. If no address is explicitly set the * address of a random interface will be returned. * * @return IP address of the node in InetAddress format * @throws AddressException * If no address is explicitly set and there are no interfaces * declared for the node */ public InetAddress getAddress() throws AddressException { if (super.getAddress() != null) return super.getAddress(); else if (!this.ifCards.isEmpty()) { for (IfCard ifCard : this.ifCards.values()) if (ifCard.getAddress() != null) { super.setAddress(ifCard.getAddress()); logger.debug("No explicit address declared, using " + super.getAddress().getHostAddress() + " from interface " + ifCard.getName()); return super.getAddress(); } } throw new AddressException( "Unable to get the IP address of " + this.getName() + ", there are no InterfaceCards specified for this node. " + "You need to add at least one IfCard to get a valid IP address."); } /** * Returns a network interface defined for this node * * @param name * The name of the network interface to get * @return The network interface as an IfCard object */ public IfCard getInterfaceCard(String name) { return this.ifCards.get(name); } /** * Add a network interface to the node. This method is protected and so * should be used only inside the NetworkManager package. See also * createInterfaceCard. * * @param ifCard * IfCard to be added * @throws DuplicateElementException * If a network interface with the same name already exist for * this node */ protected void addInterfaceCard(IfCard ifCard) throws DuplicateElementException { if (this.ifCards.containsKey(ifCard.getName())) throw new DuplicateElementException("A network interface named '" + ifCard.getName() + "' already exist for the node " + this.getName()); ifCard.setRouter(this); this.ifCards.put(ifCard.getName(), ifCard); logger.debug("New interface card added >> " + ifCard.getName()); } /** * Create a network interface for the node. * * @param name * Name of the network interface to be crated * @return The network interface created as an IfCard object * @throws DuplicateElementException * If a network interface with the same name already exist for * this node */ public IfCard createInterfaceCard(String name) throws DuplicateElementException { if (this.ifCards.containsKey(name)) throw new DuplicateElementException("An interfaceCard named '" + name + "' already exist"); IfCard newIfCard = new IfCard(name, this); this.addInterfaceCard(newIfCard); return newIfCard; } /** * Returns all the network interfaces of this node * * @return A Collection of network interfaces as IfCard objects */ public Collection<IfCard> getIfCards() { return this.ifCards.values(); } /** * Check if this node is connected with the specified node. A node is * connected with another node when at least one link exists between the two * nodes and it's connected to the network interfaces of both nodes. * * @param node * The node for which to check the connection * @return true if it's connected, false elsewhere. */ public boolean isConnectedWith(Node node) { if (this.ifCards.isEmpty()) return false; else for (IfCard ifCard : this.getIfCards()) if (ifCard.getLinkEndpoint() != null && ifCard.getLinkEndpoint().getNode() == node) return true; return false; } /** * Returns all the nodes connected with this nodes * * @return a Collection of nodes connected with this node as a Node object */ public Collection<Node> getNeighbours() { Vector<Node> neighbours = new Vector<Node>(); if (!this.ifCards.isEmpty()) { for (IfCard ifCard : this.ifCards.values()) { if (ifCard.hasLink() && ifCard.getLinkEndpoint() != null) { neighbours.add(ifCard.getLinkEndpoint().getNode()); } } } return neighbours; } /** * Returns all the links that connect this node with the specified node * * @param node * The other endpoint of the links * @return A Collection of links in as a Link object. * @throws NotConnectedException * If this node is not connected with the specified node (no * links exists between the two nodes) */ public Collection<Link> getAllLinks(Node node) throws NotConnectedException { if (this.isConnectedWith(node)) { Vector<Link> links = new Vector<Link>(); for (IfCard ifCard : this.getIfCards()) { if (ifCard.getLinkEndpoint() != null && ifCard.getLinkEndpoint().getNode() == node) links.add(ifCard.getLink()); } return links; } else throw new NotConnectedException(); } /** * Returns a a textual description of the node, intended as the node name * and a list of its network interfaces * * @return a description of the node in a string format */ public String getDescription() { String text; text = this.getName() + " : "; if (!this.ifCards.isEmpty()) for (IfCard ifCard : ifCards.values()) text += "\n- " + ifCard.getDescription(); return text; } public synchronized NodeAgent getAgent() throws AddressException { if (this.agent == null) this.agent = new NodeAgent(this); return this.agent; } }
package org.apache.cassandra.db.transaction; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.cassandra.db.*; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.FastByteArrayInputStream; import org.apache.cassandra.net.ICompletable; import org.apache.cassandra.net.Message; import org.apache.cassandra.net.MessageProducer; import org.apache.cassandra.service.StorageProxy; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.Mutation; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.LamportClock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BatchMutateTransactionCoordinator implements MessageProducer { // coordinator: // INIT --receive data locally --> WAIT_FOR_PREPARES // --receive data remotely --> --send out dep_checks--> // WAIT_FOR_DEPS_AND_NOTIFICATIONS --> receive dep_check_response --> self // --> receive notify --> self // --> receive last notify and last dep_check_response --> // SEND_PREPARES -- send prepare to all cohorts --> // WAIT_FOR_VOTES -- receive all yes-votes --> // COMMIT --set lvt on local keys --> choose commit time --> commit locally --> send commit to cohorts --> // WAIT_FOR_ACKS -- receive all acks --> cleanup local state private static BatchMutateTransactionCoordinatorSerializer serializer_ = new BatchMutateTransactionCoordinatorSerializer(); public static BatchMutateTransactionCoordinatorSerializer serializer() { return serializer_; } private static enum CoordinatorState { INIT, WAIT_FOR_DEPS_AND_NOTIFICATIONS, SEND_PREPARES, WAIT_FOR_VOTES, COMMIT, WAIT_FOR_ACKS }; private static final Logger static_logger = LoggerFactory.getLogger(BatchMutateTransactionCoordinator.class); private final Logger logger = LoggerFactory.getLogger(BatchMutateTransactionCoordinator.class); // TODO: Switch to ConcurrentHashMap? private static Map<Long, BatchMutateTransactionCoordinator> idToCoordinator = Collections.synchronizedMap(new HashMap<Long, BatchMutateTransactionCoordinator>()); private static Map<Long, Integer> queuedNotifications = new HashMap<Long, Integer>(); private static Map<Long, Integer> queuedYesVotes = new HashMap<Long, Integer>(); public static BatchMutateTransactionCoordinator findCoordinator(Long transactionId) { return idToCoordinator.get(transactionId); } /** Delivers the notifications to a coordinator if it exists, and otherwise queues it up for later delivery * * @param transactionId * @param notificationCount */ public static void deliverNotification(Long transactionId, int notificationCount) { if (static_logger.isTraceEnabled()) { static_logger.trace("deliverNotification({}, {})", new Object[]{transactionId, notificationCount}); } // Notification delivery & coordinator finding are synchronized together to avoid a race between: // findCoordinator then queue notification and // create coordinator then get past where we check for queued notifications // If a coordinator is created after we have the lock, it must // wait for us to release the lock before it can check for // queued notifications and is therefor guarenteed to see them BatchMutateTransactionCoordinator coordinator = null; synchronized(queuedNotifications) { coordinator = findCoordinator(transactionId); if (coordinator == null) { Integer previousCount = queuedNotifications.get(transactionId); if (previousCount != null) { queuedNotifications.put(transactionId, previousCount + notificationCount); } else { queuedNotifications.put(transactionId, notificationCount); } } } // Send the notify outside the synchronized block so other // notifications can be delivered to other coordinators if (coordinator != null) { coordinator.receiveNotify(notificationCount); } } /** Delivers the yesVotes to a coordinator if it exists, and otherwise queues it up for later delivery * * @param transactionId * @param yesVoteCount */ public static void deliverYesVotes(Long transactionId, int yesVoteCount) { if (static_logger.isTraceEnabled()) { static_logger.trace("deliverYesVotes({}, {})", new Object[]{transactionId, yesVoteCount}); } // Yesvote delivery & coordinator finding are synchronized together to avoid a race // See deliverNotify for reasoning as why this is correct BatchMutateTransactionCoordinator coordinator = null; synchronized(queuedYesVotes) { coordinator = findCoordinator(transactionId); if (coordinator == null) { Integer previousCount = queuedYesVotes.get(transactionId); if (previousCount != null) { queuedYesVotes.put(transactionId, previousCount + yesVoteCount); } else { queuedYesVotes.put(transactionId, yesVoteCount); } } } if (coordinator != null) { coordinator.receiveYesVotes(yesVoteCount); } } private CoordinatorState state; private Integer yesVotesReceived = 0; private Integer acksReceived = 0; private Integer notifiesReceived = 0; private String keyspace; private List<IMutation> mutations; private Set<Dependency> deps; private Set<ByteBuffer> allKeys; private List<ByteBuffer> remoteKeys; private long transactionId; private ByteBuffer coordinatorKey; private LinkedBlockingQueue<Long> completionBlockingQueue; private boolean local; private boolean depChecksReturned; private Long timestamp; //localCommitTime in accepting datacenter private Long localCommitTime; private final int waitForLocalCommitTimeoutSecs = 5; // Ensure non-message-receiving functions are only called once private final boolean sanityCheckFireOnces = true; // also change in Cohort private Boolean receivedTransaction = false; private Boolean checkedQueuedNotifications = false; private Boolean sentPrepares = false; private Boolean checkedQueuedYesVotes = false; private Boolean committed = false; private Boolean cleanedUp = false; public BatchMutateTransactionCoordinator() { if (logger.isTraceEnabled()) { logger.trace("BatchMutateTransactionCoordinator()", new Object[]{}); } state = CoordinatorState.INIT; } private void receiveTransaction(String keyspace, List<IMutation> mutations, Set<Dependency> deps, ByteBuffer coordinator_key, Set<ByteBuffer> all_keys, long transaction_id) { if (sanityCheckFireOnces) { synchronized(receivedTransaction) { assert receivedTransaction == false : "May only receive transaction " + transaction_id + " once"; receivedTransaction = true; } } this.keyspace = keyspace; this.mutations = mutations; this.deps = deps; this.allKeys = all_keys; this.transactionId = transaction_id; this.coordinatorKey = coordinator_key; this.depChecksReturned = false; Set<ByteBuffer> localKeys = new HashSet<ByteBuffer>(); for (IMutation mutation : mutations) { localKeys.add(mutation.key()); } remoteKeys = new ArrayList<ByteBuffer>(allKeys.size()); for (ByteBuffer key : allKeys) { if (!localKeys.contains(key)) { remoteKeys.add(key); } } } public void receiveLocalTransaction(String keyspace, Map<ByteBuffer, Map<String, List<Mutation>>> mutation_map, Set<Dependency> deps, ByteBuffer coordinator_key, Set<ByteBuffer> all_keys, long transaction_id) throws InvalidRequestException, IOException, TimeoutException { assert state == CoordinatorState.INIT : state + " != " + CoordinatorState.INIT + " on " + transaction_id; if (logger.isTraceEnabled()) { logger.trace("receiveLocalTransaction({}, {}, {}, {}, {}, {})", new Object[]{keyspace, mutation_map, deps, coordinator_key, all_keys, transaction_id}); } List<IMutation> mutations = BatchMutateTransactionUtil.convertToInternalMutations(keyspace, mutation_map, coordinator_key); receiveTransaction(keyspace, mutations, deps, coordinator_key, all_keys, transaction_id); this.completionBlockingQueue = new LinkedBlockingQueue<Long>(); this.local = true; // Register this coordinator only after we're done constructing it idToCoordinator.put(transactionId, this); BatchMutateTransactionUtil.registerCoordinatorKey(coordinator_key, transaction_id); state = CoordinatorState.WAIT_FOR_VOTES; checkQueuedYesVotes(); } public void receiveReplicatedTransaction(String keyspace, List<IMutation> mutations, Set<Dependency> deps, ByteBuffer coordinator_key, Set<ByteBuffer> all_keys, long transaction_id, long timestamp) { assert state == CoordinatorState.INIT : state + " != " + CoordinatorState.INIT + " on " + transaction_id; if (logger.isTraceEnabled()) { logger.trace("receiveReplicatedTransaction({}, {}, {}, {}, {}, {}, {})", new Object[]{keyspace, mutations, deps, coordinator_key, all_keys, transaction_id, timestamp}); } receiveTransaction(keyspace, mutations, deps, coordinator_key, all_keys, transaction_id); this.timestamp = timestamp; this.completionBlockingQueue = null; this.local = false; //mark as pending for depChecks AppliedOperations.addPendingOp(coordinator_key, timestamp); // Register this coordinator only after we're done constructing it idToCoordinator.put(transactionId, this); BatchMutateTransactionUtil.registerCoordinatorKey(coordinator_key, transaction_id); state = CoordinatorState.WAIT_FOR_DEPS_AND_NOTIFICATIONS; StorageProxy.checkDependencies(keyspace, coordinatorKey, timestamp, deps, new TransactionDepCheckCompletable()); checkQueuedNotifications(); } private class TransactionDepCheckCompletable implements ICompletable { @Override public void complete() { depChecksReturned = true; receiveNotify(0); } } private void checkQueuedNotifications() { if (sanityCheckFireOnces) { synchronized(checkedQueuedNotifications) { assert checkedQueuedNotifications == false : "May only checkQueuedNotifications for " + transactionId + " once"; checkedQueuedNotifications = true; } } if (logger.isTraceEnabled()) { logger.trace("checkQueuedNotifications() on {}", new Object[]{transactionId}); } Integer queuedCount; synchronized(queuedNotifications) { queuedCount = queuedNotifications.get(transactionId); if (queuedCount != null) { queuedNotifications.remove(transactionId); } } //call receiveNotify even if none are queued in case all notification came in when we were in the INIT state if (queuedCount == null) { receiveNotify(0); } else { receiveNotify(queuedCount); } } public synchronized void receiveNotify(int keysNotified) { CoordinatorState stateWhenInvoked = state; //assert stateWhenInvoked == CoordinatorState.INIT || stateWhenInvoked == CoordinatorState.WAIT_FOR_DEPS_AND_NOTIFICATIONS : stateWhenInvoked + " on " + transactionId; if (logger.isTraceEnabled()) { logger.trace("receiveNotify({} out of {} - {}) on {} @ ", new Object[]{keysNotified, remoteKeys.size(), notifiesReceived, transactionId, stateWhenInvoked}); } boolean shouldSendPrepares = false; synchronized(notifiesReceived) { notifiesReceived += keysNotified; assert notifiesReceived <= remoteKeys.size() : notifiesReceived + " ? " + remoteKeys.size() + " on " + transactionId; if (notifiesReceived == remoteKeys.size() && stateWhenInvoked == CoordinatorState.WAIT_FOR_DEPS_AND_NOTIFICATIONS) { //defer sending prepares unless we release notifiesReceived shouldSendPrepares = true; } } if (shouldSendPrepares) { state = CoordinatorState.SEND_PREPARES; sendPrepares(); } } private void sendPrepares() { if (sanityCheckFireOnces) { synchronized(sentPrepares) { assert sentPrepares == false : "May only sendPrepares for " + transactionId + " once"; sentPrepares = true; } } assert state == CoordinatorState.SEND_PREPARES : state + " != " + CoordinatorState.SEND_PREPARES + " on " + transactionId; if (logger.isTraceEnabled()) { logger.trace("sendPrepares() on {}", new Object[]{transactionId}); } state = CoordinatorState.WAIT_FOR_VOTES; try { TransactionProxy.sendPrepares(keyspace, remoteKeys, transactionId); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } // In case there are no cohorts, we receive 0 votes immediately if (remoteKeys.size() == 0) { receiveYesVotes(0); } } /* only needs to be called for local transactions, replicated transactions * will always have a coordinator created and go through the first round * before yesVotes start coming in */ private void checkQueuedYesVotes() { if (sanityCheckFireOnces) { synchronized(checkedQueuedYesVotes) { assert checkedQueuedYesVotes == false : "May only checkQueuedYesVotes for " + transactionId + " once"; checkedQueuedYesVotes = true; } } if (logger.isTraceEnabled()) { logger.trace("checkQueuedYesVotes() on {}", new Object[]{transactionId}); } Integer queuedCount; synchronized(queuedYesVotes) { queuedCount = queuedYesVotes.get(transactionId); if (queuedCount != null) { queuedYesVotes.remove(transactionId); } } if (queuedCount == null) { queuedCount = 0; } receiveYesVotes(queuedCount); } public synchronized void receiveYesVotes(int keysPrepared) { CoordinatorState stateWhenInvoked = state; if (keysPrepared == 0 && stateWhenInvoked != CoordinatorState.WAIT_FOR_VOTES) { return; } assert stateWhenInvoked == CoordinatorState.WAIT_FOR_VOTES || stateWhenInvoked == CoordinatorState.INIT : stateWhenInvoked + " != " + CoordinatorState.WAIT_FOR_VOTES + " on " + transactionId; if (logger.isTraceEnabled()) { logger.trace("receiveYesVotes({} out of {} - {}) on {} @ {}", new Object[]{keysPrepared, remoteKeys.size(), yesVotesReceived, transactionId, stateWhenInvoked}); } yesVotesReceived += keysPrepared; assert yesVotesReceived <= remoteKeys.size() : yesVotesReceived + " ? " + remoteKeys.size() + " on " + transactionId; if (yesVotesReceived == remoteKeys.size() && stateWhenInvoked == CoordinatorState.WAIT_FOR_VOTES) { state = CoordinatorState.COMMIT; commit(); } } private void commit() { if (sanityCheckFireOnces) { synchronized(committed) { assert committed == false : "May only commit " + transactionId + " once"; committed = true; } } assert state == CoordinatorState.COMMIT : state + " != " + CoordinatorState.COMMIT + " on " + transactionId; if (logger.isTraceEnabled()) { logger.trace("commit() on {}", new Object[]{transactionId}); } //set latestValidTime on local keys //NOTE: Need to set a "don't" update flag here try { BatchMutateTransactionUtil.markTransactionPending(keyspace, mutations, transactionId); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } //commit now localCommitTime = LamportClock.getVersion(); if (local) { timestamp = localCommitTime; } //send commit to cohorts try { TransactionProxy.sendCommit(keyspace, remoteKeys, transactionId, localCommitTime); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } //apply locally try { BatchMutateTransactionUtil.applyTransaction(keyspace, mutations, timestamp, localCommitTime, coordinatorKey); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } //mark as applied for depChecks AppliedOperations.addAppliedOp(coordinatorKey, timestamp); if (local) { try { TransactionProxy.replicateCoordinatorToOtherDatacenters(this, keyspace, coordinatorKey); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } if (local) { completionBlockingQueue.add(timestamp); } state = CoordinatorState.WAIT_FOR_ACKS; receiveAck(0); } public synchronized void receiveAck(int keysAcked) { CoordinatorState stateWhenInvoked = state; // null state is okay if this is this is called with (0) and a previously received ack caused the cleanup assert stateWhenInvoked == CoordinatorState.WAIT_FOR_ACKS || stateWhenInvoked == CoordinatorState.COMMIT || stateWhenInvoked == null : stateWhenInvoked + " on " + transactionId; if (logger.isTraceEnabled()) { logger.trace("receiveAck({} out of {} - {}) on {} @ {}", new Object[]{keysAcked, remoteKeys.size(), acksReceived, transactionId, stateWhenInvoked}); } acksReceived += keysAcked; assert acksReceived <= remoteKeys.size() : acksReceived + " ? " + remoteKeys.size() + " on " + transactionId; if (acksReceived == remoteKeys.size() && stateWhenInvoked == CoordinatorState.WAIT_FOR_ACKS) { state = null; cleanup(); } } private void cleanup() { if (sanityCheckFireOnces) { synchronized(cleanedUp) { assert cleanedUp == false : "May only cleanUp for " + transactionId + " once"; cleanedUp = true; } } if (logger.isTraceEnabled()) { logger.trace("cleanup() on {}", new Object[]{transactionId}); } BatchMutateTransactionUtil.unregisterCoordinatorKey(coordinatorKey, transactionId, localCommitTime); idToCoordinator.remove(transactionId); } public long waitForLocalCommitNoInterruption() { assert completionBlockingQueue != null : "can only wait on local commit"; while (true) { try { Long completionTime = completionBlockingQueue.poll(waitForLocalCommitTimeoutSecs, TimeUnit.SECONDS); if (completionTime != null) { return completionTime; } else { logger.warn("BatchMutateTransaction timedout, transactionId = {}, state = {}, yes = {}, acks = {}, notifies = {}, remoteKeys.size() = {}, timestamp = {}, localCommitTime = {}, " + "rT = {}, cQN = {}, sP = {}, cQYV = {}, committed = {}, cleanedUp = {}", new Object[]{transactionId, state, yesVotesReceived, acksReceived, notifiesReceived, remoteKeys.size(), timestamp, localCommitTime, receivedTransaction, checkedQueuedNotifications, sentPrepares, checkedQueuedYesVotes, committed, cleanedUp}); throw new RuntimeException(); } } catch (InterruptedException e) { //ignore, we want a response no matter what if (logger.isDebugEnabled()) { e.printStackTrace(); } continue; } } } public Long localCommitTime() { return localCommitTime; } private static class BatchMutateTransactionCoordinatorSerializer implements IVersionedSerializer<BatchMutateTransactionCoordinator> { final static int ROW_MUTATION_FLAG = 0x01; final static int COUNTER_MUTATION_FLAG = 0x02; @Override public void serialize(BatchMutateTransactionCoordinator coordinator, DataOutput dos, int version) throws IOException { dos.writeUTF(coordinator.keyspace); dos.writeInt(coordinator.mutations.size()); for (IMutation mutation : coordinator.mutations) { if (mutation instanceof RowMutation) { dos.writeByte(ROW_MUTATION_FLAG); RowMutation.serializer().serialize((RowMutation) mutation, dos, version); } else { assert mutation instanceof CounterMutation; dos.writeByte(COUNTER_MUTATION_FLAG); CounterMutation.serializer().serialize((CounterMutation) mutation, dos, version); } } dos.writeInt(coordinator.deps.size()); for (Dependency dep : coordinator.deps) { Dependency.serializer().serialize(dep, dos); } ByteBufferUtil.writeWithShortLength(coordinator.coordinatorKey, dos); dos.writeInt(coordinator.allKeys.size()); for (ByteBuffer key : coordinator.allKeys) { ByteBufferUtil.writeWithShortLength(key, dos); } dos.writeLong(coordinator.transactionId); dos.writeLong(coordinator.timestamp); } @Override public BatchMutateTransactionCoordinator deserialize(DataInput dis, int version) throws IOException { String keyspace = dis.readUTF(); int mutationsLength = dis.readInt(); List<IMutation> mutations = new ArrayList<IMutation>(mutationsLength); for (int i = 0; i < mutationsLength; ++i) { int typeByte = dis.readUnsignedByte(); if ((typeByte & ROW_MUTATION_FLAG) != 0) { assert (typeByte & COUNTER_MUTATION_FLAG) == 0; RowMutation rm = RowMutation.serializer().deserialize(dis, version); mutations.add(rm); } else { assert (typeByte & COUNTER_MUTATION_FLAG) != 0; assert (typeByte & ROW_MUTATION_FLAG) == 0; CounterMutation cm = CounterMutation.serializer().deserialize(dis, version); mutations.add(cm); } } int depsLength = dis.readInt(); Set<Dependency> deps = new HashSet<Dependency>(depsLength); for (int i = 0; i < depsLength; ++i) { deps.add(Dependency.serializer().deserialize(dis)); } ByteBuffer coordinatorKey = ByteBufferUtil.readWithShortLength(dis); int allKeysLength = dis.readInt(); Set<ByteBuffer> allKeys = new HashSet<ByteBuffer>(allKeysLength); for (int i = 0; i < allKeysLength; ++i) { allKeys.add(ByteBufferUtil.readWithShortLength(dis)); } long transactionId = dis.readLong(); long timestamp = dis.readLong(); BatchMutateTransactionCoordinator coordinator = new BatchMutateTransactionCoordinator(); coordinator.receiveReplicatedTransaction(keyspace, mutations, deps, coordinatorKey, allKeys, transactionId, timestamp); return coordinator; } @Override public long serializedSize(BatchMutateTransactionCoordinator coordinator, int version) { int size = 0; size += DBConstants.shortSize + FBUtilities.encodedUTF8Length(coordinator.keyspace); size += DBConstants.intSize; for (IMutation mutation : coordinator.mutations) { if (mutation instanceof RowMutation) { size += 1; size += RowMutation.serializer().serializedSize((RowMutation) mutation, version); } else { assert mutation instanceof CounterMutation; size += 1; size += CounterMutation.serializer().serializedSize((CounterMutation) mutation, version); } } size += DBConstants.intSize; for (Dependency dep : coordinator.deps) { size += Dependency.serializer().serializedSize(dep); } size += DBConstants.shortSize + coordinator.coordinatorKey.remaining(); size += DBConstants.intSize; for (ByteBuffer key : coordinator.allKeys) { size += DBConstants.shortSize + key.remaining(); } size += DBConstants.longSize; size += DBConstants.longSize; return size; } } private final Map<Integer, byte[]> preserializedBuffers = new HashMap<Integer, byte[]>(); public synchronized byte[] getSerializedBuffer(int version) throws IOException { byte[] bytes = preserializedBuffers.get(version); if (bytes == null) { bytes = FBUtilities.serialize(this, serializer(), version); preserializedBuffers.put(version, bytes); } return bytes; } public static BatchMutateTransactionCoordinator fromBytes(byte[] raw, int version) throws IOException { return serializer().deserialize(new DataInputStream(new FastByteArrayInputStream(raw)), version); } @Override public Message getMessage(Integer version) throws IOException { return new Message(FBUtilities.getBroadcastAddress(), StorageService.Verb.TRANSACTION_COORDINATOR, getSerializedBuffer(version), version); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id: ImageManager.java 924666 2010-03-18 08:26:30Z jeremias $ */ package org.apache.xmlgraphics.image.loader; import java.io.IOException; import java.util.Iterator; import java.util.Map; import javax.xml.transform.Source; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.image.loader.cache.ImageCache; import org.apache.xmlgraphics.image.loader.pipeline.ImageProviderPipeline; import org.apache.xmlgraphics.image.loader.pipeline.PipelineFactory; import org.apache.xmlgraphics.image.loader.spi.ImageImplRegistry; import org.apache.xmlgraphics.image.loader.spi.ImagePreloader; import org.apache.xmlgraphics.image.loader.util.ImageUtil; import org.apache.xmlgraphics.image.loader.util.Penalty; /** * ImageManager is the central starting point for image access. */ public class ImageManager { /** logger */ protected static Log log = LogFactory.getLog(ImageManager.class); /** Holds all registered interface implementations for the image package */ private ImageImplRegistry registry; /** Provides session-independent information */ private ImageContext imageContext; /** The image cache for this instance */ private ImageCache cache = new ImageCache(); private PipelineFactory pipelineFactory = new PipelineFactory(this); /** * Main constructor. * @param context the session-independent context information */ public ImageManager(ImageContext context) { this(ImageImplRegistry.getDefaultInstance(), context); } /** * Constructor for testing purposes. * @param registry the implementation registry with all plug-ins * @param context the session-independent context information */ public ImageManager(ImageImplRegistry registry, ImageContext context) { this.registry = registry; this.imageContext = context; } /** * Returns the ImageImplRegistry in use by the ImageManager. * @return the ImageImplRegistry */ public ImageImplRegistry getRegistry() { return this.registry; } /** * Returns the ImageContext in use by the ImageManager. * @return the ImageContext */ public ImageContext getImageContext() { return this.imageContext; } /** * Returns the ImageCache in use by the ImageManager. * @return the ImageCache */ public ImageCache getCache() { return this.cache; } /** * Returns the PipelineFactory in use by the ImageManager. * @return the PipelineFactory */ public PipelineFactory getPipelineFactory() { return this.pipelineFactory; } /** * Returns an ImageInfo object containing its intrinsic size for a given URI. The ImageInfo * is retrieved from an image cache if it has been requested before. * @param uri the URI of the image * @param session the session context through which to resolve the URI if the image is not in * the cache * @return the ImageInfo object created from the image * @throws ImageException If no suitable ImagePreloader can be found to load the image or * if an error occurred while preloading the image. * @throws IOException If an I/O error occurs while preloading the image */ public ImageInfo getImageInfo(String uri, ImageSessionContext session) throws ImageException, IOException { if (getCache() != null) { return getCache().needImageInfo(uri, session, this); } else { return preloadImage(uri, session); } } /** * Preloads an image, i.e. the format of the image is identified and some basic information * (MIME type, intrinsic size and possibly other values) are loaded and returned as an * ImageInfo object. Note that the image is not fully loaded normally. Only with certain formats * the image is already fully loaded and references added to the ImageInfo's custom objects * (see {@link ImageInfo#getOriginalImage()}). * <p> * The reason for the preloading: Apache FOP, for example, only needs the image's intrinsic * size during layout. Only when the document is rendered to the final format does FOP need * to load the full image. Like this a lot of memory can be saved. * @param uri the original URI of the image * @param session the session context through which to resolve the URI * @return the ImageInfo object created from the image * @throws ImageException If no suitable ImagePreloader can be found to load the image or * if an error occurred while preloading the image. * @throws IOException If an I/O error occurs while preloading the image */ public ImageInfo preloadImage(String uri, ImageSessionContext session) throws ImageException, IOException { Source src = session.needSource(uri); ImageInfo info = preloadImage(uri, src); session.returnSource(uri, src); return info; } /** * Preloads an image, i.e. the format of the image is identified and some basic information * (MIME type, intrinsic size and possibly other values) are loaded and returned as an * ImageInfo object. Note that the image is not fully loaded normally. Only with certain formats * the image is already fully loaded and references added to the ImageInfo's custom objects * (see {@link ImageInfo#getOriginalImage()}). * <p> * The reason for the preloading: Apache FOP, for example, only needs the image's intrinsic * size during layout. Only when the document is rendered to the final format does FOP need * to load the full image. Like this a lot of memory can be saved. * @param uri the original URI of the image * @param src the Source object to load the image from * @return the ImageInfo object created from the image * @throws ImageException If no suitable ImagePreloader can be found to load the image or * if an error occurred while preloading the image. * @throws IOException If an I/O error occurs while preloading the image */ public ImageInfo preloadImage(String uri, Source src) throws ImageException, IOException { Iterator iter = registry.getPreloaderIterator(); while (iter.hasNext()) { ImagePreloader preloader = (ImagePreloader)iter.next(); ImageInfo info = preloader.preloadImage(uri, src, imageContext); if (info != null) { return info; } } throw new ImageException("The file format is not supported. No ImagePreloader found for " + uri); } private Map prepareHints(Map hints, ImageSessionContext sessionContext) { Map newHints = new java.util.HashMap(); if (hints != null) { newHints.putAll(hints); //Copy in case an unmodifiable map is passed in } if (!newHints.containsKey(ImageProcessingHints.IMAGE_SESSION_CONTEXT) && sessionContext != null) { newHints.put(ImageProcessingHints.IMAGE_SESSION_CONTEXT, sessionContext); } if (!newHints.containsKey(ImageProcessingHints.IMAGE_MANAGER)) { newHints.put(ImageProcessingHints.IMAGE_MANAGER, this); } return newHints; } /** * Loads an image. The caller can indicate what kind of image flavor is requested. When this * method is called the code looks for a suitable ImageLoader and, if necessary, builds * a conversion pipeline so it can return the image in exactly the form the caller needs. * <p> * Optionally, it is possible to pass in Map of hints. These hints may be used by ImageLoaders * and ImageConverters to act on the image. See {@link ImageProcessingHints} for common hints * used by the bundled implementations. You can, of course, define your own hints. * @param info the ImageInfo instance for the image (obtained by * {@link #getImageInfo(String, ImageSessionContext)}) * @param flavor the requested image flavor. * @param hints a Map of hints to any of the background components or null * @param session the session context * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image getImage(ImageInfo info, ImageFlavor flavor, Map hints, ImageSessionContext session) throws ImageException, IOException { hints = prepareHints(hints, session); Image img = null; ImageProviderPipeline pipeline = getPipelineFactory().newImageConverterPipeline( info, flavor); if (pipeline != null) { img = pipeline.execute(info, hints, session); } if (img == null) { throw new ImageException( "Cannot load image (no suitable loader/converter combination available) for " + info); } ImageUtil.closeQuietly(session.getSource(info.getOriginalURI())); return img; } /** * Loads an image. The caller can indicate what kind of image flavors are requested. When this * method is called the code looks for a suitable ImageLoader and, if necessary, builds * a conversion pipeline so it can return the image in exactly the form the caller needs. * The array of image flavors is ordered, so the first image flavor is given highest priority. * <p> * Optionally, it is possible to pass in Map of hints. These hints may be used by ImageLoaders * and ImageConverters to act on the image. See {@link ImageProcessingHints} for common hints * used by the bundled implementations. You can, of course, define your own hints. * @param info the ImageInfo instance for the image (obtained by * {@link #getImageInfo(String, ImageSessionContext)}) * @param flavors the requested image flavors (in preferred order). * @param hints a Map of hints to any of the background components or null * @param session the session context * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image getImage(ImageInfo info, ImageFlavor[] flavors, Map hints, ImageSessionContext session) throws ImageException, IOException { hints = prepareHints(hints, session); Image img = null; ImageProviderPipeline[] candidates = getPipelineFactory().determineCandidatePipelines( info, flavors); ImageProviderPipeline pipeline = choosePipeline(candidates); if (pipeline != null) { img = pipeline.execute(info, hints, session); } if (img == null) { throw new ImageException( "Cannot load image (no suitable loader/converter combination available) for " + info); } ImageUtil.closeQuietly(session.getSource(info.getOriginalURI())); return img; } /** * Loads an image with no hints. See * {@link #getImage(ImageInfo, ImageFlavor, Map, ImageSessionContext)} for more * information. * @param info the ImageInfo instance for the image (obtained by * {@link #getImageInfo(String, ImageSessionContext)}) * @param flavor the requested image flavor. * @param session the session context * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image getImage(ImageInfo info, ImageFlavor flavor, ImageSessionContext session) throws ImageException, IOException { return getImage(info, flavor, ImageUtil.getDefaultHints(session), session); } /** * Loads an image with no hints. See * {@link #getImage(ImageInfo, ImageFlavor[], Map, ImageSessionContext)} for more * information. * @param info the ImageInfo instance for the image (obtained by * {@link #getImageInfo(String, ImageSessionContext)}) * @param flavors the requested image flavors (in preferred order). * @param session the session context * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image getImage(ImageInfo info, ImageFlavor[] flavors, ImageSessionContext session) throws ImageException, IOException { return getImage(info, flavors, ImageUtil.getDefaultHints(session), session); } /** * Converts an image. The caller can indicate what kind of image flavors are requested. When * this method is called the code looks for a suitable combination of ImageConverters so it * can return the image in exactly the form the caller needs. * The array of image flavors is ordered, so the first image flavor is given highest priority. * <p> * Optionally, it is possible to pass in Map of hints. These hints may be used by * ImageConverters to act on the image. See {@link ImageProcessingHints} for common hints * used by the bundled implementations. You can, of course, define your own hints. * @param image the image to convert * @param flavors the requested image flavors (in preferred order). * @param hints a Map of hints to any of the background components or null * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image convertImage(Image image, ImageFlavor[] flavors, Map hints) throws ImageException, IOException { hints = prepareHints(hints, null); ImageInfo info = image.getInfo(); Image img = null; int count = flavors.length; for (int i = 0; i < count; i++) { if (image.getFlavor().equals(flavors[i])) { //Shortcut (the image is already in one of the requested formats) return image; } } ImageProviderPipeline[] candidates = getPipelineFactory().determineCandidatePipelines( image, flavors); ImageProviderPipeline pipeline = choosePipeline(candidates); if (pipeline != null) { img = pipeline.execute(info, image, hints, null); } if (img == null) { throw new ImageException( "Cannot convert image " + image + " (no suitable converter combination available)"); } return img; } /** * Converts an image with no hints. See * {@link #convertImage(Image, ImageFlavor[], Map)} for more * information. * @param image the image to convert * @param flavors the requested image flavors (in preferred order). * @return the fully loaded image * @throws ImageException If no suitable loader/converter combination is available to fulfill * the request or if an error occurred while loading the image. * @throws IOException If an I/O error occurs */ public Image convertImage(Image image, ImageFlavor[] flavors) throws ImageException, IOException { return convertImage(image, flavors, null); } /** * Chooses the best {@link ImageProviderPipeline} from a set of candidates. * @param candidates the candidates * @return the best pipeline */ public ImageProviderPipeline choosePipeline(ImageProviderPipeline[] candidates) { ImageProviderPipeline pipeline = null; int minPenalty = Integer.MAX_VALUE; int count = candidates.length; if (log.isTraceEnabled()) { log.trace("Candidate Pipelines:"); for (int i = 0; i < count; i++) { if (candidates[i] == null) { continue; } log.trace(" " + i + ": " + candidates[i].getConversionPenalty(getRegistry()) + " for " + candidates[i]); } } for (int i = count - 1; i >= 0; i--) { if (candidates[i] == null) { continue; } Penalty penalty = candidates[i].getConversionPenalty(getRegistry()); if (penalty.isInfinitePenalty()) { continue; //Exclude candidate on infinite penalty } if (penalty.getValue() <= minPenalty) { pipeline = candidates[i]; minPenalty = penalty.getValue(); } } if (log.isDebugEnabled()) { log.debug("Chosen pipeline: " + pipeline); } return pipeline; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.graphics; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; import java.util.zip.Deflater; import java.util.zip.DeflaterOutputStream; import java.util.zip.InflaterInputStream; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Pixmap.Format; import com.badlogic.gdx.utils.ByteArray; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.GdxRuntimeException; import com.badlogic.gdx.utils.StreamUtils; /** Writes Pixmaps to various formats. * @author mzechner * @author Nathan Sweet */ public class PixmapIO { /** Writes the {@link Pixmap} to the given file using a custom compression scheme. First three integers define the width, height * and format, remaining bytes are zlib compressed pixels. To be able to load the Pixmap to a Texture, use ".cim" as the file * suffix. Throws a GdxRuntimeException in case the Pixmap couldn't be written to the file. * @param file the file to write the Pixmap to */ static public void writeCIM (FileHandle file, Pixmap pixmap) { CIM.write(file, pixmap); } /** Reads the {@link Pixmap} from the given file, assuming the Pixmap was written with the * {@link PixmapIO#writeCIM(FileHandle, Pixmap)} method. Throws a GdxRuntimeException in case the file couldn't be read. * @param file the file to read the Pixmap from */ static public Pixmap readCIM (FileHandle file) { return CIM.read(file); } /** Writes the pixmap as a PNG with compression. See {@link PNG} to configure the compression level, more efficiently flip the * pixmap vertically, and to write out multiple PNGs with minimal allocation. */ static public void writePNG (FileHandle file, Pixmap pixmap) { try { PNG writer = new PNG((int)(pixmap.getWidth() * pixmap.getHeight() * 1.5f)); // Guess at deflated size. try { writer.setFlipY(false); writer.write(file, pixmap); } finally { writer.dispose(); } } catch (IOException ex) { throw new GdxRuntimeException("Error writing PNG: " + file, ex); } } /** @author mzechner */ static private class CIM { static private final int BUFFER_SIZE = 32000; static private final byte[] writeBuffer = new byte[BUFFER_SIZE]; static private final byte[] readBuffer = new byte[BUFFER_SIZE]; static public void write (FileHandle file, Pixmap pixmap) { DataOutputStream out = null; try { // long start = System.nanoTime(); DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(file.write(false)); out = new DataOutputStream(deflaterOutputStream); out.writeInt(pixmap.getWidth()); out.writeInt(pixmap.getHeight()); out.writeInt(Format.toGdx2DPixmapFormat(pixmap.getFormat())); ByteBuffer pixelBuf = pixmap.getPixels(); pixelBuf.position(0); pixelBuf.limit(pixelBuf.capacity()); int remainingBytes = pixelBuf.capacity() % BUFFER_SIZE; int iterations = pixelBuf.capacity() / BUFFER_SIZE; synchronized (writeBuffer) { for (int i = 0; i < iterations; i++) { pixelBuf.get(writeBuffer); out.write(writeBuffer); } pixelBuf.get(writeBuffer, 0, remainingBytes); out.write(writeBuffer, 0, remainingBytes); } pixelBuf.position(0); pixelBuf.limit(pixelBuf.capacity()); // Gdx.app.log("PixmapIO", "write (" + file.name() + "):" + (System.nanoTime() - start) / 1000000000.0f + ", " + // Thread.currentThread().getName()); } catch (Exception e) { throw new GdxRuntimeException("Couldn't write Pixmap to file '" + file + "'", e); } finally { StreamUtils.closeQuietly(out); } } static public Pixmap read (FileHandle file) { DataInputStream in = null; try { // long start = System.nanoTime(); in = new DataInputStream(new InflaterInputStream(new BufferedInputStream(file.read()))); int width = in.readInt(); int height = in.readInt(); Format format = Format.fromGdx2DPixmapFormat(in.readInt()); Pixmap pixmap = new Pixmap(width, height, format); ByteBuffer pixelBuf = pixmap.getPixels(); pixelBuf.position(0); pixelBuf.limit(pixelBuf.capacity()); synchronized (readBuffer) { int readBytes = 0; while ((readBytes = in.read(readBuffer)) > 0) { pixelBuf.put(readBuffer, 0, readBytes); } } pixelBuf.position(0); pixelBuf.limit(pixelBuf.capacity()); // Gdx.app.log("PixmapIO", "read:" + (System.nanoTime() - start) / 1000000000.0f); return pixmap; } catch (Exception e) { throw new GdxRuntimeException("Couldn't read Pixmap from file '" + file + "'", e); } finally { StreamUtils.closeQuietly(in); } } } /** PNG encoder with compression. An instance can be reused to encode multiple PNGs with minimal allocation. * * <pre> * Copyright (c) 2007 Matthias Mann - www.matthiasmann.de * Copyright (c) 2014 Nathan Sweet * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * </pre> * @author Matthias Mann * @author Nathan Sweet */ static public class PNG implements Disposable { static private final byte[] SIGNATURE = {(byte)137, 80, 78, 71, 13, 10, 26, 10}; static private final int IHDR = 0x49484452, IDAT = 0x49444154, IEND = 0x49454E44; static private final byte COLOR_ARGB = 6; static private final byte COMPRESSION_DEFLATE = 0; static private final byte FILTER_NONE = 0; static private final byte INTERLACE_NONE = 0; static private final byte PAETH = 4; private final ChunkBuffer buffer; private final DeflaterOutputStream deflaterOutput; private final Deflater deflater; private ByteArray lineOutBytes, curLineBytes, prevLineBytes; private boolean flipY = true; private int lastLineLen; public PNG () { this(128 * 128); } public PNG (int initialBufferSize) { buffer = new ChunkBuffer(initialBufferSize); deflater = new Deflater(); deflaterOutput = new DeflaterOutputStream(buffer, deflater); } /** If true, the resulting PNG is flipped vertically. Default is true. */ public void setFlipY (boolean flipY) { this.flipY = flipY; } /** Sets the deflate compression level. Default is {@link Deflater#DEFAULT_COMPRESSION}. */ public void setCompression (int level) { deflater.setLevel(level); } public void write (FileHandle file, Pixmap pixmap) throws IOException { OutputStream output = file.write(false); try { write(output, pixmap); } finally { StreamUtils.closeQuietly(output); } } /** Writes the pixmap to the stream without closing the stream. */ public void write (OutputStream output, Pixmap pixmap) throws IOException { DataOutputStream dataOutput = new DataOutputStream(output); dataOutput.write(SIGNATURE); buffer.writeInt(IHDR); buffer.writeInt(pixmap.getWidth()); buffer.writeInt(pixmap.getHeight()); buffer.writeByte(8); // 8 bits per component. buffer.writeByte(COLOR_ARGB); buffer.writeByte(COMPRESSION_DEFLATE); buffer.writeByte(FILTER_NONE); buffer.writeByte(INTERLACE_NONE); buffer.endChunk(dataOutput); buffer.writeInt(IDAT); deflater.reset(); int lineLen = pixmap.getWidth() * 4; byte[] lineOut, curLine, prevLine; if (lineOutBytes == null) { lineOut = (lineOutBytes = new ByteArray(lineLen)).items; curLine = (curLineBytes = new ByteArray(lineLen)).items; prevLine = (prevLineBytes = new ByteArray(lineLen)).items; } else { lineOut = lineOutBytes.ensureCapacity(lineLen); curLine = curLineBytes.ensureCapacity(lineLen); prevLine = prevLineBytes.ensureCapacity(lineLen); for (int i = 0, n = lastLineLen; i < n; i++) prevLine[i] = 0; } lastLineLen = lineLen; ByteBuffer pixels = pixmap.getPixels(); int oldPosition = pixels.position(); boolean rgba8888 = pixmap.getFormat() == Format.RGBA8888; for (int y = 0, h = pixmap.getHeight(); y < h; y++) { int py = flipY ? (h - y - 1) : y; if (rgba8888) { pixels.position(py * lineLen); pixels.get(curLine, 0, lineLen); } else { for (int px = 0, x = 0; px < pixmap.getWidth(); px++) { int pixel = pixmap.getPixel(px, py); curLine[x++] = (byte)((pixel >> 24) & 0xff); curLine[x++] = (byte)((pixel >> 16) & 0xff); curLine[x++] = (byte)((pixel >> 8) & 0xff); curLine[x++] = (byte)(pixel & 0xff); } } lineOut[0] = (byte)(curLine[0] - prevLine[0]); lineOut[1] = (byte)(curLine[1] - prevLine[1]); lineOut[2] = (byte)(curLine[2] - prevLine[2]); lineOut[3] = (byte)(curLine[3] - prevLine[3]); for (int x = 4; x < lineLen; x++) { int a = curLine[x - 4] & 0xff; int b = prevLine[x] & 0xff; int c = prevLine[x - 4] & 0xff; int p = a + b - c; int pa = p - a; if (pa < 0) pa = -pa; int pb = p - b; if (pb < 0) pb = -pb; int pc = p - c; if (pc < 0) pc = -pc; if (pa <= pb && pa <= pc) c = a; else if (pb <= pc) // c = b; lineOut[x] = (byte)(curLine[x] - c); } deflaterOutput.write(PAETH); deflaterOutput.write(lineOut, 0, lineLen); byte[] temp = curLine; curLine = prevLine; prevLine = temp; } pixels.position(oldPosition); deflaterOutput.finish(); buffer.endChunk(dataOutput); buffer.writeInt(IEND); buffer.endChunk(dataOutput); output.flush(); } /** Disposal will happen automatically in {@link #finalize()} but can be done explicitly if desired. */ public void dispose () { deflater.end(); } static class ChunkBuffer extends DataOutputStream { final ByteArrayOutputStream buffer; final CRC32 crc; ChunkBuffer (int initialSize) { this(new ByteArrayOutputStream(initialSize), new CRC32()); } private ChunkBuffer (ByteArrayOutputStream buffer, CRC32 crc) { super(new CheckedOutputStream(buffer, crc)); this.buffer = buffer; this.crc = crc; } public void endChunk (DataOutputStream target) throws IOException { flush(); target.writeInt(buffer.size() - 4); buffer.writeTo(target); target.writeInt((int)crc.getValue()); buffer.reset(); crc.reset(); } } } }
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.tests; import java.util.ArrayList; import java.util.List; import android.graphics.Color; import android.widget.Spinner; import android.widget.SpinnerAdapter; import android.widget.TextView; import com.facebook.react.bridge.BaseJavaModule; import com.facebook.react.testing.ReactInstanceSpecForTest; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.JavaScriptModule; import com.facebook.react.views.picker.ReactDialogPickerManager; import com.facebook.react.views.picker.ReactDropdownPickerManager; import com.facebook.react.views.picker.ReactPicker; import com.facebook.react.views.picker.ReactPickerManager; import com.facebook.react.testing.ReactAppInstrumentationTestCase; /** * Integration test for {@link ReactDialogPickerManager} and {@link ReactDropdownPickerManager} * (and, implicitly, {@link ReactPickerManager}). Tests basic properties, events and switching * between spinner modes (which changes the used manager). */ public class ReactPickerTestCase extends ReactAppInstrumentationTestCase { private static interface PickerAndroidTestModule extends JavaScriptModule { public void selectItem(int position); public void setMode(String mode); public void setPrimaryColor(String color); } public static class PickerAndroidRecordingModule extends BaseJavaModule { private final List<Integer> mSelections = new ArrayList<Integer>(); @Override public String getName() { return "PickerAndroidRecordingModule"; } @ReactMethod public void recordSelection(int position) { mSelections.add(position); } public List<Integer> getSelections() { return new ArrayList<Integer>(mSelections); } } private PickerAndroidRecordingModule mRecordingModule; @Override protected String getReactApplicationKeyUnderTest() { return "PickerAndroidTestApp"; } @Override protected ReactInstanceSpecForTest createReactInstanceSpecForTest() { mRecordingModule = new PickerAndroidRecordingModule(); return super.createReactInstanceSpecForTest() .addNativeModule(mRecordingModule); } public void testBasicProperties() { ReactPicker spinner = getViewAtPath(0, 0); SpinnerAdapter adapter = spinner.getAdapter(); assertEquals(Spinner.MODE_DIALOG, spinner.getMode()); assertEquals("prompt", spinner.getPrompt()); assertNotNull(adapter); assertEquals(3, adapter.getCount()); assertEquals("item1", ((TextView) adapter.getView(0, null, null)).getText()); assertEquals("item2", ((TextView) adapter.getView(1, null, null)).getText()); assertEquals("item3", ((TextView) adapter.getView(2, null, null)).getText()); assertEquals(1, spinner.getSelectedItemPosition()); // test colors assertEquals(Color.RED, ((TextView) adapter.getView(0, null, null)).getCurrentTextColor()); assertEquals(Color.GREEN, ((TextView) adapter.getView(1, null, null)).getCurrentTextColor()); assertEquals(Color.BLUE, ((TextView) adapter.getView(2, null, null)).getCurrentTextColor()); assertEquals( Color.RED, ((TextView) adapter.getDropDownView(0, null, null)).getCurrentTextColor()); assertEquals( Color.GREEN, ((TextView) adapter.getDropDownView(1, null, null)).getCurrentTextColor()); assertEquals( Color.BLUE, ((TextView) adapter.getDropDownView(2, null, null)).getCurrentTextColor()); getTestModule().setPrimaryColor("black"); waitForBridgeAndUIIdle(); assertEquals(Color.BLACK, ((TextView) adapter.getView(0, null, null)).getCurrentTextColor()); assertEquals(Color.BLACK, ((TextView) adapter.getView(1, null, null)).getCurrentTextColor()); assertEquals(Color.BLACK, ((TextView) adapter.getView(2, null, null)).getCurrentTextColor()); assertEquals( Color.RED, ((TextView) adapter.getDropDownView(0, null, null)).getCurrentTextColor()); assertEquals( Color.GREEN, ((TextView) adapter.getDropDownView(1, null, null)).getCurrentTextColor()); assertEquals( Color.BLUE, ((TextView) adapter.getDropDownView(2, null, null)).getCurrentTextColor()); } public void testDropdownPicker() { ReactPicker spinner = getViewAtPath(0, 1); assertEquals(Spinner.MODE_DROPDOWN, spinner.getMode()); } public void testDisabledPicker() { ReactPicker spinner = getViewAtPath(0, 2); assertFalse(spinner.isEnabled()); } public void testUpdateSelectedItem() { ReactPicker spinner = getViewAtPath(0, 0); assertEquals(1, spinner.getSelectedItemPosition()); getTestModule().selectItem(2); waitForBridgeAndUIIdle(); getInstrumentation().waitForIdleSync(); assertEquals(2, spinner.getSelectedItemPosition()); } public void testUpdateMode() { ReactPicker spinner = getViewAtPath(0, 1); assertEquals(Spinner.MODE_DROPDOWN, spinner.getMode()); getTestModule().setMode("dialog"); waitForBridgeAndUIIdle(); getInstrumentation().waitForIdleSync(); // changing the spinner mode in JS actually creates a new component on the native side, as // there's no way to change the mode once you have constructed a Spinner. ReactPicker newPicker = getViewAtPath(0, 1); assertTrue(spinner != newPicker); assertEquals(Spinner.MODE_DIALOG, newPicker.getMode()); } public void testOnSelect() throws Throwable { runTestOnUiThread( new Runnable() { @Override public void run() { ReactPicker spinner = getViewAtPath(0, 0); spinner.setSelection(2); } }); getInstrumentation().waitForIdleSync(); waitForBridgeAndUIIdle(); List<Integer> selections = mRecordingModule.getSelections(); assertEquals(1, selections.size()); assertEquals(2, (int) selections.get(0)); } public void testOnSelectSequence() throws Throwable { updateFirstSpinnerAndCheckLastSpinnerMatches(0); updateFirstSpinnerAndCheckLastSpinnerMatches(2); updateFirstSpinnerAndCheckLastSpinnerMatches(0); updateFirstSpinnerAndCheckLastSpinnerMatches(2); } private void updateFirstSpinnerAndCheckLastSpinnerMatches( final int indexToSelect ) throws Throwable { // The last spinner has the same selected value as the first one. // Test that user selection is propagated correctly to JS, to setState, and to Spinners. runTestOnUiThread( new Runnable() { @Override public void run() { ReactPicker spinner = getViewAtPath(0, 0); spinner.setSelection(indexToSelect); } }); getInstrumentation().waitForIdleSync(); waitForBridgeAndUIIdle(); ReactPicker spinnerInSync = getViewAtPath(0, 3); assertEquals( "Picker selection was not updated correctly via setState.", indexToSelect, spinnerInSync.getSelectedItemPosition()); } private PickerAndroidTestModule getTestModule() { return getReactContext().getCatalystInstance().getJSModule(PickerAndroidTestModule.class); } }
package com.xeiam.xchange.coinbase; import java.io.IOException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import si.mazi.rescu.ParamsDigest; import com.xeiam.xchange.coinbase.dto.CoinbaseBaseResponse; import com.xeiam.xchange.coinbase.dto.account.CoinbaseAccountChanges; import com.xeiam.xchange.coinbase.dto.account.CoinbaseAddress; import com.xeiam.xchange.coinbase.dto.account.CoinbaseAddressCallback; import com.xeiam.xchange.coinbase.dto.account.CoinbaseAddresses; import com.xeiam.xchange.coinbase.dto.account.CoinbaseContacts; import com.xeiam.xchange.coinbase.dto.account.CoinbaseRecurringPayment; import com.xeiam.xchange.coinbase.dto.account.CoinbaseRecurringPayments; import com.xeiam.xchange.coinbase.dto.account.CoinbaseTransaction; import com.xeiam.xchange.coinbase.dto.account.CoinbaseTransactions; import com.xeiam.xchange.coinbase.dto.account.CoinbaseUser; import com.xeiam.xchange.coinbase.dto.account.CoinbaseUsers; import com.xeiam.xchange.coinbase.dto.marketdata.CoinbaseMoney; import com.xeiam.xchange.coinbase.dto.merchant.CoinbaseButton; import com.xeiam.xchange.coinbase.dto.merchant.CoinbaseOrder; import com.xeiam.xchange.coinbase.dto.merchant.CoinbaseOrders; import com.xeiam.xchange.coinbase.dto.merchant.CoinbaseSubscription; import com.xeiam.xchange.coinbase.dto.merchant.CoinbaseSubscriptions; import com.xeiam.xchange.coinbase.dto.trade.CoinbaseTransfer; import com.xeiam.xchange.coinbase.dto.trade.CoinbaseTransfers; /** * @author jamespedwards42 */ @Path("api/v1") @Produces(MediaType.APPLICATION_JSON) public interface CoinbaseAuthenticated extends Coinbase { @GET @Path("users") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseUsers getUsers(@HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @PUT @Path("users/{userId}") @Consumes(MediaType.APPLICATION_JSON) CoinbaseUser updateUser(@PathParam("userId") String userId, CoinbaseUser user, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("tokens/redeem") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseBaseResponse redeemToken(@QueryParam("token_id") String tokenId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("account/balance") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseMoney getBalance(@HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("account/receive_address") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseAddress getReceiveAddress(@HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("account/generate_receive_address") @Consumes(MediaType.APPLICATION_JSON) CoinbaseAddress generateReceiveAddress(CoinbaseAddressCallback callbackUrl, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("account_changes") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseAccountChanges getAccountChanges(@QueryParam("page") Integer page, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("addresses") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseAddresses getAddresses(@QueryParam("page") Integer page, @QueryParam("limit") Integer limit, @QueryParam("query") String query, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("contacts") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseContacts getContacts(@QueryParam("page") Integer page, @QueryParam("num_pages") Integer limit, @QueryParam("query") String query, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("transfers") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransfers getTransfers(@QueryParam("page") Integer page, @QueryParam("limit") Integer limit, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("transactions") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransactions getTransactions(@QueryParam("page") Integer page, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("transactions/{transactionId}") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransaction getTransactionDetails(@PathParam("transactionId") String transactionId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("transactions/request_money") @Consumes(MediaType.APPLICATION_JSON) CoinbaseTransaction requestMoney(CoinbaseTransaction transactionRequest, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("transactions/send_money") @Consumes(MediaType.APPLICATION_JSON) CoinbaseTransaction sendMoney(CoinbaseTransaction transactionRequest, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @PUT @Path("transactions/{transactionId}/resend_request") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseBaseResponse resendRequest(@PathParam("transactionId") String transactionId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @PUT @Path("transactions/{transactionId}/complete_request") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransaction completeRequest(@PathParam("transactionId") String transactionId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @DELETE @Path("transactions/{transactionId}/cancel_request") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseBaseResponse cancelRequest(@PathParam("transactionId") String transactionId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("buttons") @Consumes(MediaType.APPLICATION_JSON) CoinbaseButton createButton(CoinbaseButton button, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("orders") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseOrders getOrders(@QueryParam("page") Integer page, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("orders/{orderId}") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseOrder getOrder(@PathParam("orderId") String orderId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("buttons/{code}/create_order") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseOrder createOrder(@PathParam("code") String code, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("orders") @Consumes(MediaType.APPLICATION_JSON) CoinbaseOrder createOrder(CoinbaseButton button, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("recurring_payments") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseRecurringPayments getRecurringPayments(@QueryParam("page") Integer page, @QueryParam("limit") Integer limit, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("recurring_payments/{recurringPaymentId}") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseRecurringPayment getRecurringPayment(@PathParam("recurringPaymentId") String recurringPaymentId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("subscribers") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseSubscriptions getsSubscriptions(@QueryParam("page") Integer page, @QueryParam("limit") Integer limit, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @GET @Path("subscribers/{subscriptionId}") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseSubscription getsSubscription(@PathParam("subscriptionId") String subscriptionId, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("buys") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransfer buy(@QueryParam("qty") String quantity, @QueryParam("agree_btc_amount_varies") boolean agreeBTCAmountVaries, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; @POST @Path("sells") @Consumes(MediaType.APPLICATION_FORM_URLENCODED) CoinbaseTransfer sell(@QueryParam("qty") String quantity, @HeaderParam("ACCESS_KEY") String apiKey, @HeaderParam("ACCESS_SIGNATURE") ParamsDigest signer, @HeaderParam("ACCESS_NONCE") long nonce) throws IOException; }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.onlinesectioning.server; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.cpsolver.coursett.constraint.GroupConstraint; import org.cpsolver.coursett.constraint.IgnoreStudentConflictsConstraint; import org.unitime.timetable.gwt.server.SectioningServlet; import org.unitime.timetable.gwt.shared.SectioningException; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.CourseDemand; import org.unitime.timetable.model.CourseOffering; import org.unitime.timetable.model.DistributionPref; import org.unitime.timetable.model.InstructionalOffering; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.model.Student; import org.unitime.timetable.model.dao.CourseOfferingDAO; import org.unitime.timetable.model.dao.InstructionalOfferingDAO; import org.unitime.timetable.model.dao.StudentDAO; import org.unitime.timetable.onlinesectioning.OnlineSectioningServerContext; import org.unitime.timetable.onlinesectioning.match.CourseMatcher; import org.unitime.timetable.onlinesectioning.match.StudentMatcher; import org.unitime.timetable.onlinesectioning.model.XCourse; import org.unitime.timetable.onlinesectioning.model.XCourseId; import org.unitime.timetable.onlinesectioning.model.XCourseRequest; import org.unitime.timetable.onlinesectioning.model.XDistribution; import org.unitime.timetable.onlinesectioning.model.XDistributionType; import org.unitime.timetable.onlinesectioning.model.XEnrollment; import org.unitime.timetable.onlinesectioning.model.XExpectations; import org.unitime.timetable.onlinesectioning.model.XOffering; import org.unitime.timetable.onlinesectioning.model.XStudent; import org.unitime.timetable.onlinesectioning.updates.ReloadAllData; /** * @author Tomas Muller */ public class DatabaseServer extends AbstractLockingServer { public DatabaseServer(OnlineSectioningServerContext context) throws SectioningException { super(context); } @Override public Collection<XCourseId> findCourses(String query, Integer limit, CourseMatcher matcher) { if (matcher != null) matcher.setServer(this); Collection<XCourseId> ret = new ArrayList<XCourseId>(); for (CourseOffering c: (List<CourseOffering>)getCurrentHelper().getHibSession().createQuery( "select c from CourseOffering c where " + "c.subjectArea.session.uniqueId = :sessionId and c.instructionalOffering.notOffered = false and (" + "(lower(c.subjectArea.subjectAreaAbbreviation || ' ' || c.courseNbr) like :q || '%' or lower(c.subjectArea.subjectAreaAbbreviation || ' ' || c.courseNbr || ' - ' || c.title) like :q || '%') " + (query.length() > 2 ? "or lower(c.title) like '%' || :q || '%'" : "") + ") " + "order by case " + "when lower(c.subjectArea.subjectAreaAbbreviation || ' ' || c.courseNbr) like :q || '%' then 0 else 1 end," + // matches on course name first "c.subjectArea.subjectAreaAbbreviation, c.courseNbr") .setString("q", query.toLowerCase()) .setLong("sessionId", getAcademicSession().getUniqueId()) .setCacheable(true).list()) { XCourse course = new XCourse(c); if (matcher == null || matcher.match(course)) ret.add(course); if (limit != null && ret.size() >= limit) break; } return ret; } @Override public Collection<XCourseId> findCourses(CourseMatcher matcher) { if (matcher != null) matcher.setServer(this); Collection<XCourseId> ret = new ArrayList<XCourseId>(); for (CourseOffering c: (List<CourseOffering>)getCurrentHelper().getHibSession().createQuery( "select c from CourseOffering c where " + "c.subjectArea.session.uniqueId = :sessionId and c.instructionalOffering.notOffered = false " + "order by c.subjectArea.subjectAreaAbbreviation, c.courseNbr") .setLong("sessionId", getAcademicSession().getUniqueId()) .setCacheable(true).list()) { XCourse course = new XCourse(c); if (matcher == null || matcher.match(course)) ret.add(course); } return ret; } @Override public Collection<XStudent> findStudents(StudentMatcher matcher) { if (matcher != null) matcher.setServer(this); Collection<XStudent> ret = new ArrayList<XStudent>(); for (Student s: (List<Student>)getCurrentHelper().getHibSession().createQuery( "select distinct s from Student s " + "left join fetch s.courseDemands as cd " + "left join fetch cd.courseRequests as cr " + "left join fetch cr.classWaitLists as cwl " + "left join fetch s.classEnrollments as e " + "left join fetch s.academicAreaClassifications as a " + "left join fetch s.posMajors as mj " + "left join fetch s.waitlists as w " + "left join fetch s.groups as g " + "where s.session.uniqueId = :sessionId") .setLong("sessionId", getAcademicSession().getUniqueId()) .setCacheable(true).list()) { XStudent student = new XStudent(s, getCurrentHelper(), getAcademicSession().getFreeTimePattern()); if (matcher == null || matcher.match(student)) ret.add(student); } return ret; } @Override public XCourse getCourse(Long courseId) { CourseOffering c = CourseOfferingDAO.getInstance().get(courseId, getCurrentHelper().getHibSession()); return c == null || c.getInstructionalOffering().isNotOffered() ? null : new XCourse(c); } @Override public XCourseId getCourse(String course) { CourseOffering c = SectioningServlet.lookupCourse(getCurrentHelper().getHibSession(), getAcademicSession().getUniqueId(), null, course, null); return c == null ? null : new XCourseId(c); } @Override public XStudent getStudent(Long studentId) { Student s = StudentDAO.getInstance().get(studentId, getCurrentHelper().getHibSession()); return s == null ? null : new XStudent(s, getCurrentHelper(), getAcademicSession().getFreeTimePattern()); } @Override public XOffering getOffering(Long offeringId) { Collection<XDistribution> distributions = new ArrayList<XDistribution>(); List<DistributionPref> distPrefs = getCurrentHelper().getHibSession().createQuery( "select distinct p from DistributionPref p inner join p.distributionObjects o, Department d, " + "Class_ c inner join c.schedulingSubpart.instrOfferingConfig.instructionalOffering io " + "where p.distributionType.reference in (:ref1, :ref2) and d.session.uniqueId = :sessionId " + "and io.uniqueId = :offeringId and (o.prefGroup = c or o.prefGroup = c.schedulingSubpart) " + "and p.owner = d and p.prefLevel.prefProlog = :pref") .setString("ref1", GroupConstraint.ConstraintType.LINKED_SECTIONS.reference()) .setString("ref2", IgnoreStudentConflictsConstraint.REFERENCE) .setString("pref", PreferenceLevel.sRequired) .setLong("sessionId", getAcademicSession().getUniqueId()) .setLong("offeringId", offeringId) .setCacheable(true) .list(); if (!distPrefs.isEmpty()) { for (DistributionPref pref: distPrefs) { int variant = 0; for (Collection<Class_> sections: ReloadAllData.getSections(pref)) { XDistributionType type = XDistributionType.IngoreConflicts; if (GroupConstraint.ConstraintType.LINKED_SECTIONS.reference().equals(pref.getDistributionType().getReference())) type = XDistributionType.LinkedSections; distributions.add(new XDistribution(type, pref.getUniqueId(), variant++, sections)); } } } InstructionalOffering o = InstructionalOfferingDAO.getInstance().get(offeringId, getCurrentHelper().getHibSession()); return o == null ? null : new XOffering(o, distributions, getCurrentHelper()); } @Override public Collection<XCourseRequest> getRequests(Long offeringId) { Collection<XCourseRequest> ret = new ArrayList<XCourseRequest>(); for (CourseDemand d: (List<CourseDemand>)getCurrentHelper().getHibSession().createQuery( "select distinct d from CourseRequest r inner join r.courseDemand d where r.courseOffering.instructionalOffering = :offeringId") .setLong("offeringId", offeringId).setCacheable(true).list()) { ret.add(new XCourseRequest(d, getCurrentHelper())); } return ret; } @Override public XExpectations getExpectations(Long offeringId) { Map<Long, Double> expectations = new HashMap<Long, Double>(); for (Object[] info: (List<Object[]>)getCurrentHelper().getHibSession().createQuery( "select i.clazz.uniqueId, i.nbrExpectedStudents from SectioningInfo i where i.clazz.schedulingSubpart.instrOfferingConfig.instructionalOffering = :offeringId"). setLong("offeringId", offeringId). setCacheable(true).list()) { expectations.put((Long)info[0], (Double)info[1]); } return new XExpectations(offeringId, expectations); } @Override public void update(XExpectations expectations) { } @Override public void remove(XStudent student) { } @Override public void update(XStudent student, boolean updateRequests) { } @Override public void remove(XOffering offering) { } @Override public void update(XOffering offering) { } @Override public void clearAll() { } @Override public void clearAllStudents() { } @Override public XCourseRequest assign(XCourseRequest request, XEnrollment enrollment) { request.setEnrollment(enrollment); return request; } @Override public XCourseRequest waitlist(XCourseRequest request, boolean waitlist) { request.setWaitlist(waitlist); return request; } }
/** */ package org_sl_planet_bgfSimplified.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org_sl_planet_bgfSimplified.Expression; import org_sl_planet_bgfSimplified.Org_sl_planet_bgfSimplifiedPackage; import org_sl_planet_bgfSimplified.Selectable; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Selectable</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org_sl_planet_bgfSimplified.impl.SelectableImpl#getSelector <em>Selector</em>}</li> * <li>{@link org_sl_planet_bgfSimplified.impl.SelectableImpl#getExpression <em>Expression</em>}</li> * </ul> * * @generated */ public class SelectableImpl extends MinimalEObjectImpl.Container implements Selectable { /** * The default value of the '{@link #getSelector() <em>Selector</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSelector() * @generated * @ordered */ protected static final String SELECTOR_EDEFAULT = null; /** * The cached value of the '{@link #getSelector() <em>Selector</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSelector() * @generated * @ordered */ protected String selector = SELECTOR_EDEFAULT; /** * The cached value of the '{@link #getExpression() <em>Expression</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getExpression() * @generated * @ordered */ protected Expression expression; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected SelectableImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Org_sl_planet_bgfSimplifiedPackage.Literals.SELECTABLE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSelector() { return selector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSelector(String newSelector) { String oldSelector = selector; selector = newSelector; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__SELECTOR, oldSelector, selector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Expression getExpression() { return expression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetExpression(Expression newExpression, NotificationChain msgs) { Expression oldExpression = expression; expression = newExpression; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION, oldExpression, newExpression); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setExpression(Expression newExpression) { if (newExpression != expression) { NotificationChain msgs = null; if (expression != null) msgs = ((InternalEObject)expression).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION, null, msgs); if (newExpression != null) msgs = ((InternalEObject)newExpression).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION, null, msgs); msgs = basicSetExpression(newExpression, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION, newExpression, newExpression)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION: return basicSetExpression(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__SELECTOR: return getSelector(); case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION: return getExpression(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__SELECTOR: setSelector((String)newValue); return; case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION: setExpression((Expression)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__SELECTOR: setSelector(SELECTOR_EDEFAULT); return; case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION: setExpression((Expression)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__SELECTOR: return SELECTOR_EDEFAULT == null ? selector != null : !SELECTOR_EDEFAULT.equals(selector); case Org_sl_planet_bgfSimplifiedPackage.SELECTABLE__EXPRESSION: return expression != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (selector: "); result.append(selector); result.append(')'); return result.toString(); } } //SelectableImpl
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.bookkeeper.mledger.impl; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.common.util.OrderedScheduler; import org.apache.bookkeeper.mledger.Entry; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.ManagedLedgerFactoryConfig; import org.apache.bookkeeper.test.MockedBookKeeperTestCase; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @Test public class EntryCacheManagerTest extends MockedBookKeeperTestCase { ManagedLedgerImpl ml1; ManagedLedgerImpl ml2; @BeforeClass void setup() throws Exception { OrderedScheduler executor = OrderedScheduler.newSchedulerBuilder().numThreads(1).build(); ml1 = mock(ManagedLedgerImpl.class); when(ml1.getScheduledExecutor()).thenReturn(executor); when(ml1.getName()).thenReturn("cache1"); ml2 = mock(ManagedLedgerImpl.class); when(ml2.getScheduledExecutor()).thenReturn(executor); when(ml2.getName()).thenReturn("cache2"); } @Test void simple() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(10); config.setCacheEvictionWatermark(0.8); factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); EntryCacheManager cacheManager = factory.getEntryCacheManager(); EntryCache cache1 = cacheManager.getEntryCache(ml1); EntryCache cache2 = cacheManager.getEntryCache(ml2); cache1.insert(EntryImpl.create(1, 1, new byte[4])); cache1.insert(EntryImpl.create(1, 0, new byte[3])); assertEquals(cache1.getSize(), 7); assertEquals(cacheManager.getSize(), 7); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 10); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 7); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); cache2.insert(EntryImpl.create(2, 0, new byte[1])); cache2.insert(EntryImpl.create(2, 1, new byte[1])); cache2.insert(EntryImpl.create(2, 2, new byte[1])); assertEquals(cache2.getSize(), 3); assertEquals(cacheManager.getSize(), 10); // Next insert should trigger a cache eviction to force the size to 8 // The algorithm should evict entries from cache1 cache2.insert(EntryImpl.create(2, 3, new byte[1])); // Wait for eviction to be completed in background Thread.sleep(100); assertEquals(cacheManager.getSize(), 7); assertEquals(cache1.getSize(), 4); assertEquals(cache2.getSize(), 3); cacheManager.removeEntryCache("cache1"); assertEquals(cacheManager.getSize(), 3); assertEquals(cache2.getSize(), 3); // Should remove 1 entry cache2.invalidateEntries(new PositionImpl(2, 1)); assertEquals(cacheManager.getSize(), 2); assertEquals(cache2.getSize(), 2); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 10); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 2); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 1); } @Test void doubleInsert() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(10); config.setCacheEvictionWatermark(0.8); factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); EntryCacheManager cacheManager = factory.getEntryCacheManager(); EntryCache cache1 = cacheManager.getEntryCache(ml1); assertTrue(cache1.insert(EntryImpl.create(1, 1, new byte[4]))); assertTrue(cache1.insert(EntryImpl.create(1, 0, new byte[3]))); assertEquals(cache1.getSize(), 7); assertEquals(cacheManager.getSize(), 7); assertFalse(cache1.insert(EntryImpl.create(1, 0, new byte[5]))); assertEquals(cache1.getSize(), 7); assertEquals(cacheManager.getSize(), 7); } @Test void cacheDisabled() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(0); config.setCacheEvictionWatermark(0.8); factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); EntryCacheManager cacheManager = factory.getEntryCacheManager(); EntryCache cache1 = cacheManager.getEntryCache(ml1); EntryCache cache2 = cacheManager.getEntryCache(ml2); assertTrue(cache1 instanceof EntryCacheManager.EntryCacheDisabled); assertTrue(cache2 instanceof EntryCacheManager.EntryCacheDisabled); cache1.insert(EntryImpl.create(1, 1, new byte[4])); cache1.insert(EntryImpl.create(1, 0, new byte[3])); assertEquals(cache1.getSize(), 0); assertEquals(cacheManager.getSize(), 0); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 0); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); cache2.insert(EntryImpl.create(2, 0, new byte[1])); cache2.insert(EntryImpl.create(2, 1, new byte[1])); cache2.insert(EntryImpl.create(2, 2, new byte[1])); assertEquals(cache2.getSize(), 0); assertEquals(cacheManager.getSize(), 0); } @Test void verifyNoCacheIfNoConsumer() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(7 * 10); config.setCacheEvictionWatermark(0.8); factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); EntryCacheManager cacheManager = factory.getEntryCacheManager(); ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("ledger"); EntryCache cache1 = ledger.entryCache; for (int i = 0; i < 10; i++) { ledger.addEntry(("entry-" + i).getBytes()); } assertEquals(cache1.getSize(), 0); assertEquals(cacheManager.getSize(), 0); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 7 * 10); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); } @Test void verifyHitsMisses() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(7 * 10); config.setCacheEvictionWatermark(0.8); config.setCacheEvictionFrequency(1); factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); EntryCacheManager cacheManager = factory.getEntryCacheManager(); ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("ledger"); ManagedCursorImpl c1 = (ManagedCursorImpl) ledger.openCursor("c1"); ManagedCursorImpl c2 = (ManagedCursorImpl) ledger.openCursor("c2"); for (int i = 0; i < 10; i++) { ledger.addEntry(("entry-" + i).getBytes()); } cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); List<Entry> entries = c1.readEntries(10); assertEquals(entries.size(), 10); entries.forEach(e -> e.release()); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 10.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 70.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); ledger.deactivateCursor(c1); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); entries = c2.readEntries(10); assertEquals(entries.size(), 10); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 10.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 70.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); PositionImpl pos = (PositionImpl) entries.get(entries.size() - 1).getPosition(); c2.setReadPosition(pos); ledger.discardEntriesFromCache(c2, pos); entries.forEach(e -> e.release()); cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS); assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 7); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0); assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0); } @Test void verifyTimeBasedEviction() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(1000); config.setCacheEvictionFrequency(100); config.setCacheEvictionTimeThresholdMillis(100); ManagedLedgerFactoryImpl factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config); ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("test"); ManagedCursor c1 = ledger.openCursor("c1"); c1.setActive(); ManagedCursor c2 = ledger.openCursor("c2"); c2.setActive(); EntryCacheManager cacheManager = factory.getEntryCacheManager(); assertEquals(cacheManager.getSize(), 0); EntryCache cache = cacheManager.getEntryCache(ledger); assertEquals(cache.getSize(), 0); ledger.addEntry(new byte[4]); ledger.addEntry(new byte[3]); // Cache eviction should happen every 10 millis and clean all the entries older that 100ms Thread.sleep(1000); c1.close(); c2.close(); assertEquals(cacheManager.getSize(), 0); assertEquals(cache.getSize(), 0); factory.shutdown(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.todo.nodes; import com.intellij.ide.IdeBundle; import com.intellij.ide.projectView.PresentationData; import com.intellij.ide.projectView.ProjectViewNode; import com.intellij.ide.projectView.ViewSettings; import com.intellij.ide.projectView.impl.nodes.PackageElement; import com.intellij.ide.projectView.impl.nodes.PackageElementNode; import com.intellij.ide.todo.HighlightedRegionProvider; import com.intellij.ide.todo.TodoFileDirAndModuleComparator; import com.intellij.ide.todo.TodoTreeBuilder; import com.intellij.ide.todo.TodoTreeStructure; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.ui.HighlightedRegion; import com.intellij.usageView.UsageTreeColors; import com.intellij.usageView.UsageTreeColorsScheme; import com.intellij.util.ArrayUtil; import org.consulo.psi.PsiPackage; import org.consulo.psi.PsiPackageManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.*; public final class TodoPackageNode extends PackageElementNode implements HighlightedRegionProvider { private final ArrayList<HighlightedRegion> myHighlightedRegions; private final TodoTreeBuilder myBuilder; @Nullable private final String myPresentationName; public TodoPackageNode(@NotNull Project project, PackageElement element, TodoTreeBuilder builder) { this(project, element, builder,null); } public TodoPackageNode(@NotNull Project project, PackageElement element, TodoTreeBuilder builder, @Nullable String name) { super(project, element, ViewSettings.DEFAULT); myBuilder = builder; myHighlightedRegions = new ArrayList<HighlightedRegion>(2); if (element != null && name == null){ final PsiPackage aPackage = element.getPackage(); myPresentationName = aPackage.getName(); } else { myPresentationName = name; } } @Override public ArrayList<HighlightedRegion> getHighlightedRegions() { return myHighlightedRegions; } @Override protected void update(PresentationData data) { super.update(data); final PackageElement packageElement = getValue(); try { if (packageElement == null || !packageElement.getPackage().isValid()) { setValue(null); return; } int fileCount = getFileCount(packageElement); if (fileCount == 0){ setValue(null); return; } PsiPackage aPackage = packageElement.getPackage(); String newName; if (getStructure().areFlattenPackages()) { newName = aPackage.getQualifiedName(); } else { newName = myPresentationName != null ? myPresentationName : ""; } int nameEndOffset = newName.length(); int todoItemCount = getTodoItemCount(packageElement); newName = IdeBundle.message("node.todo.group", newName, todoItemCount, fileCount); myHighlightedRegions.clear(); TextAttributes textAttributes = new TextAttributes(); Color newColor = null; if (CopyPasteManager.getInstance().isCutElement(packageElement)) { newColor = CopyPasteManager.CUT_COLOR; } textAttributes.setForegroundColor(newColor); myHighlightedRegions.add(new HighlightedRegion(0, nameEndOffset, textAttributes)); EditorColorsScheme colorsScheme = UsageTreeColorsScheme.getInstance().getScheme(); myHighlightedRegions.add( new HighlightedRegion(nameEndOffset, newName.length(), colorsScheme.getAttributes(UsageTreeColors.NUMBER_OF_USAGES))); data.setPresentableText(newName); } catch (IndexNotReadyException e) { ProjectViewNode.LOG.info(e); data.setPresentableText("N/A"); } } @Override public void apply(@NotNull Map<String, String> info) { info.put("toDoFileCount", String.valueOf(getFileCount(getValue()))); info.put("toDoItemCount", String.valueOf(getTodoItemCount(getValue()))); } private int getFileCount(final PackageElement packageElement) { int count = 0; if (getSettings().isFlattenPackages()) { final PsiPackage aPackage = packageElement.getPackage(); final Module module = packageElement.getModule(); final GlobalSearchScope scope = module != null ? GlobalSearchScope.moduleScope(module) : GlobalSearchScope.projectScope(aPackage.getProject()); final PsiDirectory[] directories = aPackage.getDirectories(scope); for (PsiDirectory directory : directories) { Iterator<PsiFile> iterator = myBuilder.getFilesUnderDirectory(directory); while (iterator.hasNext()) { PsiFile psiFile = iterator.next(); if (getStructure().accept(psiFile)) count++; } } } else { Iterator<PsiFile> iterator = getFiles(packageElement); while (iterator.hasNext()) { PsiFile psiFile = iterator.next(); if (getStructure().accept(psiFile)) { count++; } } } return count; } public int getTodoItemCount(PackageElement packageElement) { int count = 0; if (getSettings().isFlattenPackages()){ final PsiPackage aPackage = packageElement.getPackage(); final Module module = packageElement.getModule(); GlobalSearchScope scope = module != null ? GlobalSearchScope.moduleScope(module) : GlobalSearchScope.projectScope(aPackage.getProject()); final PsiDirectory[] directories = aPackage.getDirectories(scope); for (PsiDirectory directory : directories) { Iterator<PsiFile> iterator = myBuilder.getFilesUnderDirectory(directory); while(iterator.hasNext()){ PsiFile psiFile = iterator.next(); count+=getStructure().getTodoItemCount(psiFile); } } } else { Iterator<PsiFile> iterator = getFiles(packageElement); while(iterator.hasNext()){ PsiFile psiFile = iterator.next(); count+=getStructure().getTodoItemCount(psiFile); } } return count; } private TodoTreeStructure getStructure() { return myBuilder.getTodoTreeStructure(); } @Override @NotNull public Collection<AbstractTreeNode> getChildren() { ArrayList<AbstractTreeNode> children = new ArrayList<AbstractTreeNode>(); final Project project = getProject(); final ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(project).getFileIndex(); final PsiPackage psiPackage = getValue().getPackage(); final Module module = getValue().getModule(); if (!getStructure().getIsFlattenPackages() || psiPackage == null) { final Iterator<PsiFile> iterator = getFiles(getValue()); while (iterator.hasNext()) { final PsiFile psiFile = iterator.next(); final Module psiFileModule = projectFileIndex.getModuleForFile(psiFile.getVirtualFile()); //group by module if (module != null && psiFileModule != null && !module.equals(psiFileModule)){ continue; } // Add files final PsiDirectory containingDirectory = psiFile.getContainingDirectory(); TodoFileNode todoFileNode = new TodoFileNode(project, psiFile, myBuilder, false); if (ArrayUtil.find(psiPackage.getDirectories(), containingDirectory) > -1 && !children.contains(todoFileNode)) { children.add(todoFileNode); continue; } // Add packages PsiDirectory _dir = psiFile.getContainingDirectory(); while (_dir != null) { final PsiDirectory parentDirectory = _dir.getParentDirectory(); if (parentDirectory != null){ PsiPackage _package = PsiPackageManager.getInstance(_dir.getProject()).findAnyPackage(_dir); if (_package != null && _package.getParentPackage() != null && psiPackage.equals(_package.getParentPackage())) { final GlobalSearchScope scope = module != null ? GlobalSearchScope.moduleScope(module) : GlobalSearchScope.projectScope(project); _package = TodoTreeHelper.findNonEmptyPackage(_package, module, project, myBuilder, scope); //compact empty middle packages final String name = _package.getParentPackage().equals(psiPackage) ? null //non compacted : _package.getQualifiedName().substring(psiPackage.getQualifiedName().length() + 1); TodoPackageNode todoPackageNode = new TodoPackageNode(project, new PackageElement(module, _package, false), myBuilder, name); if (!children.contains(todoPackageNode)) { children.add(todoPackageNode); break; } } } _dir = parentDirectory; } } } else { // flatten packages final Iterator<PsiFile> iterator = getFiles(getValue()); while (iterator.hasNext()) { final PsiFile psiFile = iterator.next(); //group by module final Module psiFileModule = projectFileIndex.getModuleForFile(psiFile.getVirtualFile()); if (module != null && psiFileModule != null && !module.equals(psiFileModule)){ continue; } final PsiDirectory _dir = psiFile.getContainingDirectory(); // Add files TodoFileNode todoFileNode = new TodoFileNode(getProject(), psiFile, myBuilder, false); if (ArrayUtil.find(psiPackage.getDirectories(), _dir) > -1 && !children.contains(todoFileNode)) { children.add(todoFileNode); continue; } } } Collections.sort(children, TodoFileDirAndModuleComparator.INSTANCE); return children; } /** * @return read-only iterator of all valid PSI files that can have T.O.D.O items * and which are located under specified <code>psiDirctory</code>. */ public Iterator<PsiFile> getFiles(PackageElement packageElement) { ArrayList<PsiFile> psiFileList = new ArrayList<PsiFile>(); GlobalSearchScope scope = packageElement.getModule() != null ? GlobalSearchScope.moduleScope(packageElement.getModule()) : GlobalSearchScope.projectScope(myProject); final PsiDirectory[] directories = packageElement.getPackage().getDirectories(scope); for (PsiDirectory directory : directories) { Iterator<PsiFile> files = myBuilder.getFiles(directory, false); for (;files.hasNext();) { psiFileList.add(files.next()); } } return psiFileList.iterator(); } @Override public int getWeight() { return 3; } }
/******************************************************************************* * (c) Copyright 2014 Hewlett-Packard Development Company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * *******************************************************************************/ package io.cloudslang.worker.management.services; import io.cloudslang.orchestrator.entities.Message; import io.cloudslang.orchestrator.services.OrchestratorDispatcherService; import junit.framework.Assert; import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.mockito.Matchers.argThat; import java.io.Serializable; import java.util.Arrays; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static org.mockito.Matchers.anyList; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration public class OutboundBufferTest { private final Logger logger = Logger.getLogger(getClass()); private static final int MAX_BUFFER_WEIGHT = 10; private static final int MAX_BULK_WEIGHT = 3; @Autowired private WorkerRecoveryManager recoveryManager; @Autowired private OutboundBuffer buffer; @Autowired private OrchestratorDispatcherService dispatcherService; @Before public void setUp() { ((WorkerRecoveryListener)buffer).doRecovery(); reset(recoveryManager, dispatcherService); } /** * Makes sure the buffer aggregates messages and dispatches them in bulk */ @Test public void testAggregation() throws InterruptedException { List<DummyMsg1> messages = Arrays.asList(new DummyMsg1(), new DummyMsg1()); for (DummyMsg1 message : messages) { buffer.put(message); } buffer.drain(); verify(dispatcherService).dispatch((List<? extends Serializable>) argThat(new MessagesSizeMatcher(messages)), anyString(), anyString(),anyString()); } /** * checks that when inserting messages to a full buffer, * the inserting thread will block until the buffer is emptied * * @throws InterruptedException */ @Test public void testProducerBlocking() throws InterruptedException { // buffer capacity is 10, put messages in it until it is full while (buffer.getWeight() < MAX_BUFFER_WEIGHT) { buffer.put(new DummyMsg1()); } // the next insert to buffer will block because it's full, do it on a different thread Thread thread = new Thread(new Runnable() { @Override public void run() { try { buffer.put(new DummyMsg1()); } catch (InterruptedException e) { //ignore } } }); thread.start(); // wait for that thread to block waitForThreadStateToBe(thread, Thread.State.WAITING); Assert.assertEquals("inserting thread should be in a waiting state when inserting to full buffer", Thread.State.WAITING, thread.getState()); // drain the buffer -> will send the first 10 messages and release the blocking thread buffer.drain(); waitForThreadStateToBe(thread, Thread.State.TERMINATED); Assert.assertEquals("inserting thread should be in a terminated state after inserting to buffer", Thread.State.TERMINATED, thread.getState()); thread.join(); } /** * Checks that the buffer will block when having no messages and continues when the first message arrives * * @throws InterruptedException */ @Test public void testConsumerBlocking() throws InterruptedException { Thread thread = new Thread(new Runnable() { @Override public void run() { buffer.drain(); } }); thread.start(); // draining the buffer should block since it is empty waitForThreadStateToBe(thread, Thread.State.WAITING); Assert.assertEquals("reading thread should be in a waiting state when inserting to full buffer", Thread.State.WAITING, thread.getState()); // insert 2 new messages Message[] messages = new Message[]{new DummyMsg1(), new DummyMsg2()}; buffer.put(messages); // thread should be released now waitForThreadStateToBe(thread, Thread.State.TERMINATED); Assert.assertEquals("reading thread should be in a terminated after a message was inserted to the buffer", Thread.State.TERMINATED, thread.getState()); thread.join(); verify(dispatcherService).dispatch((List<? extends Serializable>) argThat(new MessagesSizeMatcher(Arrays.asList(messages))), anyString(), anyString(), anyString()); } private void waitForThreadStateToBe(Thread thread, Thread.State state) throws InterruptedException { int waitCount = 0; while (!thread.getState().equals(state) && waitCount <= 20) { Thread.sleep(50); waitCount++; } } @Test public void longevityTest() throws InterruptedException { int THREADS_NUM = 5; long CHECK_DURATION = 5*1000L; long INFO_FREQUENCY = 2*1000L; final AtomicBoolean run = new AtomicBoolean(true); final CountDownLatch latch = new CountDownLatch(THREADS_NUM+1); for (int i=1; i<=THREADS_NUM; i++){ final int index = i; new Thread(new Runnable() { private final Class<? extends Message> messageClass = (index%2)!=0? DummyMsg1.class: DummyMsg2.class; @Override public void run() { int counter=0; try { logger.debug("started, will generate messages of " + messageClass.getSimpleName()); while (run.get()){ buffer.put(messageClass.newInstance()); counter++; Thread.sleep(5L); } logger.debug("thread finished. processed " + counter + " messages"); } catch (Exception ex) { logger.error("thread finished", ex); } finally { latch.countDown(); } } }, "T-"+i).start(); } final DrainStatistics statistics = new DrainStatistics(); //noinspection unchecked doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { @SuppressWarnings("unchecked") List<Message> messages = (List<Message>) invocation.getArguments()[0]; int weight = 0; for (Message message : messages) weight += message.getWeight(); statistics.add(messages.size(), weight); return null; } }).when(dispatcherService).dispatch(anyList(), anyString(), anyString(), anyString()); new Thread(new Runnable() { @Override public void run() { try { logger.debug("started"); while (run.get()){ buffer.drain(); Thread.sleep(30L); } while (buffer.getSize() > 0) buffer.drain(); } catch (Exception ex) { logger.error("thread finished", ex); } finally { latch.countDown(); } } }, "T-D").start(); long t = System.currentTimeMillis(); while (System.currentTimeMillis()-t < CHECK_DURATION){ Thread.sleep(INFO_FREQUENCY); logger.debug(buffer.getStatus()); } run.set(false); latch.await(); System.out.println("Drain statistics: " + statistics.report()); } /** * Makes sure the recovery clears worker state */ @Test public void testRecovery() throws InterruptedException { List<DummyMsg1> messages = Arrays.asList(new DummyMsg1(), new DummyMsg1()); for (DummyMsg1 message : messages) { buffer.put(message); } Assert.assertEquals(2,buffer.getSize()); Assert.assertEquals(2,buffer.getWeight()); ((WorkerRecoveryListener)buffer).doRecovery(); Assert.assertEquals(0,buffer.getSize()); Assert.assertEquals(0,buffer.getWeight()); } private class MessagesSizeMatcher extends ArgumentMatcher{ List messages; public MessagesSizeMatcher(List val) { messages = val; } @Override public boolean matches(Object argument) { if(argument instanceof List){ List listConverted = (List)argument; if(listConverted.size() == messages.size()){ return true; } } return false; } } static class DrainStatistics{ private int counter; private int size; private int weight; public void add(int size, int weight){ counter++; this.size+=size; this.weight+=weight; } public String report(){ return "Buffer has sent " + counter + " bulks, avg(size): " + size/counter + ", avg(weight): " + weight/counter + ", total messages: " + size; } } static class DummyMsg1 implements Message { public int getWeight() { return 1; } public String getId() { return ""; } public List<Message> shrink(List<Message> messages) { return messages; } @Override public String getExceptionMessage() { return null; } @Override public void setExceptionMessage(String msg) { //do nothing } } static class DummyMsg2 implements Message { public int getWeight() { return 2; } public String getId() { return ""; } public List<Message> shrink(List<Message> messages) { return messages; } @Override public String getExceptionMessage() { return null; } @Override public void setExceptionMessage(String msg) { //do nothing } } @Configuration static class config { static{ System.setProperty("out.buffer.max.buffer.weight", String.valueOf(MAX_BUFFER_WEIGHT)); System.setProperty("out.buffer.max.bulk.weight", String.valueOf(MAX_BULK_WEIGHT)); } @Bean public WorkerRecoveryManager workerRecoveryManager() { return mock(WorkerRecoveryManager.class); } @Bean OrchestratorDispatcherService orchestratorDispatcherService(){ return mock(OrchestratorDispatcherService.class); } @Bean SynchronizationManager synchronizationManager(){ return new SynchronizationManagerImpl(); } @Bean public RetryTemplate retryTemplate() { return new RetryTemplate(); } @Bean public OutboundBuffer outboundBuffer() { return new OutboundBufferImpl(); } @Bean String workerUuid() { return "1234"; } } }
/* * * * * * * Copyright 2015 Skymind,Inc. * * * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * * you may not use this file except in compliance with the License. * * * You may obtain a copy of the License at * * * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, * * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * * See the License for the specific language governing permissions and * * * limitations under the License. * * * */ package org.canova.nlp.annotator; import opennlp.tools.postag.POSModel; import opennlp.tools.postag.POSTaggerME; import opennlp.uima.postag.POSModelResource; import opennlp.uima.postag.POSModelResourceImpl; import opennlp.uima.util.AnnotationComboIterator; import opennlp.uima.util.AnnotationIteratorPair; import opennlp.uima.util.AnnotatorUtil; import opennlp.uima.util.UimaUtil; import org.apache.uima.UimaContext; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.cas.CAS; import org.apache.uima.cas.Feature; import org.apache.uima.cas.Type; import org.apache.uima.cas.TypeSystem; import org.apache.uima.cas.text.AnnotationFS; import org.apache.uima.fit.component.CasAnnotator_ImplBase; import org.apache.uima.fit.factory.AnalysisEngineFactory; import org.apache.uima.fit.factory.ExternalResourceFactory; import org.apache.uima.resource.ResourceAccessException; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.util.Level; import org.apache.uima.util.Logger; import org.canova.nlp.movingwindow.Util; import org.cleartk.token.type.Sentence; import org.cleartk.token.type.Token; import java.util.Iterator; import java.util.LinkedList; import java.util.List; public class PoStagger extends CasAnnotator_ImplBase { static { //UIMA logging Util.disableLogging(); } private POSTaggerME posTagger; private Type sentenceType; private Type tokenType; private Feature posFeature; private Feature probabilityFeature; private UimaContext context; private Logger logger; /** * Initializes a new instance. * * Note: Use {@link #initialize(org.apache.uima.UimaContext) } to initialize this instance. Not use the * constructor. */ public PoStagger() { // must not be implemented ! } /** * Initializes the current instance with the given context. * * Note: Do all initialization in this method, do not use the constructor. */ @Override public void initialize(UimaContext context) throws ResourceInitializationException { super.initialize(context); this.context = context; this.logger = context.getLogger(); if (this.logger.isLoggable(Level.INFO)) { this.logger.log(Level.INFO, "Initializing the OpenNLP " + "Part of Speech annotator."); } POSModel model; try { POSModelResource modelResource = (POSModelResource) context .getResourceObject(UimaUtil.MODEL_PARAMETER); model = modelResource.getModel(); } catch (ResourceAccessException e) { throw new ResourceInitializationException(e); } Integer beamSize = AnnotatorUtil.getOptionalIntegerParameter(context, UimaUtil.BEAM_SIZE_PARAMETER); if (beamSize == null) beamSize = POSTaggerME.DEFAULT_BEAM_SIZE; this.posTagger = new POSTaggerME(model, beamSize, 0); } /** * Initializes the type system. */ @Override public void typeSystemInit(TypeSystem typeSystem) throws AnalysisEngineProcessException { // sentence type this.sentenceType = AnnotatorUtil.getRequiredTypeParameter(this.context, typeSystem, UimaUtil.SENTENCE_TYPE_PARAMETER); // token type this.tokenType = AnnotatorUtil.getRequiredTypeParameter(this.context, typeSystem, UimaUtil.TOKEN_TYPE_PARAMETER); // pos feature this.posFeature = AnnotatorUtil.getRequiredFeatureParameter(this.context, this.tokenType, UimaUtil.POS_FEATURE_PARAMETER, CAS.TYPE_NAME_STRING); this.probabilityFeature = AnnotatorUtil.getOptionalFeatureParameter(this.context, this.tokenType, UimaUtil.PROBABILITY_FEATURE_PARAMETER, CAS.TYPE_NAME_DOUBLE); } /** * Performs pos-tagging on the given tcas object. */ @Override public synchronized void process(CAS tcas) { final AnnotationComboIterator comboIterator = new AnnotationComboIterator(tcas, this.sentenceType, this.tokenType); for (AnnotationIteratorPair annotationIteratorPair : comboIterator) { final List<AnnotationFS> sentenceTokenAnnotationList = new LinkedList<AnnotationFS>(); final List<String> sentenceTokenList = new LinkedList<String>(); for (AnnotationFS tokenAnnotation : annotationIteratorPair.getSubIterator()) { sentenceTokenAnnotationList.add(tokenAnnotation); sentenceTokenList.add(tokenAnnotation.getCoveredText()); } final List<String> posTags = this.posTagger.tag(sentenceTokenList); double posProbabilities[] = null; if (this.probabilityFeature != null) { posProbabilities = this.posTagger.probs(); } final Iterator<String> posTagIterator = posTags.iterator(); final Iterator<AnnotationFS> sentenceTokenIterator = sentenceTokenAnnotationList.iterator(); int index = 0; while (posTagIterator.hasNext() && sentenceTokenIterator.hasNext()) { final String posTag = posTagIterator.next(); final AnnotationFS tokenAnnotation = sentenceTokenIterator.next(); tokenAnnotation.setStringValue(this.posFeature, posTag); if (posProbabilities != null) { tokenAnnotation.setDoubleValue(this.posFeature, posProbabilities[index]); } index++; } // log tokens with pos if (this.logger.isLoggable(Level.FINER)) { final StringBuilder sentenceWithPos = new StringBuilder(); sentenceWithPos.append("\""); for (final Iterator<AnnotationFS> it = sentenceTokenAnnotationList.iterator(); it.hasNext();) { final AnnotationFS token = it.next(); sentenceWithPos.append(token.getCoveredText()); sentenceWithPos.append('\\'); sentenceWithPos.append(token.getStringValue(this.posFeature)); sentenceWithPos.append(' '); } // delete last whitespace if (sentenceWithPos.length() > 1) // not 0 because it contains already the " char sentenceWithPos.setLength(sentenceWithPos.length() - 1); sentenceWithPos.append("\""); this.logger.log(Level.FINER, sentenceWithPos.toString()); } } } /** * Releases allocated resources. */ @Override public void destroy() { this.posTagger = null; } public static AnalysisEngineDescription getDescription(String languageCode) throws ResourceInitializationException { String modelPath = String.format("/models/%s-pos-maxent.bin", languageCode); return AnalysisEngineFactory.createEngineDescription( PoStagger.class, UimaUtil.MODEL_PARAMETER, ExternalResourceFactory.createExternalResourceDescription( POSModelResourceImpl.class, PoStagger.class.getResource(modelPath).toString()), UimaUtil.SENTENCE_TYPE_PARAMETER, Sentence.class.getName(), UimaUtil.TOKEN_TYPE_PARAMETER, Token.class.getName(), UimaUtil.POS_FEATURE_PARAMETER, "pos"); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.util; import com.thoughtworks.go.server.Jetty9Server; import com.thoughtworks.go.server.config.GoSSLConfig; import com.thoughtworks.go.util.SystemEnvironment; import org.eclipse.jetty.server.*; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.mockito.Mock; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.function.Predicate; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class GoSslSocketConnectorTest { @Rule public final TemporaryFolder folder = new TemporaryFolder(); private File truststore; private File keystore; private GoSslSocketConnector sslSocketConnector; @Mock private GoSSLConfig goSSLConfig; @Mock private Jetty9Server jettyServer; @Mock private SystemEnvironment systemEnvironment; @Before public void setUp() throws Exception { initMocks(this); keystore = folder.newFile("keystore"); truststore = folder.newFile("truststore"); String[] cipherSuitesToBeIncluded = {"FOO"}; when(goSSLConfig.getCipherSuitesToBeIncluded()).thenReturn(cipherSuitesToBeIncluded); when(systemEnvironment.getSslServerPort()).thenReturn(1234); when(systemEnvironment.keystore()).thenReturn(keystore); when(systemEnvironment.truststore()).thenReturn(truststore); when(systemEnvironment.get(SystemEnvironment.RESPONSE_BUFFER_SIZE)).thenReturn(100); when(systemEnvironment.get(SystemEnvironment.IDLE_TIMEOUT)).thenReturn(200); when(systemEnvironment.getListenHost()).thenReturn("foo"); when(jettyServer.getServer()).thenReturn(new Server()); when(systemEnvironment.get(SystemEnvironment.GO_SSL_CONFIG_CLEAR_JETTY_DEFAULT_EXCLUSIONS)).thenReturn(false); when(systemEnvironment.get(SystemEnvironment.GO_SSL_CONFIG_JETTY_WANT_CLIENT_AUTH)).thenReturn(false); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); } @Test public void shouldCreateSslConnectorWithRelevantPortAndTimeout() { assertThat(sslSocketConnector.getConnector() instanceof ServerConnector, is(true)); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); assertThat(connector.getPort(), is(1234)); assertThat(connector.getHost(), is("foo")); assertThat(connector.getIdleTimeout(), is(200l)); } @Test public void shouldSetupSslContextWithKeystoreAndTruststore() throws IOException { ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getKeyStorePath(), is(keystore.getCanonicalFile().toPath().toAbsolutePath().toUri().toString())); assertThat(sslContextFactory.getTrustStorePath(), is(truststore.getCanonicalFile().toPath().toAbsolutePath().toUri().toString())); assertThat(sslContextFactory.getWantClientAuth(), is(false)); } @Test public void shouldSetupCipherSuitesToBeIncluded() { ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); List<String> includedCipherSuites = new ArrayList<>(Arrays.asList(sslContextFactory.getIncludeCipherSuites())); assertThat(includedCipherSuites.size(), is(1)); assertThat(includedCipherSuites.contains("FOO"), is(true)); } @Test public void shouldSetupHttpConnectionFactory() { ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); HttpConnectionFactory httpConnectionFactory = getHttpConnectionFactory(connectionFactories); assertThat(httpConnectionFactory.getHttpConfiguration().getOutputBufferSize(), is(100)); assertThat(httpConnectionFactory.getHttpConfiguration().getCustomizers().size(), is(2)); assertThat(httpConnectionFactory.getHttpConfiguration().getCustomizers().get(0), instanceOf(SecureRequestCustomizer.class)); assertThat(httpConnectionFactory.getHttpConfiguration().getCustomizers().get(1), instanceOf(ForwardedRequestCustomizer.class)); } @Test public void shouldNotSendAServerHeaderForSecurityReasons() throws Exception { HttpConnectionFactory httpConnectionFactory = getHttpConnectionFactory(sslSocketConnector.getConnector().getConnectionFactories()); HttpConfiguration configuration = httpConnectionFactory.getHttpConfiguration(); assertThat(configuration.getSendServerVersion(), is(false)); } @Test public void shouldLeaveTheDefaultCipherSuiteInclusionAndExclusionListUnTouchedIfNotOverridden() { when(goSSLConfig.getCipherSuitesToBeIncluded()).thenReturn(null); when(goSSLConfig.getCipherSuitesToBeExcluded()).thenReturn(null); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getExcludeCipherSuites(), is(arrayWithSize(5))); assertThat(sslContextFactory.getExcludeCipherSuites(), is(arrayContainingInAnyOrder("^.*_(MD5|SHA|SHA1)$", "^TLS_RSA_.*$", "^SSL_.*$", "^.*_NULL_.*$", "^.*_anon_.*$"))); assertThat(sslContextFactory.getIncludeCipherSuites(), is(emptyArray())); } @Test public void shouldClearOutDefaultProtocolsAndCipherSetByJettyIfFlagIsSet() { when(systemEnvironment.get(SystemEnvironment.GO_SSL_CONFIG_CLEAR_JETTY_DEFAULT_EXCLUSIONS)).thenReturn(true); when(goSSLConfig.getProtocolsToBeExcluded()).thenReturn(null); when(goSSLConfig.getProtocolsToBeIncluded()).thenReturn(null); when(goSSLConfig.getCipherSuitesToBeIncluded()).thenReturn(null); when(goSSLConfig.getCipherSuitesToBeExcluded()).thenReturn(null); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getExcludeProtocols().length, is(0)); assertThat(sslContextFactory.getIncludeProtocols().length, is(0)); assertThat(sslContextFactory.getExcludeCipherSuites().length, is(0)); assertThat(sslContextFactory.getIncludeCipherSuites().length, is(0)); } @Test public void shouldOverrideTheDefaultCipherSuiteExclusionListIfConfigured() { when(goSSLConfig.getCipherSuitesToBeExcluded()).thenReturn(new String[]{"*MD5*"}); when(goSSLConfig.getCipherSuitesToBeIncluded()).thenReturn(new String[]{"*ECDHE*"}); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getExcludeCipherSuites().length, is(1)); assertThat(sslContextFactory.getExcludeCipherSuites()[0], is("*MD5*")); assertThat(sslContextFactory.getIncludeCipherSuites().length, is(1)); assertThat(sslContextFactory.getIncludeCipherSuites()[0], is("*ECDHE*")); } @Test public void shouldLeaveTheDefaultProtocolInclusionAndExclusionListUnTouchedIfNotOverridden() { when(goSSLConfig.getProtocolsToBeIncluded()).thenReturn(null); when(goSSLConfig.getProtocolsToBeExcluded()).thenReturn(null); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getExcludeProtocols().length, is(4)); assertThat(Arrays.asList(sslContextFactory.getExcludeProtocols()).containsAll(Arrays.asList("SSL", "SSLv2", "SSLv2Hello", "SSLv3")), is(true)); assertThat(sslContextFactory.getIncludeProtocols().length, is(0)); } @Test public void shouldOverrideTheDefaultProtocolExclusionListIfConfigured() { when(goSSLConfig.getProtocolsToBeExcluded()).thenReturn(new String[]{"SSL", "TLS1.0", "TLS1.1"}); when(goSSLConfig.getProtocolsToBeIncluded()).thenReturn(new String[]{"TLS1.2"}); sslSocketConnector = new GoSslSocketConnector(jettyServer, "password", systemEnvironment, goSSLConfig); ServerConnector connector = (ServerConnector) sslSocketConnector.getConnector(); Collection<ConnectionFactory> connectionFactories = connector.getConnectionFactories(); SslContextFactory sslContextFactory = findSslContextFactory(connectionFactories); assertThat(sslContextFactory.getExcludeProtocols().length, is(3)); assertThat(Arrays.asList(sslContextFactory.getExcludeProtocols()).containsAll(Arrays.asList("SSL", "TLS1.0", "TLS1.1")), is(true)); assertThat(sslContextFactory.getIncludeProtocols().length, is(1)); assertThat(sslContextFactory.getIncludeProtocols()[0], is("TLS1.2")); } private HttpConnectionFactory getHttpConnectionFactory(Collection<ConnectionFactory> connectionFactories) { return (HttpConnectionFactory) getConnectionFactoryOfType(connectionFactories, HttpConnectionFactory.class); } private SslContextFactory findSslContextFactory(Collection<ConnectionFactory> connectionFactories) { return ((SslConnectionFactory) getConnectionFactoryOfType(connectionFactories, SslConnectionFactory.class)).getSslContextFactory(); } private ConnectionFactory getConnectionFactoryOfType(Collection<ConnectionFactory> connectionFactories, final Class<?> aClass) { return connectionFactories.stream().filter(new Predicate<ConnectionFactory>() { @Override public boolean test(ConnectionFactory item) { return aClass.isInstance(item); } }).findFirst().orElse(null); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.introduce.inplace; import com.intellij.codeInsight.highlighting.HighlightManager; import com.intellij.codeInsight.template.TextResult; import com.intellij.codeInsight.template.impl.TemplateManagerImpl; import com.intellij.codeInsight.template.impl.TemplateState; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.command.impl.StartMarkAction; import com.intellij.openapi.command.undo.UndoUtil; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.ReadonlyStatusHandler; import com.intellij.psi.*; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.RefactoringActionHandler; import com.intellij.refactoring.listeners.RefactoringEventData; import com.intellij.refactoring.listeners.RefactoringEventListener; import com.intellij.refactoring.rename.inplace.InplaceRefactoring; import com.intellij.ui.DottedBorder; import com.intellij.util.ui.PositionTracker; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.border.LineBorder; import java.awt.*; import java.util.*; import java.util.List; /** * User: anna * Date: 3/15/11 */ public abstract class AbstractInplaceIntroducer<V extends PsiNameIdentifierOwner, E extends PsiElement> extends InplaceVariableIntroducer<E> { protected V myLocalVariable; protected RangeMarker myLocalMarker; protected final String myExprText; private final String myLocalName; public static final Key<AbstractInplaceIntroducer> ACTIVE_INTRODUCE = Key.create("ACTIVE_INTRODUCE"); private EditorEx myPreview; private final JComponent myPreviewComponent; private DocumentAdapter myDocumentAdapter; protected final JPanel myWholePanel; protected boolean myFinished = false; public AbstractInplaceIntroducer(Project project, Editor editor, @Nullable E expr, @Nullable V localVariable, E[] occurrences, String title, final FileType languageFileType) { super(null, editor, project, title, occurrences, expr); myLocalVariable = localVariable; if (localVariable != null) { final PsiElement nameIdentifier = localVariable.getNameIdentifier(); if (nameIdentifier != null) { myLocalMarker = createMarker(nameIdentifier); } } else { myLocalMarker = null; } myExprText = getExpressionText(expr); myLocalName = localVariable != null ? localVariable.getName() : null; Document document = EditorFactory.getInstance().createDocument(""); UndoUtil.disableUndoFor(document); myPreview = (EditorEx)EditorFactory.getInstance().createEditor(document, project, languageFileType, true); myPreview.setOneLineMode(true); final EditorSettings settings = myPreview.getSettings(); settings.setAdditionalLinesCount(0); settings.setAdditionalColumnsCount(1); settings.setRightMarginShown(false); settings.setFoldingOutlineShown(false); settings.setLineNumbersShown(false); settings.setLineMarkerAreaShown(false); settings.setIndentGuidesShown(false); settings.setVirtualSpace(false); myPreview.setHorizontalScrollbarVisible(false); myPreview.setVerticalScrollbarVisible(false); myPreview.setCaretEnabled(false); settings.setLineCursorWidth(1); final Color bg = myPreview.getColorsScheme().getColor(EditorColors.CARET_ROW_COLOR); myPreview.setBackgroundColor(bg); myPreview.setBorder(BorderFactory.createCompoundBorder(new DottedBorder(Color.gray), new LineBorder(bg, 2))); myPreviewComponent = new JPanel(new BorderLayout()); myPreviewComponent.add(myPreview.getComponent(), BorderLayout.CENTER); myPreviewComponent.setBorder(new EmptyBorder(2, 2, 6, 2)); myWholePanel = new JPanel(new GridBagLayout()); myWholePanel.setBorder(null); showDialogAdvertisement(getActionName()); } @Nullable protected String getExpressionText(E expr) { return expr != null ? expr.getText() : null; } protected final void setPreviewText(final String text) { if (myPreview == null) return; //already disposed ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { myPreview.getDocument().replaceString(0, myPreview.getDocument().getTextLength(), text); } }); } protected final JComponent getPreviewComponent() { return myPreviewComponent; } protected final Editor getPreviewEditor() { return myPreview; } @Override protected StartMarkAction startRename() throws StartMarkAction.AlreadyStartedException { return StartMarkAction.start(myEditor, myProject, getCommandName()); } /** * Returns ID of the action the shortcut of which is used to show the non-in-place refactoring dialog. * * @return action ID */ protected abstract String getActionName(); /** * Creates an initial version of the declaration for the introduced element. Note that this method is not called in a write action * and most likely needs to create one itself. * * @param replaceAll whether all occurrences are going to be replaced * @param names the suggested names for the declaration * @return the declaration */ @Nullable protected abstract V createFieldToStartTemplateOn(boolean replaceAll, String[] names); /** * Returns the suggested names for the introduced element. * * @param replaceAll whether all occurrences are going to be replaced * @param variable introduced element declaration, if already created. * @return the suggested names */ protected abstract String[] suggestNames(boolean replaceAll, @Nullable V variable); protected abstract void performIntroduce(); protected void performPostIntroduceTasks() {} public abstract boolean isReplaceAllOccurrences(); public abstract void setReplaceAllOccurrences(boolean allOccurrences); @Override protected abstract JComponent getComponent(); protected abstract void saveSettings(@NotNull V variable); @Override protected abstract V getVariable(); public abstract E restoreExpression(PsiFile containingFile, V variable, RangeMarker marker, String exprText); /** * Begins the in-place refactoring operation. * * @return true if the in-place refactoring was successfully started, false if it failed to start and a dialog should be shown instead. */ public boolean startInplaceIntroduceTemplate() { final boolean replaceAllOccurrences = isReplaceAllOccurrences(); final Ref<Boolean> result = new Ref<Boolean>(); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { @Override public void run() { final String[] names = suggestNames(replaceAllOccurrences, getLocalVariable()); final V variable = createFieldToStartTemplateOn(replaceAllOccurrences, names); boolean started = false; if (variable != null) { int caretOffset = getCaretOffset(); myEditor.getCaretModel().moveToOffset(caretOffset); myEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); final LinkedHashSet<String> nameSuggestions = new LinkedHashSet<String>(); nameSuggestions.add(variable.getName()); nameSuggestions.addAll(Arrays.asList(names)); initOccurrencesMarkers(); setElementToRename(variable); updateTitle(getVariable()); started = AbstractInplaceIntroducer.super.performInplaceRefactoring(nameSuggestions); if (started) { onRenameTemplateStarted(); myDocumentAdapter = new DocumentAdapter() { @Override public void documentChanged(DocumentEvent e) { if (myPreview == null) return; final TemplateState templateState = TemplateManagerImpl.getTemplateState(myEditor); if (templateState != null) { final TextResult value = templateState.getVariableValue(InplaceRefactoring.PRIMARY_VARIABLE_NAME); if (value != null) { updateTitle(getVariable(), value.getText()); } } } }; myEditor.getDocument().addDocumentListener(myDocumentAdapter); updateTitle(getVariable()); if (TemplateManagerImpl.getTemplateState(myEditor) != null) { myEditor.putUserData(ACTIVE_INTRODUCE, AbstractInplaceIntroducer.this); } } } result.set(started); if (!started) { finish(true); } } }, getCommandName(), getCommandName()); return result.get(); } protected void onRenameTemplateStarted() {} protected int getCaretOffset() { RangeMarker r; if (myLocalMarker != null) { final PsiReference reference = myExpr != null ? myExpr.getReference() : null; if (reference != null && reference.resolve() == myLocalVariable) { r = myExprMarker; } else { r = myLocalMarker; } } else { r = myExprMarker; } return r != null ? r.getStartOffset() : 0; } protected void updateTitle(@Nullable V variable, String value) { if (variable == null) return; final String variableText = variable.getText(); final PsiElement identifier = variable.getNameIdentifier(); if (identifier != null) { final int startOffsetInParent = identifier.getStartOffsetInParent(); setPreviewText(variableText.substring(0, startOffsetInParent) + value + variableText.substring(startOffsetInParent + identifier.getTextLength())); } else { setPreviewText(variableText.replaceFirst(variable.getName(), value)); } revalidate(); } protected void updateTitle(@Nullable V variable) { if (variable == null) return; setPreviewText(variable.getText()); revalidate(); } protected void revalidate() { myWholePanel.revalidate(); if (myTarget != null) { myBalloon.revalidate(new PositionTracker.Static<Balloon>(myTarget)); } } private boolean myShouldSelect = true; @Override protected boolean shouldSelectAll() { return myShouldSelect; } public void restartInplaceIntroduceTemplate() { Runnable restartTemplateRunnable = new Runnable() { @Override public void run() { final TemplateState templateState = TemplateManagerImpl.getTemplateState(myEditor); if (templateState != null) { myEditor.putUserData(INTRODUCE_RESTART, true); try { final TextRange range = templateState.getCurrentVariableRange(); if (range != null) { final TextResult inputText = templateState.getVariableValue(PRIMARY_VARIABLE_NAME); final String inputName = inputText != null ? inputText.getText() : null; final V variable = getVariable(); if (inputName == null || variable == null || !isIdentifier(inputName, variable.getLanguage())) { final String[] names = suggestNames(isReplaceAllOccurrences(), getLocalVariable()); ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { myEditor.getDocument().replaceString(range.getStartOffset(), range.getEndOffset(), names[0]); } }); } } templateState.gotoEnd(true); try { myShouldSelect = false; startInplaceIntroduceTemplate(); } finally { myShouldSelect = true; } } finally { myEditor.putUserData(INTRODUCE_RESTART, false); } } updateTitle(getVariable()); } }; CommandProcessor.getInstance().executeCommand(myProject, restartTemplateRunnable, getCommandName(), getCommandName()); } @Override protected void restoreSelection() { if (!shouldSelectAll()) { myEditor.getSelectionModel().removeSelection(); } } public String getInputName() { return myInsertedName; } @Override public void finish(boolean success) { myFinished = true; final TemplateState templateState = TemplateManagerImpl.getTemplateState(myEditor); if (templateState != null) { myEditor.putUserData(ACTIVE_INTRODUCE, null); } if (myDocumentAdapter != null) { myEditor.getDocument().removeDocumentListener(myDocumentAdapter); } if (myBalloon == null) { releaseIfNotRestart(); } super.finish(success); if (success) { PsiDocumentManager.getInstance(myProject).commitAllDocuments(); final V variable = getVariable(); if (variable == null) { return; } restoreState(variable); } } @Override protected void releaseResources() { super.releaseResources(); if (myPreview == null) return; EditorFactory.getInstance().releaseEditor(myPreview); myPreview = null; } @Override protected void addReferenceAtCaret(Collection<PsiReference> refs) { final V variable = getLocalVariable(); if (variable != null) { for (PsiReference reference : ReferencesSearch.search(variable)) { refs.add(reference); } } else { refs.clear(); } } @Override protected void collectAdditionalElementsToRename(List<Pair<PsiElement, TextRange>> stringUsages) { if (isReplaceAllOccurrences()) { for (E expression : getOccurrences()) { LOG.assertTrue(expression.isValid(), expression.getText()); stringUsages.add(Pair.<PsiElement, TextRange>create(expression, new TextRange(0, expression.getTextLength()))); } } else if (getExpr() != null) { correctExpression(); final E expr = getExpr(); LOG.assertTrue(expr.isValid(), expr.getText()); stringUsages.add(Pair.<PsiElement, TextRange>create(expr, new TextRange(0, expr.getTextLength()))); } final V localVariable = getLocalVariable(); if (localVariable != null) { final PsiElement nameIdentifier = localVariable.getNameIdentifier(); if (nameIdentifier != null) { int length = nameIdentifier.getTextLength(); stringUsages.add(Pair.<PsiElement, TextRange>create(nameIdentifier, new TextRange(0, length))); } } } protected void correctExpression() {} @Override protected void addHighlights(@NotNull Map<TextRange, TextAttributes> ranges, @NotNull Editor editor, @NotNull Collection<RangeHighlighter> highlighters, @NotNull HighlightManager highlightManager) { final TextAttributes attributes = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES); final V variable = getVariable(); if (variable != null) { final String name = variable.getName(); LOG.assertTrue(name != null, variable); final int variableNameLength = name.length(); if (isReplaceAllOccurrences()) { for (RangeMarker marker : getOccurrenceMarkers()) { final int startOffset = marker.getStartOffset(); highlightManager.addOccurrenceHighlight(editor, startOffset, startOffset + variableNameLength, attributes, 0, highlighters, null); } } else if (getExpr() != null) { final int startOffset = getExprMarker().getStartOffset(); highlightManager.addOccurrenceHighlight(editor, startOffset, startOffset + variableNameLength, attributes, 0, highlighters, null); } } for (RangeHighlighter highlighter : highlighters) { highlighter.setGreedyToLeft(true); highlighter.setGreedyToRight(true); } } protected void restoreState(@NotNull final V psiField) { if (!ReadonlyStatusHandler.ensureDocumentWritable(myProject, InjectedLanguageUtil.getTopLevelEditor(myEditor).getDocument())) return; ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { final PsiFile containingFile = psiField.getContainingFile(); final RangeMarker exprMarker = getExprMarker(); if (exprMarker != null) { myExpr = restoreExpression(containingFile, psiField, exprMarker, myExprText); } if (myLocalMarker != null) { final PsiElement refVariableElement = containingFile.findElementAt(myLocalMarker.getStartOffset()); if (refVariableElement != null) { final PsiElement parent = refVariableElement.getParent(); if (parent instanceof PsiNamedElement) { ((PsiNamedElement)parent).setName(myLocalName); } } final V localVariable = getLocalVariable(); if (localVariable != null && localVariable.isPhysical()) { myLocalVariable = localVariable; final PsiElement nameIdentifier = localVariable.getNameIdentifier(); if (nameIdentifier != null) { myLocalMarker = createMarker(nameIdentifier); } } } final List<RangeMarker> occurrenceMarkers = getOccurrenceMarkers(); for (int i = 0, occurrenceMarkersSize = occurrenceMarkers.size(); i < occurrenceMarkersSize; i++) { RangeMarker marker = occurrenceMarkers.get(i); if (getExprMarker() != null && marker.getStartOffset() == getExprMarker().getStartOffset() && myExpr != null) { myOccurrences[i] = myExpr; continue; } final E psiExpression = restoreExpression(containingFile, psiField, marker, getLocalVariable() != null ? myLocalName : myExprText); if (psiExpression != null) { myOccurrences[i] = psiExpression; } } if (myExpr != null && myExpr.isPhysical()) { myExprMarker = createMarker(myExpr); } myOccurrenceMarkers = null; deleteTemplateField(psiField); } }); } protected void deleteTemplateField(V psiField) { if (psiField.isValid()) { psiField.delete(); } } @Override protected boolean performRefactoring() { if (!ensureValid()) return false; WriteCommandAction.runWriteCommandAction(myProject, getCommandName(), getCommandName(), new Runnable() { @Override public void run() { final String refactoringId = getRefactoringId(); if (refactoringId != null) { final RefactoringEventData beforeData = new RefactoringEventData(); final V localVariable = getLocalVariable(); if (localVariable != null) { beforeData.addElement(localVariable); } else { final E beforeExpr = getBeforeExpr(); if (beforeExpr != null) { beforeData.addElement(beforeExpr); } } myProject.getMessageBus() .syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC).refactoringStarted(refactoringId, beforeData); } performIntroduce(); } }); V variable = getVariable(); if (variable != null) { saveSettings(variable); } return false; } protected E getBeforeExpr() { return getExpr(); } protected boolean ensureValid() { final String newName = getInputName(); if (getLocalVariable() == null && myExpr == null || newName == null || getLocalVariable() != null && !getLocalVariable().isValid() || myExpr != null && !myExpr.isValid()) { super.moveOffsetAfter(false); return false; } if (getLocalVariable() != null) { new WriteCommandAction(myProject, getCommandName(), getCommandName()) { @Override protected void run(@NotNull Result result) throws Throwable { getLocalVariable().setName(myLocalName); } }.execute(); } if (!isIdentifier(newName, myExpr != null ? myExpr.getLanguage() : getLocalVariable().getLanguage())) return false; return true; } @Override protected void moveOffsetAfter(boolean success) { if (getLocalVariable() != null && getLocalVariable().isValid()) { myEditor.getCaretModel().moveToOffset(getLocalVariable().getTextOffset()); myEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); } else if (getExprMarker() != null) { final RangeMarker exprMarker = getExprMarker(); if (exprMarker.isValid()) { myEditor.getCaretModel().moveToOffset(exprMarker.getStartOffset()); myEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); } } super.moveOffsetAfter(success); if (myLocalMarker != null && !isRestart()) { myLocalMarker.dispose(); } if (success) { performPostIntroduceTasks(); final String refactoringId = getRefactoringId(); if (refactoringId != null) { final RefactoringEventData afterData = new RefactoringEventData(); afterData.addElement(getVariable()); myProject.getMessageBus() .syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC).refactoringDone(refactoringId, afterData); } } } protected String getRefactoringId() { return null; } @Override protected boolean startsOnTheSameElement(RefactoringActionHandler handler, PsiElement element) { return super.startsOnTheSameElement(handler, element) || getLocalVariable() == element; } public V getLocalVariable() { if (myLocalVariable != null && myLocalVariable.isValid()) { return myLocalVariable; } if (myLocalMarker != null) { V variable = getVariable(); PsiFile containingFile; if (variable != null) { containingFile = variable.getContainingFile(); } else { containingFile = PsiDocumentManager.getInstance(myProject).getPsiFile(myEditor.getDocument()); } PsiNameIdentifierOwner identifierOwner = PsiTreeUtil.getParentOfType(containingFile.findElementAt(myLocalMarker.getStartOffset()), PsiNameIdentifierOwner.class, false); return identifierOwner != null && identifierOwner.getClass() == myLocalVariable.getClass() ? (V)identifierOwner : null; } return myLocalVariable; } public void stopIntroduce(Editor editor) { final TemplateState templateState = TemplateManagerImpl.getTemplateState(editor); if (templateState != null) { final Runnable runnable = new Runnable() { @Override public void run() { templateState.gotoEnd(true); } }; CommandProcessor.getInstance().executeCommand(myProject, runnable, getCommandName(), getCommandName()); } } @Override protected void navigateToAlreadyStarted(Document oldDocument, int exitCode) { finish(true); super.navigateToAlreadyStarted(oldDocument, exitCode); } @Override protected void showBalloon() { if (myFinished) return; super.showBalloon(); } public boolean startsOnTheSameElement(E expr, V localVariable) { if (myExprMarker != null && myExprMarker.isValid() && expr != null && myExprMarker.getStartOffset() == expr.getTextOffset()) { return true; } if (myLocalMarker != null && myLocalMarker.isValid() && localVariable != null && myLocalMarker.getStartOffset() == localVariable.getTextOffset()) { return true; } return isRestart(); } @Nullable public static AbstractInplaceIntroducer getActiveIntroducer(@Nullable Editor editor) { if (editor == null) return null; return editor.getUserData(ACTIVE_INTRODUCE); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.net.InetSocketAddress; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.filecache.TaskDistributedCacheManager; import org.apache.hadoop.filecache.TrackerDistributedCacheManager; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.server.tasktracker.Localizer; import org.apache.hadoop.fs.FSError; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.mapreduce.JobContext; /** Base class that runs a task in a separate process. Tasks are run in a * separate process in order to isolate the map/reduce system code from bugs in * user supplied map and reduce functions. */ abstract class TaskRunner extends Thread { public static final Log LOG = LogFactory.getLog(TaskRunner.class); volatile boolean killed = false; private TaskTracker.TaskInProgress tip; private Task t; private Object lock = new Object(); private volatile boolean done = false; private int exitCode = -1; private boolean exitCodeSet = false; private static String SYSTEM_PATH_SEPARATOR = System.getProperty("path.separator"); private TaskTracker tracker; private TaskDistributedCacheManager taskDistributedCacheManager; protected JobConf conf; JvmManager jvmManager; /** * for cleaning up old map outputs */ protected MapOutputFile mapOutputFile; public TaskRunner(TaskTracker.TaskInProgress tip, TaskTracker tracker, JobConf conf) { this.tip = tip; this.t = tip.getTask(); this.tracker = tracker; this.conf = conf; this.mapOutputFile = new MapOutputFile(); this.mapOutputFile.setConf(conf); this.jvmManager = tracker.getJvmManagerInstance(); } public Task getTask() { return t; } public TaskTracker.TaskInProgress getTaskInProgress() { return tip; } public TaskTracker getTracker() { return tracker; } /** Called to assemble this task's input. This method is run in the parent * process before the child is spawned. It should not execute user code, * only system code. */ public boolean prepare() throws IOException { return true; } /** Called when this task's output is no longer needed. * This method is run in the parent process after the child exits. It should * not execute user code, only system code. */ public void close() throws IOException {} /** * Get the java command line options for the child map/reduce tasks. * @param jobConf job configuration * @param defaultValue default value * @return the java command line options for child map/reduce tasks * @deprecated Use command line options specific to map or reduce tasks set * via {@link JobConf#MAPRED_MAP_TASK_JAVA_OPTS} or * {@link JobConf#MAPRED_REDUCE_TASK_JAVA_OPTS} */ @Deprecated public String getChildJavaOpts(JobConf jobConf, String defaultValue) { return jobConf.get(JobConf.MAPRED_TASK_JAVA_OPTS, defaultValue); } /** * Get the maximum virtual memory of the child map/reduce tasks. * @param jobConf job configuration * @return the maximum virtual memory of the child task or <code>-1</code> if * none is specified * @deprecated Use limits specific to the map or reduce tasks set via * {@link JobConf#MAPRED_MAP_TASK_ULIMIT} or * {@link JobConf#MAPRED_REDUCE_TASK_ULIMIT} */ @Deprecated public int getChildUlimit(JobConf jobConf) { return jobConf.getInt(JobConf.MAPRED_TASK_ULIMIT, -1); } /** * Get the environment variables for the child map/reduce tasks. * @param jobConf job configuration * @return the environment variables for the child map/reduce tasks or * <code>null</code> if unspecified * @deprecated Use environment variables specific to the map or reduce tasks * set via {@link JobConf#MAPRED_MAP_TASK_ENV} or * {@link JobConf#MAPRED_REDUCE_TASK_ENV} */ public String getChildEnv(JobConf jobConf) { return jobConf.get(JobConf.MAPRED_TASK_ENV); } @Override public final void run() { String errorInfo = "Child Error"; try { //before preparing the job localize //all the archives TaskAttemptID taskid = t.getTaskID(); final LocalDirAllocator lDirAlloc = new LocalDirAllocator("mapred.local.dir"); final File workDir = formWorkDir(lDirAlloc, taskid, t.isTaskCleanupTask(), conf); // We don't create any symlinks yet, so presence/absence of workDir // actually on the file system doesn't matter. tip.getUGI().doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws IOException { taskDistributedCacheManager = tracker.getTrackerDistributedCacheManager() .newTaskDistributedCacheManager(conf); taskDistributedCacheManager.setup(lDirAlloc, workDir, TaskTracker .getPrivateDistributedCacheDir(conf.getUser()), TaskTracker.getPublicDistributedCacheDir()); return null; } }); // Set up the child task's configuration. After this call, no localization // of files should happen in the TaskTracker's process space. Any changes to // the conf object after this will NOT be reflected to the child. setupChildTaskConfiguration(lDirAlloc); if (!prepare()) { return; } // Accumulates class paths for child. List<String> classPaths = getClassPaths(conf, workDir, taskDistributedCacheManager); long logSize = TaskLog.getTaskLogLength(conf); // Build exec child JVM args. Vector<String> vargs = getVMArgs(taskid, workDir, classPaths, logSize); tracker.addToMemoryManager(t.getTaskID(), t.isMapTask(), conf); // set memory limit using ulimit if feasible and necessary ... List<String> setup = getVMSetupCmd(); // Set up the redirection of the task's stdout and stderr streams File[] logFiles = prepareLogFiles(taskid, t.isTaskCleanupTask()); File stdout = logFiles[0]; File stderr = logFiles[1]; tracker.getTaskTrackerInstrumentation().reportTaskLaunch(taskid, stdout, stderr); Map<String, String> env = new HashMap<String, String>(); errorInfo = getVMEnvironment(errorInfo, workDir, conf, env, taskid, logSize); launchJvmAndWait(setup, vargs, stdout, stderr, logSize, workDir, env); tracker.getTaskTrackerInstrumentation().reportTaskEnd(t.getTaskID()); if (exitCodeSet) { if (!killed && exitCode != 0) { if (exitCode == 65) { tracker.getTaskTrackerInstrumentation().taskFailedPing(t.getTaskID()); } throw new IOException("Task process exit with nonzero status of " + exitCode + "."); } } } catch (FSError e) { LOG.fatal("FSError", e); try { tracker.fsError(t.getTaskID(), e.getMessage()); } catch (IOException ie) { LOG.fatal(t.getTaskID()+" reporting FSError", ie); } } catch (Throwable throwable) { LOG.warn(t.getTaskID() + " : " + errorInfo, throwable); Throwable causeThrowable = new Throwable(errorInfo, throwable); ByteArrayOutputStream baos = new ByteArrayOutputStream(); causeThrowable.printStackTrace(new PrintStream(baos)); try { tracker.reportDiagnosticInfo(t.getTaskID(), baos.toString()); } catch (IOException e) { LOG.warn(t.getTaskID()+" Reporting Diagnostics", e); } } finally { try{ if (taskDistributedCacheManager != null) { taskDistributedCacheManager.release(); } }catch(IOException ie){ LOG.warn("Error releasing caches : Cache files might not have been cleaned up"); } // It is safe to call TaskTracker.TaskInProgress.reportTaskFinished with // *false* since the task has either // a) SUCCEEDED - which means commit has been done // b) FAILED - which means we do not need to commit tip.reportTaskFinished(false); } } void launchJvmAndWait(List<String> setup, Vector<String> vargs, File stdout, File stderr, long logSize, File workDir, Map<String, String> env) throws InterruptedException { jvmManager.launchJvm(this, jvmManager.constructJvmEnv(setup, vargs, stdout, stderr, logSize, workDir, env, conf)); synchronized (lock) { while (!done) { lock.wait(); } } } /** * Prepare the log files for the task * * @param taskid * @param isCleanup * @return an array of files. The first file is stdout, the second is stderr. * @throws IOException */ File[] prepareLogFiles(TaskAttemptID taskid, boolean isCleanup) throws IOException { File[] logFiles = new File[2]; logFiles[0] = TaskLog.getTaskLogFile(taskid, isCleanup, TaskLog.LogName.STDOUT); logFiles[1] = TaskLog.getTaskLogFile(taskid, isCleanup, TaskLog.LogName.STDERR); File logDir = logFiles[0].getParentFile(); boolean b = logDir.mkdirs(); if (!b) { LOG.warn("mkdirs failed. Ignoring"); } else { Localizer.PermissionsHandler.setPermissions(logDir, Localizer.PermissionsHandler.sevenZeroZero); } return logFiles; } /** * Write the child's configuration to the disk and set it in configuration so * that the child can pick it up from there. * * @param lDirAlloc * @throws IOException */ void setupChildTaskConfiguration(LocalDirAllocator lDirAlloc) throws IOException { Path localTaskFile = lDirAlloc.getLocalPathForWrite(TaskTracker.getTaskConfFile( t.getUser(), t.getJobID().toString(), t.getTaskID().toString(), t .isTaskCleanupTask()), conf); // write the child's task configuration file to the local disk writeLocalTaskFile(localTaskFile.toString(), conf); // Set the final job file in the task. The child needs to know the correct // path to job.xml. So set this path accordingly. t.setJobFile(localTaskFile.toString()); } /** * @return */ private List<String> getVMSetupCmd() { String[] ulimitCmd = Shell.getUlimitMemoryCommand(getChildUlimit(conf)); List<String> setup = null; if (ulimitCmd != null) { setup = new ArrayList<String>(); for (String arg : ulimitCmd) { setup.add(arg); } } return setup; } /** * @param taskid * @param workDir * @param classPaths * @param logSize * @return * @throws IOException */ private Vector<String> getVMArgs(TaskAttemptID taskid, File workDir, List<String> classPaths, long logSize) throws IOException { Vector<String> vargs = new Vector<String>(8); File jvm = // use same jvm as parent new File(new File(System.getProperty("java.home"), "bin"), "java"); vargs.add(jvm.toString()); // Add child (task) java-vm options. // // The following symbols if present in mapred.{map|reduce}.child.java.opts // value are replaced: // + @taskid@ is interpolated with value of TaskID. // Other occurrences of @ will not be altered. // // Example with multiple arguments and substitutions, showing // jvm GC logging, and start of a passwordless JVM JMX agent so can // connect with jconsole and the likes to watch child memory, threads // and get thread dumps. // // <property> // <name>mapred.map.child.java.opts</name> // <value>-Xmx 512M -verbose:gc -Xloggc:/tmp/@taskid@.gc \ // -Dcom.sun.management.jmxremote.authenticate=false \ // -Dcom.sun.management.jmxremote.ssl=false \ // </value> // </property> // // <property> // <name>mapred.reduce.child.java.opts</name> // <value>-Xmx 1024M -verbose:gc -Xloggc:/tmp/@taskid@.gc \ // -Dcom.sun.management.jmxremote.authenticate=false \ // -Dcom.sun.management.jmxremote.ssl=false \ // </value> // </property> // String javaOpts = getChildJavaOpts(conf, JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS); javaOpts = javaOpts.replace("@taskid@", taskid.toString()); String [] javaOptsSplit = javaOpts.split(" "); // Add java.library.path; necessary for loading native libraries. // // 1. To support native-hadoop library i.e. libhadoop.so, we add the // parent processes' java.library.path to the child. // 2. We also add the 'cwd' of the task to it's java.library.path to help // users distribute native libraries via the DistributedCache. // 3. The user can also specify extra paths to be added to the // java.library.path via mapred.{map|reduce}.child.java.opts. // String libraryPath = System.getProperty("java.library.path"); if (libraryPath == null) { libraryPath = workDir.getAbsolutePath(); } else { libraryPath += SYSTEM_PATH_SEPARATOR + workDir; } boolean hasUserLDPath = false; for(int i=0; i<javaOptsSplit.length ;i++) { if(javaOptsSplit[i].startsWith("-Djava.library.path=")) { javaOptsSplit[i] += SYSTEM_PATH_SEPARATOR + libraryPath; hasUserLDPath = true; break; } } if(!hasUserLDPath) { vargs.add("-Djava.library.path=" + libraryPath); } for (int i = 0; i < javaOptsSplit.length; i++) { vargs.add(javaOptsSplit[i]); } Path childTmpDir = createChildTmpDir(workDir, conf); vargs.add("-Djava.io.tmpdir=" + childTmpDir); // Add classpath. vargs.add("-classpath"); String classPath = StringUtils.join(SYSTEM_PATH_SEPARATOR, classPaths); vargs.add(classPath); // Setup the log4j prop setupLog4jProperties(vargs, taskid, logSize); if (conf.getProfileEnabled()) { if (conf.getProfileTaskRange(t.isMapTask() ).isIncluded(t.getPartition())) { File prof = TaskLog.getTaskLogFile(taskid, t.isTaskCleanupTask(), TaskLog.LogName.PROFILE); vargs.add(String.format(conf.getProfileParams(), prof.toString())); } } // Add main class and its arguments vargs.add(Child.class.getName()); // main of Child // pass umbilical address InetSocketAddress address = tracker.getTaskTrackerReportAddress(); vargs.add(address.getAddress().getHostAddress()); vargs.add(Integer.toString(address.getPort())); vargs.add(taskid.toString()); // pass task identifier // pass task log location vargs.add(TaskLog.getAttemptDir(taskid, t.isTaskCleanupTask()).toString()); return vargs; } private void setupLog4jProperties(Vector<String> vargs, TaskAttemptID taskid, long logSize) { vargs.add("-Dhadoop.log.dir=" + new File(System.getProperty("hadoop.log.dir")).getAbsolutePath()); vargs.add("-Dhadoop.root.logger=INFO,TLA"); vargs.add("-Dhadoop.tasklog.taskid=" + taskid); vargs.add("-Dhadoop.tasklog.iscleanup=" + t.isTaskCleanupTask()); vargs.add("-Dhadoop.tasklog.totalLogFileSize=" + logSize); } /** * @param taskid * @param workDir * @return * @throws IOException */ static Path createChildTmpDir(File workDir, JobConf conf) throws IOException { // add java.io.tmpdir given by mapred.child.tmp String tmp = conf.get("mapred.child.tmp", "./tmp"); Path tmpDir = new Path(tmp); // if temp directory path is not absolute, prepend it with workDir. if (!tmpDir.isAbsolute()) { tmpDir = new Path(workDir.toString(), tmp); FileSystem localFs = FileSystem.getLocal(conf); if (!localFs.mkdirs(tmpDir) && !localFs.getFileStatus(tmpDir).isDir()) { throw new IOException("Mkdirs failed to create " + tmpDir.toString()); } } return tmpDir; } /** */ private static List<String> getClassPaths(JobConf conf, File workDir, TaskDistributedCacheManager taskDistributedCacheManager) throws IOException { // Accumulates class paths for child. List<String> classPaths = new ArrayList<String>(); // start with same classpath as parent process appendSystemClasspaths(classPaths); // include the user specified classpath appendJobJarClasspaths(conf.getJar(), classPaths); // Distributed cache paths classPaths.addAll(taskDistributedCacheManager.getClassPaths()); // Include the working dir too classPaths.add(workDir.toString()); return classPaths; } /** * @param errorInfo * @param workDir * @param env * @return * @throws Throwable */ private String getVMEnvironment(String errorInfo, File workDir, JobConf conf, Map<String, String> env, TaskAttemptID taskid, long logSize) throws Throwable { StringBuffer ldLibraryPath = new StringBuffer(); ldLibraryPath.append(workDir.toString()); String oldLdLibraryPath = null; oldLdLibraryPath = System.getenv("LD_LIBRARY_PATH"); if (oldLdLibraryPath != null) { ldLibraryPath.append(SYSTEM_PATH_SEPARATOR); ldLibraryPath.append(oldLdLibraryPath); } env.put("LD_LIBRARY_PATH", ldLibraryPath.toString()); String jobTokenFile = conf.get(TokenCache.JOB_TOKENS_FILENAME); LOG.debug("putting jobToken file name into environment fn=" + jobTokenFile); env.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION, jobTokenFile); // for the child of task jvm, set hadoop.root.logger env.put("HADOOP_ROOT_LOGGER","INFO,TLA"); String hadoopClientOpts = System.getenv("HADOOP_CLIENT_OPTS"); if (hadoopClientOpts == null) { hadoopClientOpts = ""; } else { hadoopClientOpts = hadoopClientOpts + " "; } hadoopClientOpts = hadoopClientOpts + "-Dhadoop.tasklog.taskid=" + taskid + " -Dhadoop.tasklog.iscleanup=" + t.isTaskCleanupTask() + " -Dhadoop.tasklog.totalLogFileSize=" + logSize; env.put("HADOOP_CLIENT_OPTS", "\"" + hadoopClientOpts + "\""); // add the env variables passed by the user String mapredChildEnv = getChildEnv(conf); if (mapredChildEnv != null && mapredChildEnv.length() > 0) { String childEnvs[] = mapredChildEnv.split(","); for (String cEnv : childEnvs) { try { String[] parts = cEnv.split("="); // split on '=' String value = env.get(parts[0]); if (value != null) { // replace $env with the child's env constructed by tt's // example LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp value = parts[1].replace("$" + parts[0], value); } else { // this key is not configured by the tt for the child .. get it // from the tt's env // example PATH=$PATH:/tmp value = System.getenv(parts[0]); if (value != null) { // the env key is present in the tt's env value = parts[1].replace("$" + parts[0], value); } else { // the env key is note present anywhere .. simply set it // example X=$X:/tmp or X=/tmp value = parts[1].replace("$" + parts[0], ""); } } env.put(parts[0], value); } catch (Throwable t) { // set the error msg errorInfo = "Invalid User environment settings : " + mapredChildEnv + ". Failed to parse user-passed environment param." + " Expecting : env1=value1,env2=value2..."; LOG.warn(errorInfo); throw t; } } } return errorInfo; } /** * Write the task specific job-configuration file. * * @param localFs * @throws IOException */ private static void writeLocalTaskFile(String jobFile, JobConf conf) throws IOException { Path localTaskFile = new Path(jobFile); FileSystem localFs = FileSystem.getLocal(conf); localFs.delete(localTaskFile, true); OutputStream out = localFs.create(localTaskFile); try { conf.writeXml(out); } finally { out.close(); } } /** * Prepare the mapred.local.dir for the child. The child is sand-boxed now. * Whenever it uses LocalDirAllocator from now on inside the child, it will * only see files inside the attempt-directory. This is done in the Child's * process space. */ static void setupChildMapredLocalDirs(Task t, JobConf conf) { String[] localDirs = conf.getStrings(JobConf.MAPRED_LOCAL_DIR_PROPERTY); String jobId = t.getJobID().toString(); String taskId = t.getTaskID().toString(); boolean isCleanup = t.isTaskCleanupTask(); String user = t.getUser(); StringBuffer childMapredLocalDir = new StringBuffer(localDirs[0] + Path.SEPARATOR + TaskTracker.getLocalTaskDir(user, jobId, taskId, isCleanup)); for (int i = 1; i < localDirs.length; i++) { childMapredLocalDir.append("," + localDirs[i] + Path.SEPARATOR + TaskTracker.getLocalTaskDir(user, jobId, taskId, isCleanup)); } LOG.debug("mapred.local.dir for child : " + childMapredLocalDir); conf.set("mapred.local.dir", childMapredLocalDir.toString()); } /** Creates the working directory pathname for a task attempt. */ static File formWorkDir(LocalDirAllocator lDirAlloc, TaskAttemptID task, boolean isCleanup, JobConf conf) throws IOException { Path workDir = lDirAlloc.getLocalPathToRead(TaskTracker.getTaskWorkDir( conf.getUser(), task.getJobID().toString(), task.toString(), isCleanup), conf); return new File(workDir.toString()); } private static void appendSystemClasspaths(List<String> classPaths) { for (String c : System.getProperty("java.class.path").split( SYSTEM_PATH_SEPARATOR)) { classPaths.add(c); } } /** * Given a "jobJar" (typically retrieved via {@link Configuration.getJar()}), * appends classpath entries for it, as well as its lib/ and classes/ * subdirectories. * * @param jobJar Job jar from configuration * @param classPaths Accumulator for class paths */ static void appendJobJarClasspaths(String jobJar, List<String> classPaths) { if (jobJar == null) { return; } File jobCacheDir = new File(new Path(jobJar).getParent().toString()); // if jar exists, it into workDir File[] libs = new File(jobCacheDir, "lib").listFiles(); if (libs != null) { for (File l : libs) { classPaths.add(l.toString()); } } classPaths.add(new File(jobCacheDir, "classes").toString()); classPaths.add(jobCacheDir.toString()); } /** * Creates distributed cache symlinks and tmp directory, as appropriate. * Note that when we setup the distributed * cache, we didn't create the symlinks. This is done on a per task basis * by the currently executing task. * * @param conf The job configuration. * @param workDir Working directory, which is completely deleted. */ public static void setupWorkDir(JobConf conf, File workDir) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Fully deleting contents of " + workDir); } /** delete only the contents of workDir leaving the directory empty. We * can't delete the workDir as it is the current working directory. */ FileUtil.fullyDeleteContents(workDir); if (DistributedCache.getSymlink(conf)) { URI[] archives = DistributedCache.getCacheArchives(conf); URI[] files = DistributedCache.getCacheFiles(conf); Path[] localArchives = DistributedCache.getLocalCacheArchives(conf); Path[] localFiles = DistributedCache.getLocalCacheFiles(conf); if (archives != null) { for (int i = 0; i < archives.length; i++) { String link = archives[i].getFragment(); String target = localArchives[i].toString(); symlink(workDir, target, link); } } if (files != null) { for (int i = 0; i < files.length; i++) { String link = files[i].getFragment(); String target = localFiles[i].toString(); symlink(workDir, target, link); } } } if (conf.getJar() != null) { File jobCacheDir = new File( new Path(conf.getJar()).getParent().toString()); // create symlinks for all the files in job cache dir in current // workingdir for streaming try{ TrackerDistributedCacheManager.createAllSymlink(conf, jobCacheDir, workDir); } catch(IOException ie){ // Do not exit even if symlinks have not been created. LOG.warn(StringUtils.stringifyException(ie)); } } createChildTmpDir(workDir, conf); } /** * Utility method for creating a symlink and warning on errors. * * If link is null, does nothing. */ private static void symlink(File workDir, String target, String link) throws IOException { if (link != null) { link = workDir.toString() + Path.SEPARATOR + link; File flink = new File(link); if (!flink.exists()) { LOG.info(String.format("Creating symlink: %s <- %s", target, link)); if (0 != FileUtil.symLink(target, link)) { LOG.warn(String.format("Failed to create symlink: %s <- %s", target, link)); } } } } /** * Kill the child process */ public void kill() { killed = true; jvmManager.taskKilled(this); signalDone(); } public void signalDone() { synchronized (lock) { done = true; lock.notify(); } } public void setExitCode(int exitCode) { this.exitCodeSet = true; this.exitCode = exitCode; } }
/**************************************************************************** * Copyright (c) 2011, Monnet Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Monnet Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE MONNET PROJECT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ********************************************************************************/ package eu.monnetproject.lemon.impl; import eu.monnetproject.lemon.LemonModel; import eu.monnetproject.lemon.LinguisticOntology; import eu.monnetproject.lemon.impl.io.ReaderAccepter; import eu.monnetproject.lemon.impl.io.UnactualizedAccepter; import eu.monnetproject.lemon.model.LexicalEntry; import java.net.URI; import java.util.HashMap; import java.util.Map; /** * * @author John McCrae */ public class AccepterFactory { final HashMap<Object, ReaderAccepter> accepters; final LemonModelImpl model; final LinguisticOntology lingOnto; LexicalEntryImpl firstEntry = null; final boolean ignoreErrors; public AccepterFactory(HashMap<Object, ReaderAccepter> accepters, LinguisticOntology lingOnto, LemonModelImpl model, boolean ignoreErrors) { this.accepters = accepters; this.lingOnto = lingOnto; this.model = model; this.ignoreErrors = ignoreErrors; } private void addAccepter(Object value, ReaderAccepter accept) { if (accept != null) { if (!accepters.containsKey(value)) { accepters.put(value, accept); } else if (accepters.get(value) instanceof UnactualizedAccepter && !(accept instanceof UnactualizedAccepter)) { final Map<Object, ReaderAccepter> actualizedAs = ((UnactualizedAccepter) accepters.get(value)).actualizedAs(accept, lingOnto, this); for (Map.Entry<Object, ReaderAccepter> entry : actualizedAs.entrySet()) { addAccepter(entry.getKey(), entry.getValue()); } accepters.put(value, accept); } else { accepters.get(value).merge(accept, lingOnto, this); } } } private <E extends ReaderAccepter> E get(Class<E> clazz, URI uri) { if (accepters.containsKey(uri)) { if (clazz.isInstance(accepters.get(uri))) { return (E) accepters.get(uri); } else if (accepters.get(uri) instanceof UnactualizedAccepter) { final E accepter = make(clazz, uri); addAccepter(uri, accepter); return accepter; } else { if(!ignoreErrors) { throw new IllegalStateException("Model already contains object of type " + accepters.get(uri).getClass().getName() + " for URI: " + uri + " but was attempted to create as " + clazz.getName()); } else { System.err.println("Model already contains object of type " + accepters.get(uri).getClass().getName() + " for URI: " + uri + " but was attempted to create as " + clazz.getName()); return make(clazz, uri); } } } else { return make(clazz, uri); } } private <E extends ReaderAccepter> E get(Class<E> clazz, String bNode) { if (accepters.containsKey(bNode)) { if (clazz.isInstance(accepters.get(bNode))) { return (E) accepters.get(bNode); } else if (accepters.get(bNode) instanceof UnactualizedAccepter) { final E accepter = make(clazz, bNode); addAccepter(bNode, accepter); return accepter; } else { throw new IllegalStateException("Model already contains object of type " + accepters.get(bNode).getClass().getName() + " for BNode: " + bNode + " but was attempted to create as " + clazz.getName()); } } else { return make(clazz, bNode); } } public <E extends ReaderAccepter> E make(Class<E> clazz, URI uri) { if (clazz.equals(ArgumentImpl.class)) { return (E) new ArgumentImpl(uri,model); } else if (clazz.equals(ComponentImpl.class)) { return (E) new ComponentImpl(uri,model); } else if (clazz.equals(ConditionImpl.class)) { return (E) new ConditionImpl(uri,model); } else if (clazz.equals(ConstituentImpl.class)) { return (E) new ConstituentImpl(uri,model); } else if (clazz.equals(ContextImpl.class)) { return (E) new ContextImpl(uri,model); } else if (clazz.equals(DefinitionImpl.class)) { return (E) new DefinitionImpl(uri,model); } else if (clazz.equals(ExampleImpl.class)) { return (E) new ExampleImpl(uri,model); } else if (clazz.equals(FormImpl.class)) { return (E) new FormImpl(uri,model); } else if (clazz.equals(FrameImpl.class)) { return (E) new FrameImpl(uri,model); } else if (clazz.equals(LexicalEntryImpl.class)) { final LexicalEntryImpl name = new LexicalEntryImpl(uri,model); if(firstEntry == null) { firstEntry = (LexicalEntryImpl)name; } return (E)name; } else if (clazz.equals(LexicalSenseImpl.class)) { return (E) new LexicalSenseImpl(uri,model); } else if (clazz.equals(MorphPatternImpl.class)) { return (E) new MorphPatternImpl(uri,model); } else if (clazz.equals(MorphTransformImpl.class)) { return (E) new MorphTransformImpl(uri,model); } else if (clazz.equals(NodeImpl.class)) { return (E) new NodeImpl(uri,model); } else if (clazz.equals(PartImpl.class)) { return (E) new PartImpl(uri,model); } else if (clazz.equals(PhraseImpl.class)) { return (E) new PhraseImpl(uri,model); } else if (clazz.equals(PrototypeImpl.class)) { return (E) new PrototypeImpl(uri,model); } else if (clazz.equals(TopicImpl.class)) { return (E) new TopicImpl(uri,model); } else if (clazz.equals(WordImpl.class)) { return (E) new WordImpl(uri,model); } else { //return null; throw new RuntimeException("Unknown type"); } } public <E extends ReaderAccepter> E make(Class<E> clazz, String bNode) { if (clazz.equals(ArgumentImpl.class)) { return (E) new ArgumentImpl(bNode,model); } else if (clazz.equals(ComponentImpl.class)) { return (E) new ComponentImpl(bNode,model); } else if (clazz.equals(ConditionImpl.class)) { return (E) new ConditionImpl(bNode,model); } else if (clazz.equals(ConstituentImpl.class)) { return (E) new ConstituentImpl(bNode,model); } else if (clazz.equals(ContextImpl.class)) { return (E) new ContextImpl(bNode,model); } else if (clazz.equals(DefinitionImpl.class)) { return (E) new DefinitionImpl(bNode,model); } else if (clazz.equals(ExampleImpl.class)) { return (E) new ExampleImpl(bNode,model); } else if (clazz.equals(FormImpl.class)) { return (E) new FormImpl(bNode,model); } else if (clazz.equals(FrameImpl.class)) { return (E) new FrameImpl(bNode,model); } else if (clazz.equals(LexicalEntryImpl.class)) { return (E) new LexicalEntryImpl(bNode,model); } else if (clazz.equals(LexicalSenseImpl.class)) { return (E) new LexicalSenseImpl(bNode,model); } else if (clazz.equals(MorphPatternImpl.class)) { final MorphPatternImpl pattern = new MorphPatternImpl(bNode,model); model.addPattern(pattern); return (E) pattern; } else if (clazz.equals(MorphTransformImpl.class)) { return (E) new MorphTransformImpl(bNode,model); } else if (clazz.equals(NodeImpl.class)) { return (E) new NodeImpl(bNode,model); } else if (clazz.equals(PartImpl.class)) { return (E) new PartImpl(bNode,model); } else if (clazz.equals(PhraseImpl.class)) { return (E) new PhraseImpl(bNode,model); } else if (clazz.equals(PrototypeImpl.class)) { return (E) new PrototypeImpl(bNode,model); } else if (clazz.equals(TopicImpl.class)) { return (E) new TopicImpl(bNode,model); } else if (clazz.equals(WordImpl.class)) { return (E) new WordImpl(bNode,model); } else { //return null; throw new RuntimeException("Unknown type"); } } public ArgumentImpl getArgumentImpl(URI uri) { return get(ArgumentImpl.class, uri); } public ArgumentImpl getArgumentImpl(String bNode) { return get(ArgumentImpl.class, bNode); } public ComponentImpl getComponentImpl(URI uri) { return get(ComponentImpl.class, uri); } public ComponentImpl getComponentImpl(String bNode) { return get(ComponentImpl.class, bNode); } public ConditionImpl getConditionImpl(URI uri) { return get(ConditionImpl.class, uri); } public ConditionImpl getConditionImpl(String bNode) { return get(ConditionImpl.class, bNode); } public ConstituentImpl getConstituentImpl(URI uri) { return get(ConstituentImpl.class, uri); } public ConstituentImpl getConstituentImpl(String bNode) { return get(ConstituentImpl.class, bNode); } public ContextImpl getContextImpl(URI uri) { return get(ContextImpl.class, uri); } public ContextImpl getContextImpl(String bNode) { return get(ContextImpl.class, bNode); } public DefinitionImpl getDefinitionImpl(URI uri) { return get(DefinitionImpl.class, uri); } public DefinitionImpl getDefinitionImpl(String bNode) { return get(DefinitionImpl.class, bNode); } public ExampleImpl getExampleImpl(URI uri) { return get(ExampleImpl.class, uri); } public ExampleImpl getExampleImpl(String bNode) { return get(ExampleImpl.class, bNode); } public FormImpl getFormImpl(URI uri) { return get(FormImpl.class, uri); } public FormImpl getFormImpl(String bNode) { return get(FormImpl.class, bNode); } public FrameImpl getFrameImpl(URI uri) { return get(FrameImpl.class, uri); } public FrameImpl getFrameImpl(String bNode) { return get(FrameImpl.class, bNode); } public LexicalEntryImpl getLexicalEntryImpl(URI uri) { return get(LexicalEntryImpl.class, uri); } public LexicalEntryImpl getLexicalEntryImpl(String bNode) { return get(LexicalEntryImpl.class, bNode); } public LexicalSenseImpl getLexicalSenseImpl(URI uri) { return get(LexicalSenseImpl.class, uri); } public LexicalSenseImpl getLexicalSenseImpl(String bNode) { return get(LexicalSenseImpl.class, bNode); } public MorphPatternImpl getMorphPatternImpl(URI uri) { if (accepters.containsKey(uri)) { if(accepters.get(uri) instanceof LexiconImpl) { return (MorphPatternImpl) accepters.get(uri); } else { final MorphPatternImpl actual = new MorphPatternImpl(uri,model); addAccepter(uri, actual); return actual; } } else { final MorphPatternImpl patternImpl = new MorphPatternImpl(uri, model); model.addPattern(patternImpl); return patternImpl; } } public MorphPatternImpl getMorphPatternImpl(String bNode) { if (accepters.containsKey(bNode)) { return (MorphPatternImpl) accepters.get(bNode); } else { final MorphPatternImpl MorphPatternImpl = new MorphPatternImpl(bNode, model); model.addPattern(MorphPatternImpl); return MorphPatternImpl; } } public MorphTransformImpl getMorphTransformImpl(URI uri) { return get(MorphTransformImpl.class, uri); } public MorphTransformImpl getMorphTransformImpl(String bNode) { return get(MorphTransformImpl.class, bNode); } public NodeImpl getNodeImpl(URI uri) { return get(NodeImpl.class, uri); } public NodeImpl getNodeImpl(String bNode) { return get(NodeImpl.class, bNode); } public PartImpl getPartImpl(URI uri) { return get(PartImpl.class, uri); } public PartImpl getPartImpl(String bNode) { return get(PartImpl.class, bNode); } public PhraseImpl getPhraseImpl(URI uri) { return get(PhraseImpl.class, uri); } public PhraseImpl getPhraseImpl(String bNode) { return get(PhraseImpl.class, bNode); } public PrototypeImpl getPrototypeImpl(URI uri) { return get(PrototypeImpl.class, uri); } public PrototypeImpl getPrototypeImpl(String bNode) { return get(PrototypeImpl.class, bNode); } public TopicImpl getTopicImpl(URI uri) { return get(TopicImpl.class, uri); } public TopicImpl getTopicImpl(String bNode) { return get(TopicImpl.class, bNode); } public WordImpl getWordImpl(URI uri) { return get(WordImpl.class, uri); } public WordImpl getWordImpl(String bNode) { return get(WordImpl.class, bNode); } public LexiconImpl getLexiconImpl(URI uri) { if (accepters.containsKey(uri)) { if(accepters.get(uri) instanceof LexiconImpl) { return (LexiconImpl) accepters.get(uri); } else { final LexiconImpl actual = new LexiconImpl(uri,model); addAccepter(uri, actual); return actual; } } else { final LexiconImpl lexiconImpl = new LexiconImpl(uri, model); model.addLexicon(lexiconImpl); return lexiconImpl; } } public LexiconImpl getLexiconImpl(String bNode) { if (accepters.containsKey(bNode)) { if(accepters.get(bNode) instanceof LexiconImpl) { return (LexiconImpl) accepters.get(bNode); } else { final LexiconImpl actual = new LexiconImpl(bNode,model); model.addLexicon(actual); addAccepter(bNode, actual); return actual; } } else { final LexiconImpl lexiconImpl = new LexiconImpl(bNode, model); model.addLexicon(lexiconImpl); return lexiconImpl; } } public LemonModel getModel() { return model; } public LexicalEntry getEntry() { return firstEntry; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes the launch specification for a Scheduled Instance. * </p> * <p> * If you are launching the Scheduled Instance in EC2-VPC, you must specify the ID of the subnet. You can specify the * subnet using either <code>SubnetId</code> or <code>NetworkInterface</code>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ScheduledInstancesLaunchSpecification" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ScheduledInstancesLaunchSpecification implements Serializable, Cloneable { /** * <p> * The ID of the Amazon Machine Image (AMI). * </p> */ private String imageId; /** * <p> * The name of the key pair. * </p> */ private String keyName; /** * <p> * The IDs of one or more security groups. * </p> */ private com.amazonaws.internal.SdkInternalList<String> securityGroupIds; /** * <p> * The base64-encoded MIME user data. * </p> */ private String userData; /** * <p> * The placement information. * </p> */ private ScheduledInstancesPlacement placement; /** * <p> * The ID of the kernel. * </p> */ private String kernelId; /** * <p> * The instance type. * </p> */ private String instanceType; /** * <p> * The ID of the RAM disk. * </p> */ private String ramdiskId; /** * <p> * One or more block device mapping entries. * </p> */ private com.amazonaws.internal.SdkInternalList<ScheduledInstancesBlockDeviceMapping> blockDeviceMappings; /** * <p> * Enable or disable monitoring for the instances. * </p> */ private ScheduledInstancesMonitoring monitoring; /** * <p> * The ID of the subnet in which to launch the instances. * </p> */ private String subnetId; /** * <p> * One or more network interfaces. * </p> */ private com.amazonaws.internal.SdkInternalList<ScheduledInstancesNetworkInterface> networkInterfaces; /** * <p> * The IAM instance profile. * </p> */ private ScheduledInstancesIamInstanceProfile iamInstanceProfile; /** * <p> * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput to * Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This optimization isn't * available with all instance types. Additional usage charges apply when using an EBS-optimized instance. * </p> * <p> * Default: <code>false</code> * </p> */ private Boolean ebsOptimized; /** * <p> * The ID of the Amazon Machine Image (AMI). * </p> * * @param imageId * The ID of the Amazon Machine Image (AMI). */ public void setImageId(String imageId) { this.imageId = imageId; } /** * <p> * The ID of the Amazon Machine Image (AMI). * </p> * * @return The ID of the Amazon Machine Image (AMI). */ public String getImageId() { return this.imageId; } /** * <p> * The ID of the Amazon Machine Image (AMI). * </p> * * @param imageId * The ID of the Amazon Machine Image (AMI). * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withImageId(String imageId) { setImageId(imageId); return this; } /** * <p> * The name of the key pair. * </p> * * @param keyName * The name of the key pair. */ public void setKeyName(String keyName) { this.keyName = keyName; } /** * <p> * The name of the key pair. * </p> * * @return The name of the key pair. */ public String getKeyName() { return this.keyName; } /** * <p> * The name of the key pair. * </p> * * @param keyName * The name of the key pair. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withKeyName(String keyName) { setKeyName(keyName); return this; } /** * <p> * The IDs of one or more security groups. * </p> * * @return The IDs of one or more security groups. */ public java.util.List<String> getSecurityGroupIds() { if (securityGroupIds == null) { securityGroupIds = new com.amazonaws.internal.SdkInternalList<String>(); } return securityGroupIds; } /** * <p> * The IDs of one or more security groups. * </p> * * @param securityGroupIds * The IDs of one or more security groups. */ public void setSecurityGroupIds(java.util.Collection<String> securityGroupIds) { if (securityGroupIds == null) { this.securityGroupIds = null; return; } this.securityGroupIds = new com.amazonaws.internal.SdkInternalList<String>(securityGroupIds); } /** * <p> * The IDs of one or more security groups. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSecurityGroupIds(java.util.Collection)} or {@link #withSecurityGroupIds(java.util.Collection)} if you * want to override the existing values. * </p> * * @param securityGroupIds * The IDs of one or more security groups. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withSecurityGroupIds(String... securityGroupIds) { if (this.securityGroupIds == null) { setSecurityGroupIds(new com.amazonaws.internal.SdkInternalList<String>(securityGroupIds.length)); } for (String ele : securityGroupIds) { this.securityGroupIds.add(ele); } return this; } /** * <p> * The IDs of one or more security groups. * </p> * * @param securityGroupIds * The IDs of one or more security groups. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withSecurityGroupIds(java.util.Collection<String> securityGroupIds) { setSecurityGroupIds(securityGroupIds); return this; } /** * <p> * The base64-encoded MIME user data. * </p> * * @param userData * The base64-encoded MIME user data. */ public void setUserData(String userData) { this.userData = userData; } /** * <p> * The base64-encoded MIME user data. * </p> * * @return The base64-encoded MIME user data. */ public String getUserData() { return this.userData; } /** * <p> * The base64-encoded MIME user data. * </p> * * @param userData * The base64-encoded MIME user data. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withUserData(String userData) { setUserData(userData); return this; } /** * <p> * The placement information. * </p> * * @param placement * The placement information. */ public void setPlacement(ScheduledInstancesPlacement placement) { this.placement = placement; } /** * <p> * The placement information. * </p> * * @return The placement information. */ public ScheduledInstancesPlacement getPlacement() { return this.placement; } /** * <p> * The placement information. * </p> * * @param placement * The placement information. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withPlacement(ScheduledInstancesPlacement placement) { setPlacement(placement); return this; } /** * <p> * The ID of the kernel. * </p> * * @param kernelId * The ID of the kernel. */ public void setKernelId(String kernelId) { this.kernelId = kernelId; } /** * <p> * The ID of the kernel. * </p> * * @return The ID of the kernel. */ public String getKernelId() { return this.kernelId; } /** * <p> * The ID of the kernel. * </p> * * @param kernelId * The ID of the kernel. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withKernelId(String kernelId) { setKernelId(kernelId); return this; } /** * <p> * The instance type. * </p> * * @param instanceType * The instance type. */ public void setInstanceType(String instanceType) { this.instanceType = instanceType; } /** * <p> * The instance type. * </p> * * @return The instance type. */ public String getInstanceType() { return this.instanceType; } /** * <p> * The instance type. * </p> * * @param instanceType * The instance type. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withInstanceType(String instanceType) { setInstanceType(instanceType); return this; } /** * <p> * The ID of the RAM disk. * </p> * * @param ramdiskId * The ID of the RAM disk. */ public void setRamdiskId(String ramdiskId) { this.ramdiskId = ramdiskId; } /** * <p> * The ID of the RAM disk. * </p> * * @return The ID of the RAM disk. */ public String getRamdiskId() { return this.ramdiskId; } /** * <p> * The ID of the RAM disk. * </p> * * @param ramdiskId * The ID of the RAM disk. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withRamdiskId(String ramdiskId) { setRamdiskId(ramdiskId); return this; } /** * <p> * One or more block device mapping entries. * </p> * * @return One or more block device mapping entries. */ public java.util.List<ScheduledInstancesBlockDeviceMapping> getBlockDeviceMappings() { if (blockDeviceMappings == null) { blockDeviceMappings = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesBlockDeviceMapping>(); } return blockDeviceMappings; } /** * <p> * One or more block device mapping entries. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. */ public void setBlockDeviceMappings(java.util.Collection<ScheduledInstancesBlockDeviceMapping> blockDeviceMappings) { if (blockDeviceMappings == null) { this.blockDeviceMappings = null; return; } this.blockDeviceMappings = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesBlockDeviceMapping>(blockDeviceMappings); } /** * <p> * One or more block device mapping entries. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setBlockDeviceMappings(java.util.Collection)} or {@link #withBlockDeviceMappings(java.util.Collection)} * if you want to override the existing values. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withBlockDeviceMappings(ScheduledInstancesBlockDeviceMapping... blockDeviceMappings) { if (this.blockDeviceMappings == null) { setBlockDeviceMappings(new com.amazonaws.internal.SdkInternalList<ScheduledInstancesBlockDeviceMapping>(blockDeviceMappings.length)); } for (ScheduledInstancesBlockDeviceMapping ele : blockDeviceMappings) { this.blockDeviceMappings.add(ele); } return this; } /** * <p> * One or more block device mapping entries. * </p> * * @param blockDeviceMappings * One or more block device mapping entries. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withBlockDeviceMappings(java.util.Collection<ScheduledInstancesBlockDeviceMapping> blockDeviceMappings) { setBlockDeviceMappings(blockDeviceMappings); return this; } /** * <p> * Enable or disable monitoring for the instances. * </p> * * @param monitoring * Enable or disable monitoring for the instances. */ public void setMonitoring(ScheduledInstancesMonitoring monitoring) { this.monitoring = monitoring; } /** * <p> * Enable or disable monitoring for the instances. * </p> * * @return Enable or disable monitoring for the instances. */ public ScheduledInstancesMonitoring getMonitoring() { return this.monitoring; } /** * <p> * Enable or disable monitoring for the instances. * </p> * * @param monitoring * Enable or disable monitoring for the instances. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withMonitoring(ScheduledInstancesMonitoring monitoring) { setMonitoring(monitoring); return this; } /** * <p> * The ID of the subnet in which to launch the instances. * </p> * * @param subnetId * The ID of the subnet in which to launch the instances. */ public void setSubnetId(String subnetId) { this.subnetId = subnetId; } /** * <p> * The ID of the subnet in which to launch the instances. * </p> * * @return The ID of the subnet in which to launch the instances. */ public String getSubnetId() { return this.subnetId; } /** * <p> * The ID of the subnet in which to launch the instances. * </p> * * @param subnetId * The ID of the subnet in which to launch the instances. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withSubnetId(String subnetId) { setSubnetId(subnetId); return this; } /** * <p> * One or more network interfaces. * </p> * * @return One or more network interfaces. */ public java.util.List<ScheduledInstancesNetworkInterface> getNetworkInterfaces() { if (networkInterfaces == null) { networkInterfaces = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesNetworkInterface>(); } return networkInterfaces; } /** * <p> * One or more network interfaces. * </p> * * @param networkInterfaces * One or more network interfaces. */ public void setNetworkInterfaces(java.util.Collection<ScheduledInstancesNetworkInterface> networkInterfaces) { if (networkInterfaces == null) { this.networkInterfaces = null; return; } this.networkInterfaces = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesNetworkInterface>(networkInterfaces); } /** * <p> * One or more network interfaces. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNetworkInterfaces(java.util.Collection)} or {@link #withNetworkInterfaces(java.util.Collection)} if * you want to override the existing values. * </p> * * @param networkInterfaces * One or more network interfaces. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withNetworkInterfaces(ScheduledInstancesNetworkInterface... networkInterfaces) { if (this.networkInterfaces == null) { setNetworkInterfaces(new com.amazonaws.internal.SdkInternalList<ScheduledInstancesNetworkInterface>(networkInterfaces.length)); } for (ScheduledInstancesNetworkInterface ele : networkInterfaces) { this.networkInterfaces.add(ele); } return this; } /** * <p> * One or more network interfaces. * </p> * * @param networkInterfaces * One or more network interfaces. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withNetworkInterfaces(java.util.Collection<ScheduledInstancesNetworkInterface> networkInterfaces) { setNetworkInterfaces(networkInterfaces); return this; } /** * <p> * The IAM instance profile. * </p> * * @param iamInstanceProfile * The IAM instance profile. */ public void setIamInstanceProfile(ScheduledInstancesIamInstanceProfile iamInstanceProfile) { this.iamInstanceProfile = iamInstanceProfile; } /** * <p> * The IAM instance profile. * </p> * * @return The IAM instance profile. */ public ScheduledInstancesIamInstanceProfile getIamInstanceProfile() { return this.iamInstanceProfile; } /** * <p> * The IAM instance profile. * </p> * * @param iamInstanceProfile * The IAM instance profile. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withIamInstanceProfile(ScheduledInstancesIamInstanceProfile iamInstanceProfile) { setIamInstanceProfile(iamInstanceProfile); return this; } /** * <p> * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput to * Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This optimization isn't * available with all instance types. Additional usage charges apply when using an EBS-optimized instance. * </p> * <p> * Default: <code>false</code> * </p> * * @param ebsOptimized * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput * to Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This * optimization isn't available with all instance types. Additional usage charges apply when using an * EBS-optimized instance.</p> * <p> * Default: <code>false</code> */ public void setEbsOptimized(Boolean ebsOptimized) { this.ebsOptimized = ebsOptimized; } /** * <p> * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput to * Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This optimization isn't * available with all instance types. Additional usage charges apply when using an EBS-optimized instance. * </p> * <p> * Default: <code>false</code> * </p> * * @return Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated * throughput to Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. * This optimization isn't available with all instance types. Additional usage charges apply when using an * EBS-optimized instance.</p> * <p> * Default: <code>false</code> */ public Boolean getEbsOptimized() { return this.ebsOptimized; } /** * <p> * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput to * Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This optimization isn't * available with all instance types. Additional usage charges apply when using an EBS-optimized instance. * </p> * <p> * Default: <code>false</code> * </p> * * @param ebsOptimized * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput * to Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This * optimization isn't available with all instance types. Additional usage charges apply when using an * EBS-optimized instance.</p> * <p> * Default: <code>false</code> * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduledInstancesLaunchSpecification withEbsOptimized(Boolean ebsOptimized) { setEbsOptimized(ebsOptimized); return this; } /** * <p> * Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated throughput to * Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. This optimization isn't * available with all instance types. Additional usage charges apply when using an EBS-optimized instance. * </p> * <p> * Default: <code>false</code> * </p> * * @return Indicates whether the instances are optimized for EBS I/O. This optimization provides dedicated * throughput to Amazon EBS and an optimized configuration stack to provide optimal EBS I/O performance. * This optimization isn't available with all instance types. Additional usage charges apply when using an * EBS-optimized instance.</p> * <p> * Default: <code>false</code> */ public Boolean isEbsOptimized() { return this.ebsOptimized; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getImageId() != null) sb.append("ImageId: ").append(getImageId()).append(","); if (getKeyName() != null) sb.append("KeyName: ").append(getKeyName()).append(","); if (getSecurityGroupIds() != null) sb.append("SecurityGroupIds: ").append(getSecurityGroupIds()).append(","); if (getUserData() != null) sb.append("UserData: ").append(getUserData()).append(","); if (getPlacement() != null) sb.append("Placement: ").append(getPlacement()).append(","); if (getKernelId() != null) sb.append("KernelId: ").append(getKernelId()).append(","); if (getInstanceType() != null) sb.append("InstanceType: ").append(getInstanceType()).append(","); if (getRamdiskId() != null) sb.append("RamdiskId: ").append(getRamdiskId()).append(","); if (getBlockDeviceMappings() != null) sb.append("BlockDeviceMappings: ").append(getBlockDeviceMappings()).append(","); if (getMonitoring() != null) sb.append("Monitoring: ").append(getMonitoring()).append(","); if (getSubnetId() != null) sb.append("SubnetId: ").append(getSubnetId()).append(","); if (getNetworkInterfaces() != null) sb.append("NetworkInterfaces: ").append(getNetworkInterfaces()).append(","); if (getIamInstanceProfile() != null) sb.append("IamInstanceProfile: ").append(getIamInstanceProfile()).append(","); if (getEbsOptimized() != null) sb.append("EbsOptimized: ").append(getEbsOptimized()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ScheduledInstancesLaunchSpecification == false) return false; ScheduledInstancesLaunchSpecification other = (ScheduledInstancesLaunchSpecification) obj; if (other.getImageId() == null ^ this.getImageId() == null) return false; if (other.getImageId() != null && other.getImageId().equals(this.getImageId()) == false) return false; if (other.getKeyName() == null ^ this.getKeyName() == null) return false; if (other.getKeyName() != null && other.getKeyName().equals(this.getKeyName()) == false) return false; if (other.getSecurityGroupIds() == null ^ this.getSecurityGroupIds() == null) return false; if (other.getSecurityGroupIds() != null && other.getSecurityGroupIds().equals(this.getSecurityGroupIds()) == false) return false; if (other.getUserData() == null ^ this.getUserData() == null) return false; if (other.getUserData() != null && other.getUserData().equals(this.getUserData()) == false) return false; if (other.getPlacement() == null ^ this.getPlacement() == null) return false; if (other.getPlacement() != null && other.getPlacement().equals(this.getPlacement()) == false) return false; if (other.getKernelId() == null ^ this.getKernelId() == null) return false; if (other.getKernelId() != null && other.getKernelId().equals(this.getKernelId()) == false) return false; if (other.getInstanceType() == null ^ this.getInstanceType() == null) return false; if (other.getInstanceType() != null && other.getInstanceType().equals(this.getInstanceType()) == false) return false; if (other.getRamdiskId() == null ^ this.getRamdiskId() == null) return false; if (other.getRamdiskId() != null && other.getRamdiskId().equals(this.getRamdiskId()) == false) return false; if (other.getBlockDeviceMappings() == null ^ this.getBlockDeviceMappings() == null) return false; if (other.getBlockDeviceMappings() != null && other.getBlockDeviceMappings().equals(this.getBlockDeviceMappings()) == false) return false; if (other.getMonitoring() == null ^ this.getMonitoring() == null) return false; if (other.getMonitoring() != null && other.getMonitoring().equals(this.getMonitoring()) == false) return false; if (other.getSubnetId() == null ^ this.getSubnetId() == null) return false; if (other.getSubnetId() != null && other.getSubnetId().equals(this.getSubnetId()) == false) return false; if (other.getNetworkInterfaces() == null ^ this.getNetworkInterfaces() == null) return false; if (other.getNetworkInterfaces() != null && other.getNetworkInterfaces().equals(this.getNetworkInterfaces()) == false) return false; if (other.getIamInstanceProfile() == null ^ this.getIamInstanceProfile() == null) return false; if (other.getIamInstanceProfile() != null && other.getIamInstanceProfile().equals(this.getIamInstanceProfile()) == false) return false; if (other.getEbsOptimized() == null ^ this.getEbsOptimized() == null) return false; if (other.getEbsOptimized() != null && other.getEbsOptimized().equals(this.getEbsOptimized()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getImageId() == null) ? 0 : getImageId().hashCode()); hashCode = prime * hashCode + ((getKeyName() == null) ? 0 : getKeyName().hashCode()); hashCode = prime * hashCode + ((getSecurityGroupIds() == null) ? 0 : getSecurityGroupIds().hashCode()); hashCode = prime * hashCode + ((getUserData() == null) ? 0 : getUserData().hashCode()); hashCode = prime * hashCode + ((getPlacement() == null) ? 0 : getPlacement().hashCode()); hashCode = prime * hashCode + ((getKernelId() == null) ? 0 : getKernelId().hashCode()); hashCode = prime * hashCode + ((getInstanceType() == null) ? 0 : getInstanceType().hashCode()); hashCode = prime * hashCode + ((getRamdiskId() == null) ? 0 : getRamdiskId().hashCode()); hashCode = prime * hashCode + ((getBlockDeviceMappings() == null) ? 0 : getBlockDeviceMappings().hashCode()); hashCode = prime * hashCode + ((getMonitoring() == null) ? 0 : getMonitoring().hashCode()); hashCode = prime * hashCode + ((getSubnetId() == null) ? 0 : getSubnetId().hashCode()); hashCode = prime * hashCode + ((getNetworkInterfaces() == null) ? 0 : getNetworkInterfaces().hashCode()); hashCode = prime * hashCode + ((getIamInstanceProfile() == null) ? 0 : getIamInstanceProfile().hashCode()); hashCode = prime * hashCode + ((getEbsOptimized() == null) ? 0 : getEbsOptimized().hashCode()); return hashCode; } @Override public ScheduledInstancesLaunchSpecification clone() { try { return (ScheduledInstancesLaunchSpecification) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package edu.princeton.safe.internal.cytoscape; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Font; import java.awt.FontFormatException; import java.awt.GraphicsEnvironment; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.List; import java.util.OptionalInt; import java.util.stream.IntStream; import java.util.stream.Stream; import javax.swing.BorderFactory; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JSeparator; import javax.swing.SwingConstants; import javax.swing.border.Border; import org.cytoscape.model.CyColumn; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.CyTable; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.hppc.cursors.LongCursor; import edu.princeton.safe.Identifiable; import edu.princeton.safe.internal.cytoscape.controller.ExpanderController; import edu.princeton.safe.internal.cytoscape.controller.ExpansionChangeListener; import edu.princeton.safe.internal.cytoscape.model.Factory; import edu.princeton.safe.internal.cytoscape.model.NameValuePair; public class SafeUtil { public static final String SEARCH_ICON = "\uf002"; public static final String CARET_DOWN_ICON = "\uf0d7"; public static final String CARET_LEFT_ICON = "\uf0da"; static Font iconFont; public static void checkSafeColumns(CyTable table) { CyColumn column = table.getColumn(StyleFactory.HIGHLIGHT_COLUMN); if (column == null) { table.createColumn(StyleFactory.HIGHLIGHT_COLUMN, Double.class, false, 0D); } column = table.getColumn(StyleFactory.COLOR_COLUMN); if (column == null) { table.createColumn(StyleFactory.COLOR_COLUMN, String.class, false, null); } column = table.getColumn(StyleFactory.BRIGHTNESSS_COLUMN); if (column == null) { table.createColumn(StyleFactory.BRIGHTNESSS_COLUMN, Double.class, false, 0D); } } public static void addSection(JPanel panel, String title) { addSection(panel, title, "center"); } public static void addSubsection(JPanel panel, String title) { addSection(panel, title, "leading"); } public static void addSection(JPanel panel, String title, String alignment) { JLabel label = new JLabel(title); Font boldFont = label.getFont() .deriveFont(Font.BOLD); label.setFont(boldFont); panel.add(label, "alignx " + alignment + ", wrap"); } public static Border createEmptyBorder(int width) { return BorderFactory.createEmptyBorder(width, width, width, width); } public static JLabel createStatusLabel(String initialText) { JLabel label = new JLabel(initialText); Font boldFont = label.getFont() .deriveFont(Font.BOLD); label.setFont(boldFont); label.setBackground(StyleFactory.NEGATIVE); label.setForeground(Color.white); label.setBorder(createEmptyBorder(2)); label.setOpaque(true); label.setHorizontalAlignment(SwingConstants.CENTER); label.setVerticalAlignment(SwingConstants.CENTER); return label; } public static ExpanderController addExpandingSection(JPanel parent, String title, Component section, ExpansionChangeListener expansionListener, String layoutOptions) { JLabel label = new JLabel(title); Font boldFont = label.getFont() .deriveFont(Font.BOLD); label.setFont(boldFont); ExpanderController controller = new ExpanderController(label); JComponent expander = controller.getExpander(); label.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (e.getButton() != MouseEvent.BUTTON1) { return; } if (!controller.isEnabled()) { return; } controller.toggle(); } }); controller.addExpandListener(isExpanded -> { section.setVisible(isExpanded); if (expansionListener != null) { expansionListener.expansionChanged(isExpanded); } }); controller.addEnableListener(isEnabled -> { label.setEnabled(isEnabled); expander.setEnabled(isEnabled); }); parent.add(expander, "grow 0, split 2"); parent.add(label, "wrap"); parent.add(section, layoutOptions); return controller; } public static JLabel createIconLabel(String icon) { int size = 15; JLabel label = new JLabel(icon); label.setFont(getIconFont(size)); label.setHorizontalAlignment(SwingConstants.CENTER); label.setPreferredSize(new Dimension(size, size)); return label; } public static Font getIconFont(float size) { if (iconFont == null) { InputStream stream = SafeUtil.class.getResourceAsStream("/fonts/fontawesome-webfont.ttf"); try { iconFont = Font.createFont(Font.TRUETYPE_FONT, stream); GraphicsEnvironment environment = GraphicsEnvironment.getLocalGraphicsEnvironment(); environment.registerFont(iconFont); } catch (FontFormatException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } return iconFont.deriveFont(size); } public static void addSeparator(JPanel panel) { panel.add(new JSeparator(), "span, growx, hmin 10, wrap"); } public static <T> void setSelected(JComboBox<NameValuePair<T>> comboBox, T value) { if (value == null) { int defaultIndex = comboBox.getItemCount() > 0 ? 0 : -1; comboBox.setSelectedIndex(defaultIndex); return; } OptionalInt index = IntStream.range(0, comboBox.getItemCount()) .filter(i -> isEqual(comboBox.getItemAt(i) .getValue(), value)) .findFirst(); if (index.isPresent()) { comboBox.setSelectedIndex(index.getAsInt()); } } static boolean isEqual(Object o1, Object o2) { if (o1 == null || o2 == null) { return o1 == o2; } return o1.equals(o2); } public static <T> void setSelected(JComboBox<NameValuePair<Factory<T>>> comboBox, Identifiable value) { String id = value == null ? null : value.getId(); OptionalInt index = IntStream.range(0, comboBox.getItemCount()) .filter(i -> isEqual(comboBox.getItemAt(i) .getValue() .getId(), id)) .findFirst(); if (index.isPresent()) { comboBox.setSelectedIndex(index.getAsInt()); } else { int defaultIndex = comboBox.getItemCount() > 0 ? 0 : -1; comboBox.setSelectedIndex(defaultIndex); } } public static void updateLayout(Component component) { component.invalidate(); Container parent = component.getParent(); while (parent != null) { if (parent instanceof JFrame) { JFrame frame = (JFrame) parent; frame.validate(); frame.repaint(); } parent = parent.getParent(); } } public static void clearSelection(CyTable table) { table.getMatchingRows(CyNetwork.SELECTED, true) .stream() .forEach(row -> row.set(CyNetwork.SELECTED, false)); } public static Stream<String> getStringColumnNames(CyTable table) { return table.getColumns() .stream() .filter(c -> isStringColumn(c)) .map(c -> c.getName()) .sorted(String.CASE_INSENSITIVE_ORDER); } static boolean isStringColumn(CyColumn column) { Class<?> type = column.getType(); if (type.equals(String.class)) { return true; } if (type.equals(List.class) && column.getListElementType() .equals(String.class)) { return true; } return false; } public static boolean hasIntersection(LongSet set1, LongSet set2) { Iterator<LongCursor> iterator = set1.iterator(); while (iterator.hasNext()) { LongCursor cursor = iterator.next(); if (set2.contains(cursor.value)) { return true; } } return false; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.security.authc; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.XPackLicenseState.AllowedRealmType; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; /** * Serves as a realms registry (also responsible for ordering the realms appropriately) */ public class Realms extends AbstractComponent implements Iterable<Realm> { private final Environment env; private final Map<String, Realm.Factory> factories; private final XPackLicenseState licenseState; private final ThreadContext threadContext; private final ReservedRealm reservedRealm; protected List<Realm> realms; // a list of realms that are considered standard in that they are provided by x-pack and // interact with a 3rd party source on a limited basis List<Realm> standardRealmsOnly; // a list of realms that are considered native, that is they only interact with x-pack and no 3rd party auth sources List<Realm> nativeRealmsOnly; public Realms(Settings settings, Environment env, Map<String, Realm.Factory> factories, XPackLicenseState licenseState, ThreadContext threadContext, ReservedRealm reservedRealm) throws Exception { super(settings); this.env = env; this.factories = factories; this.licenseState = licenseState; this.threadContext = threadContext; this.reservedRealm = reservedRealm; assert factories.get(ReservedRealm.TYPE) == null; this.realms = initRealms(); // pre-computing a list of internal only realms allows us to have much cheaper iteration than a custom iterator // and is also simpler in terms of logic. These lists are small, so the duplication should not be a real issue here List<Realm> standardRealms = new ArrayList<>(); List<Realm> nativeRealms = new ArrayList<>(); for (Realm realm : realms) { // don't add the reserved realm here otherwise we end up with only this realm... if (InternalRealms.isStandardRealm(realm.type())) { standardRealms.add(realm); } if (FileRealmSettings.TYPE.equals(realm.type()) || NativeRealmSettings.TYPE.equals(realm.type())) { nativeRealms.add(realm); } } for (List<Realm> realmList : Arrays.asList(standardRealms, nativeRealms)) { if (realmList.isEmpty()) { addNativeRealms(realmList); } assert realmList.contains(reservedRealm) == false; realmList.add(0, reservedRealm); assert realmList.get(0) == reservedRealm; } this.standardRealmsOnly = Collections.unmodifiableList(standardRealms); this.nativeRealmsOnly = Collections.unmodifiableList(nativeRealms); realms.forEach(r -> r.initialize(this, licenseState)); } @Override public Iterator<Realm> iterator() { if (licenseState.isAuthAllowed() == false) { return Collections.emptyIterator(); } AllowedRealmType allowedRealmType = licenseState.allowedRealmType(); switch (allowedRealmType) { case ALL: return realms.iterator(); case DEFAULT: return standardRealmsOnly.iterator(); case NATIVE: return nativeRealmsOnly.iterator(); default: throw new IllegalStateException("authentication should not be enabled"); } } public Stream<Realm> stream() { return StreamSupport.stream(this.spliterator(), false); } public List<Realm> asList() { if (licenseState.isAuthAllowed() == false) { return Collections.emptyList(); } AllowedRealmType allowedRealmType = licenseState.allowedRealmType(); switch (allowedRealmType) { case ALL: return Collections.unmodifiableList(realms); case DEFAULT: return Collections.unmodifiableList(standardRealmsOnly); case NATIVE: return Collections.unmodifiableList(nativeRealmsOnly); default: throw new IllegalStateException("authentication should not be enabled"); } } public Realm realm(String name) { for (Realm realm : realms) { if (name.equals(realm.name())) { return realm; } } return null; } public Realm.Factory realmFactory(String type) { return factories.get(type); } protected List<Realm> initRealms() throws Exception { Settings realmsSettings = RealmSettings.get(settings); Set<String> internalTypes = new HashSet<>(); List<Realm> realms = new ArrayList<>(); List<String> kerberosRealmNames = new ArrayList<>(); for (String name : realmsSettings.names()) { Settings realmSettings = realmsSettings.getAsSettings(name); String type = realmSettings.get("type"); if (type == null) { throw new IllegalArgumentException("missing realm type for [" + name + "] realm"); } Realm.Factory factory = factories.get(type); if (factory == null) { throw new IllegalArgumentException("unknown realm type [" + type + "] set for realm [" + name + "]"); } RealmConfig config = new RealmConfig(name, realmSettings, settings, env, threadContext); if (!config.enabled()) { if (logger.isDebugEnabled()) { logger.debug("realm [{}/{}] is disabled", type, name); } continue; } if (FileRealmSettings.TYPE.equals(type) || NativeRealmSettings.TYPE.equals(type)) { // this is an internal realm factory, let's make sure we didn't already registered one // (there can only be one instance of an internal realm) if (internalTypes.contains(type)) { throw new IllegalArgumentException("multiple [" + type + "] realms are configured. [" + type + "] is an internal realm and therefore there can only be one such realm configured"); } internalTypes.add(type); } if (KerberosRealmSettings.TYPE.equals(type)) { kerberosRealmNames.add(name); if (kerberosRealmNames.size() > 1) { throw new IllegalArgumentException("multiple realms " + kerberosRealmNames.toString() + " configured of type [" + type + "], [" + type + "] can only have one such realm configured"); } } realms.add(factory.create(config)); } if (!realms.isEmpty()) { Collections.sort(realms); } else { // there is no "realms" configuration, add the defaults addNativeRealms(realms); } // always add built in first! realms.add(0, reservedRealm); return realms; } public void usageStats(ActionListener<Map<String, Object>> listener) { Map<String, Object> realmMap = new HashMap<>(); final AtomicBoolean failed = new AtomicBoolean(false); final List<Realm> realmList = asList().stream() .filter(r -> ReservedRealm.TYPE.equals(r.type()) == false) .collect(Collectors.toList()); final CountDown countDown = new CountDown(realmList.size()); final Runnable doCountDown = () -> { if ((realmList.isEmpty() || countDown.countDown()) && failed.get() == false) { final AllowedRealmType allowedRealmType = licenseState.allowedRealmType(); // iterate over the factories so we can add enabled & available info for (String type : factories.keySet()) { assert ReservedRealm.TYPE.equals(type) == false; realmMap.compute(type, (key, value) -> { if (value == null) { return MapBuilder.<String, Object>newMapBuilder() .put("enabled", false) .put("available", isRealmTypeAvailable(allowedRealmType, type)) .map(); } assert value instanceof Map; Map<String, Object> realmTypeUsage = (Map<String, Object>) value; realmTypeUsage.put("enabled", true); // the realms iterator returned this type so it must be enabled assert isRealmTypeAvailable(allowedRealmType, type); realmTypeUsage.put("available", true); return value; }); } listener.onResponse(realmMap); } }; if (realmList.isEmpty()) { doCountDown.run(); } else { for (Realm realm : realmList) { realm.usageStats(ActionListener.wrap(stats -> { if (failed.get() == false) { synchronized (realmMap) { realmMap.compute(realm.type(), (key, value) -> { if (value == null) { Object realmTypeUsage = convertToMapOfLists(stats); return realmTypeUsage; } assert value instanceof Map; combineMaps((Map<String, Object>) value, stats); return value; }); } doCountDown.run(); } }, e -> { if (failed.compareAndSet(false, true)) { listener.onFailure(e); } })); } } } private void addNativeRealms(List<Realm> realms) throws Exception { Realm.Factory fileRealm = factories.get(FileRealmSettings.TYPE); if (fileRealm != null) { realms.add(fileRealm.create(new RealmConfig("default_" + FileRealmSettings.TYPE, Settings.EMPTY, settings, env, threadContext))); } Realm.Factory indexRealmFactory = factories.get(NativeRealmSettings.TYPE); if (indexRealmFactory != null) { realms.add(indexRealmFactory.create(new RealmConfig("default_" + NativeRealmSettings.TYPE, Settings.EMPTY, settings, env, threadContext))); } } private static void combineMaps(Map<String, Object> mapA, Map<String, Object> mapB) { for (Entry<String, Object> entry : mapB.entrySet()) { mapA.compute(entry.getKey(), (key, value) -> { if (value == null) { return new ArrayList<>(Collections.singletonList(entry.getValue())); } assert value instanceof List; ((List) value).add(entry.getValue()); return value; }); } } private static Map<String, Object> convertToMapOfLists(Map<String, Object> map) { Map<String, Object> converted = new HashMap<>(map.size()); for (Entry<String, Object> entry : map.entrySet()) { converted.put(entry.getKey(), new ArrayList<>(Collections.singletonList(entry.getValue()))); } return converted; } public static boolean isRealmTypeAvailable(AllowedRealmType enabledRealmType, String type) { switch (enabledRealmType) { case ALL: return true; case NONE: return false; case NATIVE: return FileRealmSettings.TYPE.equals(type) || NativeRealmSettings.TYPE.equals(type); case DEFAULT: return InternalRealms.isStandardRealm(type) || ReservedRealm.TYPE.equals(type); default: throw new IllegalStateException("unknown enabled realm type [" + enabledRealmType + "]"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.tez.tools; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.PriorityQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.io.BinaryComparable; import org.apache.tez.runtime.api.Input; import org.apache.tez.runtime.library.api.KeyValuesReader; /** * A KeyValuesReader implementation that returns a sorted stream of key-values * by doing a sorted merge of the key-value in LogicalInputs. * Tags are in the last byte of the key, so no special handling for tags is required. * Uses a priority queue to pick the KeyValuesReader of the input that is next in * sort order. */ public class KeyValuesInputMerger extends KeyValuesReader { private class KeyValuesIterable implements Iterable<Object> { KeyValuesIterator currentIterator = null; KeyValuesIterable(int size) { currentIterator = new KeyValuesIterator(size); } @Override public Iterator<Object> iterator() { return currentIterator; } public void init(List<KeyValuesReader> readerList) { currentIterator.init(readerList); } } private class KeyValuesIterator implements Iterator<Object> { KeyValuesReader[] readerArray = null; Iterator<Object> currentIterator = null; int currentIndex = 0; int loadedSize = 0; KeyValuesIterator(int size) { readerArray = new KeyValuesReader[size]; } public void init(List<KeyValuesReader> readerList) { for (int i = 0; i < readerList.size(); i++) { readerArray[i] = null; } loadedSize = 0; for (KeyValuesReader kvsReader : readerList) { readerArray[loadedSize] = kvsReader; loadedSize++; } currentIterator = null; currentIndex = 0; } @Override public boolean hasNext() { if ((currentIterator == null) || (currentIterator.hasNext() == false)) { if (currentIndex == loadedSize) { return false; } try { if (readerArray[currentIndex] == null) { return false; } currentIterator = readerArray[currentIndex].getCurrentValues().iterator(); currentIndex++; return currentIterator.hasNext(); } catch (IOException e) { return false; } } return true; } @Override public Object next() { return currentIterator.next(); } @Override public void remove() { // nothing to do } } public static final Logger l4j = LoggerFactory.getLogger(KeyValuesInputMerger.class); private PriorityQueue<KeyValuesReader> pQueue = null; private final List<KeyValuesReader> nextKVReaders = new ArrayList<KeyValuesReader>(); KeyValuesIterable kvsIterable = null; public KeyValuesInputMerger(List<? extends Input> shuffleInputs) throws Exception { //get KeyValuesReaders from the LogicalInput and add them to priority queue int initialCapacity = shuffleInputs.size(); kvsIterable = new KeyValuesIterable(initialCapacity); pQueue = new PriorityQueue<KeyValuesReader>(initialCapacity, new KVReaderComparator()); for(Input input : shuffleInputs){ addToQueue((KeyValuesReader)input.getReader()); } } /** * Add KeyValuesReader to queue if it has more key-values * @param kvsReadr * @throws IOException */ private void addToQueue(KeyValuesReader kvsReadr) throws IOException{ if(kvsReadr.next()){ pQueue.add(kvsReadr); } } /** * @return true if there are more key-values and advances to next key-values * @throws IOException */ @Override public boolean next() throws IOException { //add the previous nextKVReader back to queue if (!nextKVReaders.isEmpty()) { for (KeyValuesReader kvReader : nextKVReaders) { addToQueue(kvReader); } nextKVReaders.clear(); } KeyValuesReader nextKVReader = null; //get the new nextKVReader with lowest key nextKVReader = pQueue.poll(); if (nextKVReader != null) { nextKVReaders.add(nextKVReader); } while (pQueue.peek() != null) { KeyValuesReader equalValueKVReader = pQueue.poll(); if (pQueue.comparator().compare(nextKVReader, equalValueKVReader) == 0) { nextKVReaders.add(equalValueKVReader); } else { pQueue.add(equalValueKVReader); break; } } return !(nextKVReaders.isEmpty()); } @Override public Object getCurrentKey() throws IOException { // return key from any of the readers return nextKVReaders.get(0).getCurrentKey(); } @Override public Iterable<Object> getCurrentValues() throws IOException { kvsIterable.init(nextKVReaders); return kvsIterable; } /** * Comparator that compares KeyValuesReader on their current key */ class KVReaderComparator implements Comparator<KeyValuesReader> { @Override public int compare(KeyValuesReader kvReadr1, KeyValuesReader kvReadr2) { try { BinaryComparable key1 = (BinaryComparable) kvReadr1.getCurrentKey(); BinaryComparable key2 = (BinaryComparable) kvReadr2.getCurrentKey(); return key1.compareTo(key2); } catch (IOException e) { l4j.error("Caught exception while reading shuffle input", e); //die! throw new RuntimeException(e); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.hyracks.bootstrap; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.LinkedBlockingQueue; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.asterix.common.api.IClusterEventsSubscriber; import org.apache.asterix.common.api.IClusterManagementWork; import org.apache.asterix.common.api.IClusterManagementWorkResponse; import org.apache.asterix.common.api.IClusterManagementWorkResponse.Status; import org.apache.asterix.common.config.ClusterProperties; import org.apache.asterix.common.exceptions.AsterixException; import org.apache.asterix.event.schema.cluster.Node; import org.apache.asterix.metadata.MetadataManager; import org.apache.asterix.metadata.cluster.AddNodeWork; import org.apache.asterix.metadata.cluster.AddNodeWorkResponse; import org.apache.asterix.metadata.cluster.ClusterManagerProvider; import org.apache.asterix.metadata.cluster.RemoveNodeWork; import org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse; import org.apache.asterix.runtime.utils.CcApplicationContext; import org.apache.asterix.runtime.utils.ClusterStateManager; import org.apache.hyracks.api.application.IClusterLifecycleListener; import org.apache.hyracks.api.config.IOption; import org.apache.hyracks.api.exceptions.HyracksException; public class ClusterLifecycleListener implements IClusterLifecycleListener { private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName()); private final CcApplicationContext appCtx; private final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<>(); private final ClusterWorkExecutor eventHandler; private final List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<>(); public ClusterLifecycleListener(CcApplicationContext appCtx) { this.appCtx = appCtx; eventHandler = new ClusterWorkExecutor(appCtx, workRequestQueue); Thread t = new Thread(eventHandler); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("Starting cluster event handler"); } t.start(); } @Override public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("NC: " + nodeId + " joined"); } ClusterStateManager.INSTANCE.addNCConfiguration(nodeId, ncConfiguration); //if metadata node rejoining, we need to rebind the proxy connection when it is active again. if (!ClusterStateManager.INSTANCE.isMetadataNodeActive()) { MetadataManager.INSTANCE.rebindMetadataNode(); } Set<String> nodeAddition = new HashSet<>(); nodeAddition.add(nodeId); updateProgress(ClusterEventType.NODE_JOIN, nodeAddition); Set<IClusterEventsSubscriber> subscribers = ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers(); Set<IClusterManagementWork> work = new HashSet<>(); for (IClusterEventsSubscriber sub : subscribers) { Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId); work.addAll(workRequest); } if (!work.isEmpty()) { executeWorkSet(work); } } @Override public void notifyNodeFailure(Collection<String> deadNodeIds) throws HyracksException { for (String deadNode : deadNodeIds) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("NC: " + deadNode + " left"); } ClusterStateManager.INSTANCE.removeNCConfiguration(deadNode); //if metadata node failed, we need to rebind the proxy connection when it is active again if (!ClusterStateManager.INSTANCE.isMetadataNodeActive()) { MetadataManager.INSTANCE.rebindMetadataNode(); } } updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds); Set<IClusterEventsSubscriber> subscribers = ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers(); Set<IClusterManagementWork> work = new HashSet<>(); for (IClusterEventsSubscriber sub : subscribers) { Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds); work.addAll(workRequest); } if (!work.isEmpty()) { executeWorkSet(work); } } private void updateProgress(ClusterEventType eventType, Collection<String> nodeIds) { List<IClusterManagementWorkResponse> completedResponses = new ArrayList<>(); boolean isComplete = false; for (IClusterManagementWorkResponse resp : pendingWorkResponses) { switch (eventType) { case NODE_FAILURE: isComplete = ((RemoveNodeWorkResponse) resp).updateProgress(nodeIds); if (isComplete) { resp.setStatus(Status.SUCCESS); resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp); completedResponses.add(resp); } break; case NODE_JOIN: isComplete = ((AddNodeWorkResponse) resp).updateProgress(nodeIds.iterator().next()); if (isComplete) { resp.setStatus(Status.SUCCESS); resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp); completedResponses.add(resp); } break; } } pendingWorkResponses.removeAll(completedResponses); } private void executeWorkSet(Set<IClusterManagementWork> workSet) { int nodesToAdd = 0; Set<String> nodesToRemove = new HashSet<>(); Set<AddNodeWork> nodeAdditionRequests = new HashSet<>(); Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>(); for (IClusterManagementWork w : workSet) { switch (w.getClusterManagementWorkType()) { case ADD_NODE: if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) { nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested(); } nodeAdditionRequests.add((AddNodeWork) w); break; case REMOVE_NODE: nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved()); nodeRemovalRequests.add(w); RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS); pendingWorkResponses.add(response); break; } } List<String> addedNodes = new ArrayList<>(); String asterixInstanceName = ClusterProperties.INSTANCE.getCluster().getInstanceName(); for (int i = 0; i < nodesToAdd; i++) { Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode(); if (node != null) { try { ClusterManagerProvider.getClusterManager().addNode(appCtx, node); addedNodes.add(asterixInstanceName + "_" + node.getId()); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("Added NC at:" + node.getId()); } } catch (AsterixException e) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Unable to add NC at:" + node.getId()); } e.printStackTrace(); } } else { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Unable to add NC: no more available nodes"); } } } for (AddNodeWork w : nodeAdditionRequests) { int n = w.getNumberOfNodesRequested(); List<String> nodesToBeAddedForWork = new ArrayList<>(); for (int i = 0; i < n && i < addedNodes.size(); i++) { nodesToBeAddedForWork.add(addedNodes.get(i)); } if (nodesToBeAddedForWork.isEmpty()) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.info("Unable to satisfy request by " + w); } AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork); response.setStatus(Status.FAILURE); w.getSourceSubscriber().notifyRequestCompletion(response); } else { AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork); pendingWorkResponses.add(response); } } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.apps.svgbrowser; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; import org.apache.batik.dom.GenericDOMImplementation; import org.apache.batik.dom.util.DOMUtilities; import org.apache.batik.dom.util.DocumentFactory; import org.apache.batik.dom.util.SAXDocumentFactory; import org.apache.batik.util.PreferenceManager; import org.apache.batik.util.XMLResourceDescriptor; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; /** * An extension of {@link PreferenceManager} which store the preference * in XML. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class XMLPreferenceManager extends PreferenceManager { /** * The XML parser */ protected String xmlParserClassName; /** * The XML encoding used to store properties */ public static final String PREFERENCE_ENCODING = "8859_1"; /** * Creates a preference manager. * @param prefFileName the name of the preference file. */ public XMLPreferenceManager(String prefFileName){ this(prefFileName, null, XMLResourceDescriptor.getXMLParserClassName()); } /** * Creates a preference manager. * @param prefFileName the name of the preference file. * @param defaults where to get defaults value if the value is * not specified in the file. */ public XMLPreferenceManager(String prefFileName, Map defaults){ this(prefFileName, defaults, XMLResourceDescriptor.getXMLParserClassName()); } /** * Creates a preference manager. * @param prefFileName the name of the preference file. * @param parser The XML parser class name. */ public XMLPreferenceManager(String prefFileName, String parser) { this(prefFileName, null, parser); } /** * Creates a preference manager with a default values * initialization map. * @param prefFileName the name of the preference file. * @param defaults where to get defaults value if the value is * not specified in the file. * @param parser The XML parser class name. */ public XMLPreferenceManager(String prefFileName, Map defaults, String parser) { super(prefFileName, defaults); internal = new XMLProperties(); xmlParserClassName = parser; } /** * To store the preferences. */ protected class XMLProperties extends Properties { /** * Reads a property list (key and element pairs) from the input stream. * The stream is assumed to be using the ISO 8859-1 character encoding. */ public synchronized void load(InputStream is) throws IOException { BufferedReader r; r = new BufferedReader(new InputStreamReader(is, PREFERENCE_ENCODING)); DocumentFactory df = new SAXDocumentFactory (GenericDOMImplementation.getDOMImplementation(), xmlParserClassName); Document doc = df.createDocument("http://xml.apache.org/batik/preferences", "preferences", null, r); Element elt = doc.getDocumentElement(); for (Node n = elt.getFirstChild(); n != null; n = n.getNextSibling()) { if (n.getNodeType() == Node.ELEMENT_NODE) { if (n.getNodeName().equals("property")) { String name = ((Element)n).getAttributeNS(null, "name"); StringBuffer cont = new StringBuffer(); for (Node c = n.getFirstChild(); c != null; c = c.getNextSibling()) { if (c.getNodeType() == Node.TEXT_NODE) { cont.append(c.getNodeValue()); } else { break; } } String val = cont.toString(); put(name, val); } } } } /** * Writes this property list (key and element pairs) in this * <code>Properties</code> table to the output stream in a format suitable * for loading into a <code>Properties</code> table using the * <code>load</code> method. * The stream is written using the ISO 8859-1 character encoding. */ public synchronized void store(OutputStream os, String header) throws IOException { BufferedWriter w; w = new BufferedWriter(new OutputStreamWriter(os, PREFERENCE_ENCODING)); Map m = new HashMap(); enumerate(m); w.write("<preferences xmlns=\"http://xml.apache.org/batik/preferences\">\n"); Iterator it = m.keySet().iterator(); while (it.hasNext()) { String n = (String)it.next(); String v = (String)m.get(n); w.write("<property name=\"" + n + "\">"); try { w.write(DOMUtilities.contentToString(v, false)); } catch (IOException ex) { // unlikely to happen } w.write("</property>\n"); } w.write("</preferences>\n"); w.flush(); } /** * Enumerates all key/value pairs in the specified m. * @param m the map */ private synchronized void enumerate(Map m) { if (defaults != null) { Iterator it = m.keySet().iterator(); while (it.hasNext()) { Object k = it.next(); m.put(k, defaults.get(k)); } } Iterator it = keySet().iterator(); while (it.hasNext()) { Object k = it.next(); m.put(k, get(k)); } } } }
/* * Copyright 2014-2016 the original author or authors. * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.springframework.cloud.stream.app.websocket.sink; import static io.netty.handler.codec.http.HttpHeaders.Names.HOST; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.boot.actuate.trace.TraceRepository; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.websocketx.CloseWebSocketFrame; import io.netty.handler.codec.http.websocketx.PingWebSocketFrame; import io.netty.handler.codec.http.websocketx.PongWebSocketFrame; import io.netty.handler.codec.http.websocketx.TextWebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketServerHandshaker; import io.netty.handler.codec.http.websocketx.WebSocketServerHandshakerFactory; import io.netty.util.CharsetUtil; /** * Handles handshakes and messages. Based on the Netty <a href="https://bit.ly/1jVBj5T">websocket examples</a>. * * @author Netty Project * @author Oliver Moser * @author Gary Russell */ class WebsocketSinkServerHandler extends SimpleChannelInboundHandler<Object> { private static final Log logger = LogFactory.getLog(WebsocketSinkServerHandler.class); private final boolean traceEnabled; private final TraceRepository websocketTraceRepository; private final WebsocketSinkProperties properties; private WebSocketServerHandshaker handshaker; public WebsocketSinkServerHandler(TraceRepository websocketTraceRepository, WebsocketSinkProperties properties, boolean traceEnabled) { this.websocketTraceRepository = websocketTraceRepository; this.properties = properties; this.traceEnabled = traceEnabled; } @Override public void channelRead0(ChannelHandlerContext ctx, Object msg) { if (msg instanceof FullHttpRequest) { handleHttpRequest(ctx, (FullHttpRequest) msg); } else if (msg instanceof WebSocketFrame) { handleWebSocketFrame(ctx, (WebSocketFrame) msg); } } @Override public void channelReadComplete(ChannelHandlerContext ctx) { ctx.flush(); } private void handleHttpRequest(ChannelHandlerContext ctx, FullHttpRequest req) { // Handle a bad request. if (!req.getDecoderResult().isSuccess()) { logger.warn(String.format("Bad request: %s", req.getUri())); sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1, BAD_REQUEST)); return; } // Allow only GET methods. if (req.getMethod() != GET) { logger.warn(String.format("Unsupported HTTP method: %s", req.getMethod())); sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1, FORBIDDEN)); return; } // enable subclasses to do additional processing if (!additionalHttpRequestHandler(ctx, req)) { return; } // Handshake WebSocketServerHandshakerFactory wsFactory = new WebSocketServerHandshakerFactory(getWebSocketLocation(req), null, true); handshaker = wsFactory.newHandshaker(req); if (handshaker == null) { WebSocketServerHandshakerFactory.sendUnsupportedVersionResponse(ctx.channel()); } else { handshaker.handshake(ctx.channel(), req); WebsocketSinkServer.channels.add(ctx.channel()); } } private void handleWebSocketFrame(ChannelHandlerContext ctx, WebSocketFrame frame) { // Check for closing frame if (frame instanceof CloseWebSocketFrame) { addTraceForFrame(frame, "close"); handshaker.close(ctx.channel(), (CloseWebSocketFrame) frame.retain()); return; } if (frame instanceof PingWebSocketFrame) { addTraceForFrame(frame, "ping"); ctx.channel().write(new PongWebSocketFrame(frame.content().retain())); return; } if (!(frame instanceof TextWebSocketFrame)) { throw new UnsupportedOperationException(String.format("%s frame types not supported", frame.getClass() .getName())); } // todo [om] think about BinaryWebsocketFrame handleTextWebSocketFrameInternal((TextWebSocketFrame) frame, ctx); } private boolean additionalHttpRequestHandler(ChannelHandlerContext ctx, FullHttpRequest req) { // implement other HTTP request logic return true; // continue processing } // simple echo implementation private void handleTextWebSocketFrameInternal(TextWebSocketFrame frame, ChannelHandlerContext ctx) { if (logger.isTraceEnabled()) { logger.trace(String.format("%s received %s", ctx.channel(), frame.text())); } addTraceForFrame(frame, "text"); ctx.channel().write(new TextWebSocketFrame("Echo: " + frame.text())); } // add trace information for received frame private void addTraceForFrame(WebSocketFrame frame, String type) { Map<String, Object> trace = new LinkedHashMap<>(); trace.put("type", type); trace.put("direction", "in"); if (frame instanceof TextWebSocketFrame) { trace.put("payload", ((TextWebSocketFrame) frame).text()); } if (traceEnabled) { websocketTraceRepository.add(trace); } } private void sendHttpResponse(ChannelHandlerContext ctx, FullHttpRequest req, FullHttpResponse res) { // Generate an error page if response getStatus code is not OK (200). if (res.getStatus().code() != 200) { ByteBuf buf = Unpooled.copiedBuffer(res.getStatus().toString(), CharsetUtil.UTF_8); res.content().writeBytes(buf); buf.release(); HttpHeaders.setContentLength(res, res.content().readableBytes()); } // Send the response and close the connection if necessary. ChannelFuture f = ctx.channel().writeAndFlush(res); if (!HttpHeaders.isKeepAlive(req) || res.getStatus().code() != 200) { f.addListener(ChannelFutureListener.CLOSE); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { logger.error("Websocket error", cause); cause.printStackTrace(); ctx.close(); } private String getWebSocketLocation(FullHttpRequest req) { String location = req.headers().get(HOST) + properties.getPath(); if (properties.isSsl()) { return "wss://" + location; } else { return "ws://" + location; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.connector.jdbc.table; import org.apache.flink.connector.jdbc.JdbcDataTestBase; import org.apache.flink.connector.jdbc.JdbcTestFixture; import org.apache.flink.connector.jdbc.dialect.JdbcDialect; import org.apache.flink.connector.jdbc.internal.options.JdbcOptions; import org.apache.flink.connector.jdbc.split.JdbcGenericParameterValuesProvider; import org.apache.flink.connector.jdbc.split.JdbcNumericBetweenParametersProvider; import org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider; import org.apache.flink.core.io.InputSplit; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.junit.After; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.io.Serializable; import java.sql.ResultSet; import java.util.Arrays; import static org.apache.flink.connector.jdbc.JdbcTestFixture.DERBY_EBOOKSHOP_DB; import static org.apache.flink.connector.jdbc.JdbcTestFixture.INPUT_TABLE; import static org.apache.flink.connector.jdbc.JdbcTestFixture.SELECT_ALL_BOOKS; import static org.apache.flink.connector.jdbc.JdbcTestFixture.SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR; import static org.apache.flink.connector.jdbc.JdbcTestFixture.SELECT_ALL_BOOKS_SPLIT_BY_ID; import static org.apache.flink.connector.jdbc.JdbcTestFixture.SELECT_EMPTY; import static org.apache.flink.connector.jdbc.JdbcTestFixture.TEST_DATA; /** * Test suite for {@link JdbcRowDataInputFormat}. */ public class JdbcRowDataInputFormatTest extends JdbcDataTestBase { private JdbcRowDataInputFormat inputFormat; private static String[] fieldNames = new String[]{"id", "title", "author", "price", "qty"}; private static DataType[] fieldDataTypes = new DataType[]{ DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.DOUBLE(), DataTypes.INT()}; final JdbcDialect dialect = JdbcOptions.builder() .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setTableName(INPUT_TABLE) .build() .getDialect(); final RowType rowType = RowType.of( Arrays.stream(fieldDataTypes) .map(DataType::getLogicalType) .toArray(LogicalType[]::new), fieldNames); @After public void tearDown() throws IOException { if (inputFormat != null) { inputFormat.close(); inputFormat.closeInputFormat(); } inputFormat = null; } @Test(expected = IllegalArgumentException.class) public void testUntypedRowInfo() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername("org.apache.derby.jdbc.idontexist") .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS) .build(); inputFormat.openInputFormat(); } @Test(expected = IllegalArgumentException.class) public void testInvalidDriver() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername("org.apache.derby.jdbc.idontexist") .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS) .build(); inputFormat.openInputFormat(); } @Test(expected = IllegalArgumentException.class) public void testInvalidURL() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl("jdbc:der:iamanerror:mory:ebookshop") .setQuery(SELECT_ALL_BOOKS) .build(); inputFormat.openInputFormat(); } @Test(expected = IllegalArgumentException.class) public void testInvalidQuery() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery("iamnotsql") .build(); inputFormat.openInputFormat(); } @Test(expected = IllegalArgumentException.class) public void testIncompleteConfiguration() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setQuery(SELECT_ALL_BOOKS) .build(); } @Test(expected = IllegalArgumentException.class) public void testInvalidFetchSize() { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS) .setFetchSize(-7) .build(); } @Test public void testValidFetchSizeIntegerMin() { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS) .setFetchSize(Integer.MIN_VALUE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); } @Test public void testJdbcInputFormatWithoutParallelism() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS) .setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); //this query does not exploit parallelism Assert.assertEquals(1, inputFormat.createInputSplits(1).length); inputFormat.openInputFormat(); inputFormat.open(null); RowData row = new GenericRowData(5); int recordCount = 0; while (!inputFormat.reachedEnd()) { RowData next = inputFormat.nextRecord(row); assertEquals(TEST_DATA[recordCount], next); recordCount++; } inputFormat.close(); inputFormat.closeInputFormat(); Assert.assertEquals(TEST_DATA.length, recordCount); } @Test public void testJdbcInputFormatWithParallelismAndNumericColumnSplitting() throws IOException { final int fetchSize = 1; final long min = TEST_DATA[0].id; final long max = TEST_DATA[TEST_DATA.length - fetchSize].id; JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize); inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID) .setParametersProvider(pramProvider) .setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); inputFormat.openInputFormat(); InputSplit[] splits = inputFormat.createInputSplits(1); //this query exploit parallelism (1 split for every id) Assert.assertEquals(TEST_DATA.length, splits.length); int recordCount = 0; RowData row = new GenericRowData(5); for (InputSplit split : splits) { inputFormat.open(split); while (!inputFormat.reachedEnd()) { RowData next = inputFormat.nextRecord(row); assertEquals(TEST_DATA[recordCount], next); recordCount++; } inputFormat.close(); } inputFormat.closeInputFormat(); Assert.assertEquals(TEST_DATA.length, recordCount); } @Test public void testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting() throws IOException { final long min = TEST_DATA[0].id; final long max = TEST_DATA[TEST_DATA.length - 1].id; final long fetchSize = max + 1; //generate a single split JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize); inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID) .setParametersProvider(pramProvider) .setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); inputFormat.openInputFormat(); InputSplit[] splits = inputFormat.createInputSplits(1); //assert that a single split was generated Assert.assertEquals(1, splits.length); int recordCount = 0; RowData row = new GenericRowData(5); for (InputSplit split : splits) { inputFormat.open(split); while (!inputFormat.reachedEnd()) { RowData next = inputFormat.nextRecord(row); assertEquals(TEST_DATA[recordCount], next); recordCount++; } inputFormat.close(); } inputFormat.closeInputFormat(); Assert.assertEquals(TEST_DATA.length, recordCount); } @Test public void testJdbcInputFormatWithParallelismAndGenericSplitting() throws IOException { Serializable[][] queryParameters = new String[2][1]; queryParameters[0] = new String[]{TEST_DATA[3].author}; queryParameters[1] = new String[]{TEST_DATA[0].author}; JdbcParameterValuesProvider paramProvider = new JdbcGenericParameterValuesProvider(queryParameters); inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR) .setParametersProvider(paramProvider) .setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); inputFormat.openInputFormat(); InputSplit[] splits = inputFormat.createInputSplits(1); //this query exploit parallelism (1 split for every queryParameters row) Assert.assertEquals(queryParameters.length, splits.length); verifySplit(splits[0], TEST_DATA[3].id); verifySplit(splits[1], TEST_DATA[0].id + TEST_DATA[1].id); inputFormat.closeInputFormat(); } private void verifySplit(InputSplit split, int expectedIDSum) throws IOException { int sum = 0; RowData row = new GenericRowData(5); inputFormat.open(split); RowData.FieldGetter idFieldGetter = RowData.createFieldGetter(new IntType(), 0); while (!inputFormat.reachedEnd()) { row = inputFormat.nextRecord(row); int id = (int) idFieldGetter.getFieldOrNull(row); int testDataIndex = id - 1001; assertEquals(TEST_DATA[testDataIndex], row); sum += id; } Assert.assertEquals(expectedIDSum, sum); } @Test public void testEmptyResults() throws IOException { inputFormat = JdbcRowDataInputFormat.builder() .setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()) .setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()) .setQuery(SELECT_EMPTY) .setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE) .setRowConverter(dialect.getRowConverter(rowType)) .build(); try { inputFormat.openInputFormat(); inputFormat.open(null); Assert.assertTrue(inputFormat.reachedEnd()); } finally { inputFormat.close(); inputFormat.closeInputFormat(); } } private static void assertEquals(JdbcTestFixture.TestEntry expected, RowData actual) { Assert.assertEquals(expected.id, actual.isNullAt(0) ? null : Integer.valueOf(actual.getInt(0))); Assert.assertEquals(expected.title, actual.isNullAt(1) ? null : actual.getString(1).toString()); Assert.assertEquals(expected.author, actual.isNullAt(2) ? null : actual.getString(2).toString()); Assert.assertEquals(expected.price, actual.isNullAt(3) ? null : Double.valueOf(actual.getDouble(3))); Assert.assertEquals(expected.qty, actual.isNullAt(4) ? null : Integer.valueOf(actual.getInt(4))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.model.document.util; import junit.framework.TestCase; import org.waveprotocol.wave.model.document.MutableDocument; import org.waveprotocol.wave.model.document.operation.automaton.DocumentSchema; import org.waveprotocol.wave.model.document.operation.impl.AttributesImpl; import org.waveprotocol.wave.model.document.raw.impl.Element; import org.waveprotocol.wave.model.document.raw.impl.Node; import org.waveprotocol.wave.model.document.raw.impl.Text; import org.waveprotocol.wave.model.document.util.ContextProviders.TestDocumentContext; import org.waveprotocol.wave.model.document.util.LineContainers.RoundDirection; import org.waveprotocol.wave.model.document.util.LineContainers.Rounding; import org.waveprotocol.wave.model.schema.AbstractXmlSchemaConstraints; import java.util.Collections; import java.util.List; /** * @author danilatos@google.com (Daniel Danilatos) */ public class LineContainersTest extends TestCase { private final DocumentSchema SCHEMA = new AbstractXmlSchemaConstraints() { { addChildren(null, "body"); addChildren("body", "line", "input"); containsBlipText("body"); containsBlipText("input"); addRequiredInitial("body", Collections.singletonList("line")); addAttrWithValues("line", "t", "h1", "h2", "h3", "h4", "li"); } }; private TestDocumentContext<Node, Element, Text> cxt; private MutableDocument<Node, Element, Text> doc; @Override protected void setUp() throws Exception { LineContainers.setTopLevelContainerTagname("body"); } public void testTopLevelContainerTagName() { LineContainers.setTopLevelContainerTagname("blah"); assertEquals("blah", LineContainers.topLevelContainerTagname()); LineContainers.setTopLevelContainerTagname("body"); assertEquals("body", LineContainers.topLevelContainerTagname()); try { LineContainers.setTopLevelContainerTagname(null); fail("null should be rejected"); } catch (RuntimeException e) { // ok } } public void testWrappers() { assertEquals("<line></line>foo", LineContainers.debugLineWrap("foo")); assertEquals("<body></body>", LineContainers.debugContainerWrap()); assertEquals("<body><line></line>foo</body>", LineContainers.debugContainerWrap("foo")); assertEquals("<body><line></line>foo<line></line>bar</body>", LineContainers.debugContainerWrap("foo", "bar")); } private class LineTestState { { getDocWithoutSchema("<body><line/>abc<x>def<line id=\"bad\"/></x>." + "<line id=\"2\"/><line id=\"3\"/>ghi</body>jkl"); } Element lc = DocHelper.getElementWithTagName(doc, "body"); Point<Node> beforeLine = Point.before(doc, DocHelper.getElementWithTagName(doc, "line")); Point<Node> afterLine = Point.after(doc, DocHelper.getElementWithTagName(doc, "line")); Point<Node> inAbc = Point.inText(afterLine.getNodeAfter(), 2); Element x = DocHelper.getElementWithTagName(doc, "x"); Point<Node> inX = Point.start(doc, x); Point<Node> afterX = Point.after(doc, x); Point<Node> beforeInvalidLine = Point.before(doc, DocHelper.findElementById(doc, "bad")); Point<Node> beforeLine2 = Point.before(doc, DocHelper.findElementById(doc, "2")); Point<Node> beforeLine3 = Point.before(doc, DocHelper.findElementById(doc, "3")); Point<Node> endLc = Point.<Node>end(lc); Point<Node> inGhi = Point.inText(endLc.getContainer().getLastChild(), 2); Point<Node> endGhi = Point.inText(endLc.getContainer().getLastChild(), 3); } public void testEndOfLineCheck() { LineTestState s = new LineTestState(); // Consider having this be satisfied: // assertFalse(LineContainers.isAtLineEnd(doc, s.beforeLine)); assertTrue(LineContainers.isAtLineEnd(doc, s.beforeLine2)); assertTrue(LineContainers.isAtLineEnd(doc, s.beforeLine3)); assertFalse(LineContainers.isAtLineEnd(doc, s.inAbc)); assertFalse(LineContainers.isAtLineEnd(doc, s.inX)); s.afterX = doc.deleteRange(s.afterX, s.beforeLine2).getSecond(); assertTrue(LineContainers.isAtLineEnd(doc, s.afterX)); assertFalse(LineContainers.isAtLineEnd(doc, s.inGhi)); assertTrue(LineContainers.isAtLineEnd(doc, s.endGhi)); } public void testStartOfLineCheck() { LineTestState s = new LineTestState(); assertTrue(LineContainers.isAtLineStart(doc, s.afterLine)); assertFalse(LineContainers.isAtLineStart(doc, s.beforeLine2)); assertTrue(LineContainers.isAtLineStart(doc, s.beforeLine3)); assertFalse(LineContainers.isAtLineStart(doc, s.inAbc)); assertFalse(LineContainers.isAtLineStart(doc, s.inX)); } public void testEmptyLineCheck() { LineTestState s = new LineTestState(); assertFalse(LineContainers.isAtEmptyLine(doc, s.afterLine)); assertFalse(LineContainers.isAtEmptyLine(doc, s.beforeLine2)); assertTrue(LineContainers.isAtEmptyLine(doc, s.beforeLine3)); assertFalse(LineContainers.isAtEmptyLine(doc, s.inAbc)); assertFalse(LineContainers.isAtEmptyLine(doc, s.inX)); } public void testRounding() { LineTestState s = new LineTestState(); // at node boundaries assertSame(s.beforeLine, LineContainers.roundLocation( doc, Rounding.NONE, s.beforeLine, RoundDirection.RIGHT)); assertSame(s.afterLine, LineContainers.roundLocation( doc, Rounding.NONE, s.afterLine, RoundDirection.RIGHT)); // in text assertSame(s.inAbc, LineContainers.roundLocation( doc, Rounding.NONE, s.inAbc, RoundDirection.RIGHT)); // in a nested element, check it does not jump out assertSame(s.inX, LineContainers.roundLocation( doc, Rounding.NONE, s.inX, RoundDirection.RIGHT)); try { LineContainers.roundLocation(doc, Rounding.WORD, s.afterLine, RoundDirection.RIGHT); fail("You forgot to write a unit test when implementing word rounding!"); } catch (UnsupportedOperationException e) { // ok } try { LineContainers.roundLocation(doc, Rounding.SENTENCE, s.afterLine, RoundDirection.RIGHT); fail("You forgot to write a unit test when implementing sentence rounding!"); } catch (UnsupportedOperationException e) { // ok } // just before the first line assertEquals(s.beforeLine, LineContainers.roundLocation( doc, Rounding.LINE, s.beforeLine, RoundDirection.RIGHT)); // just after the preceding line assertEquals(s.beforeLine2, LineContainers.roundLocation( doc, Rounding.LINE, s.afterLine, RoundDirection.RIGHT)); // in text assertEquals(s.beforeLine2, LineContainers.roundLocation( doc, Rounding.LINE, s.inAbc, RoundDirection.RIGHT)); // *does* jump out assertEquals(s.beforeLine2, LineContainers.roundLocation( doc, Rounding.LINE, s.inX, RoundDirection.RIGHT)); // ignore lines not direct children of the line container assertEquals(s.beforeLine2, LineContainers.roundLocation( doc, Rounding.LINE, s.beforeInvalidLine, RoundDirection.RIGHT)); // just before a line with preceding content assertEquals(s.beforeLine2, LineContainers.roundLocation( doc, Rounding.LINE, s.beforeLine2, RoundDirection.RIGHT)); // just before a line with an immediately preceeding line assertEquals(s.beforeLine3, LineContainers.roundLocation( doc, Rounding.LINE, s.beforeLine3, RoundDirection.RIGHT)); // inside text at the end assertEquals(s.endLc, LineContainers.roundLocation( doc, Rounding.LINE, s.inGhi, RoundDirection.RIGHT)); // just before the end assertEquals(s.endLc, LineContainers.roundLocation( doc, Rounding.LINE, s.endLc, RoundDirection.RIGHT)); // outside of line container is invalid assertNull(LineContainers.roundLocation( doc, Rounding.LINE, Point.before(doc, s.lc), RoundDirection.RIGHT)); assertNull(LineContainers.roundLocation( doc, Rounding.LINE, Point.after(doc, s.lc), RoundDirection.RIGHT)); assertNull(LineContainers.roundLocation( doc, Rounding.LINE, Point.inText(s.lc.getNextSibling(), 2), RoundDirection.RIGHT)); } public void testInsertLine() { LineTestState s; // at node boundaries s = new LineTestState(); checkInsertLine(s.beforeLine, Rounding.NONE, s.beforeLine); s = new LineTestState(); checkInsertLine(s.afterLine, Rounding.NONE, s.afterLine); // in text s = new LineTestState(); checkInsertLine(s.inAbc, Rounding.NONE, s.inAbc); // in a nested element, check it *does* jump out s = new LineTestState(); checkInsertLine(s.afterX, Rounding.NONE, s.inX); // just before the first line s = new LineTestState(); checkInsertLine(s.beforeLine, Rounding.LINE, s.beforeLine); // just after the preceding line s = new LineTestState(); checkInsertLine(s.beforeLine2, Rounding.LINE, s.afterLine); // in text s = new LineTestState(); checkInsertLine(s.beforeLine2, Rounding.LINE, s.inAbc); // *does* jump out s = new LineTestState(); checkInsertLine(s.beforeLine2, Rounding.LINE, s.inX); // ignore lines not direct children of the line container s = new LineTestState(); checkInsertLine(s.beforeLine2, Rounding.LINE, s.beforeInvalidLine); // just before a line with preceding content s = new LineTestState(); checkInsertLine(s.beforeLine2, Rounding.LINE, s.beforeLine2); // just before a line with an immediately preceeding line s = new LineTestState(); checkInsertLine(s.beforeLine3, Rounding.LINE, s.beforeLine3); // inside text at the end s = new LineTestState(); checkInsertLine(s.endLc, Rounding.LINE, s.inGhi); // just before the end s = new LineTestState(); checkInsertLine(s.endLc, Rounding.LINE, s.endLc); // append s = new LineTestState(); checkAppendLine(s.endLc, null); s = new LineTestState(); checkAppendLine(s.endLc, XmlStringBuilder.createText("blah").wrap("y")); getDocWithoutSchema(""); LineContainers.appendLine(doc, XmlStringBuilder.createText("blah").wrap("y")); assertEquals("<body><line></line><y>blah</y></body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithoutSchema("<x>abc</x>"); LineContainers.appendLine(doc, XmlStringBuilder.createText("blah").wrap("y")); assertEquals("<x>abc</x><body><line></line><y>blah</y></body>", XmlStringBuilder.innerXml(doc).toString()); // outside of line container is invalid try { s = new LineTestState(); LineContainers.insertLine(doc, Rounding.LINE, Point.before(doc, s.lc)); fail("Expected invalid location exception"); } catch (IllegalArgumentException iae) { // ok } try { s = new LineTestState(); LineContainers.insertLine(doc, Rounding.LINE, Point.after(doc, s.lc)); fail("Expected invalid location exception"); } catch (IllegalArgumentException iae) { // ok } try { s = new LineTestState(); LineContainers.insertLine(doc, Rounding.LINE, Point.inText(s.lc.getNextSibling(), 2)); fail("Expected invalid location exception"); } catch (IllegalArgumentException iae) { // ok } } public void testAppendLineWithAttributes() { getDocWithSchema(""); LineContainers.appendLine(doc, XmlStringBuilder.createText("hi"), new AttributesImpl("t", "h2")); assertEquals("h2", doc.getAttribute(DocHelper.getElementWithTagName(doc, "line"), "t")); getDocWithSchema("<body><line/>abc</body>"); LineContainers.appendLine(doc, XmlStringBuilder.createText("hi"), new AttributesImpl("id", "2", "t", "h2")); assertEquals("h2", doc.getAttribute(DocHelper.findElementById(doc, "2"), "t")); } public void testAppendObeysSchema() { getDocWithSchema(""); LineContainers.appendLine(doc, XmlStringBuilder.createText("hi")); } public void testInsertInto() { getDocWithSchema("<body><line/>abc</body>"); Point<Node> afterLine = Point.after(doc, DocHelper.getElementWithTagName(doc, "line")); LineContainers.insertInto(doc, afterLine, XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line><input>blah</input>abc</body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithSchema("<body><line/>abc</body>"); Point<Node> beforeLine = Point.before(doc, DocHelper.getElementWithTagName(doc, "line")); LineContainers.insertInto(doc, beforeLine, XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line><input>blah</input><line></line>abc</body>", XmlStringBuilder.innerXml(doc).toString()); } public void testInsertContentIntoLineStart() { getDocWithSchema("<body><line/>abc</body>"); LineContainers.insertContentIntoLineStart(doc, DocHelper.getElementWithTagName(doc, "line"), XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line><input>blah</input>abc</body>", XmlStringBuilder.innerXml(doc).toString()); } public void testInsertContentIntoLineEnd() { getDocWithSchema("<body><line/>abc</body>"); LineContainers.insertContentIntoLineEnd(doc, DocHelper.getElementWithTagName(doc, "line"), XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line>abc<input>blah</input></body>", XmlStringBuilder.innerXml(doc).toString()); } public void testAppendToLastLine() { getDocWithSchema("<body><line/>abc</body>"); LineContainers.appendToLastLine(doc, XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line>abc<input>blah</input></body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithoutSchema(""); LineContainers.appendToLastLine(doc, XmlStringBuilder.createText("blah").wrap("input")); assertEquals("<body><line></line><input>blah</input></body>", XmlStringBuilder.innerXml(doc).toString()); } public void testGetRelatedLineElement() { LineTestState s = new LineTestState(); Element line1 = DocHelper.getElementWithTagName(doc, "line"); Element line2 = DocHelper.findElementById(doc, "2"); Element line3 = DocHelper.findElementById(doc, "3"); getDocWithoutSchema("<lc><line/>abc<x>def<line id=\"bad\"/></x>." + "<line id=\"2\"/><line id=\"3\"/>ghi</lc>jkl"); assertNull(LineContainers.getRelatedLineElement(doc, Point.start(doc, s.lc))); assertNull(LineContainers.getRelatedLineElement(doc, s.beforeLine)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.afterLine)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.inAbc)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.inX)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.afterX)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.beforeInvalidLine)); assertSame(line1, LineContainers.getRelatedLineElement(doc, s.beforeLine2)); assertSame(line2, LineContainers.getRelatedLineElement(doc, s.beforeLine3)); assertSame(line3, LineContainers.getRelatedLineElement(doc, s.inGhi)); assertSame(line3, LineContainers.getRelatedLineElement(doc, Point.end((Node)s.lc))); } public void testGetLineRanges() { getDocWithSchema(""); List<Range> ranges = LineContainers.getLineRanges(doc); assertEquals(0, ranges.size()); getDocWithSchema("<body><line/></body>"); checkLineRanges(""); getDocWithSchema("<body><line/>abc</body>"); checkLineRanges("abc"); getDocWithSchema("<body><line/>abc<input/>def</body>"); checkLineRanges("abcdef"); getDocWithSchema("<body><line/>abc<input>_</input>def</body>"); checkLineRanges("abc_def"); getDocWithSchema("<body><line/>abc<line/>def</body>"); checkLineRanges("abc", "def"); getDocWithSchema("<body><line/>a day<line/> late and <line/>a dollar<line/> short</body>"); checkLineRanges("a day", " late and ", "a dollar", " short"); } public void testDeleteLine() { getDocWithSchema("<body><line/>foo</body>"); LineContainers.deleteLine(doc, DocHelper.getElementWithTagName(doc, "line")); assertEquals("<body><line></line></body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithoutSchema("<body><line><x/></line>foo</body>"); LineContainers.deleteLine(doc, DocHelper.getElementWithTagName(doc, "line")); assertEquals("<body><line></line></body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithSchema("<body><line/>foo<line/>bar</body>"); LineContainers.deleteLine(doc, DocHelper.getElementWithTagName(doc, "line")); assertEquals("<body><line></line>bar</body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithSchema("<body><line/>foo<line id=\"2\"/>bar<line/>baz</body>"); LineContainers.deleteLine(doc, DocHelper.findElementById(doc, "2")); assertEquals("<body><line></line>foo<line></line>baz</body>", XmlStringBuilder.innerXml(doc).toString()); getDocWithoutSchema("<x/><body><line/>foo</body>"); try { LineContainers.deleteLine(doc, DocHelper.getElementWithTagName(doc, "x")); fail("Did not reject non-line element"); } catch (IllegalArgumentException e) { // ok } assertEquals("<x></x><body><line></line>foo</body>", XmlStringBuilder.innerXml(doc).toString()); } private void checkLineRanges(String ... expectedLines) { List<Range> ranges = LineContainers.getLineRanges(doc); assertEquals(expectedLines.length, ranges.size()); int i = 0; for (Range r : ranges) { Point<Node> start = doc.locate(r.getStart()); Point<Node> end = doc.locate(r.getStart()); String expectedLine = expectedLines[i++]; assertEquals(expectedLine, DocHelper.getText(doc, doc, r.getStart(), r.getEnd())); } } // Delete this after 2009/09/15 public void testRejectsParagraphs() { getDocWithoutSchema("<p>blah</p>"); try { LineContainers.appendLine(doc, XmlStringBuilder.createText("yep")); fail("Did not reject paragraph doc"); } catch (IllegalArgumentException e) { // ok } } private void checkInsertLine(Point<Node> expectedLocation, Rounding rounding, Point<Node> location) { checkInsertLineish(expectedLocation, rounding, location, false, 0); } private void checkInsertLineish(Point<Node> expectedPoint, Rounding rounding, Point<Node> location, boolean useParagraphs, int add) { int expectedLocation = doc.getLocation(expectedPoint) + add; Element el = LineContainers.insertLine(doc, rounding, location); assertTrue(LineContainers.isLineElement(doc, el)); assertEquals(expectedLocation, doc.getLocation(el)); } private void checkAppendLine(Point<Node> expectedPoint, XmlStringBuilder content) { checkAppendLineish(expectedPoint, content, false); } private void checkAppendLineish(Point<Node> expectedPoint, XmlStringBuilder content, boolean useParagraphs) { int expectedLocation = doc.getLocation(expectedPoint); Element el = LineContainers.appendLine(doc, content); if (content == null) { content = XmlStringBuilder.createEmpty(); } assertTrue(LineContainers.isLineElement(doc, el)); XmlStringBuilderDoc<Node, Element, Text> siblingContent = XmlStringBuilder.createEmpty(doc); for (Node n = el.getNextSibling(); n != null; n = n.getNextSibling()) { siblingContent.appendNode(n); } assertEquals(content, siblingContent); assertEquals(expectedLocation, doc.getLocation(el)); } private MutableDocument<Node, Element, Text> getDocWithoutSchema(String innerXml) { cxt = ContextProviders.createTestPojoContext(innerXml, null, null, null, DocumentSchema.NO_SCHEMA_CONSTRAINTS); return doc = cxt.document(); } private MutableDocument<Node, Element, Text> getDocWithSchema(String innerXml) { cxt = ContextProviders.createTestPojoContext(innerXml, null, null, null, new DocumentSchema() { @Override public List<String> getRequiredInitialChildren(String typeOrNull) { return SCHEMA.getRequiredInitialChildren(typeOrNull); } @Override public boolean permitsAttribute(String type, String attributeName) { return (type.equals("line") && attributeName.equals("id")) || SCHEMA.permitsAttribute(type, attributeName); } @Override public boolean permitsAttribute(String type, String attributeName, String attributeValue) { return (type.equals("line") && attributeName.equals("id")) || SCHEMA.permitsAttribute(type, attributeName, attributeValue); } @Override public boolean permitsChild(String parentTypeOrNull, String childType) { return SCHEMA.permitsChild(parentTypeOrNull, childType); } @Override public PermittedCharacters permittedCharacters(String typeOrNull) { return SCHEMA.permittedCharacters(typeOrNull); } }); return doc = cxt.document(); } }
// Copyright (C) 2010 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.query.change; import com.google.gerrit.reviewdb.Change; import com.google.gerrit.reviewdb.PatchSet; import com.google.gerrit.reviewdb.ReviewDb; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.events.ChangeAttribute; import com.google.gerrit.server.events.EventFactory; import com.google.gerrit.server.events.PatchSetAttribute; import com.google.gerrit.server.events.QueryStats; import com.google.gerrit.server.query.Predicate; import com.google.gerrit.server.query.QueryParseException; import com.google.gson.Gson; import com.google.gwtorm.client.OrmException; import com.google.inject.Inject; import com.google.inject.Provider; import org.eclipse.jgit.util.io.DisabledOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.reflect.Field; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.List; public class QueryProcessor { private static final Logger log = LoggerFactory.getLogger(QueryProcessor.class); public static enum OutputFormat { TEXT, JSON; } private final Gson gson = new Gson(); private final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss zzz"); private final EventFactory eventFactory; private final ChangeQueryBuilder queryBuilder; private final ChangeQueryRewriter queryRewriter; private final Provider<ReviewDb> db; private int defaultLimit = 500; private OutputFormat outputFormat = OutputFormat.TEXT; private boolean includePatchSets; private boolean includeCurrentPatchSet; private boolean includeApprovals; private OutputStream outputStream = DisabledOutputStream.INSTANCE; private PrintWriter out; @Inject QueryProcessor(EventFactory eventFactory, ChangeQueryBuilder.Factory queryBuilder, CurrentUser currentUser, ChangeQueryRewriter queryRewriter, Provider<ReviewDb> db) { this.eventFactory = eventFactory; this.queryBuilder = queryBuilder.create(currentUser); this.queryRewriter = queryRewriter; this.db = db; } public void setIncludePatchSets(boolean on) { includePatchSets = on; } public void setIncludeCurrentPatchSet(boolean on) { includeCurrentPatchSet = on; } public void setIncludeApprovals(boolean on) { includeApprovals = on; } public void setOutput(OutputStream out, OutputFormat fmt) { this.outputStream = out; this.outputFormat = fmt; } public void query(String queryString) throws IOException { out = new PrintWriter( // new BufferedWriter( // new OutputStreamWriter(outputStream, "UTF-8"))); try { try { final QueryStats stats = new QueryStats(); stats.runTimeMilliseconds = System.currentTimeMillis(); final Predicate<ChangeData> visibleToMe = queryBuilder.is_visible(); Predicate<ChangeData> s = compileQuery(queryString, visibleToMe); List<ChangeData> results = new ArrayList<ChangeData>(); HashSet<Change.Id> want = new HashSet<Change.Id>(); for (ChangeData d : ((ChangeDataSource) s).read()) { if (d.hasChange()) { // Checking visibleToMe here should be unnecessary, the // query should have already performed it. But we don't // want to trust the query rewriter that much yet. // if (visibleToMe.match(d)) { results.add(d); } } else { want.add(d.getId()); } } if (!want.isEmpty()) { for (Change c : db.get().changes().get(want)) { ChangeData d = new ChangeData(c); if (visibleToMe.match(d)) { results.add(d); } } } Collections.sort(results, new Comparator<ChangeData>() { @Override public int compare(ChangeData a, ChangeData b) { return b.getChange().getSortKey().compareTo( a.getChange().getSortKey()); } }); int limit = limit(s); if (limit < results.size()) { results = results.subList(0, limit); } for (ChangeData d : results) { ChangeAttribute c = eventFactory.asChangeAttribute(d.getChange()); eventFactory.extend(c, d.getChange()); eventFactory.addTrackingIds(c, d.trackingIds(db)); if (includePatchSets) { eventFactory.addPatchSets(c, d.patches(db), includeApprovals ? d.approvalsMap(db) : null); } if (includeCurrentPatchSet) { PatchSet current = d.currentPatchSet(db); if (current != null) { c.currentPatchSet = eventFactory.asPatchSetAttribute(current); eventFactory.addApprovals(c.currentPatchSet, // d.approvalsFor(db, current.getId())); } } show(c); } stats.rowCount = results.size(); stats.runTimeMilliseconds = System.currentTimeMillis() - stats.runTimeMilliseconds; show(stats); } catch (OrmException err) { log.error("Cannot execute query: " + queryString, err); ErrorMessage m = new ErrorMessage(); m.message = "cannot query database"; show(m); } catch (QueryParseException e) { ErrorMessage m = new ErrorMessage(); m.message = e.getMessage(); show(m); } } finally { try { out.flush(); } finally { out = null; } } } private int limit(Predicate<ChangeData> s) { return queryBuilder.hasLimit(s) ? queryBuilder.getLimit(s) : defaultLimit; } @SuppressWarnings("unchecked") private Predicate<ChangeData> compileQuery(String queryString, final Predicate<ChangeData> visibleToMe) throws QueryParseException { Predicate<ChangeData> q = queryBuilder.parse(queryString); if (!queryBuilder.hasLimit(q)) { q = Predicate.and(q, queryBuilder.limit(defaultLimit)); } if (!queryBuilder.hasSortKey(q)) { q = Predicate.and(q, queryBuilder.sortkey_before("z")); } q = Predicate.and(q, visibleToMe); Predicate<ChangeData> s = queryRewriter.rewrite(q); if (!(s instanceof ChangeDataSource)) { s = queryRewriter.rewrite(Predicate.and(queryBuilder.status_open(), q)); } if (!(s instanceof ChangeDataSource)) { throw new QueryParseException("cannot execute query: " + s); } return s; } private void show(Object data) { switch (outputFormat) { default: case TEXT: if (data instanceof ChangeAttribute) { out.print("change "); out.print(((ChangeAttribute) data).id); out.print("\n"); showText(data, 1); } else { showText(data, 0); } out.print('\n'); break; case JSON: out.print(gson.toJson(data)); out.print('\n'); break; } } private void showText(Object data, int depth) { for (Field f : fieldsOf(data.getClass())) { Object val; try { val = f.get(data); } catch (IllegalArgumentException err) { continue; } catch (IllegalAccessException err) { continue; } if (val == null) { continue; } indent(depth); out.print(f.getName()); out.print(":"); if (val instanceof Long && isDateField(f.getName())) { out.print(' '); out.print(sdf.format(new Date(((Long) val) * 1000L))); out.print('\n'); } else { showTextValue(val, depth); } } } private void indent(int depth) { for (int i = 0; i < depth; i++) { out.print(" "); } } @SuppressWarnings( {"cast", "unchecked"}) private void showTextValue(Object value, int depth) { if (isPrimitive(value)) { out.print(' '); out.print(value); out.print('\n'); } else if (value instanceof Collection) { out.print('\n'); for (Object thing : ((Collection) value)) { if (isPrimitive(thing)) { out.print(' '); out.print(value); out.print('\n'); } else { showText(thing, depth + 1); out.print('\n'); } } } else { out.print('\n'); showText(value, depth + 1); } } @SuppressWarnings("unchecked") private static boolean isPrimitive(Object value) { return value instanceof String // || value instanceof Number // || value instanceof Boolean // || value instanceof Enum; } private static boolean isDateField(String name) { return "lastUpdated".equals(name) // || "grantedOn".equals(name); } private List<Field> fieldsOf(Class<?> type) { List<Field> r = new ArrayList<Field>(); if (type.getSuperclass() != null) { r.addAll(fieldsOf(type.getSuperclass())); } r.addAll(Arrays.asList(type.getDeclaredFields())); return r; } static class ErrorMessage { public final String type = "error"; public String message; } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.siddhi.core.managment; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.wso2.siddhi.core.ExecutionPlanRuntime; import org.wso2.siddhi.core.SiddhiManager; import org.wso2.siddhi.core.event.Event; import org.wso2.siddhi.core.exception.NoPersistenceStoreException; import org.wso2.siddhi.core.query.output.callback.QueryCallback; import org.wso2.siddhi.core.stream.input.InputHandler; import org.wso2.siddhi.core.util.EventPrinter; import org.wso2.siddhi.core.util.persistence.InMemoryPersistenceStore; import org.wso2.siddhi.core.util.persistence.PersistenceStore; public class PersistenceTestCase { static final Logger log = Logger.getLogger(PersistenceTestCase.class); private int count; private boolean eventArrived; private long firstValue; private long lastValue; @Before public void init() { count = 0; eventArrived = false; firstValue = 0; lastValue = 0; } @Test public void persistenceTest1() throws InterruptedException { log.info("persistence test 1 - window query"); PersistenceStore persistenceStore = new InMemoryPersistenceStore(); String revision; SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setPersistenceStore(persistenceStore); String executionPlan = "" + "@plan:name('Test') " + "" + "define stream StockStream ( symbol string, price float, volume int );" + "" + "@info(name = 'query1')" + "from StockStream[price>10]#window.length(10) " + "select symbol, price, sum(volume) as totalVol " + "insert into OutStream "; QueryCallback queryCallback = new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event inEvent : inEvents) { count++; Assert.assertTrue("IBM".equals(inEvent.getData(0)) || "WSO2".equals(inEvent.getData(0))); lastValue = (Long) inEvent.getData(2); } } }; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); InputHandler inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); Thread.sleep(100); Assert.assertTrue(eventArrived); Assert.assertEquals(2, count); //persisting Thread.sleep(500); revision = executionPlanRuntime.persist(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); //restarting execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); //loading executionPlanRuntime.restoreLastRevision(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); //shutdown execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); Assert.assertTrue(count <= 6); Assert.assertEquals(400, lastValue); Assert.assertEquals(true, eventArrived); } @Test public void persistenceTest2() throws InterruptedException { log.info("persistence test 2 - pattern count query"); PersistenceStore persistenceStore = new InMemoryPersistenceStore(); String revision; SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setPersistenceStore(persistenceStore); String executionPlan = "" + "@plan:name('Test') " + "" + "define stream Stream1 (symbol string, price float, volume int); " + "define stream Stream2 (symbol string, price float, volume int); " + "" + "@info(name = 'query1') " + "from e1=Stream1[price>20] <2:5> -> e2=Stream2[price>20] " + "select e1[0].price as price1_0, e1[1].price as price1_1, e1[2].price as price1_2, " + " e1[3].price as price1_3, e2.price as price2 " + "insert into OutputStream ;"; QueryCallback queryCallback = new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event inEvent : inEvents) { count++; Assert.assertArrayEquals(new Object[]{25.6f, 47.6f, null, null, 45.7f}, inEvent.getData()); } } }; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); InputHandler stream1 = executionPlanRuntime.getInputHandler("Stream1"); InputHandler stream2 = executionPlanRuntime.getInputHandler("Stream2"); executionPlanRuntime.start(); stream1.send(new Object[]{"WSO2", 25.6f, 100}); Thread.sleep(100); stream1.send(new Object[]{"GOOG", 47.6f, 100}); Thread.sleep(100); stream1.send(new Object[]{"GOOG", 13.7f, 100}); Thread.sleep(100); Assert.assertEquals("Number of success events", 0, count); Assert.assertEquals("Event arrived", false, eventArrived); //persisting Thread.sleep(500); revision = executionPlanRuntime.persist(); //restarting execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); stream1 = executionPlanRuntime.getInputHandler("Stream1"); stream2 = executionPlanRuntime.getInputHandler("Stream2"); executionPlanRuntime.start(); //loading executionPlanRuntime.restoreLastRevision(); stream2.send(new Object[]{"IBM", 45.7f, 100}); Thread.sleep(500); stream1.send(new Object[]{"GOOG", 47.8f, 100}); Thread.sleep(500); stream2.send(new Object[]{"IBM", 55.7f, 100}); Thread.sleep(500); //shutdown execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); Assert.assertEquals("Number of success events", 1, count); Assert.assertEquals("Event arrived", true, eventArrived); } @Test(expected = NoPersistenceStoreException.class) public void persistenceTest3() throws InterruptedException { log.info("persistence test 3 - no store defined"); String revision; SiddhiManager siddhiManager = new SiddhiManager(); String executionPlan = "" + "@plan:name('Test') " + "" + "define stream Stream1 (symbol string, price float, volume int); " + "define stream Stream2 (symbol string, price float, volume int); " + "" + "@info(name = 'query1') " + "from e1=Stream1[price>20] <2:5> -> e2=Stream2[price>20] " + "select e1[0].price as price1_0, e1[1].price as price1_1, e1[2].price as price1_2, " + " e1[3].price as price1_3, e2.price as price2 " + "insert into OutputStream ;"; QueryCallback queryCallback = new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event inEvent : inEvents) { count++; Assert.assertArrayEquals(new Object[]{25.6f, 47.6f, null, null, 45.7f}, inEvent.getData()); } } }; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); InputHandler stream1 = executionPlanRuntime.getInputHandler("Stream1"); InputHandler stream2 = executionPlanRuntime.getInputHandler("Stream2"); executionPlanRuntime.start(); stream1.send(new Object[]{"WSO2", 25.6f, 100}); Thread.sleep(100); stream1.send(new Object[]{"GOOG", 47.6f, 100}); Thread.sleep(100); stream1.send(new Object[]{"GOOG", 13.7f, 100}); Thread.sleep(100); Assert.assertEquals("Number of success events", 0, count); Assert.assertEquals("Event arrived", false, eventArrived); //persisting Thread.sleep(500); revision = executionPlanRuntime.persist(); //restarting execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); } @Test public void persistenceTest4() throws InterruptedException { log.info("persistence test 4 - window restart"); PersistenceStore persistenceStore = new InMemoryPersistenceStore(); String revision; SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setPersistenceStore(persistenceStore); String executionPlan = "" + "@plan:name('Test') " + "" + "define stream StockStream ( symbol string, price float, volume int );" + "" + "@info(name = 'query1')" + "from StockStream[price>10]#window.time(10 sec) " + "select symbol, price, sum(volume) as totalVol " + "insert into OutStream "; QueryCallback queryCallback = new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; for (Event inEvent : inEvents) { count++; Assert.assertTrue("IBM".equals(inEvent.getData(0)) || "WSO2".equals(inEvent.getData(0))); lastValue = (Long) inEvent.getData(2); } } }; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); InputHandler inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); Thread.sleep(100); Assert.assertTrue(eventArrived); Assert.assertEquals(2, count); //persisting Thread.sleep(500); revision = executionPlanRuntime.persist(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); //restarting execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); //loading executionPlanRuntime.restoreLastRevision(); //shutdown execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); Assert.assertEquals(4, count); Assert.assertEquals(400, lastValue); Assert.assertEquals(true, eventArrived); } @Test public void persistenceTest5() throws InterruptedException { log.info("persistence test 5 - window restart expired event "); PersistenceStore persistenceStore = new InMemoryPersistenceStore(); String revision; SiddhiManager siddhiManager = new SiddhiManager(); siddhiManager.setPersistenceStore(persistenceStore); String executionPlan = "" + "@plan:name('Test') " + "" + "define stream StockStream ( symbol string, price float, volume int );" + "" + "@info(name = 'query1')" + "from StockStream[price>10]#window.time(10 sec) " + "select symbol, price, sum(volume) as totalVol " + "insert all events into OutStream "; QueryCallback queryCallback = new QueryCallback() { @Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { EventPrinter.print(timeStamp, inEvents, removeEvents); eventArrived = true; if (inEvents != null) { for (Event inEvent : inEvents) { count++; Assert.assertTrue("IBM".equals(inEvent.getData(0)) || "WSO2".equals(inEvent.getData(0))); firstValue = (Long) inEvent.getData(2); } } if (removeEvents != null) { for (Event removeEvent : removeEvents) { count++; lastValue = (Long) removeEvent.getData(2); } } } }; ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); InputHandler inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); Thread.sleep(100); Assert.assertTrue(eventArrived); Assert.assertEquals(2, count); //persisting Thread.sleep(500); revision = executionPlanRuntime.persist(); inputHandler.send(new Object[]{"IBM", 75.6f, 100}); Thread.sleep(10); inputHandler.send(new Object[]{"WSO2", 75.6f, 100}); //restarting execution plan Thread.sleep(500); executionPlanRuntime.shutdown(); executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan); executionPlanRuntime.addCallback("query1", queryCallback); inputHandler = executionPlanRuntime.getInputHandler("StockStream"); executionPlanRuntime.start(); //loading executionPlanRuntime.restoreLastRevision(); //shutdown execution plan Thread.sleep(15000); executionPlanRuntime.shutdown(); Assert.assertEquals(6, count); Assert.assertEquals(400, firstValue); Assert.assertEquals(0, lastValue); Assert.assertEquals(true, eventArrived); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vadim L. Bogdanov * @version $Revision$ */ package javax.swing; import java.awt.BorderLayout; import java.awt.Component; import java.awt.FlowLayout; import java.awt.Frame; import java.awt.GraphicsConfiguration; import java.awt.GraphicsEnvironment; import java.awt.IllegalComponentStateException; import java.awt.KeyboardFocusManager; import java.awt.LayoutManager; import java.awt.Window; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.accessibility.AccessibleContext; import javax.accessibility.AccessibleRole; public class JWindowTest extends SwingTestCase { /* * This class is used to test protected methods */ static private class TestWindow extends JWindow { private static final long serialVersionUID = 1L; public static boolean createRootPaneCalled = false; public static boolean setRootPaneCalled = false; @Override public JRootPane createRootPane() { createRootPaneCalled = true; return super.createRootPane(); } @Override public void setRootPane(final JRootPane root) { setRootPaneCalled = true; super.setRootPane(root); } @Override public void setRootPaneCheckingEnabled(final boolean enabled) { super.setRootPaneCheckingEnabled(enabled); } @Override public boolean isRootPaneCheckingEnabled() { return super.isRootPaneCheckingEnabled(); } @Override public void addImpl(final Component comp, final Object constraints, final int index) { super.addImpl(comp, constraints, index); } public static void initStaticVars() { createRootPaneCalled = false; setRootPaneCalled = false; } @Override public String paramString() { return super.paramString(); } } /* * This class is used to test that some property is (or is not) a bound property */ private class MyPropertyChangeListener implements PropertyChangeListener { public boolean ok; MyPropertyChangeListener() { ok = false; } public void propertyChange(final PropertyChangeEvent e) { ok = true; } } private JWindow window; /* * @see TestCase#setUp() */ @Override protected void setUp() throws Exception { super.setUp(); window = new JWindow(); TestWindow.initStaticVars(); } /* * @see TestCase#tearDown() */ @Override protected void tearDown() throws Exception { super.tearDown(); } /** * Constructor for JWindowTest. * @param name */ public JWindowTest(final String name) { super(name); } /* * Class under test for void JWindow() */ public void testJWindow() { window = new JWindow(); assertTrue("owner is not null", window.getOwner() != null); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); assertTrue(window.getContentPane().getLayout() instanceof BorderLayout); } /* * Class under test for void windowInit() */ public void testWindowInit() { TestWindow window = new TestWindow(); assertTrue("rootPaneCheckingEnabled is true", window.isRootPaneCheckingEnabled()); assertTrue("layout is not null", window.getLayout() != null); assertTrue("rootPane is not null", window.getRootPane() != null); assertTrue("locale is set", window.getLocale() == JComponent.getDefaultLocale()); assertTrue("rootPane.windowDecorationStyle is NONE", window.getRootPane() .getWindowDecorationStyle() == JRootPane.NONE); // test that defaultFocusTraversalPolicy is set assertTrue("focusTraversalPolicy is set correctly", window.getFocusTraversalPolicy() == KeyboardFocusManager .getCurrentKeyboardFocusManager().getDefaultFocusTraversalPolicy()); assertTrue("focusTraversalPolicy is set", window.isFocusTraversalPolicySet()); assertTrue(window.isFocusCycleRoot()); assertFalse(window.isFocusTraversalPolicyProvider()); } /* * Class under test for * void setRootPaneCheckingEnabled(boolean enabled) * boolean isRootPaneCheckingEnabled() */ public void testSetIsRootPaneCheckingEnabled() { TestWindow window = new TestWindow(); assertTrue("rootPaneCheckingEnabled is true by default", window .isRootPaneCheckingEnabled()); window.setRootPaneCheckingEnabled(false); assertFalse("rootPaneCheckingEnabled is set to false", window .isRootPaneCheckingEnabled()); } /* * Class under test for void JWindow(Window, GraphicsConfiguration) */ public void testJWindowWindowGraphicsConfiguration() { GraphicsConfiguration gc = GraphicsEnvironment.getLocalGraphicsEnvironment() .getDefaultScreenDevice().getDefaultConfiguration(); Window owner = new JWindow(); // test with valid owner and valid gc // would be nice to test non-default gc here window = new JWindow(owner, gc); assertTrue("owner is set", window.getOwner() == owner); assertTrue(window.getGraphicsConfiguration() == gc); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); // test with valid owner and gc == null window = new JWindow(owner, (GraphicsConfiguration) null); assertTrue("owner is set", window.getOwner() == owner); assertTrue(window.getGraphicsConfiguration() == gc); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); // test with owner == null and valid gc window = new JWindow(null, gc); assertTrue("owner is not null", window.getOwner() != null); assertTrue(window.getGraphicsConfiguration() == gc); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); // test with owner == null and gc == null window = new JWindow(null, null); assertTrue("owner is not null", window.getOwner() != null); assertTrue(window.getGraphicsConfiguration() == window.getOwner() .getGraphicsConfiguration()); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); } /* * Class under test for void JWindow(Window) */ public void testJWindowWindow() { Window owner = new JWindow(); window = new JWindow(owner); // test with the correct owner assertTrue("owner is set", window.getOwner() == owner); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); // test with owner = null window = new JWindow((Window) null); assertTrue("owner is not null", window.getOwner() != null); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); } /* * Class under test for void JWindow(GraphicsConfiguration) */ public void testJWindowGraphicsConfiguration() { GraphicsConfiguration gc = GraphicsEnvironment.getLocalGraphicsEnvironment() .getDefaultScreenDevice().getDefaultConfiguration(); // test with valid gc // would be nice to test non-default gc here window = new JWindow(gc); assertTrue("owner is not null", window.getOwner() != null); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); assertTrue(window.getGraphicsConfiguration() == gc); // test with gc == null window = new JWindow((GraphicsConfiguration) null); assertTrue("owner is not null", window.getOwner() != null); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); assertTrue(window.getGraphicsConfiguration() == gc); } /* * Class under test for void JWindow(Frame) */ public void testJWindowFrame() { Frame owner = new Frame(); window = new JWindow(owner); // test with the correct owner assertTrue("owner is set", window.getOwner() == owner); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); // test with owner = null window = new JWindow((Frame) null); assertTrue("owner is not null", window.getOwner() != null); assertFalse("JWindow is invisible by default", window.isVisible()); assertTrue(window.getLocale() == JComponent.getDefaultLocale()); assertFalse("window is not focusable", window.isFocusableWindow()); } /* * Class under test for void addImpl(Component, Object, int) */ public void testAddImpl() { TestWindow window = new TestWindow(); JComponent comp = new JPanel(); // rootPaneCheckingEnabled is true, no exception since 1.5 window.setRootPaneCheckingEnabled(true); boolean ok = false; try { window.addImpl(comp, null, 0); } catch (Error e) { ok = true; } finally { assertFalse("no exception", ok); assertTrue("The component is added to contentPane", comp.getParent() == window .getContentPane()); } // rootPaneCheckingEnabled is false, no exception window.setRootPaneCheckingEnabled(false); ok = false; try { window.addImpl(comp, null, 0); } catch (Error e) { ok = true; } finally { assertFalse("no exception", ok); assertTrue("the component is added to JWindow", comp.getParent() == window); assertTrue("index of the component is 0", window.getComponent(0) == comp); } } /* * Class under test for * void setRootPane(JRootPane) * JRootPane getRootPane() */ public void testSetGetRootPane() { TestWindow window = new TestWindow(); assertTrue("setRootPane() is called from the constructor", TestWindow.setRootPaneCalled); MyPropertyChangeListener listener = new MyPropertyChangeListener(); window.addPropertyChangeListener("rootPane", listener); JRootPane root = new JRootPane(); window.setRootPane(root); assertTrue(window.getRootPane() == root); assertFalse("rootPane is not a bound property", listener.ok); // test setting rootPane to null window.setRootPane(null); assertNull(window.getRootPane()); assertTrue("rootPane is removed from the container", window.getComponentCount() == 0); } /* * Class under test for JRootPane createRootPane() */ public void testCreateRootPane() { TestWindow frame = new TestWindow(); assertTrue("createRootPane() is called from the constructor", TestWindow.createRootPaneCalled); JRootPane root = frame.createRootPane(); assertTrue("createRootPane() cannot return null", root != null); } /* * Class under test for * void setLayeredPane(JLayeredPane) * JLayeredPane getLayeredPane() */ public void testSetGetLayeredPane() { MyPropertyChangeListener listener = new MyPropertyChangeListener(); window.addPropertyChangeListener("layeredPane", listener); JLayeredPane pane = new JLayeredPane(); window.setLayeredPane(pane); assertTrue(window.getLayeredPane() == pane); assertFalse("layeredPane is not a bound property", listener.ok); // test throwing exception if the parameter is null boolean ok = false; try { window.setLayeredPane(null); } catch (IllegalComponentStateException e) { ok = true; } finally { assertTrue(ok); } // layeredPane cannot be null, even after setLayeredPane(null) assertTrue(window.getLayeredPane() != null); // setLayeredPane() method is not called by the constructor // (seems that there is an error in docs) } /* * Class under test for AccessibleContext getAccessibleContext() */ public void testGetAccessibleContext() { AccessibleContext c = window.getAccessibleContext(); assertTrue("instance of AccessibleJWindow", c instanceof JWindow.AccessibleJWindow); assertTrue("AccessibleRole is ok", c.getAccessibleRole() == AccessibleRole.WINDOW); assertNull("AccessibleName is ok", c.getAccessibleName()); assertNull("AccessibleDescription is ok", c.getAccessibleDescription()); assertTrue("AccessibleChildrenCount == 1", c.getAccessibleChildrenCount() == 1); } /* * Class under test for String paramString() */ public void testParamString() { TestWindow window = new TestWindow(); assertTrue("paramString() cannot return null", window.paramString() != null); } /* * Class under test for void setLayout(LayoutManager) */ public void testSetLayout() { TestWindow window = new TestWindow(); LayoutManager contentLayout = window.getContentPane().getLayout(); LayoutManager frameLayout = window.getLayout(); // rootPaneCheckingEnabled is true, no exception since 1.5 window.setRootPaneCheckingEnabled(true); boolean ok = false; try { window.setLayout(new FlowLayout()); } catch (Error e) { ok = true; } finally { assertFalse("no exception since 1.5", ok); assertTrue("contentPane layout is changed", window.getContentPane().getLayout() != contentLayout); assertTrue("Window layout shouldn't be changed", window.getLayout() == frameLayout); window.getContentPane().setLayout(contentLayout); } // rootPaneCheckingEnabled is false window.setRootPaneCheckingEnabled(false); ok = false; try { window.setLayout(new FlowLayout()); } catch (Error e) { ok = true; } finally { assertFalse("no exception", ok); assertTrue("contentPane layout shouldn't be changed", window.getContentPane() .getLayout() == contentLayout); assertTrue("Window layout is changed)", window.getLayout() != frameLayout); } } /* * Class under test for void update(Graphics) */ public void testUpdate() { // Note: painting code, cannot test } /* * Class under test for * void setContentPane(Container) * Container getContentPane() */ public void testSetGetContentPane() { MyPropertyChangeListener listener = new MyPropertyChangeListener(); window.addPropertyChangeListener("contentPane", listener); JPanel pane = new JPanel(); window.setContentPane(pane); assertTrue(window.getContentPane() == pane); assertFalse("contentPane is not a bound property", listener.ok); // test throwing exception if the parameter is null boolean ok = false; try { window.setContentPane(null); } catch (IllegalComponentStateException e) { ok = true; } finally { assertTrue(ok); } // contentPane cannot be null, even after setContentPane(null) assertTrue(window.getContentPane() != null); // setContentPane() method is not called by the constructor // (seems that there is an error in docs) } /* * Class under test for * void setGlassPane(Component) * Component getGlassPane() */ public void testSetGetGlassPane() { MyPropertyChangeListener listener = new MyPropertyChangeListener(); window.addPropertyChangeListener("glassPane", listener); JPanel pane = new JPanel(); window.setGlassPane(pane); assertTrue(window.getGlassPane() == pane); assertFalse("glassPane is not a bound property", listener.ok); // test throwing exception if the parameter is null boolean ok = false; try { window.setGlassPane(null); } catch (NullPointerException e) { ok = true; } finally { assertTrue(ok); } // glassPane cannot be null, even after setGlassPane(null) assertTrue(window.getGlassPane() != null); // setGlassPane() method is not called by the constructor // (seems that there is an error in docs) } /* * Class under test for void remove(Component) */ public void testRemove() { JComponent comp = new JPanel(); window.getContentPane().add(comp); assertTrue("label is in contentPane", window.isAncestorOf(comp)); window.remove(comp); assertFalse("label is removed from contentPane", window.isAncestorOf(comp)); ((JPanel) window.getGlassPane()).add(comp); window.remove(comp); assertTrue("label is not removed from glassPane", window.isAncestorOf(comp)); // test removing directly from the container window.setRootPaneCheckingEnabled(false); window.add(comp, BorderLayout.EAST); assertTrue("added", comp.getParent() == window); window.remove(comp); assertTrue("not removed", comp.getParent() == window); // test removing null // boolean ok = false; // try { // window.remove((Component)null); // } catch (NullPointerException e) { // ok = true; // } finally { // assertTrue("exception", ok); // } // test removing rootPane assertTrue(window.isAncestorOf(window.getRootPane())); window.remove(window.getRootPane()); // rootPane is removed from the container assertFalse(window.isAncestorOf(window.getRootPane())); // but getRootPane() still returns it assertTrue(window.getRootPane() != null); } }
package com.example.photogalleryactivity; import java.util.ArrayList; import android.annotation.TargetApi; import android.app.Activity; import android.app.SearchManager; import android.app.SearchableInfo; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.GridView; import android.widget.ImageView; import android.widget.SearchView; public class PhotoGalleryFragment extends VisibleFragment { private static final String TAG = "PhotoGalleryFragment"; GridView mGridView; ArrayList<GalleryItem> mItems; ThumbnailDowanloader<ImageView> mThumbnailThread; @Override public void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); setRetainInstance(true); setHasOptionsMenu(true); //new FetchItemsTask().execute(); updateItems(); // Intent i = new Intent(getActivity(),PollService.class); // getActivity().startService(i); // PollService.setServiceAlarm(getActivity(), true); //mThumbnailThread = new ThumbnailDowanloader<ImageView>(); mThumbnailThread = new ThumbnailDowanloader<ImageView>(new Handler()); mThumbnailThread.setListener(new ThumbnailDowanloader.Listener<ImageView>() { @Override public void onThumbnailDownloaded(ImageView imageView, Bitmap thumnail) { // TODO Auto-generated method stub if(isVisible()) { imageView.setImageBitmap(thumnail); } } }); mThumbnailThread.start(); mThumbnailThread.getLooper(); Log.i(TAG,"Background thread started"); } public AsyncTask<Void, Void, ArrayList<GalleryItem>> updateItems(){ return new FetchItemsTask().execute(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // TODO Auto-generated method stub View v = inflater.inflate(R.layout.fragment_photo_gallery,container, false); mGridView = (GridView) v.findViewById(R.id.gridView); setupAdapter(); mGridView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { // TODO Auto-generated method stub GalleryItem item = mItems.get(position); Uri photoPageUri = Uri.parse(item.getPhotoPageUrl()); //Intent i = new Intent(Intent.ACTION_VIEW,photoPageUri); Intent i = new Intent(getActivity(),PhotoPageActivity.class); i.setData(photoPageUri); startActivity(i); } }); return v; } @Override public void onDestroyView() { // TODO Auto-generated method stub super.onDestroyView(); mThumbnailThread.clearQueue(); } @Override public void onDestroy() { // TODO Auto-generated method stub super.onDestroy(); mThumbnailThread.quit(); Log.i(TAG, "Background thread destroyed"); } void setupAdapter() { if(getActivity() == null || mGridView == null) return; if(mItems != null) { mGridView.setAdapter(new GalleryItemAdapter(mItems)); //mGridView.setAdapter(new ArrayAdapter<GalleryItem>(getActivity(), android.R.layout.simple_gallery_item, mItems)); } else { mGridView.setAdapter(null); } } private class FetchItemsTask extends AsyncTask<Void, Void, ArrayList<GalleryItem>> { //String query = "anroid"; //Just For Test @Override protected ArrayList<GalleryItem> doInBackground(Void... params) { Activity activity = getActivity(); if(activity == null) return new ArrayList<GalleryItem>(); String query = PreferenceManager.getDefaultSharedPreferences(activity) .getString(FlickrFetchr.PREF_SEARCH_QUERY, null); if(query != null) { return new FlickrFetchr().search(query); } else { return new FlickrFetchr().fetchItems(); } } @Override protected void onPostExecute(ArrayList<GalleryItem> items) { // TODO Auto-generated method stub mItems = items; setupAdapter(); } } private class GalleryItemAdapter extends ArrayAdapter<GalleryItem> { public GalleryItemAdapter(ArrayList<GalleryItem> items) { super(getActivity(),0,items); } @Override public View getView(int position, View convertView, ViewGroup parent) { // TODO Auto-generated method stub if(convertView == null) { convertView = getActivity().getLayoutInflater().inflate(R.layout.gallery_item,parent,false); } ImageView imageView = (ImageView) convertView.findViewById(R.id.gallery_item_imageView); imageView.setImageResource(R.drawable.brian_up_close); GalleryItem item = getItem(position); mThumbnailThread.queueThumbnail(imageView, item.getUrl()); return convertView; } } @TargetApi(11) @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // TODO Auto-generated method stub super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.fragment_photo_gallery, menu); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { MenuItem searchItem = menu.findItem(R.id.menu_item_search); SearchView searchView = (SearchView)searchItem.getActionView(); SearchManager searchManager = (SearchManager) getActivity().getSystemService(Context.SEARCH_SERVICE); ComponentName name = getActivity().getComponentName(); SearchableInfo searchInfo = searchManager.getSearchableInfo(name); searchView.setSearchableInfo(searchInfo); } } @Override public boolean onOptionsItemSelected(MenuItem item) { // TODO Auto-generated method stub switch(item.getItemId()) { case R.id.menu_item_search : getActivity().onSearchRequested(); return true; case R.id.menu_item_clear: PreferenceManager.getDefaultSharedPreferences(getActivity()) .edit() .putString(FlickrFetchr.PREF_SEARCH_QUERY, null) .commit(); return true; case R.id.menu_item_toggle_polling: boolean shouldStartAlarm = !PollService.isServiceAlaremOn(getActivity()); PollService.setServiceAlarm(getActivity(), shouldStartAlarm); if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) getActivity().invalidateOptionsMenu(); return true; default: return super.onOptionsItemSelected(item); } } @Override public void onPrepareOptionsMenu(Menu menu) { // TODO Auto-generated method stub super.onPrepareOptionsMenu(menu); MenuItem toggleItem = menu.findItem(R.id.menu_item_toggle_polling); if(PollService.isServiceAlaremOn(getActivity())) { toggleItem.setTitle(R.string.stop_polling); } else { toggleItem.setTitle(R.string.start_polling); } } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.UnmodifiableIterator; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.*; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.gateway.Gateway; import org.elasticsearch.index.*; import org.elasticsearch.index.aliases.IndexAliasesServiceModule; import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCacheModule; import org.elasticsearch.index.codec.CodecModule; import org.elasticsearch.index.engine.IndexEngine; import org.elasticsearch.index.engine.IndexEngineModule; import org.elasticsearch.index.fielddata.IndexFieldDataModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.gateway.IndexGateway; import org.elasticsearch.index.gateway.IndexGatewayModule; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceModule; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.query.IndexQueryParserModule; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.service.IndexService; import org.elasticsearch.index.service.InternalIndexService; import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.service.IndexShard; import org.elasticsearch.index.similarity.SimilarityModule; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreModule; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.plugins.IndexPluginsModule; import org.elasticsearch.plugins.PluginsService; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; /** * */ public class InternalIndicesService extends AbstractLifecycleComponent<IndicesService> implements IndicesService { private final InternalIndicesLifecycle indicesLifecycle; private final IndicesAnalysisService indicesAnalysisService; private final IndicesStore indicesStore; private final Injector injector; private final PluginsService pluginsService; private final Map<String, Injector> indicesInjectors = new HashMap<String, Injector>(); private volatile ImmutableMap<String, IndexService> indices = ImmutableMap.of(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @Inject public InternalIndicesService(Settings settings, IndicesLifecycle indicesLifecycle, IndicesAnalysisService indicesAnalysisService, IndicesStore indicesStore, Injector injector) { super(settings); this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle; this.indicesAnalysisService = indicesAnalysisService; this.indicesStore = indicesStore; this.injector = injector; this.pluginsService = injector.getInstance(PluginsService.class); this.indicesLifecycle.addListener(oldShardsStats); } @Override protected void doStart() throws ElasticSearchException { } @Override protected void doStop() throws ElasticSearchException { ImmutableSet<String> indices = ImmutableSet.copyOf(this.indices.keySet()); final CountDownLatch latch = new CountDownLatch(indices.size()); final ExecutorService indicesStopExecutor = Executors.newFixedThreadPool(5, EsExecutors.daemonThreadFactory("indices_shutdown")); final ExecutorService shardsStopExecutor = Executors.newFixedThreadPool(5, EsExecutors.daemonThreadFactory("shards_shutdown")); for (final String index : indices) { indicesStopExecutor.execute(new Runnable() { @Override public void run() { try { removeIndex(index, "shutdown", shardsStopExecutor); } catch (Throwable e) { logger.warn("failed to delete index on stop [" + index + "]", e); } finally { latch.countDown(); } } }); } try { latch.await(); } catch (InterruptedException e) { // ignore } finally { shardsStopExecutor.shutdown(); indicesStopExecutor.shutdown(); } } @Override protected void doClose() throws ElasticSearchException { injector.getInstance(RecoverySettings.class).close(); indicesStore.close(); indicesAnalysisService.close(); } @Override public IndicesLifecycle indicesLifecycle() { return this.indicesLifecycle; } @Override public NodeIndicesStats stats(boolean includePrevious) { return stats(true, new CommonStatsFlags().all()); } @Override public NodeIndicesStats stats(boolean includePrevious, CommonStatsFlags flags) { CommonStats stats = new CommonStats(flags); if (includePrevious) { Flag[] setFlags = flags.getFlags(); for (Flag flag : setFlags) { switch (flag) { case Get: stats.get.add(oldShardsStats.getStats); break; case Indexing: stats.indexing.add(oldShardsStats.indexingStats); break; case Search: stats.search.add(oldShardsStats.searchStats); break; case Merge: stats.merge.add(oldShardsStats.mergeStats); break; case Refresh: stats.refresh.add(oldShardsStats.refreshStats); break; case Flush: stats.flush.add(oldShardsStats.flushStats); break; } } } for (IndexService indexService : indices.values()) { for (IndexShard indexShard : indexService) { CommonStats indexStas = new CommonStats(indexShard, flags); stats.add(indexStas); } } return new NodeIndicesStats(stats); } public ShardStats[] shardStats(CommonStatsFlags flags) { // TODO: Do we want to upgrade this to the IndicesService level List<ShardStats> shardStats = Lists.newArrayList(); for (String index : indices()) { IndexService indexService = indexService(index); if (indexService == null) { continue; // something changed, move along } for (int shardId : indexService.shardIds()) { IndexShard indexShard = indexService.shard(shardId); if (indexShard == null) { continue; } shardStats.add(new ShardStats(indexShard, flags)); } } return shardStats.toArray(new ShardStats[shardStats.size()]); } /** * Returns <tt>true</tt> if changes (adding / removing) indices, shards and so on are allowed. */ public boolean changesAllowed() { // we check on stop here since we defined stop when we delete the indices return lifecycle.started(); } @Override public UnmodifiableIterator<IndexService> iterator() { return indices.values().iterator(); } public boolean hasIndex(String index) { return indices.containsKey(index); } public Set<String> indices() { return newHashSet(indices.keySet()); } public IndexService indexService(String index) { return indices.get(index); } @Override public IndexService indexServiceSafe(String index) throws IndexMissingException { IndexService indexService = indexService(index); if (indexService == null) { throw new IndexMissingException(new Index(index)); } return indexService; } public synchronized IndexService createIndex(String sIndexName, Settings settings, String localNodeId) throws ElasticSearchException { if (!lifecycle.started()) { throw new ElasticSearchIllegalStateException("Can't create an index [" + sIndexName + "], node is closed"); } Index index = new Index(sIndexName); if (indicesInjectors.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); } indicesLifecycle.beforeIndexCreated(index); logger.debug("creating Index [{}], shards [{}]/[{}]", sIndexName, settings.get(SETTING_NUMBER_OF_SHARDS), settings.get(SETTING_NUMBER_OF_REPLICAS)); Settings indexSettings = settingsBuilder() .put(this.settings) .put(settings) .classLoader(settings.getClassLoader()) .build(); ModulesBuilder modules = new ModulesBuilder(); modules.add(new IndexNameModule(index)); modules.add(new LocalNodeIdModule(localNodeId)); modules.add(new IndexSettingsModule(index, indexSettings)); modules.add(new IndexPluginsModule(indexSettings, pluginsService)); modules.add(new IndexStoreModule(indexSettings)); modules.add(new IndexEngineModule(indexSettings)); modules.add(new AnalysisModule(indexSettings, indicesAnalysisService)); modules.add(new SimilarityModule(indexSettings)); modules.add(new IndexCacheModule(indexSettings)); modules.add(new IndexFieldDataModule(indexSettings)); modules.add(new CodecModule(indexSettings)); modules.add(new MapperServiceModule()); modules.add(new IndexQueryParserModule(indexSettings)); modules.add(new IndexAliasesServiceModule()); modules.add(new IndexGatewayModule(indexSettings, injector.getInstance(Gateway.class))); modules.add(new IndexModule(indexSettings)); Injector indexInjector; try { indexInjector = modules.createChildInjector(injector); } catch (CreationException e) { throw new IndexCreationException(index, Injectors.getFirstErrorFailure(e)); } catch (Throwable e) { throw new IndexCreationException(index, e); } indicesInjectors.put(index.name(), indexInjector); IndexService indexService = indexInjector.getInstance(IndexService.class); indicesLifecycle.afterIndexCreated(indexService); indices = newMapBuilder(indices).put(index.name(), indexService).immutableMap(); return indexService; } @Override public void removeIndex(String index, String reason) throws ElasticSearchException { removeIndex(index, reason, null); } private synchronized void removeIndex(String index, String reason, @Nullable Executor executor) throws ElasticSearchException { IndexService indexService; Injector indexInjector = indicesInjectors.remove(index); if (indexInjector == null) { return; } Map<String, IndexService> tmpMap = newHashMap(indices); indexService = tmpMap.remove(index); indices = ImmutableMap.copyOf(tmpMap); indicesLifecycle.beforeIndexClosed(indexService); for (Class<? extends CloseableIndexComponent> closeable : pluginsService.indexServices()) { indexInjector.getInstance(closeable).close(); } ((InternalIndexService) indexService).close(reason, executor); indexInjector.getInstance(IndexCache.class).close(); indexInjector.getInstance(IndexFieldDataService.class).clear(); indexInjector.getInstance(AnalysisService.class).close(); indexInjector.getInstance(IndexEngine.class).close(); indexInjector.getInstance(IndexGateway.class).close(); indexInjector.getInstance(MapperService.class).close(); indexInjector.getInstance(IndexQueryParserService.class).close(); indexInjector.getInstance(IndexStore.class).close(); Injectors.close(injector); indicesLifecycle.afterIndexClosed(indexService.index()); } static class OldShardsStats extends IndicesLifecycle.Listener { final SearchStats searchStats = new SearchStats(); final GetStats getStats = new GetStats(); final IndexingStats indexingStats = new IndexingStats(); final MergeStats mergeStats = new MergeStats(); final RefreshStats refreshStats = new RefreshStats(); final FlushStats flushStats = new FlushStats(); @Override public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard) { if (indexShard != null) { getStats.add(indexShard.getStats()); indexingStats.add(indexShard.indexingStats(), false); searchStats.add(indexShard.searchStats(), false); mergeStats.add(indexShard.mergeStats()); refreshStats.add(indexShard.refreshStats()); flushStats.add(indexShard.flushStats()); } } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.datafeed; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.Node; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction; import org.elasticsearch.xpack.ml.annotations.AnnotationPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.RestartTimeInfo; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.junit.Before; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.test.NodeRoles.nonRemoteClusterClientNode; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DatafeedJobBuilderTests extends ESTestCase { private Client client; private AnomalyDetectionAuditor auditor; private AnnotationPersister annotationPersister; private JobResultsPersister jobResultsPersister; private ClusterService clusterService; private DatafeedJobBuilder datafeedJobBuilder; @Before @SuppressWarnings("unchecked") public void init() { client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.settings()).thenReturn(Settings.EMPTY); auditor = mock(AnomalyDetectionAuditor.class); annotationPersister = mock(AnnotationPersister.class); jobResultsPersister = mock(JobResultsPersister.class); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(MachineLearning.DELAYED_DATA_CHECK_FREQ, MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING, ClusterService.USER_DEFINED_METADATA, ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING))); clusterService = new ClusterService( Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "test_node").build(), clusterSettings, threadPool ); datafeedJobBuilder = new DatafeedJobBuilder( client, xContentRegistry(), auditor, annotationPersister, System::currentTimeMillis, jobResultsPersister, Settings.EMPTY, clusterService ); } public void testBuild_GivenScrollDatafeedAndNewJob() throws Exception { DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeField("time"); Job.Builder jobBuilder = DatafeedRunnerTests.createDatafeedJob(); jobBuilder.setDataDescription(dataDescription); jobBuilder.setCreateTime(new Date()); DatafeedConfig.Builder datafeed = DatafeedRunnerTests.createDatafeedConfig("datafeed1", jobBuilder.getId()); AtomicBoolean wasHandlerCalled = new AtomicBoolean(false); ActionListener<DatafeedJob> datafeedJobHandler = ActionListener.wrap( datafeedJob -> { assertThat(datafeedJob.isRunning(), is(true)); assertThat(datafeedJob.isIsolated(), is(false)); assertThat(datafeedJob.lastEndTimeMs(), is(nullValue())); wasHandlerCalled.compareAndSet(false, true); }, e -> fail() ); DatafeedContext datafeedContext = DatafeedContext.builder() .setDatafeedConfig(datafeed.build()) .setJob(jobBuilder.build()) .setRestartTimeInfo(new RestartTimeInfo(null, null, false)) .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) .build(); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); datafeedJobBuilder.build(datafeedTask, datafeedContext, datafeedJobHandler); assertBusy(() -> wasHandlerCalled.get()); } public void testBuild_GivenScrollDatafeedAndOldJobWithLatestRecordTimestampAfterLatestBucket() throws Exception { DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeField("time"); Job.Builder jobBuilder = DatafeedRunnerTests.createDatafeedJob(); jobBuilder.setDataDescription(dataDescription); jobBuilder.setCreateTime(new Date()); DatafeedConfig.Builder datafeed = DatafeedRunnerTests.createDatafeedConfig("datafeed1", jobBuilder.getId()); AtomicBoolean wasHandlerCalled = new AtomicBoolean(false); ActionListener<DatafeedJob> datafeedJobHandler = ActionListener.wrap( datafeedJob -> { assertThat(datafeedJob.isRunning(), is(true)); assertThat(datafeedJob.isIsolated(), is(false)); assertThat(datafeedJob.lastEndTimeMs(), equalTo(7_200_000L)); wasHandlerCalled.compareAndSet(false, true); }, e -> fail() ); DatafeedContext datafeedContext = DatafeedContext.builder() .setDatafeedConfig(datafeed.build()) .setJob(jobBuilder.build()) .setRestartTimeInfo(new RestartTimeInfo(3_600_000L, 7_200_000L, false)) .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) .build(); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); datafeedJobBuilder.build(datafeedTask, datafeedContext, datafeedJobHandler); assertBusy(() -> wasHandlerCalled.get()); } public void testBuild_GivenScrollDatafeedAndOldJobWithLatestBucketAfterLatestRecordTimestamp() throws Exception { DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeField("time"); Job.Builder jobBuilder = DatafeedRunnerTests.createDatafeedJob(); jobBuilder.setDataDescription(dataDescription); jobBuilder.setCreateTime(new Date()); DatafeedConfig.Builder datafeed = DatafeedRunnerTests.createDatafeedConfig("datafeed1", jobBuilder.getId()); AtomicBoolean wasHandlerCalled = new AtomicBoolean(false); ActionListener<DatafeedJob> datafeedJobHandler = ActionListener.wrap( datafeedJob -> { assertThat(datafeedJob.isRunning(), is(true)); assertThat(datafeedJob.isIsolated(), is(false)); assertThat(datafeedJob.lastEndTimeMs(), equalTo(7_199_999L)); wasHandlerCalled.compareAndSet(false, true); }, e -> fail() ); DatafeedContext datafeedContext = DatafeedContext.builder() .setDatafeedConfig(datafeed.build()) .setJob(jobBuilder.build()) .setRestartTimeInfo(new RestartTimeInfo(3_800_000L, 3_600_000L, false)) .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) .build(); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); datafeedJobBuilder.build(datafeedTask, datafeedContext, datafeedJobHandler); assertBusy(() -> wasHandlerCalled.get()); } public void testBuildGivenRemoteIndicesButNoRemoteSearching() throws Exception { datafeedJobBuilder = new DatafeedJobBuilder( client, xContentRegistry(), auditor, annotationPersister, System::currentTimeMillis, jobResultsPersister, nonRemoteClusterClientNode(), clusterService ); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeField("time"); Job.Builder jobBuilder = DatafeedRunnerTests.createDatafeedJob(); jobBuilder.setDataDescription(dataDescription); jobBuilder.setCreateTime(new Date()); DatafeedConfig.Builder datafeed = DatafeedRunnerTests.createDatafeedConfig("datafeed1", jobBuilder.getId()); datafeed.setIndices(Collections.singletonList("remotecluster:index-*")); AtomicBoolean wasHandlerCalled = new AtomicBoolean(false); ActionListener<DatafeedJob> datafeedJobHandler = ActionListener.wrap( datafeedJob -> fail("datafeed builder did not fail when remote index was given and remote clusters were not enabled"), e -> { assertThat(e.getMessage(), equalTo(Messages.getMessage(Messages.DATAFEED_NEEDS_REMOTE_CLUSTER_SEARCH, "datafeed1", "[remotecluster:index-*]", "test_node"))); wasHandlerCalled.compareAndSet(false, true); } ); DatafeedContext datafeedContext = DatafeedContext.builder() .setDatafeedConfig(datafeed.build()) .setJob(jobBuilder.build()) .setRestartTimeInfo(new RestartTimeInfo(null, null, false)) .setTimingStats(new DatafeedTimingStats(jobBuilder.getId())) .build(); TransportStartDatafeedAction.DatafeedTask datafeedTask = newDatafeedTask("datafeed1"); datafeedJobBuilder.build(datafeedTask, datafeedContext, datafeedJobHandler); assertBusy(() -> wasHandlerCalled.get()); } private static TransportStartDatafeedAction.DatafeedTask newDatafeedTask(String datafeedId) { TransportStartDatafeedAction.DatafeedTask task = mock(TransportStartDatafeedAction.DatafeedTask.class); when(task.getDatafeedId()).thenReturn(datafeedId); TaskId parentTaskId = new TaskId(""); when(task.getParentTaskId()).thenReturn(parentTaskId); return task; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.api.services.stackadvisor.commands; import static java.util.Collections.emptyMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.apache.ambari.server.api.resources.ResourceInstance; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.api.services.Request; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequestException; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse; import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner; import org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.StackAdvisorData; import org.apache.ambari.server.controller.internal.AmbariServerConfigurationHandler; import org.apache.ambari.server.state.ServiceInfo; import org.apache.commons.io.FileUtils; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.ObjectNode; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import com.google.common.collect.Lists; /** * StackAdvisorCommand unit tests. */ @RunWith(MockitoJUnitRunner.class) public class StackAdvisorCommandTest { private static final String SINGLE_HOST_RESPONSE = "{\"href\":\"/api/v1/hosts?fields=Hosts/*&Hosts/host_name.in(%1$s)\",\"items\":[{\"href\":\"/api/v1/hosts/%1$s\",\"Hosts\":{\"host_name\":\"%1$s\"}}]}"; private static final String TWO_HOST_RESPONSE = "{\"href\":\"/api/v1/hosts?fields=Hosts/*&Hosts/host_name.in(%1$s,%2$s)\",\"items\":[{\"href\":\"/api/v1/hosts/%1$s\",\"Hosts\":{\"host_name\":\"%1$s\"}},{\"href\":\"/api/v1/hosts/%2$s\",\"Hosts\":{\"host_name\":\"%2$s\"}}]}"; private TemporaryFolder temp = new TemporaryFolder(); @Mock AmbariServerConfigurationHandler ambariServerConfigurationHandler; @Before public void setUp() throws IOException { temp.create(); } @After public void tearDown() throws IOException { temp.delete(); } @Test(expected = StackAdvisorException.class) public void testInvoke_invalidRequest_throwsException() throws StackAdvisorException { File recommendationsDir = temp.newFolder("recommendationDir"); String recommendationsArtifactsLifetime = "1w"; int requestId = 0; StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class); doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString()); StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo, null)); StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion") .build(); doThrow(new StackAdvisorException("message")).when(command).validate(request); command.invoke(request, ServiceInfo.ServiceAdvisorType.PYTHON); assertTrue(false); } @Test(expected = StackAdvisorException.class) public void testInvoke_saRunnerNotSucceed_throwsException() throws StackAdvisorException { File recommendationsDir = temp.newFolder("recommendationDir"); String recommendationsArtifactsLifetime = "1w"; int requestId = 0; StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class); doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString()); StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo, null)); StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion") .build(); String hostsJSON = "{\"hosts\" : \"localhost\""; String servicesJSON = "{\"services\" : \"HDFS\""; StackAdvisorData data = new StackAdvisorData(hostsJSON, servicesJSON); doReturn(hostsJSON).when(command).getHostsInformation(request); doReturn(servicesJSON).when(command).getServicesInformation(request); doReturn(data).when(command) .adjust(any(StackAdvisorData.class), any(StackAdvisorRequest.class)); doThrow(new StackAdvisorRequestException("error")).when(saRunner) .runScript(any(ServiceInfo.ServiceAdvisorType.class), any(StackAdvisorCommandType.class), any(File.class)); command.invoke(request, ServiceInfo.ServiceAdvisorType.PYTHON); assertTrue(false); } @Test(expected = WebApplicationException.class) public void testInvoke_adjustThrowsException_throwsException() throws StackAdvisorException { File recommendationsDir = temp.newFolder("recommendationDir"); String recommendationsArtifactsLifetime = "1w"; int requestId = 0; StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class); doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString()); StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo, null)); StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion") .build(); doReturn("{\"hosts\" : \"localhost\"").when(command).getHostsInformation(request); doReturn("{\"services\" : \"HDFS\"").when(command).getServicesInformation(request); doThrow(new WebApplicationException()).when(command).adjust(any(StackAdvisorData.class), any(StackAdvisorRequest.class)); doThrow(new StackAdvisorException("error")).when(saRunner) .runScript(any(ServiceInfo.ServiceAdvisorType.class), any(StackAdvisorCommandType.class), any(File.class)); command.invoke(request, ServiceInfo.ServiceAdvisorType.PYTHON); assertTrue(false); } @Test public void testInvoke_success() throws StackAdvisorException { String expected = "success"; final String testResourceString = String.format("{\"type\": \"%s\"}", expected); final File recommendationsDir = temp.newFolder("recommendationDir"); String recommendationsArtifactsLifetime = "1w"; final int requestId = 2; StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class); doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString()); final StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand( recommendationsDir, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, requestId, saRunner, metaInfo, null)); StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion") .build(); String hostsJSON = "{\"hosts\" : \"localhost\""; String servicesJSON = "{\"services\" : \"HDFS\""; StackAdvisorData data = new StackAdvisorData(hostsJSON, servicesJSON); doReturn(hostsJSON).when(command).getHostsInformation(request); doReturn(servicesJSON).when(command).getServicesInformation(request); doReturn(data).when(command) .adjust(any(StackAdvisorData.class), any(StackAdvisorRequest.class)); doAnswer(invocation -> { String resultFilePath = String.format("%s/%s", requestId, command.getResultFileName()); File resultFile = new File(recommendationsDir, resultFilePath); resultFile.getParentFile().mkdirs(); FileUtils.writeStringToFile(resultFile, testResourceString, Charset.defaultCharset()); return null; }).when(saRunner).runScript(any(ServiceInfo.ServiceAdvisorType.class), any(StackAdvisorCommandType.class), any(File.class)); TestResource result = command.invoke(request, ServiceInfo.ServiceAdvisorType.PYTHON); assertEquals(expected, result.getType()); assertEquals(requestId, result.getId()); } @Test public void testPopulateStackHierarchy() throws Exception { File file = mock(File.class); String recommendationsArtifactsLifetime = "1w"; StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class); StackAdvisorCommand<TestResource> cmd = new TestStackAdvisorCommand(file, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, 1, stackAdvisorRunner, ambariMetaInfo, null); ObjectNode objectNode = (ObjectNode) cmd.mapper.readTree("{\"Versions\": " + "{\"stack_name\": \"stack\", \"stack_version\":\"1.0.0\"}}"); doReturn(Arrays.asList("0.9", "0.8")).when(ambariMetaInfo).getStackParentVersions("stack", "1.0.0"); cmd.populateStackHierarchy(objectNode); JsonNode stackHierarchy = objectNode.get("Versions").get("stack_hierarchy"); assertNotNull(stackHierarchy); JsonNode stackName = stackHierarchy.get("stack_name"); assertNotNull(stackName); assertEquals("stack", stackName.asText()); ArrayNode stackVersions = (ArrayNode) stackHierarchy.get("stack_versions"); assertNotNull(stackVersions); assertEquals(2, stackVersions.size()); Iterator<JsonNode> stackVersionsElements = stackVersions.getElements(); assertEquals("0.9", stackVersionsElements.next().asText()); assertEquals("0.8", stackVersionsElements.next().asText()); } @Test public void testPopulateAmbariServerProperties() throws Exception { File file = mock(File.class); String recommendationsArtifactsLifetime = "1w"; StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class); StackAdvisorCommand<TestResource> cmd = new TestStackAdvisorCommand(file, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, 1, stackAdvisorRunner, ambariMetaInfo, null); ObjectNode objectNode = (ObjectNode) cmd.mapper.readTree("{\"Versions\": " + "{\"stack_name\": \"stack\", \"stack_version\":\"1.0.0\"}}"); Map<String, String> props = Collections.singletonMap("a", "b"); doReturn(props).when(ambariMetaInfo).getAmbariServerProperties(); cmd.populateAmbariServerInfo(objectNode); JsonNode serverProperties = objectNode.get("ambari-server-properties"); assertNotNull(serverProperties); assertEquals("b", serverProperties.iterator().next().getTextValue()); } @Test public void testPopulateStackHierarchy_noParents() throws Exception { File file = mock(File.class); String recommendationsArtifactsLifetime = "1w"; StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class); StackAdvisorCommand<TestResource> cmd = new TestStackAdvisorCommand(file, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, 1, stackAdvisorRunner, ambariMetaInfo, null); ObjectNode objectNode = (ObjectNode) cmd.mapper.readTree("{\"Versions\": " + "{\"stack_name\": \"stack\", \"stack_version\":\"1.0.0\"}}"); doReturn(Collections.emptyList()).when(ambariMetaInfo).getStackParentVersions("stack", "1.0.0"); cmd.populateStackHierarchy(objectNode); JsonNode stackHierarchy = objectNode.get("Versions").get("stack_hierarchy"); assertNotNull(stackHierarchy); JsonNode stackName = stackHierarchy.get("stack_name"); assertNotNull(stackName); assertEquals("stack", stackName.asText()); ArrayNode stackVersions = (ArrayNode) stackHierarchy.get("stack_versions"); assertNotNull(stackVersions); assertEquals(0, stackVersions.size()); } @Test public void testPopulateLdapConfig() throws Exception { Map<String, Map<String, String>> storedConfig = Collections.singletonMap("ldap-configuration", Collections.singletonMap("authentication.ldap.secondaryUrl", "localhost:333")); TestStackAdvisorCommand command = new TestStackAdvisorCommand( temp.newFolder("recommendationDir"), "1w", ServiceInfo.ServiceAdvisorType.PYTHON, 0, mock(StackAdvisorRunner.class), mock(AmbariMetaInfo.class), null); when(ambariServerConfigurationHandler.getConfigurations()).thenReturn(storedConfig); JsonNode servicesRootNode = json("{}"); command.populateAmbariConfiguration((ObjectNode)servicesRootNode); JsonNode expectedLdapConfig = json("{\"ambari-server-configuration\":{\"ldap-configuration\":{\"authentication.ldap.secondaryUrl\":\"localhost:333\"}}}"); assertEquals(expectedLdapConfig, servicesRootNode); } @Test public void testPopulateLdapConfig_NoConfigs() throws Exception { TestStackAdvisorCommand command = new TestStackAdvisorCommand( temp.newFolder("recommendationDir"), "1w", ServiceInfo.ServiceAdvisorType.PYTHON, 0, mock(StackAdvisorRunner.class), mock(AmbariMetaInfo.class), null); when(ambariServerConfigurationHandler.getConfigurations()).thenReturn(emptyMap()); JsonNode servicesRootNode = json("{}"); command.populateAmbariConfiguration((ObjectNode)servicesRootNode); JsonNode expectedLdapConfig = json("{\"ambari-server-configuration\":{}}"); assertEquals(expectedLdapConfig, servicesRootNode); } /** * Try to retrieve host info twice. The inner cache should be populated with first usage (with handleRequest method calling). * And for next info retrieving for the same host the saved value should be used. */ @Test public void testHostInfoCachingSingleHost() throws StackAdvisorException { File file = mock(File.class); String recommendationsArtifactsLifetime = "1w"; StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class); Map<String, JsonNode> hostInfoCache = new HashMap<>(); TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(file, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, 1, stackAdvisorRunner, ambariMetaInfo, hostInfoCache)); // in second handling case NPE will be fired during result processing doReturn(Response.status(200).entity(String.format(SINGLE_HOST_RESPONSE, "hostName1")).build()) .doReturn(null) .when(command).handleRequest(any(HttpHeaders.class), any(String.class), any(UriInfo.class), any(Request.Type.class), any(MediaType.class), any(ResourceInstance.class)); StackAdvisorRequest request = StackAdvisorRequestBuilder. forStack(null, null).ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS). forHosts(Arrays.asList(new String[]{"hostName1"})). build(); String firstResponse = command.getHostsInformation(request); assertEquals(String.format(SINGLE_HOST_RESPONSE, "hostName1"), firstResponse); String secondResponse = command.getHostsInformation(request); assertEquals(String.format(SINGLE_HOST_RESPONSE, "hostName1"), secondResponse); } /** * Try to retrieve multiple hosts info twice. The inner cache should be populated with first usage for first host (hostName1). * For the next usage with the both hosts handleRequest should be used for second host only. */ @Test public void testHostInfoCachingTwoHost() throws StackAdvisorException { File file = mock(File.class); String recommendationsArtifactsLifetime = "1w"; StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class); AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class); Map<String, JsonNode> hostInfoCache = new HashMap<>(); TestStackAdvisorCommand command = spy(new TestStackAdvisorCommand(file, recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType.PYTHON, 1, stackAdvisorRunner, ambariMetaInfo, hostInfoCache)); doReturn(Response.status(200).entity(String.format(SINGLE_HOST_RESPONSE, "hostName1")).build()) .doReturn(Response.status(200).entity(String.format(SINGLE_HOST_RESPONSE, "hostName2")).build()) .doReturn(null) .when(command).handleRequest(any(HttpHeaders.class), any(String.class), any(UriInfo.class), any(Request.Type.class), any(MediaType.class), any(ResourceInstance.class)); StackAdvisorRequest request = StackAdvisorRequestBuilder. forStack(null, null).ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS). forHosts(Arrays.asList(new String[]{"hostName1"})). build(); String firstResponse = command.getHostsInformation(request); assertEquals(String.format(SINGLE_HOST_RESPONSE, "hostName1"), firstResponse); request = StackAdvisorRequestBuilder. forStack(null, null).ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS). forHosts(Arrays.asList(new String[]{"hostName1", "hostName2"})). build(); String secondResponse = command.getHostsInformation(request); assertEquals(String.format(TWO_HOST_RESPONSE, "hostName1", "hostName2"), secondResponse); } private static String jsonString(Object obj) throws IOException { return new ObjectMapper().writeValueAsString(obj); } private static JsonNode json(Object obj) throws IOException { return new ObjectMapper().convertValue(obj, JsonNode.class); } private static JsonNode json(String jsonString) throws IOException { return new ObjectMapper().readTree(jsonString); } private static List<Object> list(Object... items) { return Lists.newArrayList(items); } private static Map<String, Object> map(Object... keysAndValues) { Map<String, Object> map = new HashMap<>(); Iterator<Object> iterator = Arrays.asList(keysAndValues).iterator(); while (iterator.hasNext()) { map.put(iterator.next().toString(), iterator.next()); } return map; } class TestStackAdvisorCommand extends StackAdvisorCommand<TestResource> { public TestStackAdvisorCommand(File recommendationsDir, String recommendationsArtifactsLifetime, ServiceInfo.ServiceAdvisorType serviceAdvisorType, int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo, Map<String, JsonNode> hostInfoCache) { super(recommendationsDir, recommendationsArtifactsLifetime, serviceAdvisorType, requestId, saRunner, metaInfo, ambariServerConfigurationHandler, hostInfoCache); } @Override protected void validate(StackAdvisorRequest request) throws StackAdvisorException { // do nothing } @Override protected String getResultFileName() { return "result.json"; } @Override protected StackAdvisorCommandType getCommandType() { return StackAdvisorCommandType.RECOMMEND_COMPONENT_LAYOUT; } @Override protected TestResource updateResponse(StackAdvisorRequest request, TestResource response) { return response; } // Overridden to ensure visiblity in tests @Override public javax.ws.rs.core.Response handleRequest(HttpHeaders headers, String body, UriInfo uriInfo, Request.Type requestType, MediaType mediaType, ResourceInstance resource) { return super.handleRequest(headers, body, uriInfo, requestType, mediaType, resource); } } public static class TestResource extends StackAdvisorResponse { @JsonProperty private String type; public String getType() { return type; } public void setType(String type) { this.type = type; } } }
package sms; /* @(#)SerialParameters.java 1.5 98/07/17 SMI * * Copyright (c) 1998 Sun Microsystems, Inc. All Rights Reserved. * * Sun grants you ("Licensee") a non-exclusive, royalty free, license * to use, modify and redistribute this software in source and binary * code form, provided that i) this copyright notice and license appear * on all copies of the software; and ii) Licensee does not utilize the * software in a manner which is disparaging to Sun. * * This software is provided "AS IS," without a warranty of any kind. * ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, * INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND * ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY * LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE * SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS * BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, * INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, * HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING * OUT OF THE USE OF OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN * ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. * * This software is not designed or intended for use in on-line control * of aircraft, air traffic, aircraft navigation or aircraft * communications; or in the design, construction, operation or * maintenance of any nuclear facility. Licensee represents and * warrants that it will not use or redistribute the Software for such * purposes. */ import javax.comm.*; /** A class that stores parameters for serial ports. */ public class SerialParameters { private String portName; private int baudRate; private int flowControlIn; private int flowControlOut; private int databits; private int stopbits; private int parity; /** Default constructer. Sets parameters to no port, 9600 baud, no flow control, 8 data bits, 1 stop bit, no parity. */ public SerialParameters () { this("", 9600, SerialPort.FLOWCONTROL_NONE, SerialPort.FLOWCONTROL_NONE, SerialPort.DATABITS_8, SerialPort.STOPBITS_1, SerialPort.PARITY_NONE ); } /** Paramaterized constructer. @param portName The name of the port. @param baudRate The baud rate. @param flowControlIn Type of flow control for receiving. @param flowControlOut Type of flow control for sending. @param databits The number of data bits. @param stopbits The number of stop bits. @param parity The type of parity. */ public SerialParameters(String portName, int baudRate, int flowControlIn, int flowControlOut, int databits, int stopbits, int parity) { this.portName = portName; this.baudRate = baudRate; this.flowControlIn = flowControlIn; this.flowControlOut = flowControlOut; this.databits = databits; this.stopbits = stopbits; this.parity = parity; } /** Sets port name. @param portName New port name. */ public void setPortName(String portName) { this.portName = portName; } /** Gets port name. @return Current port name. */ public String getPortName() { return portName; } /** Sets baud rate. @param baudRate New baud rate. */ public void setBaudRate(int baudRate) { this.baudRate = baudRate; } /** Sets baud rate. @param baudRate New baud rate. */ public void setBaudRate(String baudRate) { this.baudRate = Integer.parseInt(baudRate); } /** Gets baud rate as an <code>int</code>. @return Current baud rate. */ public int getBaudRate() { return baudRate; } /** Gets baud rate as a <code>String</code>. @return Current baud rate. */ public String getBaudRateString() { return Integer.toString(baudRate); } /** Sets flow control for reading. @param flowControlIn New flow control for reading type. */ public void setFlowControlIn(int flowControlIn) { this.flowControlIn = flowControlIn; } /** Sets flow control for reading. @param flowControlIn New flow control for reading type. */ public void setFlowControlIn(String flowControlIn) { this.flowControlIn = stringToFlow(flowControlIn); } /** Gets flow control for reading as an <code>int</code>. @return Current flow control type. */ public int getFlowControlIn() { return flowControlIn; } /** Gets flow control for reading as a <code>String</code>. @return Current flow control type. */ public String getFlowControlInString() { return flowToString(flowControlIn); } /** Sets flow control for writing. @param flowControlIn New flow control for writing type. */ public void setFlowControlOut(int flowControlOut) { this.flowControlOut = flowControlOut; } /** Sets flow control for writing. @param flowControlIn New flow control for writing type. */ public void setFlowControlOut(String flowControlOut) { this.flowControlOut = stringToFlow(flowControlOut); } /** Gets flow control for writing as an <code>int</code>. @return Current flow control type. */ public int getFlowControlOut() { return flowControlOut; } /** Gets flow control for writing as a <code>String</code>. @return Current flow control type. */ public String getFlowControlOutString() { return flowToString(flowControlOut); } /** Sets data bits. @param databits New data bits setting. */ public void setDatabits(int databits) { this.databits = databits; } /** Sets data bits. @param databits New data bits setting. */ public void setDatabits(String databits) { if (databits.equals("5")) { this.databits = SerialPort.DATABITS_5; } if (databits.equals("6")) { this.databits = SerialPort.DATABITS_6; } if (databits.equals("7")) { this.databits = SerialPort.DATABITS_7; } if (databits.equals("8")) { this.databits = SerialPort.DATABITS_8; } } /** Gets data bits as an <code>int</code>. @return Current data bits setting. */ public int getDatabits() { return databits; } /** Gets data bits as a <code>String</code>. @return Current data bits setting. */ public String getDatabitsString() { switch(databits) { case SerialPort.DATABITS_5: return "5"; case SerialPort.DATABITS_6: return "6"; case SerialPort.DATABITS_7: return "7"; case SerialPort.DATABITS_8: return "8"; default: return "8"; } } /** Sets stop bits. @param stopbits New stop bits setting. */ public void setStopbits(int stopbits) { this.stopbits = stopbits; } /** Sets stop bits. @param stopbits New stop bits setting. */ public void setStopbits(String stopbits) { if (stopbits.equals("1")) { this.stopbits = SerialPort.STOPBITS_1; } if (stopbits.equals("1.5")) { this.stopbits = SerialPort.STOPBITS_1_5; } if (stopbits.equals("2")) { this.stopbits = SerialPort.STOPBITS_2; } } /** Gets stop bits setting as an <code>int</code>. @return Current stop bits setting. */ public int getStopbits() { return stopbits; } /** Gets stop bits setting as a <code>String</code>. @return Current stop bits setting. */ public String getStopbitsString() { switch(stopbits) { case SerialPort.STOPBITS_1: return "1"; case SerialPort.STOPBITS_1_5: return "1.5"; case SerialPort.STOPBITS_2: return "2"; default: return "1"; } } /** Sets parity setting. @param parity New parity setting. */ public void setParity(int parity) { this.parity = parity; } /** Sets parity setting. @param parity New parity setting. */ public void setParity(String parity) { if (parity.equals("None")) { this.parity = SerialPort.PARITY_NONE; } if (parity.equals("Even")) { this.parity = SerialPort.PARITY_EVEN; } if (parity.equals("Odd")) { this.parity = SerialPort.PARITY_ODD; } } /** Gets parity setting as an <code>int</code>. @return Current parity setting. */ public int getParity() { return parity; } /** Gets parity setting as a <code>String</code>. @return Current parity setting. */ public String getParityString() { switch(parity) { case SerialPort.PARITY_NONE: return "None"; case SerialPort.PARITY_EVEN: return "Even"; case SerialPort.PARITY_ODD: return "Odd"; default: return "None"; } } /** Converts a <code>String</code> describing a flow control type to an <code>int</code> type defined in <code>SerialPort</code>. @param flowControl A <code>string</code> describing a flow control type. @return An <code>int</code> describing a flow control type. */ private int stringToFlow(String flowControl) { if (flowControl.equals("None")) { return SerialPort.FLOWCONTROL_NONE; } if (flowControl.equals("Xon/Xoff Out")) { return SerialPort.FLOWCONTROL_XONXOFF_OUT; } if (flowControl.equals("Xon/Xoff In")) { return SerialPort.FLOWCONTROL_XONXOFF_IN; } if (flowControl.equals("RTS/CTS In")) { return SerialPort.FLOWCONTROL_RTSCTS_IN; } if (flowControl.equals("RTS/CTS Out")) { return SerialPort.FLOWCONTROL_RTSCTS_OUT; } return SerialPort.FLOWCONTROL_NONE; } /** Converts an <code>int</code> describing a flow control type to a <code>String</code> describing a flow control type. @param flowControl An <code>int</code> describing a flow control type. @return A <code>String</code> describing a flow control type. */ String flowToString(int flowControl) { switch(flowControl) { case SerialPort.FLOWCONTROL_NONE: return "None"; case SerialPort.FLOWCONTROL_XONXOFF_OUT: return "Xon/Xoff Out"; case SerialPort.FLOWCONTROL_XONXOFF_IN: return "Xon/Xoff In"; case SerialPort.FLOWCONTROL_RTSCTS_IN: return "RTS/CTS In"; case SerialPort.FLOWCONTROL_RTSCTS_OUT: return "RTS/CTS Out"; default: return "None"; } } }
package net.floodlightcontroller.debugevent.web; import java.util.HashMap; import java.util.List; import java.util.Map; import net.floodlightcontroller.debugevent.IDebugEventService.DebugEventInfo; import net.floodlightcontroller.debugevent.IDebugEventService.EventType; import org.restlet.resource.Get; import org.restlet.resource.Post; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Web interface for Debug Events * * @author Saurav */ public class DebugEventResource extends DebugEventResourceBase { protected static Logger logger = LoggerFactory.getLogger(DebugEventResource.class); /** * The output JSON model that contains the counter information */ public static class DebugEventInfoOutput { protected class DEInfo { private final boolean enabled; private final int bufferCapacity; private final EventType eventType; private final String eventDesc; private final String eventName; private final String moduleName; private final String[] metaData; private final List<Map<String,String>> eventHistory; DEInfo(DebugEventInfo dei) { this.moduleName = dei.getEventInfo().getModuleName(); this.eventName = dei.getEventInfo().getEventName(); this.eventDesc = dei.getEventInfo().getEventDesc(); this.metaData = dei.getEventInfo().getMetaData(); this.enabled = dei.getEventInfo().isEnabled(); this.eventType = dei.getEventInfo().getEtype(); this.bufferCapacity = dei.getEventInfo().getBufferCapacity(); this.eventHistory = dei.getEvents(); } public boolean isEnabled() { return enabled; } public int getBufferCapacity() { return bufferCapacity; } public String getEventDesc() { return eventDesc; } public String getEventName() { return eventName; } public String getModuleName() { return moduleName; } public String[] getMetaData() { return metaData; } public EventType getEventType() { return eventType; } public List<Map<String,String>> getEventHistory() { return eventHistory; } } public Map<String, DEInfo> eventMap = null; public List<String> names = null; public String error = null; DebugEventInfoOutput(boolean getList) { if (!getList) { eventMap = new HashMap<String, DEInfo>(); } } public Map<String, DEInfo> getEventMap() { return eventMap; } public List<String> getNames() { return names; } public String getError() { return error; } } public enum Option { ALL, ONE_MODULE, ONE_MODULE_EVENT, ERROR_BAD_MODULE_NAME, ERROR_BAD_PARAM, ERROR_BAD_MODULE_EVENT_NAME } public static class DebugEventPost { public Boolean reset; public Boolean getReset() { return reset; } public void setReset(Boolean reset) { this.reset = reset; } } public static class ResetOutput { String error = null; public String getError() { return error; } public void setError(String error) { this.error = error; } } /** * Reset events * * If using curl: * curl -X POST -d {\"reset\":true} -H "Content-Type: application/json" URL * where URL must be in one of the following forms for resetting registered events: * "http://{controller-hostname}:8080/wm/debugevent/ * "http://{controller-hostname}:8080/wm/debugevent/{param1} * "http://{controller-hostname}:8080/wm/debugevent/{param1}/{param2} * * Not giving {param1} will reset all events * {param1} can be 'all' or the name of a module. The former case will reset * all events, while the latter will reset all events for the moduleName (if * param2 is null).{param2} must be an eventName for the given moduleName to * reset a specific event. */ @Post public ResetOutput postHandler(DebugEventPost postData) { ResetOutput output = new ResetOutput(); String param1 = (String)getRequestAttributes().get("param1"); String param2 = (String)getRequestAttributes().get("param2"); if (postData.getReset() != null && postData.getReset()) { Option choice = Option.ERROR_BAD_PARAM; if (param1 == null) { param1 = "all"; choice = Option.ALL; } else if (param1.equals("all")) { choice = Option.ALL; } else if (param2 == null) { boolean isRegistered = debugEvent.containsModuleName(param1); if (isRegistered) { choice = Option.ONE_MODULE; } else { choice = Option.ERROR_BAD_MODULE_NAME; } } else { // differentiate between disabled and non-existing events boolean isRegistered = debugEvent.containsModuleEventName(param1, param2); if (isRegistered) { choice = Option.ONE_MODULE_EVENT; } else { choice = Option.ERROR_BAD_MODULE_EVENT_NAME; } } switch (choice) { case ALL: debugEvent.resetAllEvents(); break; case ONE_MODULE: debugEvent.resetAllModuleEvents(param1); break; case ONE_MODULE_EVENT: debugEvent.resetSingleEvent(param1, param2); break; case ERROR_BAD_MODULE_NAME: output.error = "Module name has no corresponding registered events"; break; case ERROR_BAD_MODULE_EVENT_NAME: output.error = "Event not registered"; break; case ERROR_BAD_PARAM: output.error = "Bad param"; } } return output; } /** * Return the debug event data for the get rest-api call * * URL must be in one of the following forms for retrieving a list * moduleNames "http://{controller-hostname}:8080/wm/debugevent/ * counterNames "http://{controller-hostname}:8080/wm/debugevent/{moduleName} * * URL must be in one of the following forms for retrieving event data: * "http://{controller-hostname}:8080/wm/debugevent/{param1} * "http://{controller-hostname}:8080/wm/debugevent/{param1}/{param2} * * where {param1} must be one of (no quotes): * null if nothing is given then by default the list * of all moduleNames is returned for which * events have been registered * "all" can return value/info on all active events * but is currently disallowed * "{moduleName}" returns value/info on events for the specified module * depending on the value of param2 * and {param2} must be one of (no quotes): * null returns all eventNames registered for the * given moduleName (in param1) * "{eventName}" returns value/info for specific event if it is active. * */ @Get("json") public DebugEventInfoOutput handleEventInfoQuery() { Option choice = Option.ERROR_BAD_PARAM; DebugEventInfoOutput output; String laststr = getQueryValue("last"); int last = Integer.MAX_VALUE; try { if (laststr != null) last = Integer.valueOf(laststr); if (last < 1) last = Integer.MAX_VALUE; } catch (NumberFormatException e) { output = new DebugEventInfoOutput(false); output.error = "Expected an integer requesting last X events;" + " received " + laststr; return output; } String param1 = (String)getRequestAttributes().get("param1"); String param2 = (String)getRequestAttributes().get("param2"); if (param1 == null) { output = new DebugEventInfoOutput(true); return listEvents(output); } else if (param1.equals("all")) { output = new DebugEventInfoOutput(false); //populateEvents(debugEvent.getAllEventHistory(), output); output.error = "Cannot retrieve all events - please select a specific event"; return output; } if (param2 == null) { output = new DebugEventInfoOutput(true); boolean isRegistered = debugEvent.containsModuleName(param1); if (isRegistered) { return listEvents(param1, output); } else { choice = Option.ERROR_BAD_MODULE_NAME; } } else if (param2.equals("all")) { output = new DebugEventInfoOutput(false); //choice = Option.ONE_MODULE; output.error = "Cannot retrieve all events - please select a specific event"; return output; } else { // differentiate between disabled and non-existing events boolean isRegistered = debugEvent.containsModuleEventName(param1, param2); if (isRegistered) { choice = Option.ONE_MODULE_EVENT; } else { choice = Option.ERROR_BAD_MODULE_EVENT_NAME; } } output = new DebugEventInfoOutput(false); switch (choice) { case ONE_MODULE: populateEvents(debugEvent.getModuleEventHistory(param1), output); break; case ONE_MODULE_EVENT: populateSingleEvent(debugEvent.getSingleEventHistory(param1, param2, last), output); break; case ERROR_BAD_MODULE_NAME: output.error = "Module name has no corresponding registered events"; break; case ERROR_BAD_MODULE_EVENT_NAME: output.error = "Event not registered"; break; case ERROR_BAD_PARAM: default: output.error = "Bad param"; } return output; } private DebugEventInfoOutput listEvents(DebugEventInfoOutput output) { output.names = debugEvent.getModuleList(); return output; } private DebugEventInfoOutput listEvents(String moduleName, DebugEventInfoOutput output) { output.names = debugEvent.getModuleEventList(moduleName); return output; } private void populateSingleEvent(DebugEventInfo singleEventHistory, DebugEventInfoOutput output) { if (singleEventHistory != null) { output.eventMap.put(singleEventHistory.getEventInfo().getModuleEventName(), output.new DEInfo(singleEventHistory)); } } private void populateEvents(List<DebugEventInfo> eventHistory, DebugEventInfoOutput output) { if (eventHistory != null) { for (DebugEventInfo de : eventHistory) populateSingleEvent(de, output); } } }
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: October 19, 2005 Site Name: Washington OI Field Office, Silver Spring, MD Developer: VHAISWPETERB Description: ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging.dicom.common.interfaces; import gov.va.med.imaging.exchange.business.dicom.DicomAE; import gov.va.med.imaging.exchange.business.dicom.DicomCorrectInfo; import gov.va.med.imaging.exchange.business.dicom.DicomMap; import gov.va.med.imaging.exchange.business.dicom.DicomRequestParameters; import gov.va.med.imaging.exchange.business.dicom.InstanceFile; import gov.va.med.imaging.exchange.business.dicom.InstrumentConfig; import gov.va.med.imaging.exchange.business.dicom.PatientRef; import gov.va.med.imaging.exchange.business.dicom.PatientStudyInfo; import gov.va.med.imaging.exchange.business.dicom.ProcedureRef; import gov.va.med.imaging.exchange.business.dicom.SOPInstance; import gov.va.med.imaging.exchange.business.dicom.Series; import gov.va.med.imaging.exchange.business.dicom.StorageCommitElement; import gov.va.med.imaging.exchange.business.dicom.Study; import gov.va.med.imaging.exchange.business.dicom.exceptions.DicomException; import gov.va.med.imaging.exchange.business.dicom.exceptions.IODViolationException; import gov.va.med.imaging.exchange.business.dicom.exceptions.IllegalQueryDataException; import gov.va.med.imaging.exchange.business.dicom.exceptions.UnknownSOPClassException; import gov.va.med.imaging.exchange.business.dicom.exceptions.ValidateIODException; import gov.va.med.imaging.exchange.business.dicom.exceptions.ValidateVRException; import gov.va.med.imaging.exchange.business.dicom.rdsr.Dose; import java.util.HashMap; import java.util.HashSet; import java.util.List; /** * * @author William Peterson * */ public interface IDicomDataSet { /** * return (unwrap) the toolkit specific DicomDataSet. * * @return Object */ public Object getDicomDataSet(); /** * return (wrap) the toolkit specific DicomDataSet. * * @param dicomDataSet return as an Object. Need to cast to the toolkit specific implementation. */ public void setDicomDataSet(Object dicomDataSet); /** * Get generic DicomElement for the given Dicom tag. * * @param dicomTag represented as a string, i.e. "0010,0010". * @return IDicomElement * @throws DicomException if method fails to access Dicom tag in DicomDataSet. */ public IDicomElement getDicomElement(String dicomTag) throws DicomException; /** * Get generic DicomElement for the given Dicom sequence's tag. * * @param dicomTagName or Sequence tag represented as a string, i.e. "0010,0010". * @param dicomTag2Name represented as a string, i.e. "0010,0010". * @return IDicomElement * @throws DicomException if method fails to access Dicom tag in DicomDataSet. */ public String getDicomElementValue(String dicomTagName, String dicomTag2Name) throws DicomException; /** * Remove generic DicomElement for the given Dicom tag, including if the DicomElement is within a Sequence. * * @param dicomTagName represented as a string, i.e. "0010,0010". * @param dicomTag2Name represented as a string, i.e. "0010,0010". * @return true if successful removing the DicomElement. * @throws DicomException if method fails to access Dicom tag in DicomDataSet. */ public boolean removeDicomElement(String dicomTagName, String dicomTag2Name) throws DicomException; /** * Insert generic DicomElement for a given Dicom tag, including if the DicomElement is within a Sequence. * * @param dicomTagName represented as a string, i.e. "0010,0010". * @param dicomTag2Name represented as a string, i.e. "0010,0010". * @param value for this specific Dicom tag represented as a string. * @return IDicomElement * @throws DicomException if method fails to access Dicom tag in DicomDataSet. */ public IDicomElement insertDicomElement(String dicomTagName, String dicomTag2Name, String value) throws DicomException; /** * Insert new & record old DicomElement value for a given Dicom tag, including if the DicomElement is within a Sequence. * Insertion occurs only if the new value is different from the existing value in dataset * or the ignoreNew flag is set to true. * if insertion occurred old values are recorded in an original Dicom Sequence * * @param tag represented as a string, i.e. "0010,0010". * @param tag2 represented as a string, i.e. "0010,0010". * @param value for this specific Dicom tag represented as a string. * @param ignoreNew if set inserts new and record old value regardless of matching * @return boolean True if insertion and recording occured */ public boolean insertAndRecordNewValue (String tag, String tag2, String value, boolean ignoreNew); /** * Create and return a PatientRef business object using the values contained in * the DicomElements of this DicomDataSet. * * @return PatientRef */ public PatientRef getPatientRef(); /** * Create and return a ProcedureRef business object using the values contained in * the DicomElements of this DicomDataSet. * * @return ProcedureRef */ public ProcedureRef getProcedureRef(InstrumentConfig instrument); /** * Create and return a Study business object using the values contained in * the DicomElements of this DicomDataSet. * * @return Study */ public Study getStudy(); /** * Create and return a Series business object using the values contained in * the DicomElements of this DicomDataSet. * * @param DicomAE DICOM Application Entity object. * @param InstrumentConfig InstrumentConfig object representing an instrument from the instrument.dic file. * @return */ public Series getSeries(DicomAE dicomAE, InstrumentConfig instrument); /** * Create and return a SOPInstance business object using the values contained in * the DicomElements of this DicomDataSet. * * @return SOPInstance */ public SOPInstance getSOPInstance(); /** * Create and return an InstanceFile business object using the values contained in * the DicomElements of this DicomDataSet. * * @return InstanceFile */ public InstanceFile getInstanceFile(); /** * Return the Study Instance UID as a String. * * @return String - representing the Study Instance UID. */ public String getStudyInstanceUID(); /** * Set the Study Instance UID. * * @param studyInstanceUID */ public void setStudyInstanceUID(String studyInstanceUID); /** * Return the Series Instance UID as a String. * * @return String - representing the Series Instance UID. */ public String getSeriesInstanceUID(); /** * Set the Series Instance UID. * * @param seriesInstanceUID */ public void setSeriesInstanceUID(String studyInstanceUID); /** * Return the SOP Instance UID as a String. * * @return String - representing the SOP Instance UID. */ public String getSOPInstanceUID(); /** * Set the SOP Instance UID. * * @param sopInstanceUID */ public void setSOPInstanceUID(String sopInstanceUID); /** * Set the Study Instance UID. * * @return boolean */ public boolean getStudyInstanceUID(String studyInstanceUID); /** * Set the Series Instance UID. * * @return boolean */ public boolean getSeriesInstanceUID(String seriesInstanceUID); /** * Set the SOP Instance UID. * * @return boolean */ public boolean getSOPInstanceUID(String sopInstanceUID); /** * Get the SOP Class UID. * * @return the UID of the SOP Class as a string. */ public String getSOPClass(); /** * * @return the Manufacturer's name */ public String getManufacturer(); /** * * @return the Model Name */ public String getModelName(); /** * Get/Create a partial DicomCorrectInfo object. It populates various fields in the object in case this object * has to enter Dicom Correct mechanism. * * @return a partially populated DicomCorrectInfo object. */ public DicomCorrectInfo getPartialDicomCorrectInfo(); /** * Get Acquisition Site. * * @return String - representing the Acquisition Site */ public String getAcquisitionSite(); /** * Set Acquisition Site. * * @param acquisitionSite String */ public void setAcquisitionSite(String acquisitionSite); /** * Get the Patient/Study Info. * * @param instrument information about the listener that received this DicomDataSet. * @return * @throws DicomException if method fails to access this information in DicomDataSet. */ public PatientStudyInfo getPatientStudyInfo(InstrumentConfig instrument) throws DicomException ; /** * Update containing DicomDataSet with a set of HIS changes. * * @param HashMap HIS changes, each change is in the form of <(gggg.eeee)> <"change value"> * @throws DicomException if method fails to access this information in DicomDataSet. */ public void updateHISChangesToDDS(HashMap<String,String> HISChanges)throws DicomException; /** * Convert DICOM dataset to byte array (to a dicom file in memory). * * @return byte array * updateOnly - true if no reconstitution (creating new DICOM object was done * @return byte[] */ public byte[] part10Buffer(boolean updateOnly); /** * Validate the VRs in this DicomDataSet. * * @throws ValidateVRException fails to access or complete the VR validation process of this DicomDataSet. */ public void validateVR() throws ValidateVRException; /** * Validate the IOD represented by this DicomDataSet. * * @return violation list that contains all errors and warnings discovered. * @throws ValidateIODException fails to access or complete the IOD validation process of this DicomDataSet. * @throws UnknownSOPClassException fails because the SOP Class of the DicomDataSet is unknown. */ public IIODViolationList validateIOD() throws ValidateIODException, UnknownSOPClassException; /** * Determine if the Dicom tag exist in this DicomDataSet. * * @param dicomTagName represented as a string, i.e. "0010,0010". * @param dicomTag2Name null or represented as a string, i.e. "0010,0010", if dicomTagName is a sequence tag * @return true if the Dicom tag exist. False otherwise. */ public boolean containsDicomElement(String dicomTagName, String dicomTag2Name); /** * returns the translated DICOM Patient Root C-Find request. Currently, this call results in an exception. * * @return DicomRequestParameters - containing the map of DicomElements and their corresponding CFind Request values. * @throws IllegalQueryDataException if the method fails to convert the DicomDataSet to a HashMap. */ public DicomRequestParameters createPatientQueryRequestParameters() throws IllegalQueryDataException; /** * returns the translated DICOM Study Root C-Find request. Only Study Root, Study level is accepted. * * @return DicomRequestParameters containing the map of DicomElements and their corresponding CFind Request value. * @throws IllegalQueryDataException if the method fails to convert the DicomDataSet to a HashMap. */ public DicomRequestParameters createStudyQueryRequestParameters() throws IllegalQueryDataException; /** * returns the translated DICOM C-Move request. Only Study Root, Study level is accepted. * * @return DicomRequestParameters containing the map of DicomElements and their corresponding CMove Request value. * @throws IllegalQueryDataException if the method fails to convert the DicomDataSet to a HashMap. */ public DicomRequestParameters createMoveRequestParameters() throws IllegalQueryDataException; /** * Get a Query Mapping Set. A mapping set contains the set of attributes, without values, that are in the DicomDataSet and * matched against a static CFind attribute Table. * * This set of attributes are used to translate the DicomDataSet to a HashMap object. It is also used later to verify * the CFind responses contain the same attributes as the CFind request. * * This is only used for a CFind Dimse message DicomDataSet. * * @return HashSet containing a list of DicomMap objects. */ public HashSet<DicomMap> getQueryMappingSet(); /** * Set a Query Mapping Set. A mapping set contains a set of attributes without values. This mapping set can be used to * translate and filter to use only selected attributes in the DicomDataSet. * * This is only used for a CFind Dimse message DicomDataSet. * * @param mappingSet containing a list of DicomMap objects. */ public void setQueryMappingSet(HashSet<DicomMap> mappingSet); /** * Get the Source AETitle. * * @return the source AETitle as a string. */ public String getSourceAET(); /** * Set the Source AETitle. * * @param sourceAET the source AETitle as a string. */ public void setSourceAET(String sourceAET); /** * Get the Transfer Syntax used to receive the DicomDataSet. This could either originate from the listener or from file. * * @return Transfer Syntax UID as a string. */ public String getReceivedTransferSyntax(); /** * Set the Transfer Syntax. * * @param receivedTransferSyntax Transfer Syntax UID as a string. */ public void setReceivedTransferSyntax(String receivedTransferSyntax); /** * Get the QueryRetrieve Level element value from this DicomDataSet. This is only populated when the DicomDataSet * represents a CFind or a CMove DicomDataSet object. * * @return one of four possible string values: PATIENT, STUDY, SERIES, IMAGE. */ public String getQueryRetrieveLevel(); //CODECR #IMAG00000440 - SSN Format issue. /** * */ public void changeDataPresentation(); /** * @return the name */ public String getName(); /** * @param name the name to set */ public void setName(String name); public String getAffectedSOPClass(); public void setAffectedSOPClass(String sopClass); /** * Set the AETitle's Default Service Type (RAD or CON). * * @param type represents the Service Type. Current value to use are RAD or CON. */ public void setAETDefaultServiceType(String type); /** * Get the AETitle's Default Service Type (RAD or CON). * * @return Service Type. */ public String getAETDefaultServiceType(); /** * this method inserts DICOM elements into the Original Attribute Sequence. This sequence is used to * keep track of changes to Patient and Study data. * * @param tag represents the element to store in Original Attribute Sequence. * @param tagValue represents the element's value to store in Original Attribute Sequence. * @param context represents some type of identifier that made the request. This parameter is not * stored. It is only used to show the identifier in the logging mechanism if the method fails. */ public void insertToOriginalAttributeSequence(String tag, String tagValue, String context); /** * this method inserts DICOM elements into the Original Attribute Sequence. This sequence is used to * keep track of changes to Patient and Study data. * * @param tag represents the element to store in Original Attribute Sequence. * @param context represents some type of identifier that made the request. This parameter is not * stored. It is only used to show the identifier in the logging mechanism if the method fails. */ public void insertToOriginalAttributeSequence(String tag, String context); /** * this method returns the transaction UID from the dataset if present, else null is returned. */ public String getTransactionUID(); /** * this method inserts DICOM SOP Class/Instance element pairs into a storage commitment structure. * This sequence is used to keep track of Storage Commitment requests until a response is delivered to the sender. */ public List<StorageCommitElement> getRefSOPUIDList(); /** * Indicates whether or not the DICOM object is a radiation dose structured report (RDSR) * @return */ public boolean isRadiationDoseStructuredReport(); /** * Returns a populated instance of a Dose subclass for known RDSR dose types. Returns null * otherwise. * * @return a Dose subclass if this is a known RDSR type; otherwise, returns null */ public List<Dose> getDose(); }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_METADATA_BLOCK; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.SuiteScopeTestCase public class GetIndexIT extends ESIntegTestCase { @Override protected void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("idx").addAlias(new Alias("alias_idx")).addMapping("type1", "{\"type1\":{}}", XContentType.JSON) .setSettings(Settings.builder().put("number_of_shards", 1)).get()); ensureSearchable("idx"); createIndex("empty_idx"); ensureSearchable("idx", "empty_idx"); } public void testSimple() { GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("idx").get(); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); assertAliases(response, "idx"); assertMappings(response, "idx"); assertSettings(response, "idx"); } public void testSimpleUnknownIndex() { try { client().admin().indices().prepareGetIndex().addIndices("missing_idx").get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), is("no such index")); } } public void testEmpty() { GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("empty_idx").get(); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("empty_idx")); assertEmptyAliases(response); assertEmptyOrOnlyDefaultMappings(response, "empty_idx"); assertNonEmptySettings(response, "empty_idx"); } public void testSimpleMapping() { GetIndexResponse response = runWithRandomFeatureMethod(client().admin().indices().prepareGetIndex().addIndices("idx"), Feature.MAPPINGS); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); assertMappings(response, "idx"); assertEmptyAliases(response); assertEmptySettings(response); } public void testSimpleAlias() { GetIndexResponse response = runWithRandomFeatureMethod(client().admin().indices().prepareGetIndex().addIndices("idx"), Feature.ALIASES); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); assertAliases(response, "idx"); assertEmptyMappings(response); assertEmptySettings(response); } public void testSimpleSettings() { GetIndexResponse response = runWithRandomFeatureMethod(client().admin().indices().prepareGetIndex().addIndices("idx"), Feature.SETTINGS); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); assertSettings(response, "idx"); assertEmptyAliases(response); assertEmptyMappings(response); } public void testSimpleMixedFeatures() { int numFeatures = randomIntBetween(1, Feature.values().length); List<Feature> features = new ArrayList<Feature>(numFeatures); for (int i = 0; i < numFeatures; i++) { features.add(randomFrom(Feature.values())); } GetIndexResponse response = runWithRandomFeatureMethod(client().admin().indices().prepareGetIndex().addIndices("idx"), features.toArray(new Feature[features.size()])); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); if (features.contains(Feature.ALIASES)) { assertAliases(response, "idx"); } else { assertEmptyAliases(response); } if (features.contains(Feature.MAPPINGS)) { assertMappings(response, "idx"); } else { assertEmptyMappings(response); } if (features.contains(Feature.SETTINGS)) { assertSettings(response, "idx"); } else { assertEmptySettings(response); } } public void testEmptyMixedFeatures() { int numFeatures = randomIntBetween(1, Feature.values().length); List<Feature> features = new ArrayList<Feature>(numFeatures); for (int i = 0; i < numFeatures; i++) { features.add(randomFrom(Feature.values())); } GetIndexResponse response = runWithRandomFeatureMethod(client().admin().indices().prepareGetIndex().addIndices("empty_idx"), features.toArray(new Feature[features.size()])); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("empty_idx")); assertEmptyAliases(response); if (features.contains(Feature.MAPPINGS)) { assertEmptyOrOnlyDefaultMappings(response, "empty_idx"); } else { assertEmptyMappings(response); } if (features.contains(Feature.SETTINGS)) { assertNonEmptySettings(response, "empty_idx"); } else { assertEmptySettings(response); } } public void testGetIndexWithBlocks() { for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY, SETTING_READ_ONLY_ALLOW_DELETE)) { try { enableIndexBlock("idx", block); GetIndexResponse response = client().admin().indices().prepareGetIndex().addIndices("idx") .addFeatures(Feature.MAPPINGS, Feature.ALIASES).get(); String[] indices = response.indices(); assertThat(indices, notNullValue()); assertThat(indices.length, equalTo(1)); assertThat(indices[0], equalTo("idx")); assertMappings(response, "idx"); assertAliases(response, "idx"); } finally { disableIndexBlock("idx", block); } } try { enableIndexBlock("idx", SETTING_BLOCKS_METADATA); assertBlocked(client().admin().indices().prepareGetIndex().addIndices("idx").addFeatures(Feature.MAPPINGS, Feature.ALIASES), INDEX_METADATA_BLOCK); } finally { disableIndexBlock("idx", SETTING_BLOCKS_METADATA); } } private GetIndexResponse runWithRandomFeatureMethod(GetIndexRequestBuilder requestBuilder, Feature... features) { if (randomBoolean()) { return requestBuilder.addFeatures(features).get(); } else { return requestBuilder.setFeatures(features).get(); } } private void assertSettings(GetIndexResponse response, String indexName) { ImmutableOpenMap<String, Settings> settings = response.settings(); assertThat(settings, notNullValue()); assertThat(settings.size(), equalTo(1)); Settings indexSettings = settings.get(indexName); assertThat(indexSettings, notNullValue()); assertThat(indexSettings.get("index.number_of_shards"), equalTo("1")); } private void assertNonEmptySettings(GetIndexResponse response, String indexName) { ImmutableOpenMap<String, Settings> settings = response.settings(); assertThat(settings, notNullValue()); assertThat(settings.size(), equalTo(1)); Settings indexSettings = settings.get(indexName); assertThat(indexSettings, notNullValue()); } private void assertMappings(GetIndexResponse response, String indexName) { ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); ImmutableOpenMap<String, MappingMetaData> indexMappings = mappings.get(indexName); assertThat(indexMappings, notNullValue()); assertThat(indexMappings.size(), anyOf(equalTo(1), equalTo(2))); if (indexMappings.size() == 2) { MappingMetaData mapping = indexMappings.get("_default_"); assertThat(mapping, notNullValue()); } MappingMetaData mapping = indexMappings.get("type1"); assertThat(mapping, notNullValue()); assertThat(mapping.type(), equalTo("type1")); } private void assertEmptyOrOnlyDefaultMappings(GetIndexResponse response, String indexName) { ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = response.mappings(); assertThat(mappings, notNullValue()); assertThat(mappings.size(), equalTo(1)); ImmutableOpenMap<String, MappingMetaData> indexMappings = mappings.get(indexName); assertThat(indexMappings, notNullValue()); assertThat(indexMappings.size(), anyOf(equalTo(0), equalTo(1))); if (indexMappings.size() == 1) { MappingMetaData mapping = indexMappings.get("_default_"); assertThat(mapping, notNullValue()); } } private void assertAliases(GetIndexResponse response, String indexName) { ImmutableOpenMap<String, List<AliasMetaData>> aliases = response.aliases(); assertThat(aliases, notNullValue()); assertThat(aliases.size(), equalTo(1)); List<AliasMetaData> indexAliases = aliases.get(indexName); assertThat(indexAliases, notNullValue()); assertThat(indexAliases.size(), equalTo(1)); AliasMetaData alias = indexAliases.get(0); assertThat(alias, notNullValue()); assertThat(alias.alias(), equalTo("alias_idx")); } private void assertEmptySettings(GetIndexResponse response) { assertThat(response.settings(), notNullValue()); assertThat(response.settings().isEmpty(), equalTo(true)); } private void assertEmptyMappings(GetIndexResponse response) { assertThat(response.mappings(), notNullValue()); assertThat(response.mappings().isEmpty(), equalTo(true)); } private void assertEmptyAliases(GetIndexResponse response) { assertThat(response.aliases(), notNullValue()); for (final ObjectObjectCursor<String, List<AliasMetaData>> entry : response.getAliases()) { assertTrue(entry.value.isEmpty()); } } }
/* * Copyright (C) 2015 Haruki Hasegawa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.h6ah4i.android.widget.advrecyclerview.event; import android.support.annotation.NonNull; import android.support.v7.widget.RecyclerView; import java.util.ArrayList; import java.util.List; public abstract class BaseRecyclerViewEventDistributor<T> { protected boolean mReleased; protected RecyclerView mRecyclerView; protected List<T> mListeners; protected boolean mPerformingClearMethod; public BaseRecyclerViewEventDistributor() { } /** * Gets attached {@link android.support.v7.widget.RecyclerView} * * @return The {@link android.support.v7.widget.RecyclerView} instance */ public RecyclerView getRecyclerView() { return mRecyclerView; } /** * Release all references. * This method should be called to avoid memory leaks. */ public void release() { if (mReleased) { return; } mReleased = true; clear(true); onRelease(); } /** * Indicates whether this distributor has been released * * @return True for this distributor has been released, otherwise false */ public boolean isReleased() { return mReleased; } /** * Attaches {@link android.support.v7.widget.RecyclerView} instance. * * @param rv The {@link android.support.v7.widget.RecyclerView} instance */ public void attachRecyclerView(RecyclerView rv) { final String METHOD_NAME = "attachRecyclerView()"; if (rv == null) { throw new IllegalArgumentException("RecyclerView cannot be null"); } verifyIsNotReleased(METHOD_NAME); verifyIsNotPerformingClearMethod(METHOD_NAME); onRecyclerViewAttached(rv); } /** * Add a {@link T} listener to the chain. * * @param listener The {@link T} instance * @return True if the listener object successfully added, otherwise false. Also returns true if have already been added. */ public boolean add(T listener) { return add(listener, -1); } /** * Add a {@link T} listener to the chain at the specified position. * * @param listener The {@link T} instance * @param index Position in the listener chain to insert this listener at. (&lt; 0: tail of the chain) * @return True if the listener object successfully added, otherwise false. Also returns true if have already been added. */ public boolean add(@NonNull T listener, int index) { final String METHOD_NAME = "add()"; if (listener == null) { throw new IllegalArgumentException("can not specify null for the listener"); } verifyIsNotReleased(METHOD_NAME); verifyIsNotPerformingClearMethod(METHOD_NAME); if (mListeners == null) { mListeners = new ArrayList<>(); } if (!mListeners.contains(listener)) { if (index < 0) { // append to the tail of the list mListeners.add(listener); } else { // insert to the specified position mListeners.add(index, listener); } // raise onAddedToEventDistributor() event if (listener instanceof RecyclerViewEventDistributorListener) { ((RecyclerViewEventDistributorListener) listener).onAddedToEventDistributor(this); } } return true; } /** * Remove a {@link T} listener from the chain. * * @param listener Listener to remove * @return True for successfully removed the listener object, otherwise false */ public boolean remove(@NonNull T listener) { final String METHOD_NAME = "remove()"; if (listener == null) { throw new IllegalArgumentException("can not specify null for the listener"); } verifyIsNotPerformingClearMethod(METHOD_NAME); verifyIsNotReleased(METHOD_NAME); if (mListeners == null) { return false; } final boolean removed = mListeners.remove(listener); if (removed) { // raise onRemovedFromEventDistributor() event if (listener instanceof RecyclerViewEventDistributorListener) { ((RecyclerViewEventDistributorListener) listener).onRemovedFromEventDistributor(this); } } return removed; } /** * Remove all listeners from the chain. */ public void clear() { clear(false); } protected void clear(boolean calledFromRelease) { final String METHOD_NAME = "clear()"; if (!calledFromRelease) { verifyIsNotReleased(METHOD_NAME); } verifyIsNotPerformingClearMethod(METHOD_NAME); if (mListeners == null) { return; } try { mPerformingClearMethod = true; final int n = mListeners.size(); for (int i = n - 1; i >= 0; i--) { final T listener = mListeners.remove(i); // raise onRemovedFromEventDistributor() event if (listener instanceof RecyclerViewEventDistributorListener) { ((RecyclerViewEventDistributorListener) listener).onRemovedFromEventDistributor(this); } } } finally { mPerformingClearMethod = false; } } /** * Gets the number of underlying listener objects. * * @return Number of underlying listener objects in the chain. */ public int size() { if (mListeners != null) { return mListeners.size(); } else { return mListeners.size(); } } /** * Gets whether the specified listener object is contained in the chain. * * @param listener Listener to check * @return True for the listener contains in the chain, otherwise false */ public boolean contains(T listener) { if (mListeners != null) { return mListeners.contains(listener); } else { return false; } } protected void onRelease() { mRecyclerView = null; mListeners = null; mPerformingClearMethod = false; } protected void onRecyclerViewAttached(RecyclerView rv) { mRecyclerView = rv; } protected void verifyIsNotPerformingClearMethod(String methodName) { if (mPerformingClearMethod) { throw new IllegalStateException(methodName + " can not be called while performing the clear() method"); } } protected void verifyIsNotReleased(String methodName) { if (mReleased) { throw new IllegalStateException(methodName + " can not be called after release() method called"); } } }
/************************************************************************ * Copyright (c) 2016 IoT-Solutions e.U. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ************************************************************************/ package iot.jcypher.domain.internal; import java.util.List; import iot.jcypher.concurrency.Locking; import iot.jcypher.database.IDBAccess; import iot.jcypher.domain.IDomainAccess; import iot.jcypher.domain.IGenericDomainAccess; import iot.jcypher.domain.SyncInfo; import iot.jcypher.domain.genericmodel.DOType; import iot.jcypher.domain.genericmodel.DOTypeBuilderFactory; import iot.jcypher.domain.genericmodel.DomainObject; import iot.jcypher.domain.internal.DomainAccess.InternalDomainAccess; import iot.jcypher.domain.internal.DomainAccessFactoryImpl.SyncType; import iot.jcypher.domainquery.DomainQuery; import iot.jcypher.domainquery.GDomainQuery; import iot.jcypher.domainquery.QueryLoader; import iot.jcypher.domainquery.QueryPersistor; import iot.jcypher.query.result.JcError; import iot.jcypher.transaction.ITransaction; public class DomainAccessSync implements IDomainAccess, IIntDomainAccess { private SyncType syncType; private DomainAccess delegate; private GenericDomainAccessSync genericDomainAccess; /** * @param dbAccess the graph database connection * @param domainName * @param domainLabelUse */ DomainAccessSync(IDBAccess dbAccess, String domainName, DomainLabelUse domainLabelUse, SyncType st) { super(); this.syncType = st; this.delegate = new DomainAccess(dbAccess, domainName, domainLabelUse); getInternalDomainAccess(); // to initialize with syncObject } @Override public synchronized List<SyncInfo> getSyncInfos(List<Object> domainObjects) { return getDelegate().getSyncInfos(domainObjects); } @Override public synchronized SyncInfo getSyncInfo(Object domainObject) { return getDelegate().getSyncInfo(domainObject); } @Override public synchronized <T> List<T> loadByIds(Class<T> domainObjectClass, int resolutionDepth, long... ids) { return getDelegate().loadByIds(domainObjectClass, resolutionDepth, ids); } @Override public synchronized <T> T loadById(Class<T> domainObjectClass, int resolutionDepth, long id) { return getDelegate().loadById(domainObjectClass, resolutionDepth, id); } @Override public synchronized <T> List<T> loadByType(Class<T> domainObjectClass, int resolutionDepth, int offset, int count) { return getDelegate().loadByType(domainObjectClass, resolutionDepth, offset, count); } @Override public synchronized List<JcError> store(List<?> domainObjects) { return getDelegate().store(domainObjects); } @Override public synchronized List<JcError> store(Object domainObject) { return getDelegate().store(domainObject); } @Override public synchronized long numberOfInstancesOf(Class<?> type) { return getDelegate().numberOfInstancesOf(type); } @Override public synchronized List<Long> numberOfInstancesOf(List<Class<?>> types) { return getDelegate().numberOfInstancesOf(types); } @Override public DomainQuery createQuery() { return getDelegate().createQuery(); } @Override public List<String> getStoredQueryNames() { return getDelegate().getStoredQueryNames(); } @Override public QueryPersistor createQueryPersistor(DomainQuery query) { return getDelegate().createQueryPersistor(query); } @Override public QueryLoader<DomainQuery> createQueryLoader(String queryName) { return getDelegate().createQueryLoader(queryName); } @Override public synchronized ITransaction beginTX() { return getDelegate().beginTX(); } @Override public IDomainAccess setLockingStrategy(Locking locking) { getDelegate().setLockingStrategy(locking); return this; } @Override public IGenericDomainAccess getGenericDomainAccess() { if (this.genericDomainAccess == null) this.genericDomainAccess = new GenericDomainAccessSync(); return this.genericDomainAccess; } @Override public InternalDomainAccess getInternalDomainAccess() { InternalDomainAccess ret = getDelegate().getInternalDomainAccess(); ret.setSyncObject(this); return ret; } private DomainAccess getDelegate() { return this.delegate; } /**********************************************************************/ public class GenericDomainAccessSync implements IGenericDomainAccess, IIntDomainAccess { @Override public synchronized List<SyncInfo> getSyncInfos(List<DomainObject> domainObjects) { return getDelegate().getGenericDomainAccess().getSyncInfos(domainObjects); } @Override public synchronized SyncInfo getSyncInfo(DomainObject domainObject) { return getDelegate().getGenericDomainAccess().getSyncInfo(domainObject); } @Override public synchronized List<JcError> store(DomainObject domainObject) { return getDelegate().getGenericDomainAccess().store(domainObject); } @Override public synchronized List<JcError> store(List<DomainObject> domainObjects) { return getDelegate().getGenericDomainAccess().store(domainObjects); } @Override public synchronized List<DomainObject> loadByIds(String domainObjectClassName, int resolutionDepth, long... ids) { return getDelegate().getGenericDomainAccess().loadByIds(domainObjectClassName, resolutionDepth, ids); } @Override public synchronized DomainObject loadById(String domainObjectClassName, int resolutionDepth, long id) { return getDelegate().getGenericDomainAccess().loadById(domainObjectClassName, resolutionDepth, id); } @Override public synchronized List<DomainObject> loadByType(String domainObjectClassName, int resolutionDepth, int offset, int count) { return getDelegate().getGenericDomainAccess().loadByType(domainObjectClassName, resolutionDepth, offset, count); } @Override public synchronized long numberOfInstancesOf(String typeName) { return getDelegate().getGenericDomainAccess().numberOfInstancesOf(typeName); } @Override public synchronized List<Long> numberOfInstancesOf(List<String> typeNames) { return getDelegate().getGenericDomainAccess().numberOfInstancesOf(typeNames); } @Override public GDomainQuery createQuery() { return getDelegate().getGenericDomainAccess().createQuery(); } @Override public List<String> getStoredQueryNames() { return getDelegate().getStoredQueryNames(); } @Override public QueryPersistor createQueryPersistor(GDomainQuery query) { return getDelegate().getGenericDomainAccess().createQueryPersistor(query); } @Override public QueryLoader<GDomainQuery> createQueryLoader(String queryName) { return getDelegate().getGenericDomainAccess().createQueryLoader(queryName); } @Override public synchronized ITransaction beginTX() { return getDelegate().getGenericDomainAccess().beginTX(); } @Override public IGenericDomainAccess setLockingStrategy(Locking locking) { getDelegate().getGenericDomainAccess().setLockingStrategy(locking); return this; } @Override public DOTypeBuilderFactory getTypeBuilderFactory() { return getDelegate().getGenericDomainAccess().getTypeBuilderFactory(); } @Override public synchronized DOType getDomainObjectType(String typeName) { return getDelegate().getGenericDomainAccess().getDomainObjectType(typeName); } @Override public IDomainAccess getDomainAccess() { return DomainAccessSync.this; } @Override public InternalDomainAccess getInternalDomainAccess() { InternalDomainAccess ret = getDelegate().getInternalDomainAccess(); ret.setSyncObject(this); return ret; } } }
/* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service; import com.thoughtworks.go.config.*; import com.thoughtworks.go.domain.*; import com.thoughtworks.go.domain.activity.AgentAssignment; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.i18n.LocalizedMessage; import com.thoughtworks.go.remote.AgentIdentifier; import com.thoughtworks.go.remote.work.InvalidAgentException; import com.thoughtworks.go.server.GoUnauthorizedException; import com.thoughtworks.go.server.dao.JobInstanceDao; import com.thoughtworks.go.server.dao.PipelineDao; import com.thoughtworks.go.server.dao.StageDao; import com.thoughtworks.go.server.domain.AgentInstances; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.perf.SchedulingPerformanceLogger; import com.thoughtworks.go.server.scheduling.PipelineScheduledMessage; import com.thoughtworks.go.server.scheduling.PipelineScheduledTopic; import com.thoughtworks.go.server.service.result.DefaultLocalizedOperationResult; import com.thoughtworks.go.server.service.result.LocalizedOperationResult; import com.thoughtworks.go.server.service.result.OperationResult; import com.thoughtworks.go.server.service.result.ServerHealthStateOperationResult; import com.thoughtworks.go.server.transaction.TransactionSynchronizationManager; import com.thoughtworks.go.server.transaction.TransactionTemplate; import com.thoughtworks.go.server.util.UserHelper; import com.thoughtworks.go.serverhealth.HealthStateScope; import com.thoughtworks.go.serverhealth.HealthStateType; import com.thoughtworks.go.serverhealth.ServerHealthService; import com.thoughtworks.go.serverhealth.ServerHealthState; import com.thoughtworks.go.util.TimeProvider; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY; @Service public class ScheduleService { private static final Logger LOGGER = Logger.getLogger(ScheduleService.class); private GoConfigService goConfigService; private PipelineService pipelineService; private StageService stageService; private SchedulingCheckerService schedulingChecker; private PipelineScheduledTopic pipelineScheduledTopic; private PipelineDao pipelineDao; private StageDao stageDao; private JobInstanceDao jobInstanceDao; private AgentAssignment agentAssignment; private StageOrderService stageOrderService; private SecurityService securityService; private PipelineScheduleQueue pipelineScheduleQueue; private JobInstanceService jobInstanceService; private EnvironmentConfigService environmentConfigService; private PipelineLockService pipelineLockService; private ServerHealthService serverHealthService; private AgentService agentService; private TransactionSynchronizationManager synchronizationManager; private TimeProvider timeProvider; private TransactionTemplate transactionTemplate; private final Object autoScheduleMutex = new Object(); private ConsoleActivityMonitor consoleActivityMonitor; private PipelinePauseService pipelinePauseService; private InstanceFactory instanceFactory; private SchedulingPerformanceLogger schedulingPerformanceLogger; private ElasticProfileService elasticProfileService; protected ScheduleService() { } @Autowired public ScheduleService(GoConfigService goConfigService, PipelineService pipelineService, StageService stageService, SchedulingCheckerService schedulingChecker, PipelineScheduledTopic pipelineScheduledTopic, PipelineDao pipelineDao, StageDao stageDao, StageOrderService stageOrderService, SecurityService securityService, PipelineScheduleQueue pipelineScheduleQueue, JobInstanceService jobInstanceService, JobInstanceDao jobInstanceDao, AgentAssignment agentAssignment, EnvironmentConfigService environmentConfigService, PipelineLockService pipelineLockService, ServerHealthService serverHealthService, TransactionTemplate transactionTemplate, AgentService agentService, TransactionSynchronizationManager synchronizationManager, TimeProvider timeProvider, ConsoleActivityMonitor consoleActivityMonitor, PipelinePauseService pipelinePauseService, InstanceFactory instanceFactory, SchedulingPerformanceLogger schedulingPerformanceLogger, ElasticProfileService elasticProfileService ) { this.goConfigService = goConfigService; this.pipelineService = pipelineService; this.stageService = stageService; this.schedulingChecker = schedulingChecker; this.pipelineScheduledTopic = pipelineScheduledTopic; this.pipelineDao = pipelineDao; this.stageDao = stageDao; this.stageOrderService = stageOrderService; this.securityService = securityService; this.pipelineScheduleQueue = pipelineScheduleQueue; this.jobInstanceService = jobInstanceService; this.jobInstanceDao = jobInstanceDao; this.agentAssignment = agentAssignment; this.environmentConfigService = environmentConfigService; this.pipelineLockService = pipelineLockService; this.serverHealthService = serverHealthService; this.transactionTemplate = transactionTemplate; this.agentService = agentService; this.synchronizationManager = synchronizationManager; this.timeProvider = timeProvider; this.consoleActivityMonitor = consoleActivityMonitor; this.pipelinePauseService = pipelinePauseService; this.instanceFactory = instanceFactory; this.schedulingPerformanceLogger = schedulingPerformanceLogger; this.elasticProfileService = elasticProfileService; } //Note: This is called from a Spring timer public void autoSchedulePipelinesFromRequestBuffer() { synchronized (autoScheduleMutex) { try { for (Entry<String, BuildCause> entry : pipelineScheduleQueue.toBeScheduled().entrySet()) { String pipelineName = entry.getKey(); BuildCause buildCause = entry.getValue(); LOGGER.info(String.format("[Pipeline Schedule] Scheduling pipeline %s with build cause %s", pipelineName, buildCause)); long schedulingStartTime = System.currentTimeMillis(); Pipeline pipeline = schedulePipeline(pipelineName, buildCause); long schedulingEndTime = System.currentTimeMillis(); if (pipeline != null) { pipelineScheduledTopic.post(new PipelineScheduledMessage(pipeline.getIdentifier())); schedulingPerformanceLogger.scheduledPipeline(pipelineName, pipelineScheduleQueue.toBeScheduled().size(), schedulingStartTime, schedulingEndTime); } } } catch (Throwable e) { LOGGER.error(String.format("[Pipeline Schedule] An exception occurred while scheduling the pipeline. %s", e)); } } } private Pipeline schedulePipeline(final String pipelineName, final BuildCause buildCause) { try { PipelineConfig pipelineConfig = goConfigService.pipelineConfigNamed(new CaseInsensitiveString(pipelineName)); if (canSchedule(pipelineConfig)) { final Pipeline pipelineInstance = pipelineScheduleQueue.createPipeline(buildCause, pipelineConfig, schedulingContext(buildCause.getApprover(), pipelineConfig, pipelineConfig.first()), goConfigService.getCurrentConfig().getMd5(), timeProvider); serverHealthService.update(stageSchedulingSuccessfulState(pipelineName, CaseInsensitiveString.str(pipelineConfig.get(0).name()))); return pipelineInstance; } } catch (PipelineNotFoundException e) { LOGGER.error("Could not find pipeline " + pipelineName, e); pipelineScheduleQueue.clearPipeline(pipelineName); } catch (CannotScheduleException e) { pipelineScheduleQueue.clearPipeline(pipelineName); serverHealthService.update(stageSchedulingFailedState(pipelineName, e)); } catch (Exception e) { LOGGER.error("Error while scheduling pipeline " + pipelineName, e); pipelineScheduleQueue.clearPipeline(pipelineName); } return null; } private ServerHealthState stageSchedulingFailedState(String pipelineName, CannotScheduleException e) { return ServerHealthState.failedToScheduleStage(HealthStateType.general(HealthStateScope.forStage(pipelineName, e.getStageName())), pipelineName, e.getStageName(), e.getMessage()); } private ServerHealthState stageSchedulingSuccessfulState(String pipelineName, String stageName) { return ServerHealthState.success(HealthStateType.general(HealthStateScope.forStage(pipelineName, stageName))); } /** * @deprecated ChrisS - Only used in tests */ public boolean rerunStage(Pipeline pipeline, StageConfig stageConfig, String approvedBy) { internalRerun(pipeline, CaseInsensitiveString.str(stageConfig.name()), approvedBy, new NewStageInstanceCreator(goConfigService), new ExceptioningErrorHandler()); return true; } private Stage internalRerun(Pipeline pipeline, String stageName, String approvedBy, final StageInstanceCreator creator, ErrorConditionHandler errorHandler) { ServerHealthStateOperationResult result = new ServerHealthStateOperationResult(); if (!schedulingChecker.canRerunStage(pipeline.getIdentifier(), stageName, approvedBy, result)) { errorHandler.cantSchedule("Cannot schedule: " + result.getServerHealthState().getDescription(), pipeline.getName(), stageName); } return scheduleStage(pipeline, stageName, approvedBy, creator, errorHandler); } public Stage scheduleStage(final Pipeline pipeline, final String stageName, final String username, final StageInstanceCreator creator, final ErrorConditionHandler errorHandler) { return (Stage) transactionTemplate.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { String pipelineName = pipeline.getName(); PipelineConfig pipelineConfig = goConfigService.pipelineConfigNamed(new CaseInsensitiveString(pipelineName)); StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName)); if (stageConfig == null) { throw new StageNotFoundException(pipelineName, stageName); } SchedulingContext context = schedulingContext(username, pipelineConfig, stageConfig); pipelineLockService.lockIfNeeded(pipeline); Stage instance = null; try { instance = creator.create(pipelineName, stageName, context); LOGGER.info(String.format("[Stage Schedule] Scheduling stage %s for pipeline %s", stageName, pipeline.getName())); } catch (CannotScheduleException e) { serverHealthService.update(stageSchedulingFailedState(pipelineName, e)); errorHandler.cantSchedule(e, pipelineName); } serverHealthService.update(stageSchedulingSuccessfulState(pipelineName, stageName)); stageService.save(pipeline, instance); return instance; } }); } private SchedulingContext schedulingContext(String username, PipelineConfig pipelineConfig, StageConfig stageConfig) { Agents availableAgents = environmentConfigService.agentsForPipeline(pipelineConfig.name()); SchedulingContext context = new DefaultSchedulingContext(username, availableAgents, elasticProfileService.allProfiles()); context = context.overrideEnvironmentVariables(pipelineConfig.getVariables()); context = context.overrideEnvironmentVariables(stageConfig.getVariables()); return context; } private boolean canSchedule(PipelineConfig pipelineConfig) { return schedulingChecker.canAutoTriggerConsumer(pipelineConfig); } public Stage rerunStage(String pipelineName, String counterOrLabel, String stageName) throws Exception { return lockAndRerunStage(pipelineName, counterOrLabel, stageName, new NewStageInstanceCreator(goConfigService), new ExceptioningErrorHandler()); } private Stage lockAndRerunStage(String pipelineName, String counterOrLabel, String stageName, StageInstanceCreator creator, final ErrorConditionHandler errorHandler) { synchronized (mutexForPipeline(pipelineName)) { OperationResult result = new ServerHealthStateOperationResult(); if (!schedulingChecker.canSchedule(result)) { errorHandler.cantSchedule(result.getServerHealthState().getDescription(), pipelineName, stageName); } String username = CaseInsensitiveString.str(UserHelper.getUserName().getUsername()); if (!securityService.hasOperatePermissionForStage(pipelineName, stageName, username)) { errorHandler.noOperatePermission(pipelineName, stageName); } Pipeline pipeline = pipelineService.fullPipelineByCounterOrLabel(pipelineName, counterOrLabel); if (pipeline == null) { errorHandler.nullPipeline(pipelineName, counterOrLabel, stageName); } if (!pipeline.hasStageBeenRun(stageName)) { if (goConfigService.hasPreviousStage(pipelineName, stageName)) { CaseInsensitiveString previousStageName = goConfigService.previousStage(pipelineName, stageName).name(); if (!pipeline.hasStageBeenRun(CaseInsensitiveString.str(previousStageName))) { errorHandler.previousStageNotRun(pipeline.getName(), stageName); } } } Stage stage = internalRerun(pipeline, stageName, username, creator, errorHandler); if (stage == null) { errorHandler.nullStage(); } return stage; } } /** * IMPORTANT: this method is only meant for TOP level usage(never use this within a transaction). It gobbles exception. */ public Stage rerunJobs(final Stage stage, final List<String> jobNames, final OperationResult result) { final StageIdentifier identifier = stage.getIdentifier(); if (jobNames == null || jobNames.isEmpty()) { String message = "No job was selected to re-run."; result.badRequest(message, message, HealthStateType.general(HealthStateScope.forStage(identifier.getPipelineName(), identifier.getStageName()))); return null; } try { return lockAndRerunStage(identifier.getPipelineName(), String.valueOf(identifier.getPipelineCounter()), identifier.getStageName(), new StageInstanceCreator() { public Stage create(String pipelineName, String stageName, SchedulingContext context) { StageConfig stageConfig = goConfigService.stageConfigNamed(identifier.getPipelineName(), identifier.getStageName()); String latestMd5 = goConfigService.getCurrentConfig().getMd5(); try { return instanceFactory.createStageForRerunOfJobs(stage, jobNames, context, stageConfig, timeProvider, latestMd5); } catch (CannotRerunJobException e) { result.notFound(e.getMessage(), e.getMessage(), HealthStateType.general(HealthStateScope.forStage(identifier.getPipelineName(), identifier.getStageName()))); throw e; } } }, new ResultUpdatingErrorHandler(result)); } catch (RuntimeException e) { if (result.canContinue()) { String message = String.format("Job rerun request for job(s) [%s] could not be completed because of unexpected failure. Cause: %s", StringUtils.join(jobNames.toArray(), ", "), e.getMessage()); LOGGER.error(message, e); result.badRequest(message, message, HealthStateType.general(HealthStateScope.forStage(identifier.getPipelineName(), identifier.getStageName())));//make this 500 while moving this to LocalizedOR. } return null; } } private String mutexForPipeline(String pipelineName) { String s = String.format("%s_forPipeline_%s", getClass().getName(), pipelineName); return s.intern(); // interned because we synchronize on it } private void triggerNextStageInPipeline(Pipeline pipeline, String stageName, String approvedBy) { StageConfig nextStage = stageOrderService.getNextStage(pipeline, stageName); if (nextStage == null) { return; } if (!nextStage.supportAutoApproval()) { return; } if (isStageActive(pipeline, nextStage)) { return; } scheduleStage(pipeline, CaseInsensitiveString.str(nextStage.name()), approvedBy, new NewStageInstanceCreator(goConfigService), new ExceptioningErrorHandler()); } //this method checks if specified stage is active in all pipelines private boolean isStageActive(Pipeline pipeline, StageConfig nextStage) { return stageDao.isStageActive(pipeline.getName(), CaseInsensitiveString.str(nextStage.name())); } public void automaticallyTriggerRelevantStagesFollowingCompletionOf(Stage stage) throws Exception { if (!stage.isCompleted()) { return; } try { Pipeline pipeline = pipelineDao.loadPipeline(stage.getPipelineId()); unlockIfLastStage(pipeline, stage); if (pipelinePauseService.isPaused(pipeline.getName())) { return; } // if there has been a newer successful run of the previous stage, we should trigger off this stage again // in the same pipeline instance that the newer successful run happened in. //TODO: ChrisS & LYH : This is only accidentally working // if (shouldTriggerThisStageInNewerPipeline(pipeline, stage)) { triggerCurrentStageInNewerPipeline(pipeline.getName(), stage); } // if this stage completed successfully, we should try to trigger the next stage in this pipeline if (stage.isCompletedAndPassed()) { triggerNextStageInPipeline(pipeline, stage.getName(), DEFAULT_APPROVED_BY); } } catch (Exception ex) { String message = String.format("Failed to trigger next stage for %s.", stage.getName()); LOGGER.error(message, ex); throw ex; } } public void unlockIfLastStage(Pipeline pipeline, Stage stage) { if (stageOrderService.getNextStage(pipeline, stage.getName()) == null) { pipelineLockService.unlock(pipeline.getName()); } } private boolean shouldTriggerThisStageInNewerPipeline(Pipeline pipeline, Stage stage) { return !goConfigService.isFirstStage(pipeline.getName(), stage.getName()) && !goConfigService.requiresApproval(new CaseInsensitiveString(pipeline.getName()), new CaseInsensitiveString(stage.getName())); } private void triggerCurrentStageInNewerPipeline(String pipelineName, Stage currentStage) { // get the most recent passed stage in this collection of pipeline id's StageConfig previousStage = goConfigService.previousStage(pipelineName, currentStage.getName()); Stage mostRecentPassed = stageService.mostRecentPassed(pipelineName, CaseInsensitiveString.str(previousStage.name())); if (mostRecentPassed != null && mostRecentPassed.getPipelineId() > currentStage.getPipelineId()) { Pipeline mostRecentEligiblePipeline = pipelineDao.loadPipeline(mostRecentPassed.getPipelineId()); if (!mostRecentEligiblePipeline.hasStageBeenRun(currentStage.getName())) { triggerNextStageInPipeline(mostRecentEligiblePipeline, mostRecentPassed.getName(), DEFAULT_APPROVED_BY); } } } public Stage cancelAndTriggerRelevantStages(String pipelineName, String stageName, Username userName, LocalizedOperationResult result) throws Exception { Stage stage = stageService.findLatestStage(pipelineName, stageName); if (stage == null) { String stageLocator = String.format("(pipeline name: %s, stage name %s)", pipelineName, stageName); LOGGER.warn("[Stage Cancellation] Failed to retrieve stage" + stageLocator); result.notFound(LocalizedMessage.string("STAGE_FOR_LOCATOR_NOT_FOUND", stageLocator), HealthStateType.general(HealthStateScope.GLOBAL)); return null; } return cancelAndTriggerRelevantStages(stage.getId(), userName, result); } // synchronized for updating job public Stage cancelAndTriggerRelevantStages(Long stageId, Username username, LocalizedOperationResult result) throws Exception { Stage stageForId; LocalizedOperationResult opResult = result == null ? new DefaultLocalizedOperationResult() : result; try { stageForId = stageService.stageById(stageId); } catch (Exception e) { LOGGER.error("[Stage Cancellation] Failed to retrieve stage identifier", e); opResult.notFound(LocalizedMessage.string("STAGE_FOR_LOCATOR_NOT_FOUND", stageId), HealthStateType.general(HealthStateScope.GLOBAL)); return null; } if (!stageForId.isActive()) { opResult.setMessage(LocalizedMessage.string("STAGE_IS_NOT_ACTIVE_FOR_CANCELLATION")); return stageForId; } String stageMutex = mutexForStageInstance(stageForId.getIdentifier()); synchronized (stageMutex) { // reload stage so we see committed state after acquiring mutex final Stage stage = stageService.stageById(stageId); String pipelineName = stage.getIdentifier().getPipelineName(); String stageName = stage.getIdentifier().getStageName(); String user = username == null ? null : username.getUsername().toString(); if (!securityService.hasOperatePermissionForStage(pipelineName, stageName, user)) { opResult.unauthorized(LocalizedMessage.string("UNAUTHORIZED_TO_OPERATE_STAGE", stageName), HealthStateType.unauthorised()); return null; } LOGGER.info("[Stage Cancellation] Cancelling stage " + stage.getIdentifier()); transactionTemplate.executeWithExceptionHandling(new com.thoughtworks.go.server.transaction.TransactionCallbackWithoutResult() { @Override public void doInTransactionWithoutResult(TransactionStatus status) throws Exception { stageService.cancelStage(stage); } }); transactionTemplate.executeWithExceptionHandling(new com.thoughtworks.go.server.transaction.TransactionCallbackWithoutResult() { @Override public void doInTransactionWithoutResult(TransactionStatus status) throws Exception { automaticallyTriggerRelevantStagesFollowingCompletionOf(stage); } }); opResult.setMessage(LocalizedMessage.string("STAGE_CANCELLED_SUCCESSFULLY")); return stage; } } public boolean canRun(PipelineIdentifier pipelineIdentifier, String stageName, String username, boolean hasPreviousStageBeenScheduled) { if (!goConfigService.hasStageConfigNamed(pipelineIdentifier.getName(), stageName)) { return false; } ServerHealthStateOperationResult result = new ServerHealthStateOperationResult(); schedulingChecker.canScheduleStage(pipelineIdentifier, stageName, username, result); return result.getServerHealthState().isSuccess() && hasPreviousStageBeenScheduled; } // synchronized for updating job public void updateJobStatus(final JobIdentifier jobIdentifier, final JobState jobState) throws Exception { // have to synchronize at stage-level because cancellation happens at stage-level final String stageMutex = mutexForStageInstance(jobIdentifier); synchronized (stageMutex) { synchronized (mutexForJob(jobIdentifier)) { final JobInstance job = jobInstanceService.buildByIdWithTransitions(jobIdentifier.getBuildId()); transactionTemplate.executeWithExceptionHandling(new com.thoughtworks.go.server.transaction.TransactionCallbackWithoutResult() { public void doInTransactionWithoutResult(TransactionStatus status) throws Exception { if (job.isNull() || job.getState() == JobState.Rescheduled || job.getResult() == JobResult.Cancelled) { return; } job.changeState(jobState); //TODO: #2318 JobInstance should contain identifier after it's loaded from database job.setIdentifier(jobIdentifier); jobInstanceService.updateStateAndResult(job); synchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCommit() { stageDao.clearCachedAllStages(jobIdentifier.getPipelineName(), jobIdentifier.getPipelineCounter(), jobIdentifier.getStageName()); } }); if (job.isCompleted()) { Stage stage = stageService.stageById(job.getStageId()); stageService.updateResult(stage); } } }); // this has to be in a separate transaction because the above should not fail due to errors when scheduling a the next stage // (e.g. CannotScheduleException thrown when there are no agents for run-on-all-agent jobs) transactionTemplate.executeWithExceptionHandling(new com.thoughtworks.go.server.transaction.TransactionCallbackWithoutResult() { @Override public void doInTransactionWithoutResult(TransactionStatus status) throws Exception { if (job.isCompleted()) { Stage stage = stageService.stageById(job.getStageId()); automaticallyTriggerRelevantStagesFollowingCompletionOf(stage); } } }); } } } private String mutexForStageInstance(StageIdentifier id) { return mutexForStageInstance(id.getPipelineName(), id.getPipelineCounter(), id.getStageName(), id.getStageCounter()); } private String mutexForStageInstance(JobIdentifier id) { return mutexForStageInstance(id.getPipelineName(), id.getPipelineCounter(), id.getStageName(), id.getStageCounter()); } private String mutexForStageInstance(String pipelineName, Integer pipelineCounter, String stageName, String stageCounter) { String s = String.format("%s_forStageInstance_%s_%s_%s_%s", getClass().getName(), pipelineName, pipelineCounter, stageName, stageCounter); return s.intern(); // interned because we synchronize on it } //Note: This is called from a Spring timer public void rescheduleHungJobs() { try { //TODO 2779 AgentInstances knownAgents = agentService.findRegisteredAgents(); List<String> liveAgentIdList = getLiveAgentUuids(knownAgents); if (!liveAgentIdList.isEmpty()) { JobInstances jobs = jobInstanceService.findHungJobs(liveAgentIdList); for (JobInstance buildId : jobs) { LOGGER.warn("Found hung job[id=" + buildId + "], rescheduling it"); rescheduleJob(buildId); } } } catch (Exception e) { LOGGER.error("Error occured during reschedule hung builds: ", e); } } // Note: This is also called from a spring timer (cancelHungJobs) public void cancelHungJobs() { try { consoleActivityMonitor.cancelUnresponsiveJobs(this); } catch (Exception e) { LOGGER.error("Error occurred during cancelling unresponsive job: ", e); } } private List<String> getLiveAgentUuids(AgentInstances knownAgents) { List<String> agents = new ArrayList<>(); for (AgentInstance agent : knownAgents) { if (agent.getStatus() != AgentStatus.LostContact) { agents.add(agent.agentConfig().getUuid()); } } return agents; } public void rescheduleAbandonedBuildIfNecessary(final AgentIdentifier identifier) { transactionTemplate.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { final JobInstance jobInstance = agentAssignment.latestActiveJobOnAgent(identifier.getUuid()); if (jobInstance != null) { LOGGER.warn(String.format("[Job Reschedule] Found latest incomplete job for agent %s [Job Instance: %s]", identifier, jobInstance)); rescheduleJob(jobInstance); } } }); } //synchronized for updating job public void rescheduleJob(final JobInstance toBeRescheduled) { final JobIdentifier jobIdentifier = toBeRescheduled.getIdentifier(); synchronized (mutexForStageInstance(jobIdentifier)) { synchronized (mutexForJob(jobIdentifier)) { transactionTemplate.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { LOGGER.warn(String.format("[Job Reschedule] Rescheduling and marking old job as ignored: %s", toBeRescheduled)); //Reloading it because we want to see the latest committed state after acquiring the mutex. JobInstance oldJob = jobInstanceService.buildById(toBeRescheduled.getId()); if (oldJob.isCompleted()) { return; } JobInstance newJob = oldJob.clone(); oldJob.changeState(JobState.Rescheduled); jobInstanceService.updateStateAndResult(oldJob); jobInstanceDao.ignore(oldJob); //Make a new Job newJob.reschedule(); jobInstanceService.save(oldJob.getIdentifier().getStageIdentifier(), oldJob.getStageId(), newJob); //Copy the plan for the old job since we don't load job plan with jobInstance by default JobPlan plan = jobInstanceDao.loadPlan(oldJob.getId()); jobInstanceDao.save(newJob.getId(), plan); LOGGER.info(String.format("[Job Reschedule] Scheduled new job: %s. Replacing old job: %s", newJob.getIdentifier(), oldJob.getIdentifier())); } }); } } } public void cancelJob(final JobInstance instance) { synchronized (mutexForStageInstance(instance.getIdentifier())) { stageService.cancelJob(instance); } } public void jobCompleting(JobIdentifier jobIdentifier, JobResult result, String agentUuid) { // have to synchronize at stage-level because cancellation happens at stage-level synchronized (mutexForStageInstance(jobIdentifier)) { synchronized (mutexForJob(jobIdentifier)) { JobInstance jobInstance = jobInstanceService.buildByIdWithTransitions(jobIdentifier.getBuildId()); if (jobInstance.isNull() || jobInstance.getResult() == JobResult.Cancelled || jobInstance.getState() == JobState.Rescheduled) { return; } //TODO: #2318 JobInstance should contain identifier after it's loaded from database jobInstance.setIdentifier(jobIdentifier); if (!StringUtils.equals(jobInstance.getAgentUuid(), agentUuid)) { LOGGER.error(String.format("Build Instance is using agent [%s] but status updating from agent [%s]", jobInstance.getAgentUuid(), agentUuid)); throw new InvalidAgentException("AgentUUID has changed in the middle of a job. AgentUUID:" + agentUuid + ", Build: " + jobInstance.toString()); } jobInstance.completing(result); jobInstanceService.updateStateAndResult(jobInstance); } } } public boolean updateAssignedInfo(String agentUuid, JobPlan job) { // have to synchronize at stage-level because cancellation happens at stage-level JobIdentifier jobIdentifier = job.getIdentifier(); synchronized (mutexForStageInstance(jobIdentifier)) { JobInstance instance = jobInstanceService.buildByIdWithTransitions(job.getJobId()); if (instance.getState() == JobState.Completed) { LOGGER.info(String.format("[Agent Assignment] Not assigning a completed job [%s] to agent %s", instance.getIdentifier(), agentUuid)); return true; } instance.assign(agentUuid, timeProvider.currentTime()); jobInstanceService.updateAssignedInfo(instance); return false; } } public String mutexForJob(JobIdentifier jobIdentifier) { return String.format("%s_forJobInstance_%s", getClass().getName(), jobIdentifier.buildLocator()).intern(); } public void cancelJob(JobIdentifier jobIdentifier) { cancelJob(jobInstanceService.buildById(jobIdentifier.getBuildId())); } public void failJob(JobInstance instance) { synchronized (mutexForStageInstance(instance.getIdentifier())) { stageService.failJob(instance); } } public interface StageInstanceCreator { Stage create(final String pipelineName, final String stageName, final SchedulingContext context); } public static class NewStageInstanceCreator implements StageInstanceCreator { private final GoConfigService goConfigService; public NewStageInstanceCreator(GoConfigService goConfigService) { this.goConfigService = goConfigService; } public Stage create(final String pipelineName, final String stageName, final SchedulingContext context) { return goConfigService.scheduleStage(pipelineName, stageName, context); } } public interface ErrorConditionHandler { void nullStage(); void cantSchedule(String description, String pipelineName, String stageName); void noOperatePermission(String pipelineName, String stageName); void nullPipeline(String pipelineName, String counterOrLabel, String stageName); void previousStageNotRun(String pipelineName, String stageName); void cantSchedule(CannotScheduleException e, String pipelineName); } public static class ExceptioningErrorHandler implements ErrorConditionHandler { public void nullStage() { throw new RuntimeException(); } public void cantSchedule(String description, String pipelineName, String stageName) { throw new RuntimeException(description); } public void noOperatePermission(String pipelineName, String stageName) { throw new GoUnauthorizedException(noOperatePermissionMessage(pipelineName, stageName)); } public void nullPipeline(String pipelineName, String counterOrLabel, String stageName) { throw new RuntimeException(String.format("Stage [%s/%s/%s] not found", pipelineName, counterOrLabel, stageName)); } public void previousStageNotRun(String pipelineName, String stageName) { throw new RuntimeException(previousStageNotRunMessage(pipelineName, stageName)); } protected String previousStageNotRunMessage(String pipelineName, String stageName) { return String.format("Can not run stage [%s] in pipeline [%s] because its previous stage has not been run.", stageName, pipelineName); } public void cantSchedule(CannotScheduleException e, String pipelineName) { throw e; } protected String noOperatePermissionMessage(String pipelineName, String stageName) { return String.format("User does not have operate permissions for stage [%s] of pipeline [%s]", stageName, pipelineName); } } private static class ResultUpdatingErrorHandler extends ExceptioningErrorHandler { private final OperationResult result; public ResultUpdatingErrorHandler(OperationResult result) { this.result = result; } @Override public void cantSchedule(String description, String pipelineName, String stageName) { result.conflict(description, description, stageScopedHealthState(pipelineName, stageName)); super.cantSchedule(description, pipelineName, stageName); } private HealthStateType stageScopedHealthState(String pipelineName, String stageName) { return HealthStateType.general(HealthStateScope.forStage(pipelineName, stageName)); } @Override public void previousStageNotRun(String pipelineName, String stageName) { String message = previousStageNotRunMessage(pipelineName, stageName); result.badRequest(message, message, stageScopedHealthState(pipelineName, stageName)); super.previousStageNotRun(pipelineName, stageName); } @Override public void noOperatePermission(String pipelineName, String stageName) { String message = noOperatePermissionMessage(pipelineName, stageName); result.unauthorized(message, message, stageScopedHealthState(pipelineName, stageName)); super.noOperatePermission(pipelineName, stageName); } @Override public void cantSchedule(CannotScheduleException e, String pipelineName) { result.conflict(e.getMessage(), e.getMessage(), stageScopedHealthState(pipelineName, e.getStageName())); super.cantSchedule(e, pipelineName); } } }
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.MouseEvent; import org.eclipse.draw2d.MouseMotionListener; import org.eclipse.draw2d.Shape; import org.eclipse.draw2d.StackLayout; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.Request; import org.eclipse.gef.commands.Command; import org.eclipse.gef.editpolicies.LayoutEditPolicy; import org.eclipse.gef.editpolicies.NonResizableEditPolicy; import org.eclipse.gef.palette.PaletteContainer; import org.eclipse.gef.palette.ToolEntry; import org.eclipse.gef.requests.CreateRequest; import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderItemEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure; import org.eclipse.gmf.runtime.notation.View; import org.eclipse.swt.graphics.Color; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpointOutputConnectorEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.WestPointerShape; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.HTTPEndPointOutputConnectorItemSemanticEditPolicy; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; /** * @generated NOT */ public class HTTPEndPointOutputConnectorEditPart extends AbstractEndpointOutputConnectorEditPart { /** * @generated */ public static final int VISUAL_ID = 3711; /** * @generated */ protected IFigure contentPane; /** * @generated */ protected IFigure primaryShape; public final boolean isInput = false; public NodeFigure figure_; public NodeFigure getNodeFigureOutput() { return figure_; } /** * @generated */ public HTTPEndPointOutputConnectorEditPart(View view) { super(view); } /** * @generated NOT */ protected void createDefaultEditPolicies() { super.createDefaultEditPolicies(); installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, getPrimaryDragEditPolicy()); installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new HTTPEndPointOutputConnectorItemSemanticEditPolicy()); installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy()); // XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE); } /** * @generated */ protected LayoutEditPolicy createLayoutEditPolicy() { org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() { protected EditPolicy createChildEditPolicy(EditPart child) { EditPolicy result = child .getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE); if (result == null) { result = new NonResizableEditPolicy(); } return result; } protected Command getMoveChildrenCommand(Request request) { return null; } protected Command getCreateCommand(CreateRequest request) { return null; } }; return lep; } /** * @generated */ protected IFigure createNodeShape() { return primaryShape = new WestPointerFigure(); } /** * @generated */ public WestPointerFigure getPrimaryShape() { return (WestPointerFigure) primaryShape; } /** * @generated */ protected NodeFigure createNodePlate() { DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(12, 10); //FIXME: workaround for #154536 result.getBounds().setSize(result.getPreferredSize()); return result; } /** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated NOT */ protected NodeFigure createNodeFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShapeForward(); figure.add(shape); contentPane = setupContentPane(shape); figure_ = figure; createNodeShapeReverse(); return figure; } /** * Default implementation treats passed figure as content pane. * Respects layout one may have set for generated figure. * @param nodeShape instance of generated figure class * @generated */ protected IFigure setupContentPane(IFigure nodeShape) { return nodeShape; // use nodeShape itself as contentPane } /** * @generated */ public IFigure getContentPane() { if (contentPane != null) { return contentPane; } return super.getContentPane(); } /** * @generated */ protected void setForegroundColor(Color color) { if (primaryShape != null) { primaryShape.setForegroundColor(color); } } /** * @generated */ protected void setBackgroundColor(Color color) { if (primaryShape != null) { primaryShape.setBackgroundColor(color); } } /** * @generated */ protected void setLineWidth(int width) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineWidth(width); } } /** * @generated */ protected void setLineType(int style) { if (primaryShape instanceof Shape) { ((Shape) primaryShape).setLineStyle(style); } } /** * @generated */ public List<IElementType> getMARelTypesOnSource() { ArrayList<IElementType> types = new ArrayList<IElementType>(1); types.add(EsbElementTypes.EsbLink_4001); return types; } /** * @generated */ public List<IElementType> getMARelTypesOnSourceAndTarget( IGraphicalEditPart targetEditPart) { LinkedList<IElementType> types = new LinkedList<IElementType>(); if (targetEditPart instanceof ProxyInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DropMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LogMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequenceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EventMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SendMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof StoreMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BuilderMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof CallTemplateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof PayloadFactoryMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EnqueueMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof URLRewriteMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ValidateMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof ConditionalRouterMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BAMMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof BeanMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof EJBMediatorInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof NamedEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressingEndpointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MessageInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof SequencesInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof DefaultEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof AddressEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof FailoverEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof RecipientListEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof WSDLEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof LoadBalanceEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof HTTPEndPointInputConnector2EditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } if (targetEditPart instanceof APIResourceFaultInputConnectorEditPart) { types.add(EsbElementTypes.EsbLink_4001); } return types; } /** * @generated */ public List<IElementType> getMATypesForTarget(IElementType relationshipType) { LinkedList<IElementType> types = new LinkedList<IElementType>(); if (relationshipType == EsbElementTypes.EsbLink_4001) { types.add(EsbElementTypes.ProxyInputConnector_3003); types.add(EsbElementTypes.ProxyFaultInputConnector_3489); types.add(EsbElementTypes.DropMediatorInputConnector_3008); types.add(EsbElementTypes.PropertyMediatorInputConnector_3033); types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121); types.add(EsbElementTypes.FilterMediatorInputConnector_3010); types.add(EsbElementTypes.LogMediatorInputConnector_3018); types.add(EsbElementTypes.EnrichMediatorInputConnector_3036); types.add(EsbElementTypes.XSLTMediatorInputConnector_3039); types.add(EsbElementTypes.SwitchMediatorInputConnector_3042); types.add(EsbElementTypes.SequenceInputConnector_3049); types.add(EsbElementTypes.EventMediatorInputConnector_3052); types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055); types.add(EsbElementTypes.ClassMediatorInputConnector_3058); types.add(EsbElementTypes.SpringMediatorInputConnector_3061); types.add(EsbElementTypes.ScriptMediatorInputConnector_3064); types.add(EsbElementTypes.FaultMediatorInputConnector_3067); types.add(EsbElementTypes.XQueryMediatorInputConnector_3070); types.add(EsbElementTypes.CommandMediatorInputConnector_3073); types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076); types.add(EsbElementTypes.DBReportMediatorInputConnector_3079); types.add(EsbElementTypes.SmooksMediatorInputConnector_3082); types.add(EsbElementTypes.SendMediatorInputConnector_3085); types.add(EsbElementTypes.HeaderMediatorInputConnector_3100); types.add(EsbElementTypes.CloneMediatorInputConnector_3103); types.add(EsbElementTypes.CacheMediatorInputConnector_3106); types.add(EsbElementTypes.IterateMediatorInputConnector_3109); types.add(EsbElementTypes.CalloutMediatorInputConnector_3115); types.add(EsbElementTypes.TransactionMediatorInputConnector_3118); types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124); types.add(EsbElementTypes.RuleMediatorInputConnector_3127); types.add(EsbElementTypes.OAuthMediatorInputConnector_3130); types.add(EsbElementTypes.AggregateMediatorInputConnector_3112); types.add(EsbElementTypes.StoreMediatorInputConnector_3589); types.add(EsbElementTypes.BuilderMediatorInputConnector_3592); types.add(EsbElementTypes.CallTemplateMediatorInputConnector_3595); types.add(EsbElementTypes.PayloadFactoryMediatorInputConnector_3598); types.add(EsbElementTypes.EnqueueMediatorInputConnector_3601); types.add(EsbElementTypes.URLRewriteMediatorInputConnector_3621); types.add(EsbElementTypes.ValidateMediatorInputConnector_3624); types.add(EsbElementTypes.RouterMediatorInputConnector_3629); types.add(EsbElementTypes.ConditionalRouterMediatorInputConnector_3636); types.add(EsbElementTypes.BAMMediatorInputConnector_3681); types.add(EsbElementTypes.BeanMediatorInputConnector_3684); types.add(EsbElementTypes.EJBMediatorInputConnector_3687); types.add(EsbElementTypes.DefaultEndPointInputConnector_3021); types.add(EsbElementTypes.AddressEndPointInputConnector_3030); types.add(EsbElementTypes.FailoverEndPointInputConnector_3088); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3693); types.add(EsbElementTypes.WSDLEndPointInputConnector_3092); types.add(EsbElementTypes.NamedEndpointInputConnector_3661); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095); types.add(EsbElementTypes.APIResourceEndpointInputConnector_3675); types.add(EsbElementTypes.AddressingEndpointInputConnector_3690); types.add(EsbElementTypes.HTTPEndPointInputConnector_3710); types.add(EsbElementTypes.MessageInputConnector_3046); types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014); types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015); types.add(EsbElementTypes.SequencesInputConnector_3616); types.add(EsbElementTypes.DefaultEndPointInputConnector_3644); types.add(EsbElementTypes.AddressEndPointInputConnector_3647); types.add(EsbElementTypes.FailoverEndPointInputConnector_3650); types.add(EsbElementTypes.RecipientListEndPointInputConnector_3697); types.add(EsbElementTypes.WSDLEndPointInputConnector_3654); types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3657); types.add(EsbElementTypes.HTTPEndPointInputConnector_3713); types.add(EsbElementTypes.APIResourceInputConnector_3670); types.add(EsbElementTypes.APIResourceFaultInputConnector_3672); } return types; } /** * @generated */ public class WestPointerFigure extends WestPointerShape { /** * @generated NOT */ public WestPointerFigure() { this.setBackgroundColor(THIS_BACK); this.setPreferredSize(new Dimension(getMapMode().DPtoLP(12), getMapMode().DPtoLP(10))); this.addMouseMotionListener(new MouseMotionListener() { public void mouseMoved(MouseEvent me) { // TODO Auto-generated method stub } public void mouseHover(MouseEvent me) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent me) { // TODO Auto-generated method stub getEditDomain().getPaletteViewer().setActiveTool(null); } public void mouseEntered(MouseEvent me) { // TODO Auto-generated method stub getEditDomain() .getPaletteViewer() .setActiveTool( (ToolEntry) (((PaletteContainer) getEditDomain() .getPaletteViewer() .getPaletteRoot().getChildren() .get(4)).getChildren().get(0))); } public void mouseDragged(MouseEvent me) { // TODO Auto-generated method stub } }); } } /** * @generated */ static final Color THIS_BACK = new Color(null, 50, 50, 50); }
package org.wikipedia.json; /* * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.util.Log; import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonPrimitive; import com.google.gson.TypeAdapter; import com.google.gson.TypeAdapterFactory; import com.google.gson.internal.Streams; import com.google.gson.reflect.TypeToken; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; /** * Adapts values whose runtime type may differ from their declaration type. This * is necessary when a field's type is not the same type that GSON should create * when deserializing that field. For example, consider these types: * <pre> {@code * abstract class Shape { * int x; * int y; * } * class Circle extends Shape { * int radius; * } * class Rectangle extends Shape { * int width; * int height; * } * class Diamond extends Shape { * int width; * int height; * } * class Drawing { * Shape bottomShape; * Shape topShape; * } * }</pre> * <p>Without additional type information, the serialized JSON is ambiguous. Is * the bottom shape in this drawing a rectangle or a diamond? <pre> {@code * { * "bottomShape": { * "width": 10, * "height": 5, * "x": 0, * "y": 0 * }, * "topShape": { * "radius": 2, * "x": 4, * "y": 1 * } * }}</pre> * This class addresses this problem by adding type information to the * serialized JSON and honoring that type information when the JSON is * deserialized: <pre> {@code * { * "bottomShape": { * "type": "Diamond", * "width": 10, * "height": 5, * "x": 0, * "y": 0 * }, * "topShape": { * "type": "Circle", * "radius": 2, * "x": 4, * "y": 1 * } * }}</pre> * Both the type field name ({@code "type"}) and the type labels ({@code * "Rectangle"}) are configurable. * * <h3>Registering Types</h3> * Create a {@code RuntimeTypeAdapterFactory} by passing the base type and type field * name to the {@link #of} factory method. If you don't supply an explicit type * field name, {@code "type"} will be used. <pre> {@code * RuntimeTypeAdapterFactory<Shape> shapeAdapterFactory * = RuntimeTypeAdapterFactory.of(Shape.class, "type"); * }</pre> * Next register all of your subtypes. Every subtype must be explicitly * registered. This protects your application from injection attacks. If you * don't supply an explicit type label, the type's simple name will be used. * <pre> {@code * shapeAdapterFactory.registerSubtype(Rectangle.class, "Rectangle"); * shapeAdapterFactory.registerSubtype(Circle.class, "Circle"); * shapeAdapterFactory.registerSubtype(Diamond.class, "Diamond"); * }</pre> * Finally, register the type adapter factory in your application's GSON builder: * <pre> {@code * Gson gson = new GsonBuilder() * .registerTypeAdapterFactory(shapeAdapterFactory) * .create(); * }</pre> * Like {@code GsonBuilder}, this API supports chaining: <pre> {@code * RuntimeTypeAdapterFactory<Shape> shapeAdapterFactory = RuntimeTypeAdapterFactory.of(Shape.class) * .registerSubtype(Rectangle.class) * .registerSubtype(Circle.class) * .registerSubtype(Diamond.class); * }</pre> * * <h3>Serialization and deserialization</h3> * In order to serialize and deserialize a polymorphic object, * you must specify the base type explicitly. * <pre> {@code * Diamond diamond = new Diamond(); * String json = gson.toJson(diamond, Shape.class); * }</pre> * And then: * <pre> {@code * Shape shape = gson.fromJson(json, Shape.class); * }</pre> */ public final class RuntimeTypeAdapterFactory<T> implements TypeAdapterFactory { private final Class<?> baseType; private final String typeFieldName; private final Map<String, Class<?>> labelToSubtype = new LinkedHashMap<String, Class<?>>(); private final Map<Class<?>, String> subtypeToLabel = new LinkedHashMap<Class<?>, String>(); private final boolean maintainType; private RuntimeTypeAdapterFactory(Class<?> baseType, String typeFieldName, boolean maintainType) { if (typeFieldName == null || baseType == null) { throw new NullPointerException(); } this.baseType = baseType; this.typeFieldName = typeFieldName; this.maintainType = maintainType; } /** * Creates a new runtime type adapter using for {@code baseType} using {@code * typeFieldName} as the type field name. Type field names are case sensitive. * {@code maintainType} flag decide if the type will be stored in pojo or not. */ public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType, String typeFieldName, boolean maintainType) { return new RuntimeTypeAdapterFactory<T>(baseType, typeFieldName, maintainType); } /** * Creates a new runtime type adapter using for {@code baseType} using {@code * typeFieldName} as the type field name. Type field names are case sensitive. */ public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType, String typeFieldName) { return new RuntimeTypeAdapterFactory<T>(baseType, typeFieldName, false); } /** * Creates a new runtime type adapter for {@code baseType} using {@code "type"} as * the type field name. */ public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType) { return new RuntimeTypeAdapterFactory<T>(baseType, "type", false); } /** * Registers {@code type} identified by {@code label}. Labels are case * sensitive. * * @throws IllegalArgumentException if either {@code type} or {@code label} * have already been registered on this type adapter. */ public RuntimeTypeAdapterFactory<T> registerSubtype(Class<? extends T> type, String label) { if (type == null || label == null) { throw new NullPointerException(); } if (subtypeToLabel.containsKey(type) || labelToSubtype.containsKey(label)) { throw new IllegalArgumentException("types and labels must be unique"); } labelToSubtype.put(label, type); subtypeToLabel.put(type, label); return this; } /** * Registers {@code type} identified by its {@link Class#getSimpleName simple * name}. Labels are case sensitive. * * @throws IllegalArgumentException if either {@code type} or its simple name * have already been registered on this type adapter. */ public RuntimeTypeAdapterFactory<T> registerSubtype(Class<? extends T> type) { return registerSubtype(type, type.getSimpleName()); } public <R> TypeAdapter<R> create(Gson gson, TypeToken<R> type) { if (type.getRawType() != baseType) { return null; } final Map<String, TypeAdapter<?>> labelToDelegate = new LinkedHashMap<String, TypeAdapter<?>>(); final Map<Class<?>, TypeAdapter<?>> subtypeToDelegate = new LinkedHashMap<Class<?>, TypeAdapter<?>>(); for (Map.Entry<String, Class<?>> entry : labelToSubtype.entrySet()) { TypeAdapter<?> delegate = gson.getDelegateAdapter(this, TypeToken.get(entry.getValue())); labelToDelegate.put(entry.getKey(), delegate); subtypeToDelegate.put(entry.getValue(), delegate); } return new TypeAdapter<R>() { @Override public R read(JsonReader in) throws IOException { JsonElement jsonElement = Streams.parse(in); JsonElement labelJsonElement; if (maintainType) { labelJsonElement = jsonElement.getAsJsonObject().get(typeFieldName); } else { labelJsonElement = jsonElement.getAsJsonObject().remove(typeFieldName); } if (labelJsonElement == null) { throw new JsonParseException("cannot deserialize " + baseType + " because it does not define a field named " + typeFieldName); } String label = labelJsonElement.getAsString(); @SuppressWarnings("unchecked") // registration requires that subtype extends T TypeAdapter<R> delegate = (TypeAdapter<R>) labelToDelegate.get(label); if (delegate == null) { Log.e("RuntimeTypeAdapter", "cannot deserialize " + baseType + " subtype named " + label + "; did you forget to register a subtype? " +jsonElement); return null; } return delegate.fromJsonTree(jsonElement); } @Override public void write(JsonWriter out, R value) throws IOException { Class<?> srcType = value.getClass(); String label = subtypeToLabel.get(srcType); @SuppressWarnings("unchecked") // registration requires that subtype extends T TypeAdapter<R> delegate = (TypeAdapter<R>) subtypeToDelegate.get(srcType); if (delegate == null) { throw new JsonParseException("cannot serialize " + srcType.getName() + "; did you forget to register a subtype?"); } JsonObject jsonObject = delegate.toJsonTree(value).getAsJsonObject(); if (maintainType) { Streams.write(jsonObject, out); return; } JsonObject clone = new JsonObject(); if (jsonObject.has(typeFieldName)) { throw new JsonParseException("cannot serialize " + srcType.getName() + " because it already defines a field named " + typeFieldName); } clone.add(typeFieldName, new JsonPrimitive(label)); for (Map.Entry<String, JsonElement> e : jsonObject.entrySet()) { clone.add(e.getKey(), e.getValue()); } Streams.write(clone, out); } }.nullSafe(); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.forscience.whistlepunk.scalarchart; import androidx.annotation.VisibleForTesting; import com.google.android.apps.forscience.whistlepunk.filemetadata.Label; import com.google.android.apps.forscience.whistlepunk.sensorapi.StreamStat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; public class ChartData { public static class DataPoint { private final long x; private final double y; public DataPoint(long x, double y) { this.x = x; this.y = y; } public long getX() { return x; } public double getY() { return y; } /** For debugging only */ @Override public String toString() { return String.format("(%d,%.3g)", x, y); } } // The number of indicies that an approximate binary search may be off. // Larger numbers cause binary search to be faster at the risk of drawing unnecessary points. // TODO: Look into tweaking this number for utmost efficency and memory usage! @VisibleForTesting private static final int DEFAULT_APPROX_RANGE = 8; public static final int DEFAULT_THROWAWAY_THRESHOLD = 100; private int throwawayDataSizeThreshold; // 2 minutes is plenty. public static final long DEFAULT_THROWAWAY_TIME_THRESHOLD = 1000 * 60 * 2; private long throwawayDataTimeThreshold = DEFAULT_THROWAWAY_TIME_THRESHOLD; private List<DataPoint> data = new ArrayList<>(); // The list of data points at which a label should be displayed. private List<DataPoint> labels = new ArrayList<>(); // The list of Label objects which are not yet converted into DataPoints and added to the // labels list. This happens when the Label is outside of the range for which we have data, // so we cannot calculate where that label should be drawn. private List<Label> unaddedLabels = new ArrayList<>(); // The stats for this list. private List<StreamStat> stats = new ArrayList<>(); private static final Comparator<? super DataPoint> DATA_POINT_COMPARATOR = new Comparator<DataPoint>() { @Override public int compare(DataPoint lhs, DataPoint rhs) { return Long.compare(lhs.getX(), rhs.getX()); } }; public ChartData() { this(DEFAULT_THROWAWAY_THRESHOLD, DEFAULT_THROWAWAY_TIME_THRESHOLD); } public ChartData(int throwawayDataSizeThreshold, long throwawayDataTimeThreshold) { this.throwawayDataSizeThreshold = throwawayDataSizeThreshold; this.throwawayDataTimeThreshold = throwawayDataTimeThreshold; } // This assumes the data point occurs after all previous data points. // Order is not checked. public void addPoint(DataPoint point) { data.add(point); if (unaddedLabels.size() > 0) { // TODO to avoid extra work, only try again if new data might come in in the direction // of these labels...? Iterator<Label> unaddedLabelIterator = unaddedLabels.iterator(); while (unaddedLabelIterator.hasNext()) { Label next = unaddedLabelIterator.next(); if (tryAddingLabel(next)) { unaddedLabelIterator.remove(); } } } } public List<DataPoint> getPoints() { return data; } // This assumes the List<DataPoint> is ordered by timestamp. public void setPoints(List<DataPoint> data) { this.data = data; } public void addOrderedGroupOfPoints(List<DataPoint> points) { if (points == null || points.size() == 0) { return; } data.addAll(points); Collections.sort(data, DATA_POINT_COMPARATOR); } public List<DataPoint> getPointsInRangeToEnd(long xMin) { int startIndex = approximateBinarySearch(xMin, 0, true); return data.subList(startIndex, data.size()); } public List<DataPoint> getPointsInRange(long xMin, long xMax) { int startIndex = approximateBinarySearch(xMin, 0, true); int endIndex = approximateBinarySearch(xMax, startIndex, false); if (startIndex > endIndex) { return Collections.emptyList(); } return data.subList(startIndex, endIndex + 1); } public DataPoint getClosestDataPointToTimestamp(long timestamp) { int index = getClosestIndexToTimestamp(timestamp); if (data.size() == 0) { return null; } return data.get(index); } // Searches for the closest index to a given timestamp, round up or down if the search // does not find an exact match, to the closest timestamp. public int getClosestIndexToTimestamp(long timestamp) { return exactBinarySearch(timestamp, 0); } /** * Searches for the index of the value that is equal to or just less than the search X value, in * the range of startSearchIndex to the end of the data array. * * @param searchX The X value to search for * @param startSearchIndex The index into the data where the search starts * @return The exact index of the value at or just below the search X value. */ @VisibleForTesting int exactBinarySearch(long searchX, int startSearchIndex) { return approximateBinarySearch(searchX, startSearchIndex, data.size() - 1, true, 0); } /** * A helper function to search for the index of the point with the closest value to the searchX * provided, using the default approximate search range. * * @param searchX The value to search for * @param startSearchIndex The index into the data where the search starts * @param preferStart Whether the approximate result should prefer the start of a range or the end * of a range. This can be used to make sure the range is not too short. * @return The index of an approximate X match in the array */ private int approximateBinarySearch(long searchX, int startSearchIndex, boolean preferStart) { return approximateBinarySearch( searchX, startSearchIndex, data.size() - 1, preferStart, DEFAULT_APPROX_RANGE); } /** * Searches for the index of the point with the closest value to the searchX provided. Does not * try for an exact match, rather returns when the range is smaller than the * approximateSearchRange. Assumes points are ordered. * * @param searchX The value to search for * @param startIndex The index into the data where the search starts * @param endIndex The index where the search ends * @param preferStart Whether the approximate result should prefer the start of a range or the end * of a range. This can be used to make sure the range is not too short. * @param searchRange The size of the range at which we can stop searching and just return * something, either at the start of the current range if preferStart, or the end of the * current range if preferEnd. This function is often used to find the approximate start and * end indices of a known range, when erring on the outside of that range is ok but erring on * the inside of the range causes points to be clipped. * @return The index of an approximate X match in the array */ @VisibleForTesting int approximateBinarySearch( long searchX, int startIndex, int endIndex, boolean preferStart, int searchRange) { if (data.isEmpty()) { return 0; } // See if we're already done (need to do this before calculating distances below, in case // searchX is so big or small we're in danger of overflow). long startValue = data.get(startIndex).getX(); if (searchX <= startValue) { return startIndex; } long endValue = data.get(endIndex).getX(); if (searchX >= endValue) { return endIndex; } if (endIndex - startIndex <= searchRange) { return preferStart ? startIndex : endIndex; } if (searchRange == 0 && endIndex - startIndex == 1) { long distanceToStart = searchX - startValue; long distanceToEnd = endValue - searchX; if (distanceToStart < distanceToEnd) { return startIndex; } else if (distanceToStart == distanceToEnd) { return preferStart ? startIndex : endIndex; } else { return endIndex; } } int mid = (startIndex + endIndex) / 2; long midX = data.get(mid).getX(); if (midX < searchX) { return approximateBinarySearch(searchX, mid, endIndex, preferStart, searchRange); } else if (midX > searchX) { return approximateBinarySearch(searchX, startIndex, mid, preferStart, searchRange); } else { return mid; } } public int getNumPoints() { return data.size(); } public boolean isEmpty() { return data.isEmpty(); } // Assume points are ordered public long getXMin() { return data.get(0).getX(); } // Assume points are ordered public long getXMax() { return data.get(data.size() - 1).getX(); } public void clear() { data.clear(); labels.clear(); unaddedLabels.clear(); } public void setDisplayableLabels(List<Label> labels) { this.labels.clear(); unaddedLabels.clear(); for (Label label : labels) { if (!tryAddingLabel(label)) { unaddedLabels.add(label); } } } public void addLabel(Label label) { if (!tryAddingLabel(label)) { unaddedLabels.add(label); } } @VisibleForTesting boolean tryAddingLabel(Label label) { long timestamp = label.getTimeStamp(); if (data.isEmpty() || timestamp < getXMin() || timestamp > getXMax()) { return false; } int indexPrev = exactBinarySearch(timestamp, 0); DataPoint start = data.get(indexPrev); if (timestamp == start.getX()) { labels.add(start); return true; } else if (indexPrev < data.size() - 2) { DataPoint end = data.get(indexPrev + 1); double weight = (timestamp - start.getX()) / (1.0 * end.getX() - start.getX()); labels.add(new DataPoint(timestamp, start.getY() * weight + end.getY() * (1 - weight))); return true; } return false; } public List<DataPoint> getLabelPoints() { return labels; } public void updateStats(List<StreamStat> stats) { this.stats = stats; } public List<StreamStat> getStats() { return stats; } public void throwAwayBefore(long throwawayThreshold) { throwAwayBetween(Long.MIN_VALUE, throwawayThreshold); } public void throwAwayAfter(long throwawayThreshold) { throwAwayBetween(throwawayThreshold, Long.MAX_VALUE); } public void throwAwayBetween(long throwAwayMinX, long throwAwayMaxX) { if (throwAwayMaxX <= throwAwayMinX) { return; } // This should be the index to the right of max int indexEnd = approximateBinarySearch(throwAwayMaxX, 0, data.size() - 1, false, 1); int indexStart = approximateBinarySearch(throwAwayMinX, 0, data.size() - 1, false, 1); // Only throw away in bulk once we reach a threshold, so that all the work is not done on // every iteration. Make sure to also throw out very far away old data to avoid // "path too long". So if the data is less than the size, and the range is not too long, // we can just "return" here. if (indexEnd - indexStart < throwawayDataSizeThreshold && (indexStart >= 0 && indexEnd < data.size() && data.get(indexEnd).getX() - data.get(indexStart).getX() < throwawayDataTimeThreshold)) { return; } data.subList(indexStart, indexEnd).clear(); } }
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.acceptance.git; import static com.google.common.truth.Truth.assertThat; import com.google.gerrit.acceptance.GerritConfig; import org.eclipse.jgit.junit.TestRepository; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevTree; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.RefSpec; import org.junit.Test; public class SubmoduleSubscriptionsIT extends AbstractSubmoduleSubscription { @Test public void testSubscriptionToEmptyRepo() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); ObjectId subHEAD = pushChangeTo(subRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEAD); } @Test public void testSubscriptionToExistingRepo() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); ObjectId subHEAD = pushChangeTo(subRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEAD); } @Test @GerritConfig(name = "submodule.verboseSuperprojectUpdate", value = "false") public void testSubmoduleShortCommitMessage() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); // The first update doesn't include any commit messages ObjectId subRepoId = pushChangeTo(subRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subRepoId); expectToHaveCommitMessage(superRepo, "master", "Updated git submodules\n\n"); // Any following update also has a short message subRepoId = pushChangeTo(subRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subRepoId); expectToHaveCommitMessage(superRepo, "master", "Updated git submodules\n\n"); } @Test public void testSubmoduleCommitMessage() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); ObjectId subHEAD = pushChangeTo(subRepo, "master"); // The first update doesn't include the rev log RevWalk rw = subRepo.getRevWalk(); RevCommit subCommitMsg = rw.parseCommit(subHEAD); expectToHaveCommitMessage(superRepo, "master", "Updated git submodules\n\n" + "Project: " + name("subscribed-to-project") + " master " + subHEAD.name() + "\n\n"); // The next commit should generate only its commit message, // omitting previous commit logs subHEAD = pushChangeTo(subRepo, "master"); subCommitMsg = rw.parseCommit(subHEAD); expectToHaveCommitMessage(superRepo, "master", "Updated git submodules\n\n" + "Project: " + name("subscribed-to-project") + " master " + subHEAD.name() + "\n\n" + subCommitMsg.getFullMessage() + "\n\n"); } @Test public void testSubscriptionUnsubscribe() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); pushChangeTo(subRepo, "master"); ObjectId subHEADbeforeUnsubscribing = pushChangeTo(subRepo, "master"); deleteAllSubscriptions(superRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEADbeforeUnsubscribing); pushChangeTo(superRepo, "refs/heads/master", "commit after unsubscribe", ""); pushChangeTo(subRepo, "refs/heads/master", "commit after unsubscribe", ""); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEADbeforeUnsubscribing); } @Test public void testSubscriptionUnsubscribeByDeletingGitModules() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); pushChangeTo(subRepo, "master"); ObjectId subHEADbeforeUnsubscribing = pushChangeTo(subRepo, "master"); deleteGitModulesFile(superRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEADbeforeUnsubscribing); pushChangeTo(superRepo, "refs/heads/master", "commit after unsubscribe", ""); pushChangeTo(subRepo, "refs/heads/master", "commit after unsubscribe", ""); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEADbeforeUnsubscribing); } @Test public void testSubscriptionToDifferentBranches() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "foo"); ObjectId subFoo = pushChangeTo(subRepo, "foo"); pushChangeTo(subRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subFoo); } @Test public void testCircularSubscriptionIsDetected() throws Exception { TestRepository<?> superRepo = createProjectWithPush("super-project"); TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project"); pushChangeTo(subRepo, "master"); createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master"); createSubmoduleSubscription(subRepo, "master", "super-project", "master"); ObjectId subHEAD = pushChangeTo(subRepo, "master"); pushChangeTo(superRepo, "master"); expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subHEAD); assertThat(hasSubmodule(subRepo, "master", "super-project")).isFalse(); } private void deleteAllSubscriptions(TestRepository<?> repo, String branch) throws Exception { repo.git().fetch().setRemote("origin").call(); repo.reset("refs/remotes/origin/" + branch); ObjectId expectedId = repo.branch("HEAD").commit().insertChangeId() .message("delete contents in .gitmodules") .add(".gitmodules", "") // Just remove the contents of the file! .create(); repo.git().push().setRemote("origin").setRefSpecs( new RefSpec("HEAD:refs/heads/" + branch)).call(); ObjectId actualId = repo.git().fetch().setRemote("origin").call() .getAdvertisedRef("refs/heads/master").getObjectId(); assertThat(actualId).isEqualTo(expectedId); } private void deleteGitModulesFile(TestRepository<?> repo, String branch) throws Exception { repo.git().fetch().setRemote("origin").call(); repo.reset("refs/remotes/origin/" + branch); ObjectId expectedId = repo.branch("HEAD").commit().insertChangeId() .message("delete .gitmodules") .rm(".gitmodules") .create(); repo.git().push().setRemote("origin").setRefSpecs( new RefSpec("HEAD:refs/heads/" + branch)).call(); ObjectId actualId = repo.git().fetch().setRemote("origin").call() .getAdvertisedRef("refs/heads/master").getObjectId(); assertThat(actualId).isEqualTo(expectedId); } private boolean hasSubmodule(TestRepository<?> repo, String branch, String submodule) throws Exception { ObjectId commitId = repo.git().fetch().setRemote("origin").call() .getAdvertisedRef("refs/heads/" + branch).getObjectId(); RevWalk rw = repo.getRevWalk(); RevCommit c = rw.parseCommit(commitId); rw.parseBody(c.getTree()); RevTree tree = c.getTree(); try { repo.get(tree, submodule); return true; } catch (AssertionError e) { return false; } } private void expectToHaveCommitMessage(TestRepository<?> repo, String branch, String expectedMessage) throws Exception { ObjectId commitId = repo.git().fetch().setRemote("origin").call() .getAdvertisedRef("refs/heads/" + branch).getObjectId(); RevWalk rw = repo.getRevWalk(); RevCommit c = rw.parseCommit(commitId); assertThat(c.getFullMessage()).isEqualTo(expectedMessage); } }
package de.ctrlaltdel.jenkins.plugins.satellite.builder; import hudson.Extension; import hudson.Launcher; import hudson.model.BuildListener; import hudson.model.Result; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import java.util.Map; import java.util.Properties; import javax.servlet.ServletException; import jenkins.model.Jenkins; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import com.jcraft.jsch.ChannelExec; import com.jcraft.jsch.JSch; import com.jcraft.jsch.Session; import de.ctrlaltdel.jenkins.plugins.satellite.SatelliteConnection; import de.ctrlaltdel.jenkins.plugins.satellite.PluginConfiguration; /** * RemoteScriptBuilder * @author ds */ public class RemoteScriptBuilder extends Builder { private final String systemGroup; private final String script; private final String user; private final boolean useSSH; private transient JSch jsch; @DataBoundConstructor public RemoteScriptBuilder(String systemGroup, String user, String script, boolean useSSH) { super(); this.systemGroup = systemGroup; this.user = user; this.script = script; this.useSSH = useSSH; } @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { logBuild(listener); String runtimeScript = setScriptVariables(listener.getLogger(), build.getBuildVariables()); if (useSSH) { for (String host : SatelliteConnection.create().forOneCall().listHosts(systemGroup)) { int result = executeSSH(host, listener.getLogger(), runtimeScript); if (result != 0) { build.setResult(Result.FAILURE); } } } else { SatelliteConnection.create().forOneCall().logger(listener).remoteScript(systemGroup, user, runtimeScript); } return true; } /** * setScriptVariables */ private String setScriptVariables(PrintStream ps, Map<String, String> vars) { StringBuilder sb = new StringBuilder(); for (String variable : vars.keySet()) { if (variable.startsWith("_")) { continue; } if (script.contains("$" + variable)) { ps.println("[INFO] insert '" + variable + '=' + vars.get(variable) + '\''); sb.append(variable + "=\"" + vars.get(variable) + "\"\n"); } } sb.append("\n"); sb.append(script); return sb.toString(); } public String getSystemGroup() { return systemGroup; } public String getUser() { return user; } public String getScript() { return script; } public boolean isUseSSH() { return useSSH; } /** * logCmd */ private void logBuild(BuildListener listener) { PrintStream ps = listener.getLogger(); ps.println("[INFO] ------------------------------------------------------------------------"); ps.println("[INFO] Run remote script on '" + systemGroup + '\''); ps.println("[INFO] ------------------------------------------------------------------------"); } /** * executeSSH stolen from SSHBuild-Plugin */ private int executeSSH(String hostname, PrintStream logger, String command) { logger.println("[SSH] connect " + hostname); PluginConfiguration configuration = (PluginConfiguration) Jenkins.getInstance().getDescriptorOrDie(PluginConfiguration.class); if (jsch == null) { jsch = new JSch(); } int port = 22; ChannelExec channel = null; Session session = null; int status = -1; try { session = jsch.getSession(configuration.getSshUser(), hostname, port); if (StringUtils.isNotEmpty(configuration.getSshKeyPath())) { jsch.addIdentity(configuration.getSshKeyPath(), configuration.getSshPassword()); } else { session.setPassword(configuration.getSshPassword()); } Properties config = new Properties(); config.put("StrictHostKeyChecking", "no"); session.setConfig(config); session.connect(); channel = (ChannelExec) session.openChannel("exec"); channel.setOutputStream(logger, true); channel.setExtOutputStream(logger, true); channel.setInputStream(null); // channel.setPty(pty == null ? Boolean.FALSE : pty ); logger.println("[SSH] execute script"); channel.setCommand(command); InputStream in = channel.getInputStream(); channel.connect(); byte[] tmp = new byte[1024]; int read = 0; while (true) { while ((read = in.read(tmp)) > 0) { logger.print(new String(tmp, 0, read)); } if (channel.isClosed()) { status = channel.getExitStatus(); logger.println("[SSH] exit-status: " + status); break; } try { Thread.sleep(1000); } catch (Exception ee) { // } } } catch (Exception e) { logger.println("[SSH] Exception:" + e.getMessage()); e.printStackTrace(logger); } finally { if (channel != null && channel.isConnected()) { channel.disconnect(); } if (session != null && session.isConnected()) { session.disconnect(); } } return status; } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Builder> { public String getDisplayName() { return "Satellite Remote Script"; } @Override public RemoteScriptBuilder newInstance(StaplerRequest req, JSONObject formData) throws FormException { return req.bindJSON(RemoteScriptBuilder.class, formData); } public boolean isApplicable(Class<? extends AbstractProject> jobType) { return true; } public FormValidation doCheckUser(@QueryParameter String value) throws IOException, ServletException { if (StringUtils.isEmpty(value)) { return FormValidation.error("User required"); } if (value.equals("root")) { PluginConfiguration pluginConfiguration = (PluginConfiguration) Jenkins.getInstance().getDescriptorOrDie(PluginConfiguration.class); return pluginConfiguration.isRootAllowed() ? FormValidation.ok() : FormValidation.error("Root not allowed"); } return FormValidation.ok(); } public ListBoxModel doFillSystemGroupItems() { List<String> groups = SatelliteConnection.create().forOneCall().listGroups(); ListBoxModel listBoxModel = new ListBoxModel(); listBoxModel.add(""); for (String group : groups) { listBoxModel.add(group); } return listBoxModel; } } }
/** */ package org.afplib.afplib.impl; import org.afplib.afplib.AfplibPackage; import org.afplib.afplib.MPSRG; import org.afplib.base.impl.TripletImpl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>MPSRG</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.afplib.afplib.impl.MPSRGImpl#getReserved <em>Reserved</em>}</li> * <li>{@link org.afplib.afplib.impl.MPSRGImpl#getPsegName <em>Pseg Name</em>}</li> * </ul> * * @generated */ public class MPSRGImpl extends TripletImpl implements MPSRG { /** * The default value of the '{@link #getReserved() <em>Reserved</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getReserved() * @generated * @ordered */ protected static final Integer RESERVED_EDEFAULT = null; /** * The cached value of the '{@link #getReserved() <em>Reserved</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getReserved() * @generated * @ordered */ protected Integer reserved = RESERVED_EDEFAULT; /** * The default value of the '{@link #getPsegName() <em>Pseg Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPsegName() * @generated * @ordered */ protected static final String PSEG_NAME_EDEFAULT = null; /** * The cached value of the '{@link #getPsegName() <em>Pseg Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPsegName() * @generated * @ordered */ protected String psegName = PSEG_NAME_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected MPSRGImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return AfplibPackage.eINSTANCE.getMPSRG(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Integer getReserved() { return reserved; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setReserved(Integer newReserved) { Integer oldReserved = reserved; reserved = newReserved; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.MPSRG__RESERVED, oldReserved, reserved)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getPsegName() { return psegName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPsegName(String newPsegName) { String oldPsegName = psegName; psegName = newPsegName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.MPSRG__PSEG_NAME, oldPsegName, psegName)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case AfplibPackage.MPSRG__RESERVED: return getReserved(); case AfplibPackage.MPSRG__PSEG_NAME: return getPsegName(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case AfplibPackage.MPSRG__RESERVED: setReserved((Integer)newValue); return; case AfplibPackage.MPSRG__PSEG_NAME: setPsegName((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case AfplibPackage.MPSRG__RESERVED: setReserved(RESERVED_EDEFAULT); return; case AfplibPackage.MPSRG__PSEG_NAME: setPsegName(PSEG_NAME_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case AfplibPackage.MPSRG__RESERVED: return RESERVED_EDEFAULT == null ? reserved != null : !RESERVED_EDEFAULT.equals(reserved); case AfplibPackage.MPSRG__PSEG_NAME: return PSEG_NAME_EDEFAULT == null ? psegName != null : !PSEG_NAME_EDEFAULT.equals(psegName); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (Reserved: "); result.append(reserved); result.append(", PsegName: "); result.append(psegName); result.append(')'); return result.toString(); } } //MPSRGImpl
/* * Copyright (c) 2015 FUJI Goro (gfx). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.gfx.android.orma.processor.model; import com.github.gfx.android.orma.annotation.Column; import com.github.gfx.android.orma.annotation.OnConflict; import com.github.gfx.android.orma.annotation.PrimaryKey; import com.github.gfx.android.orma.processor.ProcessingContext; import com.github.gfx.android.orma.processor.exception.ProcessingException; import com.github.gfx.android.orma.processor.util.Annotations; import com.github.gfx.android.orma.processor.util.SqlTypes; import com.github.gfx.android.orma.processor.util.Strings; import com.github.gfx.android.orma.processor.util.Types; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import androidx.annotation.Nullable; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.function.Supplier; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.VariableElement; public class ColumnDefinition { public static final String kDefaultPrimaryKeyName = "_rowid_"; public final ProcessingContext context; public final SchemaDefinition schema; public final VariableElement element; public final String name; public final String columnName; public final TypeName type; public final boolean nullable; public final boolean explicitNonNull; public final boolean primaryKey; public final int primaryKeyOnConflict; public final boolean autoincrement; public final boolean autoId; public final boolean indexed; public final boolean unique; public final int uniqueOnConflict; public final String defaultExpr; public final Column.Collate collate; public final Column.ForeignKeyAction onDeleteAction; public final Column.ForeignKeyAction onUpdateAction; public final long helperFlags; public final TypeAdapterDefinition typeAdapter; private String storageType; public ExecutableElement getter; public ExecutableElement setter; public ColumnDefinition(SchemaDefinition schema, VariableElement element) { this.schema = schema; this.element = element; context = schema.context; // See https://www.sqlite.org/lang_createtable.html for full specification Column column = element.getAnnotation(Column.class); PrimaryKey primaryKeyAnnotation = element.getAnnotation(PrimaryKey.class); name = element.getSimpleName().toString(); columnName = columnName(column, element); type = ClassName.get(element.asType()); typeAdapter = schema.context.findTypeAdapter(element.asType()); storageType = (column != null && !Strings.isEmpty(column.storageType())) ? column.storageType() : null; if (primaryKeyAnnotation != null) { primaryKeyOnConflict = primaryKeyAnnotation.onConflict(); primaryKey = true; autoincrement = primaryKeyAnnotation.autoincrement(); autoId = primaryKeyAnnotation.auto() && Types.looksLikeIntegerType(type); } else { primaryKeyOnConflict = OnConflict.NONE; primaryKey = false; autoincrement = false; autoId = false; } if (column != null) { indexed = column.indexed(); uniqueOnConflict = column.uniqueOnConflict(); unique = uniqueOnConflict != OnConflict.NONE || column.unique(); collate = column.collate(); onDeleteAction = column.onDelete(); onUpdateAction = column.onUpdate(); defaultExpr = column.defaultExpr(); helperFlags = normalizeHelperFlags(primaryKey, indexed, autoincrement, autoId, column.helpers()); } else { indexed = false; uniqueOnConflict = OnConflict.NONE; unique = false; defaultExpr = null; collate = Column.Collate.BINARY; onDeleteAction = Column.ForeignKeyAction.NO_ACTION; onUpdateAction = Column.ForeignKeyAction.NO_ACTION; helperFlags = normalizeHelperFlags(primaryKey, indexed, autoincrement, autoId, Column.Helpers.AUTO); } nullable = hasNullableAnnotation(element); explicitNonNull = hasNonNullAnnotation(element); } // to create primary key columns private ColumnDefinition(SchemaDefinition schema) { this.schema = schema; context = schema.context; element = null; name = kDefaultPrimaryKeyName; columnName = kDefaultPrimaryKeyName; type = TypeName.LONG; nullable = false; explicitNonNull = false; primaryKey = true; primaryKeyOnConflict = OnConflict.NONE; autoincrement = false; autoId = true; indexed = false; unique = false; uniqueOnConflict = OnConflict.NONE; defaultExpr = ""; collate = Column.Collate.BINARY; onDeleteAction = Column.ForeignKeyAction.NO_ACTION; onUpdateAction = Column.ForeignKeyAction.NO_ACTION; helperFlags = normalizeHelperFlags(primaryKey, indexed, autoincrement, autoId, Column.Helpers.AUTO); typeAdapter = schema.context.getTypeAdapter(type); storageType = null; } public static ColumnDefinition createDefaultPrimaryKey(SchemaDefinition schema) { return new ColumnDefinition(schema); } static String columnName(Column column, Element element) { if (column != null && !column.value().equals("")) { return column.value(); } else { for (AnnotationMirror annotation : element.getAnnotationMirrors()) { Name annotationName = annotation.getAnnotationType().asElement().getSimpleName(); if (annotationName.contentEquals("SerializedName") // GSON || annotationName.contentEquals("JsonProperty") // Jackson ) { for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : annotation .getElementValues().entrySet()) { if (entry.getKey().getSimpleName().contentEquals("value")) { return entry.getValue().getValue().toString(); } } } } } return element.getSimpleName().toString(); } static boolean hasNullableAnnotation(Element element) { for (AnnotationMirror annotation : element.getAnnotationMirrors()) { // allow anything named "Nullable" if (annotation.getAnnotationType().asElement().getSimpleName().contentEquals("Nullable")) { return true; } } return false; } static boolean hasNonNullAnnotation(Element element) { for (AnnotationMirror annotation : element.getAnnotationMirrors()) { // allow anything named "NonNull" if (annotation.getAnnotationType().asElement().getSimpleName().contentEquals("NonNull")) { return true; } } return false; } @Column.Helpers static long normalizeHelperFlags(boolean primaryKey, boolean indexed, boolean autoincrement, boolean autoId, long flags) { if (flags == Column.Helpers.AUTO) { if (primaryKey) { return (autoincrement || !autoId) ? Column.Helpers.CONDITIONS | Column.Helpers.ORDERS : Column.Helpers.CONDITIONS; } else if (indexed) { return Column.Helpers.CONDITIONS | Column.Helpers.ORDERS | Column.Helpers.AGGREGATORS; } else { return Column.Helpers.AGGREGATORS; } } else { return flags; } } private static String extractStorageType(ProcessingContext context, TypeName type, Element element, TypeAdapterDefinition typeAdapter) { if (typeAdapter != null) { return SqlTypes.getSqliteType(typeAdapter.serializedType); } else if (Types.isSingleAssociation(type)) { return SqlTypes.getSqliteType(TypeName.LONG); } else if (Types.isDirectAssociation(context, type)) { return context.getSchemaDef(type).getPrimaryKey() .map(primaryKey -> SqlTypes.getSqliteType(primaryKey.getSerializedType())) .orElseGet(() -> { context.addError("Missing @PrimaryKey as foreign key", element); return "UNKNOWN"; }); } else { return SqlTypes.getSqliteType(type); } } public void initGetterAndSetter(ExecutableElement getter, ExecutableElement setter) { if (getter != null) { this.getter = getter; } if (setter != null) { this.setter = setter; } } public CharSequence getEscapedColumnName() { return context.sqlg.escapeIdentifier(columnName); } public TypeName getType() { return type; } public TypeName getBoxType() { return type.box(); } public TypeName getUnboxType() { return Types.asUnboxType(type); } public TypeName getSerializedType() { if (isAssociation()) { return getAssociatedSchema().getPrimaryKey() .map(ColumnDefinition::getSerializedType) .orElseGet(() -> Types.ByteArray); // dummy } else if (typeAdapter != null) { return Types.asUnboxType(typeAdapter.serializedType); } else { return getUnboxType(); } } public TypeName getSerializedBoxType() { return getSerializedType().box(); } public TypeName getStorableBoxType() { if (isSingleAssociation()) { return TypeName.LONG; } else if (isDirectAssociation()) { return getAssociatedSchema().getPrimaryKey().orElseThrow(new Supplier<RuntimeException>() { @Override public RuntimeException get() { return new RuntimeException("No primary key exists in " + getAssociatedSchema().getModelClassName()); } }).getBoxType(); } else { return getBoxType(); } } public String getStorageType() { if (storageType == null) { storageType = extractStorageType(context, type, element, typeAdapter); } return storageType; } public boolean isNullableInSQL() { return nullable; } public boolean isNullableInJava() { return !type.isPrimitive() && nullable; } /** * @return A representation of {@code ColumnDef<T>} */ public ParameterizedTypeName getColumnDefType() { if (isDirectAssociation()) { return Types .getAssociationDef(schema.getModelClassName(), getBoxType(), getAssociatedSchema().getSchemaClassName()); } else { return Types.getColumnDef(schema.getModelClassName(), getBoxType()); } } public CodeBlock buildSetColumnExpr(CodeBlock rhsExpr) { if (setter != null) { return CodeBlock.of("$L($L)", setter.getSimpleName(), rhsExpr); } else { return CodeBlock.of("$L = $L", name, rhsExpr); } } public CodeBlock buildGetColumnExpr(String modelExpr) { return buildGetColumnExpr(CodeBlock.of("$L", modelExpr)); } public CodeBlock buildGetColumnExpr(CodeBlock modelExpr) { return CodeBlock.of("$L.$L", modelExpr, getter != null ? getter.getSimpleName() + "()" : name); } public CodeBlock buildSerializedColumnExpr(String connectionExpr, String modelExpr) { return buildSerializedColumnExpr(connectionExpr, CodeBlock.of("$L", modelExpr)); } public CodeBlock buildSerializedColumnExpr(String connectionExpr, CodeBlock modelExpr) { CodeBlock getColumnExpr = buildGetColumnExpr(modelExpr); return buildSerializedExpr(connectionExpr, getColumnExpr, false); } public CodeBlock buildSerializedExpr(String connectionExpr, String valueExpr, boolean castIsRequired) { return buildSerializedExpr(connectionExpr, CodeBlock.of("$L", valueExpr), castIsRequired); } public CodeBlock buildSerializedExpr(String connectionExpr, CodeBlock valueExpr, boolean castIsRequired) { CodeBlock typedValueExpr = castIsRequired ? CodeBlock.of("(($T) $L)", getBoxType(), valueExpr) : valueExpr; if (isSingleAssociation()) { return CodeBlock.of("$L.getId()", typedValueExpr); } else if (isDirectAssociation()) { return getAssociatedSchema().getPrimaryKey() .map(primaryKey -> primaryKey.buildSerializedColumnExpr(connectionExpr, typedValueExpr)) .orElseGet(() -> CodeBlock.of("null /* missing @PrimaryKey */")); } else { return applySerialization(connectionExpr, typedValueExpr); } } public CodeBlock applySerialization(String connectionExpr, String valueExpr) { return applySerialization(connectionExpr, CodeBlock.of("$L", valueExpr)); } public CodeBlock applySerialization(String connectionExpr, CodeBlock valueExpr) { // TODO: parameter injection for static type serializers if (needsTypeAdapter()) { if (typeAdapter == null) { if (isAssociation()) { throw new AssertionError("[BUG] applySerialization() called for " + schema.getModelClassName() + "#" + name); } throw new ProcessingException("Missing @StaticTypeAdapter to serialize " + type, element); } return CodeBlock.of("$T.$L($L)", typeAdapter.typeAdapterImpl, typeAdapter.getSerializerName(), valueExpr); } else { return valueExpr; } } public CodeBlock buildDeserializeExpr(CodeBlock valueExpr) { if (needsTypeAdapter()) { if (typeAdapter == null) { throw new ProcessingException("Missing @StaticTypeAdapter to deserialize " + type, element); } if (!typeAdapter.generic) { return CodeBlock.of("$T.$L($L)", typeAdapter.typeAdapterImpl, typeAdapter.getDeserializerName(), valueExpr); } else { // inject Class<T> if the deserializer takes more than one TypeName rawType = (type instanceof ParameterizedTypeName ? ((ParameterizedTypeName) type).rawType : type); return CodeBlock.of("$T.<$T>$L($L, $T.class)", typeAdapter.typeAdapterImpl, type, typeAdapter.getDeserializerName(), valueExpr, rawType); } } else { return valueExpr; } } public boolean needsTypeAdapter() { return Types.needsTypeAdapter(type); } public Collection<AnnotationSpec> nullabilityAnnotations() { if (type.isPrimitive()) { return Collections.emptyList(); } if (nullable) { return Collections.singletonList(Annotations.nullable()); } else { return Collections.singletonList(Annotations.nonNull()); } } @Nullable public AssociationDefinition getAssociation() { if (Types.isSingleAssociation(type)) { return AssociationDefinition.createSingleAssociation(type); } else if (Types.isDirectAssociation(context, type)) { return AssociationDefinition.createDirectAssociation(type); } return null; } public boolean isAssociation() { return isDirectAssociation() || isSingleAssociation(); } public boolean isDirectAssociation() { return Types.isDirectAssociation(context, type); } public boolean isSingleAssociation() { return Types.isSingleAssociation(type); } public SchemaDefinition getAssociatedSchema() { AssociationDefinition r = getAssociation(); assert r != null; return context.getSchemaDef(r.modelType); } public boolean hasConditionHelpers() { return (helperFlags & Column.Helpers.CONDITIONS) != 0; } public boolean hasOrderHelpers() { return (helperFlags & Column.Helpers.ORDERS) != 0; } public boolean hasAggregationHelpers() { return (helperFlags & Column.Helpers.AGGREGATORS) != 0; } public boolean hasHelper(@Column.Helpers long f) { assert f != Column.Helpers.NONE && f != Column.Helpers.AUTO; return (helperFlags & f) == f; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.sstable; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.net.InetAddress; import java.util.*; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.Config; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.streaming.*; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.OutputHandler; import org.apache.cassandra.utils.Pair; /** * Cassandra SSTable bulk loader. * Load an externally created sstable into a cluster. */ public class SSTableLoader implements StreamEventHandler { private final File directory; private final String keyspace; private final Client client; private final int connectionsPerHost; private final OutputHandler outputHandler; private final Set<InetAddress> failedHosts = new HashSet<>(); private final List<SSTableReader> sstables = new ArrayList<>(); private final Multimap<InetAddress, StreamSession.SSTableStreamingSections> streamingDetails = HashMultimap.create(); static { Config.setClientMode(true); } public SSTableLoader(File directory, Client client, OutputHandler outputHandler) { this(directory, client, outputHandler, 1); } public SSTableLoader(File directory, Client client, OutputHandler outputHandler, int connectionsPerHost) { this.directory = directory; this.keyspace = directory.getParentFile().getName(); this.client = client; this.outputHandler = outputHandler; this.connectionsPerHost = connectionsPerHost; } protected Collection<SSTableReader> openSSTables(final Map<InetAddress, Collection<Range<Token>>> ranges) { outputHandler.output("Opening sstables and calculating sections to stream"); directory.list(new FilenameFilter() { public boolean accept(File dir, String name) { if (new File(dir, name).isDirectory()) return false; Pair<Descriptor, Component> p = SSTable.tryComponentFromFilename(dir, name); Descriptor desc = p == null ? null : p.left; if (p == null || !p.right.equals(Component.DATA) || desc.type.isTemporary) return false; if (!new File(desc.filenameFor(Component.PRIMARY_INDEX)).exists()) { outputHandler.output(String.format("Skipping file %s because index is missing", name)); return false; } CFMetaData metadata = client.getCFMetaData(keyspace, desc.cfname); if (metadata == null) { outputHandler.output(String.format("Skipping file %s: column family %s.%s doesn't exist", name, keyspace, desc.cfname)); return false; } Set<Component> components = new HashSet<>(); components.add(Component.DATA); components.add(Component.PRIMARY_INDEX); if (new File(desc.filenameFor(Component.SUMMARY)).exists()) components.add(Component.SUMMARY); if (new File(desc.filenameFor(Component.COMPRESSION_INFO)).exists()) components.add(Component.COMPRESSION_INFO); if (new File(desc.filenameFor(Component.STATS)).exists()) components.add(Component.STATS); try { // To conserve memory, open SSTableReaders without bloom filters and discard // the index summary after calculating the file sections to stream and the estimated // number of keys for each endpoint. See CASSANDRA-5555 for details. SSTableReader sstable = SSTableReader.openForBatch(desc, components, metadata, client.getPartitioner()); sstables.add(sstable); // calculate the sstable sections to stream as well as the estimated number of // keys per host for (Map.Entry<InetAddress, Collection<Range<Token>>> entry : ranges.entrySet()) { InetAddress endpoint = entry.getKey(); Collection<Range<Token>> tokenRanges = entry.getValue(); List<Pair<Long, Long>> sstableSections = sstable.getPositionsForRanges(tokenRanges); long estimatedKeys = sstable.estimatedKeysForRanges(tokenRanges); StreamSession.SSTableStreamingSections details = new StreamSession.SSTableStreamingSections(sstable, sstableSections, estimatedKeys, ActiveRepairService.UNREPAIRED_SSTABLE); streamingDetails.put(endpoint, details); } // to conserve heap space when bulk loading sstable.releaseSummary(); } catch (IOException e) { outputHandler.output(String.format("Skipping file %s, error opening it: %s", name, e.getMessage())); } return false; } }); return sstables; } public StreamResultFuture stream() { return stream(Collections.<InetAddress>emptySet()); } public StreamResultFuture stream(Set<InetAddress> toIgnore, StreamEventHandler... listeners) { client.init(keyspace); outputHandler.output("Established connection to initial hosts"); StreamPlan plan = new StreamPlan("Bulk Load", 0, connectionsPerHost).connectionFactory(client.getConnectionFactory()); Map<InetAddress, Collection<Range<Token>>> endpointToRanges = client.getEndpointToRangesMap(); openSSTables(endpointToRanges); if (sstables.isEmpty()) { // return empty result return plan.execute(); } outputHandler.output(String.format("Streaming relevant part of %sto %s", names(sstables), endpointToRanges.keySet())); for (Map.Entry<InetAddress, Collection<Range<Token>>> entry : endpointToRanges.entrySet()) { InetAddress remote = entry.getKey(); if (toIgnore.contains(remote)) continue; List<StreamSession.SSTableStreamingSections> endpointDetails = new LinkedList<>(); try { // transferSSTables assumes references have been acquired for (StreamSession.SSTableStreamingSections details : streamingDetails.get(remote)) { if (!details.sstable.acquireReference()) throw new IllegalStateException(); endpointDetails.add(details); } plan.transferFiles(remote, endpointDetails); } finally { for (StreamSession.SSTableStreamingSections details : endpointDetails) details.sstable.releaseReference(); } } plan.listeners(this, listeners); return plan.execute(); } public void onSuccess(StreamState finalState) {} public void onFailure(Throwable t) {} public void handleStreamEvent(StreamEvent event) { if (event.eventType == StreamEvent.Type.STREAM_COMPLETE) { StreamEvent.SessionCompleteEvent se = (StreamEvent.SessionCompleteEvent) event; if (!se.success) failedHosts.add(se.peer); } } private String names(Collection<SSTableReader> sstables) { StringBuilder builder = new StringBuilder(); for (SSTableReader sstable : sstables) builder.append(sstable.descriptor.filenameFor(Component.DATA)).append(" "); return builder.toString(); } public Set<InetAddress> getFailedHosts() { return failedHosts; } public static abstract class Client { private final Map<InetAddress, Collection<Range<Token>>> endpointToRanges = new HashMap<>(); private IPartitioner partitioner; /** * Initialize the client. * Perform any step necessary so that after the call to the this * method: * * partitioner is initialized * * getEndpointToRangesMap() returns a correct map * This method is guaranteed to be called before any other method of a * client. */ public abstract void init(String keyspace); /** * Stop the client. */ public void stop() {} /** * Provides connection factory. * By default, it uses DefaultConnectionFactory. * * @return StreamConnectionFactory to use */ public StreamConnectionFactory getConnectionFactory() { return new DefaultConnectionFactory(); } /** * Validate that {@code keyspace} is an existing keyspace and {@code * cfName} one of its existing column family. */ public abstract CFMetaData getCFMetaData(String keyspace, String cfName); public Map<InetAddress, Collection<Range<Token>>> getEndpointToRangesMap() { return endpointToRanges; } protected void setPartitioner(String partclass) throws ConfigurationException { setPartitioner(FBUtilities.newPartitioner(partclass)); } protected void setPartitioner(IPartitioner partitioner) { this.partitioner = partitioner; // the following is still necessary since Range/Token reference partitioner through StorageService.getPartitioner DatabaseDescriptor.setPartitioner(partitioner); } public IPartitioner getPartitioner() { return partitioner; } protected void addRangeForEndpoint(Range<Token> range, InetAddress endpoint) { Collection<Range<Token>> ranges = endpointToRanges.get(endpoint); if (ranges == null) { ranges = new HashSet<>(); endpointToRanges.put(endpoint, ranges); } ranges.add(range); } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.svn16; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.changes.*; import com.intellij.openapi.vcs.rollback.RollbackProgressListener; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.vcsUtil.VcsUtil; import org.jetbrains.idea.svn.Svn17TestCase; import org.jetbrains.idea.svn.SvnVcs; import org.jetbrains.idea.svn.ignore.FileGroupInfo; import org.jetbrains.idea.svn.ignore.SvnPropertyService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.tmatesoft.svn.core.SVNDepth; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNProperties; import org.tmatesoft.svn.core.SVNPropertyValue; import org.tmatesoft.svn.core.wc.ISVNPropertyValueProvider; import org.tmatesoft.svn.core.wc.SVNPropertyData; import org.tmatesoft.svn.core.wc.SVNRevision; import org.tmatesoft.svn.core.wc.SVNWCClient; import java.io.File; import java.util.*; /** * Created with IntelliJ IDEA. * User: Irina.Chernushina * Date: 12/3/12 * Time: 1:28 PM */ public class SvnRollbackTest extends Svn17TestCase { private VcsDirtyScopeManager myDirtyScopeManager; private ChangeListManager myChangeListManager; private SvnVcs myVcs; @Override @Before public void setUp() throws Exception { super.setUp(); myDirtyScopeManager = VcsDirtyScopeManager.getInstance(myProject); myChangeListManager = ChangeListManager.getInstance(myProject); myVcs = SvnVcs.getInstance(myProject); enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); } @Test public void testSimpleRollback() throws Exception { final VirtualFile a = createFileInCommand("a.txt", "test"); checkin(); VcsTestUtil.editFileInCommand(myProject, a, "tset"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = myChangeListManager.getChange(a); Assert.assertNotNull(change); rollbackIMpl(Collections.singletonList(change), Collections.<Change>emptyList()); } private void rollbackIMpl(List<Change> changes, final List<Change> allowedAfter) throws VcsException { final List<VcsException> exceptions = new ArrayList<>(); myVcs.createRollbackEnvironment().rollbackChanges(changes, exceptions, RollbackProgressListener.EMPTY); if (! exceptions.isEmpty()) { throw exceptions.get(0); } myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); List<LocalChangeList> lists = myChangeListManager.getChangeLists(); final HashSet<Change> afterCopy = new HashSet<>(allowedAfter); for (LocalChangeList list : lists) { final Collection<Change> listChanges = list.getChanges(); if (! listChanges.isEmpty()) { for (Change change : listChanges) { final boolean removed = afterCopy.remove(change); Assert.assertTrue(removed); } } } Assert.assertTrue(afterCopy.isEmpty()); } @Test public void testRollbackMoveDir() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); VcsTestUtil.moveFileInCommand(myProject, tree.mySourceDir, tree.myTargetDir); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertMovedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); rollbackIMpl(Collections.singletonList(change), Collections.<Change>emptyList()); } @Test public void testRollbackMOveDirVariant() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); final VirtualFile unv = createFileInCommand(tree.mySourceDir, "unv.txt", "***"); final File wasUnversioned = new File(unv.getPath()); VcsTestUtil.moveFileInCommand(myProject, tree.mySourceDir, tree.myTargetDir); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertMovedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); Assert.assertTrue(unv != null); Assert.assertTrue(unv.isValid()); Assert.assertTrue(!FileUtil.filesEqual(new File(unv.getPath()), wasUnversioned)); Assert.assertTrue(! wasUnversioned.exists()); rollbackIMpl(Arrays.asList(change, s2Change), Collections.<Change>emptyList()); Assert.assertTrue(wasUnversioned.exists()); } //IDEA-39943 @Test public void testRollbackWithDeepUnversioned() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final VirtualFile inner = createDirInCommand(tree.mySourceDir, "inner"); final VirtualFile innerFile = createFileInCommand(inner, "inInner.txt", "kdfjsdisdjiuewjfew wefn w"); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); final VirtualFile deepUnverioned = createFileInCommand(inner, "deepUnverioned.txt", "deepUnverioned"); final File was = new File(deepUnverioned.getPath()); checkin(); runAndVerifyStatusSorted("? root" + File.separator + "source" + File.separator + "inner" + File.separator + deepUnverioned.getName()); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "newName"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); assertMovedChange(inner); assertMovedChange(innerFile); Assert.assertTrue(!FileUtil.filesEqual(new File(deepUnverioned.getPath()), was)); Assert.assertTrue(! was.exists()); rollbackIMpl(Arrays.asList(change), Collections.<Change>emptyList()); Assert.assertTrue(was.exists()); } @Test public void testRollbackDeepEdit() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final VirtualFile inner = createDirInCommand(tree.mySourceDir, "inner"); final VirtualFile innerFile = createFileInCommand(inner, "inInner.txt", "kdfjsdisdjiuewjfew wefn w"); checkin(); runAndVerifyStatusSorted(); VcsTestUtil.editFileInCommand(myProject, innerFile, "some content"); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "newName"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); assertMovedChange(inner); final Change innerChange = assertMovedChange(innerFile); rollbackIMpl(Arrays.asList(change), Arrays.asList(new Change(innerChange.getBeforeRevision(), innerChange.getBeforeRevision(), FileStatus.MODIFIED))); } @Test public void testRollbackDirRenameWithDeepRenamesAndUnverioned() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final VirtualFile inner = createDirInCommand(tree.mySourceDir, "inner"); final VirtualFile inner1 = createDirInCommand(inner, "inner1"); final VirtualFile inner2 = createDirInCommand(inner1, "inner2"); final VirtualFile innerFile_ = createFileInCommand(inner1, "inInner38432.txt", "kdfjsdisdjiuewjfew wefn w"); final VirtualFile inner3 = createDirInCommand(inner2, "inner3"); final VirtualFile innerFile = createFileInCommand(inner3, "inInner.txt", "kdfjsdisdjiuewjfew wefn w"); final VirtualFile innerFile1 = createFileInCommand(inner3, "inInner1.txt", "kdfjsdisdjiuewjfew wefn w"); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); final VirtualFile deepUNversioned = createFileInCommand(inner3, "deep.txt", "deep"); final File wasU = new File(deepUNversioned.getPath()); final File wasLowestDir = new File(inner3.getPath()); final File wasInnerFile1 = new File(innerFile1.getPath()); final File wasInnerFile = new File(innerFile.getPath()); checkin(); runAndVerifyStatusSorted("? root" + File.separator + "source" + File.separator + "inner" + File.separator + "inner1" + File.separator + "inner2" + File.separator + "inner3" + File.separator + "deep.txt"); VcsTestUtil.editFileInCommand(myProject, innerFile, "some content"); final File inner2Before = new File(inner2.getPath()); VcsTestUtil.renameFileInCommand(myProject, inner2, "newName2"); final File wasU2 = new File(deepUNversioned.getPath()); final File inner2After = new File(inner2.getPath()); final File wasInnerFileAfter = new File(innerFile.getPath()); final File wasInnerFile1After = new File(innerFile1.getPath()); final File wasLowestDirAfter = new File(inner3.getPath()); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "newNameSource"); Assert.assertTrue(! wasU.exists()); Assert.assertTrue(! wasU2.exists()); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); final Change inner2Change = assertMovedChange(inner2); assertMovedChange(inner); final Change innerChange = assertMovedChange(innerFile); final Change fantomDelete1 = new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(wasLowestDir, true), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(wasLowestDirAfter, true), "2")); final Change fantomDelete2 = new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(wasInnerFile1, false), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(wasInnerFile1After, false), SVNRevision.WORKING.getName())); rollbackIMpl(Arrays.asList(change), Arrays.asList(new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(wasInnerFile, false), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(wasInnerFileAfter, false), SVNRevision.WORKING.getName())), new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(inner2Before, true), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(inner2After, true), SVNRevision.WORKING.getName())), fantomDelete1, fantomDelete2)); Assert.assertTrue(wasU2.exists()); } @Test public void testKeepDeepProperty() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final VirtualFile inner = createDirInCommand(tree.mySourceDir, "inner"); final VirtualFile innerFile = createFileInCommand(inner, "inInner.txt", "kdfjsdisdjiuewjfew wefn w"); checkin(); runAndVerifyStatus(); final File fileBefore = new File(innerFile.getPath()); setProperty(fileBefore, "abc", "cde"); Assert.assertEquals("cde", getProperty(new File(innerFile.getPath()), "abc")); final File innerBefore = new File(inner.getPath()); VcsTestUtil.renameFileInCommand(myProject, inner, "innerNew"); final File innerAfter = new File(inner.getPath()); final File fileAfter = new File(innerFile.getPath()); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "newName"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); assertMovedChange(inner); final Change innerChange = assertMovedChange(innerFile); Assert.assertEquals("cde", getProperty(new File(innerFile.getPath()), "abc")); rollbackIMpl(Arrays.asList(change), Arrays.asList(new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(innerBefore, true), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(innerAfter, true), SVNRevision.WORKING.getName())), new Change(new SimpleContentRevision("1", VcsUtil.getFilePath(fileBefore, false), "2"), new SimpleContentRevision("1", VcsUtil.getFilePath(fileAfter, false), SVNRevision.WORKING.getName())))); Assert.assertEquals("cde", getProperty(fileAfter, "abc")); } private String getProperty(File file, String name) throws SVNException { final SVNWCClient client = myVcs.getSvnKitManager().createWCClient(); final SVNPropertyData data = client.doGetProperty(file, name, SVNRevision.UNDEFINED, SVNRevision.WORKING); return data == null ? null : new String(data.getValue().getBytes()); } private void setProperty(final File file, final String name, final String value) throws SVNException { final SVNWCClient client = myVcs.getSvnKitManager().createWCClient(); client.doSetProperty(file, new ISVNPropertyValueProvider() { @Override public SVNProperties providePropertyValues(File path, SVNProperties properties) throws SVNException { final SVNProperties result = new SVNProperties(); result.put(name, SVNPropertyValue.create(value)); return result; } }, true, SVNDepth.EMPTY, null, null); } @Test public void testRollbackDelete() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final FilePath fpSource = VcsUtil.getFilePath(new File(tree.mySourceDir.getPath()), true); final FilePath fpT11 = VcsUtil.getFilePath(new File(tree.myTargetFiles.get(0).getPath()), false); VcsTestUtil.deleteFileInCommand(myProject, tree.mySourceDir); VcsTestUtil.deleteFileInCommand(myProject, tree.myTargetFiles.get(0)); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertDeletedChange(fpSource); final Change t11Change = assertDeletedChange(fpT11); rollbackIMpl(Arrays.asList(change, t11Change), Collections.<Change>emptyList()); } private Change assertDeletedChange(FilePath fpSource) { final Change change = myChangeListManager.getChange(fpSource); Assert.assertNotNull(change); Assert.assertNull(change.getAfterRevision()); return change; } @Test public void testRollbackAdd() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final VirtualFile newDir = createDirInCommand(tree.mySourceDir, "newDir"); final VirtualFile inNewDir = createFileInCommand(newDir, "f.txt", "12345"); final VirtualFile inSource = createFileInCommand(tree.myTargetDir, "newF.txt", "54321"); Assert.assertTrue(newDir != null && inNewDir != null && inSource != null); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change change = assertCreatedChange(newDir); final Change inNewDirChange = assertCreatedChange(inNewDir); final Change inSourceChange = assertCreatedChange(inSource); rollbackIMpl(Arrays.asList(change, inSourceChange), Collections.<Change>emptyList()); } private Change assertCreatedChange(VirtualFile newDir) { final Change change = myChangeListManager.getChange(newDir); Assert.assertNotNull(change); Assert.assertNull(change.getBeforeRevision()); return change; } // move directory with unversioned dir + check edit @Test public void testRollbackRenameDirWithUnversionedDir() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final String editedText = "s1 edited"; VcsTestUtil.editFileInCommand(myProject, tree.myS1File, editedText); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); final VirtualFile unverionedDir = createDirInCommand(tree.mySourceDir, "unverionedDir"); final String unvText = "unv content"; final VirtualFile unvFile = createFileInCommand(unverionedDir, "childFile", unvText); final File wasUnvDir = new File(unverionedDir.getPath()); final File wasUnvFile = new File(unvFile.getPath()); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "renamed"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change dirChange = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); FileStatus status = myChangeListManager.getStatus(unverionedDir); Assert.assertNotNull(FileStatus.UNKNOWN.equals(status)); Assert.assertTrue(! wasUnvDir.exists()); FileStatus fileStatus = myChangeListManager.getStatus(unvFile); Assert.assertNotNull(FileStatus.UNKNOWN.equals(fileStatus)); Assert.assertTrue(! wasUnvFile.exists()); rollbackIMpl(Collections.singletonList(dirChange), Collections.singletonList(new Change(s1Change.getBeforeRevision(), s1Change.getBeforeRevision(), FileStatus.MODIFIED))); Assert.assertTrue(wasUnvDir.exists()); Assert.assertTrue(wasUnvFile.exists()); } @Test public void testRollbackDirWithIgnored() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); VirtualFile ignored = createFileInCommand(tree.mySourceDir, "ign.txt", "ignored"); final File wasIgnored = new File(ignored.getPath()); final FileGroupInfo groupInfo = new FileGroupInfo(); groupInfo.onFileEnabled(ignored); SvnPropertyService.doAddToIgnoreProperty(myVcs, myProject, false, new VirtualFile[]{ignored}, groupInfo); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); Assert.assertTrue(FileStatus.IGNORED.equals(myChangeListManager.getStatus(ignored))); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "renamed"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change dirChange = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); Assert.assertTrue(! wasIgnored.exists()); Assert.assertTrue(FileStatus.IGNORED.equals(myChangeListManager.getStatus(ignored))); rollbackIMpl(Collections.singletonList(dirChange), Collections.<Change>emptyList()); ignored = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(wasIgnored); // ignored property was not committed Assert.assertTrue(FileStatus.UNKNOWN.equals(myChangeListManager.getStatus(ignored))); Assert.assertTrue(wasIgnored.exists()); } @Test public void testRollbackDirWithCommittedIgnored() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); disableSilentOperation(VcsConfiguration.StandardConfirmation.ADD); VirtualFile ignored = createFileInCommand(tree.mySourceDir, "ign.txt", "ignored"); final File wasIgnored = new File(ignored.getPath()); final FileGroupInfo groupInfo = new FileGroupInfo(); groupInfo.onFileEnabled(ignored); SvnPropertyService.doAddToIgnoreProperty(myVcs, myProject, false, new VirtualFile[]{ignored}, groupInfo); checkin(); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); Assert.assertTrue(FileStatus.IGNORED.equals(myChangeListManager.getStatus(ignored))); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "renamed"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change dirChange = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); Assert.assertTrue(! wasIgnored.exists()); Assert.assertTrue(FileStatus.IGNORED.equals(myChangeListManager.getStatus(ignored))); rollbackIMpl(Collections.singletonList(dirChange), Collections.<Change>emptyList()); ignored = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(wasIgnored); // ignored property was not committed Assert.assertTrue(FileStatus.IGNORED.equals(myChangeListManager.getStatus(ignored))); Assert.assertTrue(wasIgnored.exists()); } @Test public void testListAllChangesForRevert() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final String editedText = "s1 edited"; VcsTestUtil.editFileInCommand(myProject, tree.myS1File, editedText); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "renamed"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change dirChange = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); rollbackIMpl(Arrays.asList(dirChange, s1Change, s2Change), Collections.<Change>emptyList()); } @Test public void testKeepOneUnderRenamed() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final File was2 = new File(tree.myS2File.getPath()); final String editedText = "s1 edited"; VcsTestUtil.editFileInCommand(myProject, tree.myS1File, editedText); VcsTestUtil.editFileInCommand(myProject, tree.myS2File, "s2 edited"); VcsTestUtil.renameFileInCommand(myProject, tree.mySourceDir, "renamed"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change dirChange = assertRenamedChange(tree.mySourceDir); final Change s1Change = assertMovedChange(tree.myS1File); final Change s2Change = assertMovedChange(tree.myS2File); final FilePath fp = VcsUtil.getFilePath(was2, false); rollbackIMpl(Arrays.asList(dirChange, s1Change), Arrays.asList(new Change( new SimpleContentRevision("1", fp, "1"), new SimpleContentRevision("1", fp, SVNRevision.WORKING.getName())))); } @Test public void testRollbackLocallyDeletedSimple() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); disableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); final File wasFile = new File(tree.myS1File.getPath()); VcsTestUtil.deleteFileInCommand(myProject, tree.myS1File); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final List<LocallyDeletedChange> deletedFiles = ((ChangeListManagerImpl)myChangeListManager).getDeletedFiles(); Assert.assertNotNull(deletedFiles); Assert.assertTrue(deletedFiles.size() == 1); Assert.assertEquals(wasFile, deletedFiles.get(0).getPath().getIOFile()); rollbackLocallyDeleted(Collections.singletonList(deletedFiles.get(0).getPath()), Collections.<FilePath>emptyList()); } @Test public void testRollbackLocallyDeletedSimpleDir() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); disableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); final File wasFile = new File(tree.mySourceDir.getPath()); final File wasFileS1 = new File(tree.myS1File.getPath()); final File wasFileS2 = new File(tree.myS2File.getPath()); VcsTestUtil.deleteFileInCommand(myProject, tree.mySourceDir); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final List<LocallyDeletedChange> deletedFiles = ((ChangeListManagerImpl)myChangeListManager).getDeletedFiles(); Assert.assertNotNull(deletedFiles); Assert.assertTrue(deletedFiles.size() == 3); final Set<File> files = new HashSet<>(); files.add(wasFile); files.add(wasFileS1); files.add(wasFileS2); for (LocallyDeletedChange file : deletedFiles) { files.remove(file.getPath().getIOFile()); } Assert.assertTrue(files.isEmpty()); rollbackLocallyDeleted(Collections.<FilePath>singletonList(VcsUtil.getFilePath(wasFile, true)), Collections.<FilePath>emptyList()); } @Test public void testRollbackAddedLocallyDeleted() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); VirtualFile f1 = createFileInCommand(tree.mySourceDir, "f1", "4"); VirtualFile dir = createDirInCommand(tree.mySourceDir, "dirrr"); VirtualFile f2 = createFileInCommand(dir, "f2", "411"); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); assertCreatedChange(f1); assertCreatedChange(dir); assertCreatedChange(f2); disableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); final File wasFile1 = new File(f1.getPath()); final File wasFile2 = new File(dir.getPath()); final File wasFile3 = new File(f2.getPath()); VcsTestUtil.deleteFileInCommand(myProject, f1); VcsTestUtil.deleteFileInCommand(myProject, dir); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final List<LocallyDeletedChange> deletedFiles = ((ChangeListManagerImpl)myChangeListManager).getDeletedFiles(); Assert.assertNotNull(deletedFiles); Assert.assertTrue(deletedFiles.size() == 3); final Set<File> files = new HashSet<>(); files.add(wasFile1); files.add(wasFile2); files.add(wasFile3); Assert.assertTrue(files.contains(deletedFiles.get(0).getPath().getIOFile())); Assert.assertTrue(files.contains(deletedFiles.get(1).getPath().getIOFile())); Assert.assertTrue(files.contains(deletedFiles.get(2).getPath().getIOFile())); rollbackLocallyDeleted(Arrays.<FilePath>asList(VcsUtil.getFilePath(wasFile2, true), VcsUtil.getFilePath(wasFile1, false)), Collections.<FilePath>emptyList()); } @Test public void testRollbackMovedDirectoryLocallyDeleted() throws Exception { final SubTree tree = new SubTree(myWorkingCopyDir); checkin(); final File wasInitially = new File(tree.mySourceDir.getPath()); Assert.assertTrue(wasInitially.exists()); VcsTestUtil.moveFileInCommand(myProject, tree.mySourceDir, tree.myTargetDir); Assert.assertTrue(!wasInitially.exists()); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final Change movedChange = assertMovedChange(tree.mySourceDir); final File was = new File(tree.mySourceDir.getPath()); Assert.assertNotSame(wasInitially, was); disableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE); VcsTestUtil.deleteFileInCommand(myProject, tree.mySourceDir); runAndVerifyStatusSorted( "! root" + File.separator + "target" + File.separator + "source", "! root" + File.separator + "target" + File.separator + "source" + File.separator + "s1.txt", "! root" + File.separator + "target" + File.separator + "source" + File.separator + "s2.txt", "D root" + File.separator + "source", "D root" + File.separator + "source" + File.separator + "s1.txt", "D root" + File.separator + "source" + File.separator + "s2.txt" ); rollbackLocallyDeleted(Collections.<FilePath>singletonList(VcsUtil.getFilePath(was, true)), Collections.<FilePath>emptyList()); runAndVerifyStatusSorted("D root" + File.separator + "source", "D root" + File.separator + "source" + File.separator + "s1.txt", "D root" + File.separator + "source" + File.separator + "s2.txt"); } private void rollbackLocallyDeleted(final List<FilePath> locally, final List<FilePath> allowed) { final List<VcsException> exceptions = new ArrayList<>(); myVcs.createRollbackEnvironment().rollbackMissingFileDeletion(locally, exceptions, RollbackProgressListener.EMPTY); Assert.assertTrue(exceptions.isEmpty()); myDirtyScopeManager.markEverythingDirty(); myChangeListManager.ensureUpToDate(false); final List<LocallyDeletedChange> deletedFiles = ((ChangeListManagerImpl)myChangeListManager).getDeletedFiles(); if (allowed == null || allowed.isEmpty()) { Assert.assertTrue(deletedFiles == null || deletedFiles.isEmpty()); } final ArrayList<FilePath> copy = new ArrayList<>(allowed); for (LocallyDeletedChange file : deletedFiles) { copy.remove(file.getPath()); } Assert.assertTrue(copy.isEmpty()); } private Change assertMovedChange(final VirtualFile file) { final Change change = myChangeListManager.getChange(file); Assert.assertNotNull(change); Assert.assertTrue(change.isMoved()); return change; } private Change assertRenamedChange(final VirtualFile file) { final Change change = myChangeListManager.getChange(file); Assert.assertNotNull(change); Assert.assertTrue(change.isRenamed()); return change; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2020 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon.trans; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.ToolBar; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.extension.ExtensionPointHandler; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.i18n.GlobalMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransAdapter; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.RowAdapter; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaDataCombi; import org.pentaho.di.ui.core.PropsUI; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.XulSpoonSettingsManager; import org.pentaho.di.ui.spoon.delegates.SpoonDelegate; import org.pentaho.di.ui.xul.KettleXulLoader; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulLoader; import org.pentaho.ui.xul.containers.XulToolbar; import org.pentaho.ui.xul.impl.XulEventHandler; import org.pentaho.ui.xul.swt.tags.SwtRadio; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toMap; public class TransPreviewDelegate extends SpoonDelegate implements XulEventHandler { private static Class<?> PKG = Spoon.class; // for i18n purposes, needed by Translator2!! private static final String XUL_FILE_TRANS_PREVIEW_TOOLBAR = "ui/trans-preview-toolbar.xul"; private TransGraph transGraph; private CTabItem transPreviewTab; private XulToolbar toolbar; private Composite transPreviewComposite; protected Map<StepMeta, RowMetaInterface> previewMetaMap; protected Map<StepMeta, List<RowMetaAndData>> previewDataMap; protected Map<StepMeta, StringBuffer> previewLogMap; private Composite previewComposite; private Text logText; private TableView tableView; public enum PreviewMode { FIRST, LAST, OFF, } private PreviewMode previewMode; private StepMeta selectedStep; protected StepMeta lastSelectedStep; private SwtRadio firstRadio; private SwtRadio lastRadio; private SwtRadio offRadio; /** * @param spoon * @param transGraph */ public TransPreviewDelegate( Spoon spoon, TransGraph transGraph ) { super( spoon ); this.transGraph = transGraph; previewMetaMap = new HashMap<>(); previewDataMap = new HashMap<>(); previewLogMap = new HashMap<>(); previewMode = PreviewMode.FIRST; } public void showPreviewView() { if ( transPreviewTab == null || transPreviewTab.isDisposed() ) { addTransPreview(); } else { transPreviewTab.dispose(); transGraph.checkEmptyExtraView(); } } /** * Add a grid with the execution metrics per step in a table view * */ public void addTransPreview() { // First, see if we need to add the extra view... // if ( transGraph.extraViewComposite == null || transGraph.extraViewComposite.isDisposed() ) { transGraph.addExtraView(); } else { if ( transPreviewTab != null && !transPreviewTab.isDisposed() ) { // just set this one active and get out... // transGraph.extraViewTabFolder.setSelection( transPreviewTab ); return; } } transPreviewTab = new CTabItem( transGraph.extraViewTabFolder, SWT.NONE ); transPreviewTab.setImage( GUIResource.getInstance().getImagePreview() ); transPreviewTab.setText( BaseMessages.getString( PKG, "Spoon.TransGraph.PreviewTab.Name" ) ); transPreviewComposite = new Composite( transGraph.extraViewTabFolder, SWT.NONE ); transPreviewComposite.setLayout( new FormLayout() ); PropsUI.getInstance().setLook( transPreviewComposite, Props.WIDGET_STYLE_TOOLBAR ); addToolBar(); Control toolbarControl = (Control) toolbar.getManagedObject(); toolbarControl.setLayoutData( new FormData() ); FormData fd = new FormData(); fd.left = new FormAttachment( 0, 0 ); // First one in the left top corner fd.top = new FormAttachment( 0, 0 ); fd.right = new FormAttachment( 100, 0 ); toolbarControl.setLayoutData( fd ); toolbarControl.setParent( transPreviewComposite ); previewComposite = new Composite( transPreviewComposite, SWT.NONE ); previewComposite.setLayout( new FillLayout() ); FormData fdPreview = new FormData(); fdPreview.left = new FormAttachment( 0, 0 ); fdPreview.right = new FormAttachment( 100, 0 ); if ( Const.isLinux() ) { fdPreview.top = new FormAttachment( (Control) toolbar.getManagedObject(), 4 ); } else { fdPreview.top = new FormAttachment( (Control) toolbar.getManagedObject(), 10 ); } fdPreview.bottom = new FormAttachment( 100, 0 ); previewComposite.setLayoutData( fdPreview ); transPreviewTab.setControl( transPreviewComposite ); transGraph.extraViewTabFolder.setSelection( transPreviewTab ); transGraph.extraViewTabFolder.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent e ) { refreshView(); } } ); TransPreviewExtension extension = new TransPreviewExtension( transPreviewComposite, toolbarControl, previewComposite ); try { ExtensionPointHandler.callExtensionPoint( log, "TransPreviewCreated", extension ); } catch ( KettleException ex ) { log.logError( "Extension point call failed.", ex ); } } private void addToolBar() { try { XulLoader loader = new KettleXulLoader(); loader.setSettingsManager( XulSpoonSettingsManager.getInstance() ); ResourceBundle bundle = GlobalMessages.getBundle( "org/pentaho/di/ui/spoon/messages/messages" ); XulDomContainer xulDomContainer = loader.loadXul( XUL_FILE_TRANS_PREVIEW_TOOLBAR, bundle ); xulDomContainer.addEventHandler( this ); toolbar = (XulToolbar) xulDomContainer.getDocumentRoot().getElementById( "nav-toolbar" ); ToolBar swtToolBar = (ToolBar) toolbar.getManagedObject(); spoon.props.setLook( swtToolBar, Props.WIDGET_STYLE_TOOLBAR ); swtToolBar.layout( true, true ); swtToolBar.pack(); firstRadio = (SwtRadio) xulDomContainer.getDocumentRoot().getElementById( "preview-first" ); lastRadio = (SwtRadio) xulDomContainer.getDocumentRoot().getElementById( "preview-last" ); offRadio = (SwtRadio) xulDomContainer.getDocumentRoot().getElementById( "preview-off" ); PropsUI.getInstance().setLook( (Control) firstRadio.getManagedObject(), Props.WIDGET_STYLE_TOOLBAR ); PropsUI.getInstance().setLook( (Control) lastRadio.getManagedObject(), Props.WIDGET_STYLE_TOOLBAR ); PropsUI.getInstance().setLook( (Control) offRadio.getManagedObject(), Props.WIDGET_STYLE_TOOLBAR ); } catch ( Throwable t ) { log.logError( toString(), Const.getStackTracker( t ) ); new ErrorDialog( transPreviewComposite.getShell(), BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Title" ), BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Message", XUL_FILE_TRANS_PREVIEW_TOOLBAR ), new Exception( t ) ); } } /** * This refresh is driven by outside influenced using listeners and so on. */ public synchronized void refreshView() { if ( transGraph != null && transGraph.extraViewTabFolder != null ) { if ( transGraph.extraViewTabFolder.getSelection() != transPreviewTab ) { return; } } if ( previewComposite == null || previewComposite.isDisposed() ) { return; } // Which step do we preview... // StepMeta stepMeta = selectedStep; // copy to prevent race conditions and so on. if ( stepMeta == null ) { hidePreviewGrid(); return; } else { lastSelectedStep = selectedStep; } // Do we have a log for this selected step? // This means the preview work is still running or it error-ed out. // boolean errorStep = false; if ( transGraph.trans != null ) { List<StepInterface> steps = transGraph.trans.findBaseSteps( stepMeta.getName() ); if ( steps != null && steps.size() > 0 ) { errorStep = steps.get( 0 ).getErrors() > 0; } } StringBuffer logText = previewLogMap.get( stepMeta ); if ( errorStep && logText != null && logText.length() > 0 ) { showLogText( stepMeta, logText.toString() ); return; } // If the preview work is done we have row meta-data and data for each step. // RowMetaInterface rowMeta = previewMetaMap.get( stepMeta ); if ( rowMeta != null ) { List<RowMetaAndData> rowData = previewDataMap.get( stepMeta ); try { showPreviewGrid( transGraph.getManagedObject(), stepMeta, rowMeta, rowData ); } catch ( Exception e ) { e.printStackTrace(); logText.append( Const.getStackTracker( e ) ); showLogText( stepMeta, logText.toString() ); } } } protected void hidePreviewGrid() { if ( tableView != null && !tableView.isDisposed() ) { tableView.dispose(); } } protected void showPreviewGrid( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface rowMeta, List<RowMetaAndData> rowsData ) throws KettleException { clearPreviewComposite(); ColumnInfo[] columnInfo = new ColumnInfo[rowMeta.size()]; for ( int i = 0; i < columnInfo.length; i++ ) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); columnInfo[i] = new ColumnInfo( valueMeta.getName(), ColumnInfo.COLUMN_TYPE_TEXT, false, true ); columnInfo[i].setValueMeta( valueMeta ); } tableView = new TableView( transMeta, previewComposite, SWT.NONE, columnInfo, rowsData.size(), null, PropsUI .getInstance() ); // Put data on it... // for ( int rowNr = 0; rowNr < rowsData.size(); rowNr++ ) { RowMetaAndData rowMetaAndData = rowsData.get( rowNr ); RowMetaInterface dataRowMeta = rowMetaAndData.getRowMeta(); Object[] rowData = rowMetaAndData.getData(); TableItem item; if ( rowNr < tableView.table.getItemCount() ) { item = tableView.table.getItem( rowNr ); } else { item = new TableItem( tableView.table, SWT.NONE ); } for ( int colNr = 0; colNr < rowMeta.size(); colNr++ ) { int dataIndex = dataRowMeta.indexOfValue( rowMeta.getValueMeta( colNr ).getName() ); dataIndex = dataIndex < 0 ? colNr : dataIndex; String string; ValueMetaInterface valueMetaInterface; try { valueMetaInterface = dataRowMeta.getValueMeta( dataIndex ); if ( valueMetaInterface.isStorageBinaryString() ) { Object nativeType = valueMetaInterface.convertBinaryStringToNativeType( (byte[]) rowData[dataIndex] ); string = valueMetaInterface.getStorageMetadata().getString( nativeType ); } else { string = dataRowMeta.getString( rowData, dataIndex ); } } catch ( Exception e ) { string = "Conversion error: " + e.getMessage(); } if ( string == null ) { item.setText( colNr + 1, "<null>" ); item.setForeground( colNr + 1, GUIResource.getInstance().getColorBlue() ); } else { item.setText( colNr + 1, string ); } } } tableView.setRowNums(); tableView.setShowingConversionErrorsInline( true ); tableView.optWidth( true ); previewComposite.layout( true, true ); } protected void showLogText( StepMeta stepMeta, String loggingText ) { clearPreviewComposite(); logText = new Text( previewComposite, SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL ); logText.setText( loggingText ); previewComposite.layout( true, true ); } private void clearPreviewComposite() { // First clear out the preview composite, then put in a text field showing the log text // // for ( Control control : previewComposite.getChildren() ) { control.dispose(); } } public CTabItem getTransGridTab() { return transPreviewTab; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getData() */ public Object getData() { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getName() */ public String getName() { return "transpreview"; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#getXulDomContainer() */ public XulDomContainer getXulDomContainer() { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setData(java.lang.Object) */ public void setData( Object data ) { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setName(java.lang.String) */ public void setName( String name ) { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see org.pentaho.ui.xul.impl.XulEventHandler#setXulDomContainer(org.pentaho. ui.xul.XulDomContainer) */ public void setXulDomContainer( XulDomContainer xulDomContainer ) { // TODO Auto-generated method stub } /** * @return the active */ public boolean isActive() { return previewMode != PreviewMode.OFF; } public void setPreviewMode( PreviewMode previewMode ) { this.previewMode = previewMode; } public void capturePreviewData( final Trans trans, List<StepMeta> stepMetas ) { final StringBuffer loggingText = new StringBuffer(); // First clean out previous preview data. Otherwise this method leaks memory like crazy. // previewLogMap.clear(); previewMetaMap.clear(); previewDataMap.clear(); final TransMeta transMeta = trans.getTransMeta(); for ( final StepMeta stepMeta : stepMetas ) { try { final RowMetaInterface rowMeta = transMeta.getStepFields( stepMeta ).clone(); previewMetaMap.put( stepMeta, rowMeta ); final List<RowMetaAndData> rowsData; if ( previewMode == PreviewMode.LAST ) { rowsData = new LinkedList<>(); } else { rowsData = new ArrayList<>(); } previewDataMap.put( stepMeta, rowsData ); previewLogMap.put( stepMeta, loggingText ); StepInterface step = trans.findRunThread( stepMeta.getName() ); if ( step != null ) { switch ( previewMode ) { case LAST: step.addRowListener( new RowAdapter() { @Override public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException { try { rowsData.add( new RowMetaAndData( rowMeta, rowMeta.cloneRow( row ) ) ); if ( rowsData.size() > PropsUI.getInstance().getDefaultPreviewSize() ) { rowsData.remove( 0 ); } } catch ( Exception e ) { throw new KettleStepException( "Unable to clone row for metadata : " + rowMeta, e ); } } } ); break; default: step.addRowListener( new RowAdapter() { @Override public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException { if ( rowsData.size() < PropsUI.getInstance().getDefaultPreviewSize() ) { try { rowsData.add( new RowMetaAndData( rowMeta, rowMeta.cloneRow( row ) ) ); } catch ( Exception e ) { throw new KettleStepException( "Unable to clone row for metadata : " + rowMeta, e ); } } } } ); break; } } } catch ( Exception e ) { loggingText.append( Const.getStackTracker( e ) ); } } // In case there were errors during preview... // trans.addTransListener( new TransAdapter() { @Override public void transFinished( Trans trans ) throws KettleException { // Copy over the data from the previewDelegate... // if ( trans.getErrors() != 0 ) { // capture logging and store it... // for ( StepMetaDataCombi combi : trans.getSteps() ) { if ( combi.copy == 0 ) { StringBuffer logBuffer = KettleLogStore.getAppender().getBuffer( combi.step.getLogChannel().getLogChannelId(), false ); previewLogMap.put( combi.stepMeta, logBuffer ); } } } } } ); } public void addPreviewData( StepMeta stepMeta, RowMetaInterface rowMeta, List<Object[]> rowsData, StringBuffer buffer ) { previewLogMap.put( stepMeta, buffer ); previewMetaMap.put( stepMeta, rowMeta ); List<RowMetaAndData> rowsMetaAndData = rowsData.stream().map( data -> new RowMetaAndData( rowMeta, data ) ).collect( toList() ); previewDataMap.put( stepMeta, rowsMetaAndData ); } /** * @return the selectedStep */ public StepMeta getSelectedStep() { return selectedStep; } /** * @param selectedStep * the selectedStep to set */ public void setSelectedStep( StepMeta selectedStep ) { this.selectedStep = selectedStep; } public PreviewMode getPreviewMode() { return previewMode; } public void first() { previewMode = PreviewMode.FIRST; firstRadio.setSelected( true ); lastRadio.setSelected( false ); offRadio.setSelected( false ); } public void last() { previewMode = PreviewMode.LAST; firstRadio.setSelected( false ); lastRadio.setSelected( true ); offRadio.setSelected( false ); } public void off() { previewMode = PreviewMode.OFF; firstRadio.setSelected( false ); lastRadio.setSelected( false ); offRadio.setSelected( true ); } public Map<StepMeta, List<Object[]>> getPreviewDataMap() { // Note this method is unused, but sincie it's public, we will keep the original signature after change to type of // this map, just in case. return previewDataMap.keySet().stream().collect( toMap( identity(), key -> previewDataMap.get( key ).stream().map( RowMetaAndData::getData ).collect( toList() ) ) ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.controlprogram.parfor; import java.io.IOException; import java.util.HashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Counters.Group; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.sysml.api.DMLScript; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.conf.DMLConfig; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.controlprogram.LocalVariableMap; import org.apache.sysml.runtime.controlprogram.ParForProgramBlock; import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PartitionFormat; import org.apache.sysml.runtime.controlprogram.caching.CacheStatistics; import org.apache.sysml.runtime.controlprogram.caching.CacheableData; import org.apache.sysml.runtime.controlprogram.caching.MatrixObject; import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer; import org.apache.sysml.runtime.controlprogram.parfor.stat.Stat; import org.apache.sysml.runtime.controlprogram.parfor.util.PairWritableBlock; import org.apache.sysml.runtime.controlprogram.parfor.util.PairWritableCell; import org.apache.sysml.runtime.instructions.cp.Data; import org.apache.sysml.runtime.io.IOUtilFunctions; import org.apache.sysml.runtime.matrix.data.InputInfo; import org.apache.sysml.runtime.matrix.data.OutputInfo; import org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames; import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration; import org.apache.sysml.runtime.util.MapReduceTool; import org.apache.sysml.utils.Statistics; import org.apache.sysml.yarn.DMLAppMasterUtils; /** * MR job class for submitting parfor remote MR jobs, controlling its execution and obtaining results. * * */ public class RemoteDPParForMR { protected static final Log LOG = LogFactory.getLog(RemoteDPParForMR.class.getName()); public static RemoteParForJobReturn runJob(long pfid, String itervar, String matrixvar, String program, String resultFile, MatrixObject input, PartitionFormat dpf, OutputInfo oi, boolean tSparseCol, //config params boolean enableCPCaching, int numReducers, int replication) //opt params throws DMLRuntimeException { RemoteParForJobReturn ret = null; String jobname = "ParFor-DPEMR"; long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; JobConf job; job = new JobConf( RemoteDPParForMR.class ); job.setJobName(jobname+pfid); //maintain dml script counters Statistics.incrementNoOfCompiledMRJobs(); try { ///// //configure the MR job //set arbitrary CP program blocks that will perform in the reducers MRJobConfiguration.setProgramBlocks(job, program); //enable/disable caching MRJobConfiguration.setParforCachingConfig(job, enableCPCaching); //setup input matrix Path path = new Path( input.getFileName() ); long rlen = input.getNumRows(); long clen = input.getNumColumns(); int brlen = (int) input.getNumRowsPerBlock(); int bclen = (int) input.getNumColumnsPerBlock(); MRJobConfiguration.setPartitioningInfo(job, rlen, clen, brlen, bclen, InputInfo.BinaryBlockInputInfo, oi, dpf._dpf, dpf._N, input.getFileName(), itervar, matrixvar, tSparseCol); job.setInputFormat(InputInfo.BinaryBlockInputInfo.inputFormatClass); FileInputFormat.setInputPaths(job, path); //set mapper and reducers classes job.setMapperClass(DataPartitionerRemoteMapper.class); job.setReducerClass(RemoteDPParWorkerReducer.class); //set output format job.setOutputFormat(SequenceFileOutputFormat.class); //set output path MapReduceTool.deleteFileIfExistOnHDFS(resultFile); FileOutputFormat.setOutputPath(job, new Path(resultFile)); //set the output key, value schema //parfor partitioning outputs (intermediates) job.setMapOutputKeyClass(LongWritable.class); if( oi == OutputInfo.BinaryBlockOutputInfo ) job.setMapOutputValueClass(PairWritableBlock.class); else if( oi == OutputInfo.BinaryCellOutputInfo ) job.setMapOutputValueClass(PairWritableCell.class); else throw new DMLRuntimeException("Unsupported intermrediate output info: "+oi); //parfor exec output job.setOutputKeyClass(LongWritable.class); job.setOutputValueClass(Text.class); ////// //set optimization parameters //set the number of mappers and reducers job.setNumReduceTasks( numReducers ); //disable automatic tasks timeouts and speculative task exec job.setInt(MRConfigurationNames.MR_TASK_TIMEOUT, 0); job.setMapSpeculativeExecution(false); //set up preferred custom serialization framework for binary block format if( MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION ) MRJobConfiguration.addBinaryBlockSerializationFramework( job ); //set up map/reduce memory configurations (if in AM context) DMLConfig config = ConfigurationManager.getDMLConfig(); DMLAppMasterUtils.setupMRJobRemoteMaxMemory(job, config); //set up custom map/reduce configurations MRJobConfiguration.setupCustomMRConfigurations(job, config); //disable JVM reuse job.setNumTasksToExecutePerJvm( 1 ); //-1 for unlimited //set the replication factor for the results job.setInt(MRConfigurationNames.DFS_REPLICATION, replication); //set the max number of retries per map task //note: currently disabled to use cluster config //job.setInt(MRConfigurationNames.MR_MAP_MAXATTEMPTS, max_retry); //set unique working dir MRJobConfiguration.setUniqueWorkingDir(job); ///// // execute the MR job RunningJob runjob = JobClient.runJob(job); // Process different counters Statistics.incrementNoOfExecutedMRJobs(); Group pgroup = runjob.getCounters().getGroup(ParForProgramBlock.PARFOR_COUNTER_GROUP_NAME); int numTasks = (int)pgroup.getCounter( Stat.PARFOR_NUMTASKS.toString() ); int numIters = (int)pgroup.getCounter( Stat.PARFOR_NUMITERS.toString() ); if( DMLScript.STATISTICS && !InfrastructureAnalyzer.isLocalMode() ) { Statistics.incrementJITCompileTime( pgroup.getCounter( Stat.PARFOR_JITCOMPILE.toString() ) ); Statistics.incrementJVMgcCount( pgroup.getCounter( Stat.PARFOR_JVMGC_COUNT.toString() ) ); Statistics.incrementJVMgcTime( pgroup.getCounter( Stat.PARFOR_JVMGC_TIME.toString() ) ); Group cgroup = runjob.getCounters().getGroup(CacheableData.CACHING_COUNTER_GROUP_NAME.toString()); CacheStatistics.incrementMemHits((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_HITS_MEM.toString() )); CacheStatistics.incrementFSBuffHits((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_HITS_FSBUFF.toString() )); CacheStatistics.incrementFSHits((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_HITS_FS.toString() )); CacheStatistics.incrementHDFSHits((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_HITS_HDFS.toString() )); CacheStatistics.incrementFSBuffWrites((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_WRITES_FSBUFF.toString() )); CacheStatistics.incrementFSWrites((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_WRITES_FS.toString() )); CacheStatistics.incrementHDFSWrites((int)cgroup.getCounter( CacheStatistics.Stat.CACHE_WRITES_HDFS.toString() )); CacheStatistics.incrementAcquireRTime(cgroup.getCounter( CacheStatistics.Stat.CACHE_TIME_ACQR.toString() )); CacheStatistics.incrementAcquireMTime(cgroup.getCounter( CacheStatistics.Stat.CACHE_TIME_ACQM.toString() )); CacheStatistics.incrementReleaseTime(cgroup.getCounter( CacheStatistics.Stat.CACHE_TIME_RLS.toString() )); CacheStatistics.incrementExportTime(cgroup.getCounter( CacheStatistics.Stat.CACHE_TIME_EXP.toString() )); } // read all files of result variables and prepare for return LocalVariableMap[] results = readResultFile(job, resultFile); ret = new RemoteParForJobReturn(runjob.isSuccessful(), numTasks, numIters, results); } catch(Exception ex) { throw new DMLRuntimeException(ex); } finally { // remove created files try { MapReduceTool.deleteFileIfExistOnHDFS(new Path(resultFile), job); } catch(IOException ex) { throw new DMLRuntimeException(ex); } } if( DMLScript.STATISTICS ){ long t1 = System.nanoTime(); Statistics.maintainCPHeavyHitters("MR-Job_"+jobname, t1-t0); } return ret; } /** * Result file contains hierarchy of workerID-resultvar(incl filename). We deduplicate * on the workerID. Without JVM reuse each task refers to a unique workerID, so we * will not find any duplicates. With JVM reuse, however, each slot refers to a workerID, * and there are duplicate filenames due to partial aggregation and overwrite of fname * (the RemoteParWorkerMapper ensures uniqueness of those files independent of the * runtime implementation). * * @param job job configuration * @param fname file name * @return array of local variable maps * @throws DMLRuntimeException if DMLRuntimeException occurs * @throws IOException if IOException occurs */ @SuppressWarnings("deprecation") public static LocalVariableMap [] readResultFile( JobConf job, String fname ) throws DMLRuntimeException, IOException { HashMap<Long,LocalVariableMap> tmp = new HashMap<Long,LocalVariableMap>(); Path path = new Path(fname); FileSystem fs = IOUtilFunctions.getFileSystem(path, job); LongWritable key = new LongWritable(); //workerID Text value = new Text(); //serialized var header (incl filename) int countAll = 0; for( Path lpath : IOUtilFunctions.getSequenceFilePaths(fs, path) ) { SequenceFile.Reader reader = new SequenceFile.Reader(fs,lpath,job); try { while( reader.next(key, value) ) { if( !tmp.containsKey( key.get() ) ) tmp.put(key.get(), new LocalVariableMap ()); Object[] dat = ProgramConverter.parseDataObject( value.toString() ); tmp.get( key.get() ).put((String)dat[0], (Data)dat[1]); countAll++; } } finally { IOUtilFunctions.closeSilently(reader); } } LOG.debug("Num remote worker results (before deduplication): "+countAll); LOG.debug("Num remote worker results: "+tmp.size()); //create return array return tmp.values().toArray(new LocalVariableMap[0]); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.lib.output; import java.io.FileNotFoundException; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import com.google.common.annotations.VisibleForTesting; /** An {@link OutputCommitter} that commits files specified * in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}. **/ @InterfaceAudience.Public @InterfaceStability.Stable public class FileOutputCommitter extends OutputCommitter { private static final Log LOG = LogFactory.getLog(FileOutputCommitter.class); /** * Name of directory where pending data is placed. Data that has not been * committed yet. */ public static final String PENDING_DIR_NAME = "_temporary"; /** * Temporary directory name * * The static variable to be compatible with M/R 1.x */ @Deprecated protected static final String TEMP_DIR_NAME = PENDING_DIR_NAME; public static final String SUCCEEDED_FILE_NAME = "_SUCCESS"; public static final String SUCCESSFUL_JOB_OUTPUT_DIR_MARKER = "mapreduce.fileoutputcommitter.marksuccessfuljobs"; public static final String FILEOUTPUTCOMMITTER_ALGORITHM_VERSION = "mapreduce.fileoutputcommitter.algorithm.version"; public static final int FILEOUTPUTCOMMITTER_ALGORITHM_VERSION_DEFAULT = 1; // Number of attempts when failure happens in commit job public static final String FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS = "mapreduce.fileoutputcommitter.failures.attempts"; // default value to be 1 to keep consistent with previous behavior public static final int FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS_DEFAULT = 1; private Path outputPath = null; private Path workPath = null; private final int algorithmVersion; /** * Create a file output committer * @param outputPath the job's output path, or null if you want the output * committer to act as a noop. * @param context the task's context * @throws IOException */ public FileOutputCommitter(Path outputPath, TaskAttemptContext context) throws IOException { this(outputPath, (JobContext)context); if (outputPath != null) { workPath = getTaskAttemptPath(context, outputPath); } } /** * Create a file output committer * @param outputPath the job's output path, or null if you want the output * committer to act as a noop. * @param context the task's context * @throws IOException */ @Private public FileOutputCommitter(Path outputPath, JobContext context) throws IOException { Configuration conf = context.getConfiguration(); algorithmVersion = conf.getInt(FILEOUTPUTCOMMITTER_ALGORITHM_VERSION, FILEOUTPUTCOMMITTER_ALGORITHM_VERSION_DEFAULT); LOG.info("File Output Committer Algorithm version is " + algorithmVersion); if (algorithmVersion != 1 && algorithmVersion != 2) { throw new IOException("Only 1 or 2 algorithm version is supported"); } if (outputPath != null) { FileSystem fs = outputPath.getFileSystem(context.getConfiguration()); this.outputPath = fs.makeQualified(outputPath); } } /** * @return the path where final output of the job should be placed. This * could also be considered the committed application attempt path. */ private Path getOutputPath() { return this.outputPath; } /** * @return true if we have an output path set, else false. */ private boolean hasOutputPath() { return this.outputPath != null; } /** * @return the path where the output of pending job attempts are * stored. */ private Path getPendingJobAttemptsPath() { return getPendingJobAttemptsPath(getOutputPath()); } /** * Get the location of pending job attempts. * @param out the base output directory. * @return the location of pending job attempts. */ private static Path getPendingJobAttemptsPath(Path out) { return new Path(out, PENDING_DIR_NAME); } /** * Get the Application Attempt Id for this job * @param context the context to look in * @return the Application Attempt Id for a given job. */ private static int getAppAttemptId(JobContext context) { return context.getConfiguration().getInt( MRJobConfig.APPLICATION_ATTEMPT_ID, 0); } /** * Compute the path where the output of a given job attempt will be placed. * @param context the context of the job. This is used to get the * application attempt id. * @return the path to store job attempt data. */ public Path getJobAttemptPath(JobContext context) { return getJobAttemptPath(context, getOutputPath()); } /** * Compute the path where the output of a given job attempt will be placed. * @param context the context of the job. This is used to get the * application attempt id. * @param out the output path to place these in. * @return the path to store job attempt data. */ public static Path getJobAttemptPath(JobContext context, Path out) { return getJobAttemptPath(getAppAttemptId(context), out); } /** * Compute the path where the output of a given job attempt will be placed. * @param appAttemptId the ID of the application attempt for this job. * @return the path to store job attempt data. */ protected Path getJobAttemptPath(int appAttemptId) { return getJobAttemptPath(appAttemptId, getOutputPath()); } /** * Compute the path where the output of a given job attempt will be placed. * @param appAttemptId the ID of the application attempt for this job. * @return the path to store job attempt data. */ private static Path getJobAttemptPath(int appAttemptId, Path out) { return new Path(getPendingJobAttemptsPath(out), String.valueOf(appAttemptId)); } /** * Compute the path where the output of pending task attempts are stored. * @param context the context of the job with pending tasks. * @return the path where the output of pending task attempts are stored. */ private Path getPendingTaskAttemptsPath(JobContext context) { return getPendingTaskAttemptsPath(context, getOutputPath()); } /** * Compute the path where the output of pending task attempts are stored. * @param context the context of the job with pending tasks. * @return the path where the output of pending task attempts are stored. */ private static Path getPendingTaskAttemptsPath(JobContext context, Path out) { return new Path(getJobAttemptPath(context, out), PENDING_DIR_NAME); } /** * Compute the path where the output of a task attempt is stored until * that task is committed. * * @param context the context of the task attempt. * @return the path where a task attempt should be stored. */ public Path getTaskAttemptPath(TaskAttemptContext context) { return new Path(getPendingTaskAttemptsPath(context), String.valueOf(context.getTaskAttemptID())); } /** * Compute the path where the output of a task attempt is stored until * that task is committed. * * @param context the context of the task attempt. * @param out The output path to put things in. * @return the path where a task attempt should be stored. */ public static Path getTaskAttemptPath(TaskAttemptContext context, Path out) { return new Path(getPendingTaskAttemptsPath(context, out), String.valueOf(context.getTaskAttemptID())); } /** * Compute the path where the output of a committed task is stored until * the entire job is committed. * @param context the context of the task attempt * @return the path where the output of a committed task is stored until * the entire job is committed. */ public Path getCommittedTaskPath(TaskAttemptContext context) { return getCommittedTaskPath(getAppAttemptId(context), context); } public static Path getCommittedTaskPath(TaskAttemptContext context, Path out) { return getCommittedTaskPath(getAppAttemptId(context), context, out); } /** * Compute the path where the output of a committed task is stored until the * entire job is committed for a specific application attempt. * @param appAttemptId the id of the application attempt to use * @param context the context of any task. * @return the path where the output of a committed task is stored. */ protected Path getCommittedTaskPath(int appAttemptId, TaskAttemptContext context) { return new Path(getJobAttemptPath(appAttemptId), String.valueOf(context.getTaskAttemptID().getTaskID())); } private static Path getCommittedTaskPath(int appAttemptId, TaskAttemptContext context, Path out) { return new Path(getJobAttemptPath(appAttemptId, out), String.valueOf(context.getTaskAttemptID().getTaskID())); } private static class CommittedTaskFilter implements PathFilter { @Override public boolean accept(Path path) { return !PENDING_DIR_NAME.equals(path.getName()); } } /** * Get a list of all paths where output from committed tasks are stored. * @param context the context of the current job * @return the list of these Paths/FileStatuses. * @throws IOException */ private FileStatus[] getAllCommittedTaskPaths(JobContext context) throws IOException { Path jobAttemptPath = getJobAttemptPath(context); FileSystem fs = jobAttemptPath.getFileSystem(context.getConfiguration()); return fs.listStatus(jobAttemptPath, new CommittedTaskFilter()); } /** * Get the directory that the task should write results into. * @return the work directory * @throws IOException */ public Path getWorkPath() throws IOException { return workPath; } /** * Create the temporary directory that is the root of all of the task * work directories. * @param context the job's context */ public void setupJob(JobContext context) throws IOException { if (hasOutputPath()) { Path jobAttemptPath = getJobAttemptPath(context); FileSystem fs = jobAttemptPath.getFileSystem( context.getConfiguration()); if (!fs.mkdirs(jobAttemptPath)) { LOG.error("Mkdirs failed to create " + jobAttemptPath); } } else { LOG.warn("Output Path is null in setupJob()"); } } /** * The job has completed, so do works in commitJobInternal(). * Could retry on failure if using algorithm 2. * @param context the job's context */ public void commitJob(JobContext context) throws IOException { int maxAttemptsOnFailure = isCommitJobRepeatable(context) ? context.getConfiguration().getInt(FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS, FILEOUTPUTCOMMITTER_FAILURE_ATTEMPTS_DEFAULT) : 1; int attempt = 0; boolean jobCommitNotFinished = true; while (jobCommitNotFinished) { try { commitJobInternal(context); jobCommitNotFinished = false; } catch (Exception e) { if (++attempt >= maxAttemptsOnFailure) { throw e; } else { LOG.warn("Exception get thrown in job commit, retry (" + attempt + ") time.", e); } } } } /** * The job has completed, so do following commit job, include: * Move all committed tasks to the final output dir (algorithm 1 only). * Delete the temporary directory, including all of the work directories. * Create a _SUCCESS file to make it as successful. * @param context the job's context */ @VisibleForTesting protected void commitJobInternal(JobContext context) throws IOException { if (hasOutputPath()) { Path finalOutput = getOutputPath(); FileSystem fs = finalOutput.getFileSystem(context.getConfiguration()); if (algorithmVersion == 1) { for (FileStatus stat: getAllCommittedTaskPaths(context)) { mergePaths(fs, stat, finalOutput); } } // delete the _temporary folder and create a _done file in the o/p folder cleanupJob(context); // True if the job requires output.dir marked on successful job. // Note that by default it is set to true. if (context.getConfiguration().getBoolean( SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true)) { Path markerPath = new Path(outputPath, SUCCEEDED_FILE_NAME); // If job commit is repeatable and previous/another AM could write // mark file already, we need to set overwritten to be true explicitly // in case other FS implementations don't overwritten by default. if (isCommitJobRepeatable(context)) { fs.create(markerPath, true).close(); } else { fs.create(markerPath).close(); } } } else { LOG.warn("Output Path is null in commitJob()"); } } /** * Merge two paths together. Anything in from will be moved into to, if there * are any name conflicts while merging the files or directories in from win. * @param fs the File System to use * @param from the path data is coming from. * @param to the path data is going to. * @throws IOException on any error */ private void mergePaths(FileSystem fs, final FileStatus from, final Path to) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Merging data from " + from + " to " + to); } FileStatus toStat; try { toStat = fs.getFileStatus(to); } catch (FileNotFoundException fnfe) { toStat = null; } if (from.isFile()) { if (toStat != null) { if (!fs.delete(to, true)) { throw new IOException("Failed to delete " + to); } } if (!fs.rename(from.getPath(), to)) { throw new IOException("Failed to rename " + from + " to " + to); } } else if (from.isDirectory()) { if (toStat != null) { if (!toStat.isDirectory()) { if (!fs.delete(to, true)) { throw new IOException("Failed to delete " + to); } renameOrMerge(fs, from, to); } else { //It is a directory so merge everything in the directories for (FileStatus subFrom : fs.listStatus(from.getPath())) { Path subTo = new Path(to, subFrom.getPath().getName()); mergePaths(fs, subFrom, subTo); } } } else { renameOrMerge(fs, from, to); } } } private void renameOrMerge(FileSystem fs, FileStatus from, Path to) throws IOException { if (algorithmVersion == 1) { if (!fs.rename(from.getPath(), to)) { throw new IOException("Failed to rename " + from + " to " + to); } } else { fs.mkdirs(to); for (FileStatus subFrom : fs.listStatus(from.getPath())) { Path subTo = new Path(to, subFrom.getPath().getName()); mergePaths(fs, subFrom, subTo); } } } @Override @Deprecated public void cleanupJob(JobContext context) throws IOException { if (hasOutputPath()) { Path pendingJobAttemptsPath = getPendingJobAttemptsPath(); FileSystem fs = pendingJobAttemptsPath .getFileSystem(context.getConfiguration()); // if job allow repeatable commit and pendingJobAttemptsPath could be // deleted by previous AM, we should tolerate FileNotFoundException in // this case. try { fs.delete(pendingJobAttemptsPath, true); } catch (FileNotFoundException e) { if (!isCommitJobRepeatable(context)) { throw e; } } } else { LOG.warn("Output Path is null in cleanupJob()"); } } /** * Delete the temporary directory, including all of the work directories. * @param context the job's context */ @Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { // delete the _temporary folder cleanupJob(context); } /** * No task setup required. */ @Override public void setupTask(TaskAttemptContext context) throws IOException { // FileOutputCommitter's setupTask doesn't do anything. Because the // temporary task directory is created on demand when the // task is writing. } /** * Move the files from the work directory to the job output directory * @param context the task context */ @Override public void commitTask(TaskAttemptContext context) throws IOException { commitTask(context, null); } @Private public void commitTask(TaskAttemptContext context, Path taskAttemptPath) throws IOException { TaskAttemptID attemptId = context.getTaskAttemptID(); if (hasOutputPath()) { context.progress(); if(taskAttemptPath == null) { taskAttemptPath = getTaskAttemptPath(context); } FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration()); FileStatus taskAttemptDirStatus; try { taskAttemptDirStatus = fs.getFileStatus(taskAttemptPath); } catch (FileNotFoundException e) { taskAttemptDirStatus = null; } if (taskAttemptDirStatus != null) { if (algorithmVersion == 1) { Path committedTaskPath = getCommittedTaskPath(context); if (fs.exists(committedTaskPath)) { if (!fs.delete(committedTaskPath, true)) { throw new IOException("Could not delete " + committedTaskPath); } } if (!fs.rename(taskAttemptPath, committedTaskPath)) { throw new IOException("Could not rename " + taskAttemptPath + " to " + committedTaskPath); } LOG.info("Saved output of task '" + attemptId + "' to " + committedTaskPath); } else { // directly merge everything from taskAttemptPath to output directory mergePaths(fs, taskAttemptDirStatus, outputPath); LOG.info("Saved output of task '" + attemptId + "' to " + outputPath); } } else { LOG.warn("No Output found for " + attemptId); } } else { LOG.warn("Output Path is null in commitTask()"); } } /** * Delete the work directory * @throws IOException */ @Override public void abortTask(TaskAttemptContext context) throws IOException { abortTask(context, null); } @Private public void abortTask(TaskAttemptContext context, Path taskAttemptPath) throws IOException { if (hasOutputPath()) { context.progress(); if(taskAttemptPath == null) { taskAttemptPath = getTaskAttemptPath(context); } FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration()); if(!fs.delete(taskAttemptPath, true)) { LOG.warn("Could not delete "+taskAttemptPath); } } else { LOG.warn("Output Path is null in abortTask()"); } } /** * Did this task write any files in the work directory? * @param context the task's context */ @Override public boolean needsTaskCommit(TaskAttemptContext context ) throws IOException { return needsTaskCommit(context, null); } @Private public boolean needsTaskCommit(TaskAttemptContext context, Path taskAttemptPath ) throws IOException { if(hasOutputPath()) { if(taskAttemptPath == null) { taskAttemptPath = getTaskAttemptPath(context); } FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration()); return fs.exists(taskAttemptPath); } return false; } @Override @Deprecated public boolean isRecoverySupported() { return true; } @Override public boolean isCommitJobRepeatable(JobContext context) throws IOException { return algorithmVersion == 2; } @Override public void recoverTask(TaskAttemptContext context) throws IOException { if(hasOutputPath()) { context.progress(); TaskAttemptID attemptId = context.getTaskAttemptID(); int previousAttempt = getAppAttemptId(context) - 1; if (previousAttempt < 0) { throw new IOException ("Cannot recover task output for first attempt..."); } Path previousCommittedTaskPath = getCommittedTaskPath( previousAttempt, context); FileSystem fs = previousCommittedTaskPath.getFileSystem(context.getConfiguration()); if (LOG.isDebugEnabled()) { LOG.debug("Trying to recover task from " + previousCommittedTaskPath); } if (algorithmVersion == 1) { if (fs.exists(previousCommittedTaskPath)) { Path committedTaskPath = getCommittedTaskPath(context); if (fs.exists(committedTaskPath)) { if (!fs.delete(committedTaskPath, true)) { throw new IOException("Could not delete "+committedTaskPath); } } //Rename can fail if the parent directory does not yet exist. Path committedParent = committedTaskPath.getParent(); fs.mkdirs(committedParent); if (!fs.rename(previousCommittedTaskPath, committedTaskPath)) { throw new IOException("Could not rename " + previousCommittedTaskPath + " to " + committedTaskPath); } } else { LOG.warn(attemptId+" had no output to recover."); } } else { // essentially a no-op, but for backwards compatibility // after upgrade to the new fileOutputCommitter, // check if there are any output left in committedTaskPath if (fs.exists(previousCommittedTaskPath)) { LOG.info("Recovering task for upgrading scenario, moving files from " + previousCommittedTaskPath + " to " + outputPath); FileStatus from = fs.getFileStatus(previousCommittedTaskPath); mergePaths(fs, from, outputPath); } LOG.info("Done recovering task " + attemptId); } } else { LOG.warn("Output Path is null in recoverTask()"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.twill.internal.appmaster; import com.google.common.collect.Lists; import com.google.common.util.concurrent.AbstractIdleService; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Service; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.twill.api.RunId; import org.apache.twill.internal.Constants; import org.apache.twill.internal.ServiceMain; import org.apache.twill.internal.TwillRuntimeSpecification; import org.apache.twill.internal.json.TwillRuntimeSpecificationAdapter; import org.apache.twill.internal.kafka.EmbeddedKafkaServer; import org.apache.twill.internal.logging.Loggings; import org.apache.twill.internal.yarn.VersionDetectYarnAMClientFactory; import org.apache.twill.internal.yarn.YarnAMClient; import org.apache.twill.zookeeper.OperationFuture; import org.apache.twill.zookeeper.ZKClient; import org.apache.twill.zookeeper.ZKClientService; import org.apache.twill.zookeeper.ZKOperations; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; /** * Main class for launching {@link ApplicationMasterService}. */ public final class ApplicationMasterMain extends ServiceMain { private static final Logger LOG = LoggerFactory.getLogger(ApplicationMasterMain.class); private final TwillRuntimeSpecification twillRuntimeSpec; /** * Starts the application master. */ public static void main(String[] args) throws Exception { File twillSpec = new File(Constants.Files.RUNTIME_CONFIG_JAR, Constants.Files.TWILL_SPEC); TwillRuntimeSpecification twillRuntimeSpec = TwillRuntimeSpecificationAdapter.create().fromJson(twillSpec); new ApplicationMasterMain(twillRuntimeSpec).doMain(); } private ApplicationMasterMain(TwillRuntimeSpecification twillRuntimeSpec) { this.twillRuntimeSpec = twillRuntimeSpec; } private void doMain() throws Exception { RunId runId = twillRuntimeSpec.getTwillAppRunId(); ZKClientService zkClientService = createZKClient(); Configuration conf = new YarnConfiguration(new HdfsConfiguration(new Configuration())); setRMSchedulerAddress(conf, twillRuntimeSpec.getRmSchedulerAddr()); final YarnAMClient amClient = new VersionDetectYarnAMClientFactory(conf).create(); ApplicationMasterService service = new ApplicationMasterService(runId, zkClientService, twillRuntimeSpec, amClient, conf, createAppLocation(conf, twillRuntimeSpec.getFsUser(), twillRuntimeSpec.getTwillAppDir())); TrackerService trackerService = new TrackerService(service); List<Service> prerequisites = Lists.newArrayList( new YarnAMClientService(amClient, trackerService), zkClientService, new AppMasterTwillZKPathService(zkClientService, runId) ); if (twillRuntimeSpec.isLogCollectionEnabled()) { prerequisites.add(new ApplicationKafkaService(zkClientService, twillRuntimeSpec.getKafkaZKConnect())); } else { LOG.info("Log collection through kafka disabled"); } new ApplicationMasterMain(twillRuntimeSpec) .doMain( service, prerequisites.toArray(new Service[prerequisites.size()]) ); } /** * Optionally sets the RM scheduler address based on the environment variable if it is not set in the cluster config. */ private static void setRMSchedulerAddress(Configuration conf, String schedulerAddress) { if (schedulerAddress == null) { return; } // If the RM scheduler address is not in the config or it's from yarn-default.xml, // replace it with the one from the env, which is the same as the one client connected to. String[] sources = conf.getPropertySources(YarnConfiguration.RM_SCHEDULER_ADDRESS); if (sources == null || sources.length == 0 || "yarn-default.xml".equals(sources[sources.length - 1])) { conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, schedulerAddress); } } @Override protected String getHostname() { try { return InetAddress.getLocalHost().getCanonicalHostName(); } catch (UnknownHostException e) { return "unknown"; } } @Override protected TwillRuntimeSpecification getTwillRuntimeSpecification() { return twillRuntimeSpec; } @Nullable @Override protected String getRunnableName() { // No runnable name for the AM return null; } /** * A service wrapper for starting/stopping {@link EmbeddedKafkaServer} and make sure the ZK path for * Kafka exists before starting the Kafka server. */ private static final class ApplicationKafkaService extends AbstractIdleService { private static final Logger LOG = LoggerFactory.getLogger(ApplicationKafkaService.class); private final ZKClient zkClient; private final EmbeddedKafkaServer kafkaServer; private final String kafkaZKPath; private ApplicationKafkaService(ZKClient zkClient, String kafkaZKConnect) { this.zkClient = zkClient; this.kafkaServer = new EmbeddedKafkaServer(generateKafkaConfig(kafkaZKConnect)); this.kafkaZKPath = kafkaZKConnect.substring(zkClient.getConnectString().length()); } @Override protected void startUp() throws Exception { // Create the ZK node for Kafka to use. If the node already exists, delete it to make sure there is // no left over content from previous AM attempt. LOG.info("Preparing Kafka ZK path {}{}", zkClient.getConnectString(), kafkaZKPath); ZKOperations.createDeleteIfExists(zkClient, kafkaZKPath, null, CreateMode.PERSISTENT, true).get(); kafkaServer.startAndWait(); } @Override protected void shutDown() throws Exception { // Flush all logs before shutting down Kafka server Loggings.forceFlush(); // Delay for 2 seconds to give clients chance to poll the last batch of log messages. try { TimeUnit.SECONDS.sleep(2); } catch (InterruptedException e) { // Ignore LOG.info("Kafka shutdown delay interrupted", e); } finally { kafkaServer.stopAndWait(); } } private Properties generateKafkaConfig(String kafkaZKConnect) { Properties prop = new Properties(); prop.setProperty("log.dir", new File("kafka-logs").getAbsolutePath()); prop.setProperty("broker.id", "1"); prop.setProperty("socket.send.buffer.bytes", "1048576"); prop.setProperty("socket.receive.buffer.bytes", "1048576"); prop.setProperty("socket.request.max.bytes", "104857600"); prop.setProperty("num.partitions", "1"); prop.setProperty("log.retention.hours", "24"); prop.setProperty("log.flush.interval.messages", "10000"); prop.setProperty("log.flush.interval.ms", "1000"); prop.setProperty("log.segment.bytes", "536870912"); prop.setProperty("zookeeper.connect", kafkaZKConnect); // Set the connection timeout to relatively short time (3 seconds). // It is only used by the org.I0Itec.zkclient.ZKClient inside KafkaServer // to block and wait for ZK connection goes into SyncConnected state. // However, due to race condition described in TWILL-139 in the ZK client library used by Kafka, // when ZK authentication is enabled, the ZK client may hang until connection timeout. // Setting it to lower value allow the AM to retry multiple times if race happens. prop.setProperty("zookeeper.connection.timeout.ms", "3000"); prop.setProperty("default.replication.factor", "1"); return prop; } } /** * A Service wrapper that starts {@link TrackerService} and {@link YarnAMClient}. It is needed because * the tracker host and url needs to be provided to {@link YarnAMClient} before it starts {@link YarnAMClient}. */ private static final class YarnAMClientService extends AbstractIdleService { private final YarnAMClient yarnAMClient; private final TrackerService trackerService; private YarnAMClientService(YarnAMClient yarnAMClient, TrackerService trackerService) { this.yarnAMClient = yarnAMClient; this.trackerService = trackerService; } @Override protected void startUp() throws Exception { trackerService.setHost(yarnAMClient.getHost()); trackerService.startAndWait(); yarnAMClient.setTracker(trackerService.getBindAddress(), trackerService.getUrl()); try { yarnAMClient.startAndWait(); } catch (Exception e) { trackerService.stopAndWait(); throw e; } } @Override protected void shutDown() throws Exception { try { yarnAMClient.stopAndWait(); } finally { trackerService.stopAndWait(); } } } private static final class AppMasterTwillZKPathService extends TwillZKPathService { private static final Logger LOG = LoggerFactory.getLogger(AppMasterTwillZKPathService.class); private final ZKClient zkClient; AppMasterTwillZKPathService(ZKClient zkClient, RunId runId) { super(zkClient, runId); this.zkClient = zkClient; } @Override protected void shutDown() throws Exception { super.shutDown(); // Deletes ZK nodes created for the application execution. // We don't have to worry about a race condition if another instance of the same app starts at the same time // as when removal is performed. This is because we always create nodes with "createParent == true", // which takes care of the parent node recreation if it is removed from here. // Try to delete the /instances path. It may throws NotEmptyException if there are other instances of the // same app running, which we can safely ignore and return. if (!delete(Constants.INSTANCES_PATH_PREFIX)) { return; } // Try to delete children under /discovery. It may fail with NotEmptyException if there are other instances // of the same app running that has discovery services running. List<String> children = getChildren(Constants.DISCOVERY_PATH_PREFIX); List<OperationFuture<?>> deleteFutures = new ArrayList<>(); for (String child : children) { String path = Constants.DISCOVERY_PATH_PREFIX + "/" + child; LOG.info("Removing ZK path: {}{}", zkClient.getConnectString(), path); deleteFutures.add(zkClient.delete(path)); } Futures.successfulAsList(deleteFutures).get(TIMEOUT_SECONDS, TimeUnit.SECONDS); for (OperationFuture<?> future : deleteFutures) { try { future.get(); } catch (ExecutionException e) { if (e.getCause() instanceof KeeperException.NotEmptyException) { // If any deletion of the service failed with not empty, if means there are other apps running, // hence just return return; } if (e.getCause() instanceof KeeperException.NoNodeException) { // If the service node is gone, it maybe deleted by another app instance that is also shutting down, // hence just keep going continue; } throw e; } } // Delete the /discovery. It may fail with NotEmptyException (due to race between apps), // which can safely ignore and return. if (!delete(Constants.DISCOVERY_PATH_PREFIX)) { return; } // Delete the ZK path for the app namespace. delete("/"); } /** * Deletes the given ZK path. * * @param path path to delete * @return true if the path was deleted, false if failed to delete due to {@link KeeperException.NotEmptyException}. * @throws Exception if failed to delete the path */ private boolean delete(String path) throws Exception { try { LOG.info("Removing ZK path: {}{}", zkClient.getConnectString(), path); zkClient.delete(path).get(TIMEOUT_SECONDS, TimeUnit.SECONDS); return true; } catch (ExecutionException e) { if (e.getCause() instanceof KeeperException.NotEmptyException) { return false; } if (e.getCause() instanceof KeeperException.NoNodeException) { // If the node to be deleted was not created or is already gone, it is the same as delete successfully. return true; } throw e; } } /** * Returns the list of children node under the given path. * * @param path path to get children * @return the list of children or empty list if the path doesn't exist. * @throws Exception if failed to get children */ private List<String> getChildren(String path) throws Exception { try { return zkClient.getChildren(path).get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getChildren(); } catch (ExecutionException e) { if (e.getCause() instanceof KeeperException.NoNodeException) { // If the node doesn't exists, return an empty list return Collections.emptyList(); } throw e; } } } }
/* * Copyright (c) 2012-2015 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package org.eclipse.moquette.server.netty; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.codec.MessageToMessageDecoder; import io.netty.handler.codec.MessageToMessageEncoder; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpResponseEncoder; import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame; import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler; import io.netty.handler.ssl.SslHandler; import io.netty.handler.timeout.IdleStateHandler; import java.io.*; import java.net.URL; import java.security.*; import java.security.cert.CertificateException; import java.util.List; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import io.netty.util.concurrent.Future; import org.eclipse.moquette.commons.Constants; import org.eclipse.moquette.server.config.IConfig; import org.eclipse.moquette.spi.IMessaging; import org.eclipse.moquette.parser.netty.MQTTDecoder; import org.eclipse.moquette.parser.netty.MQTTEncoder; import org.eclipse.moquette.server.ServerAcceptor; import org.eclipse.moquette.server.netty.metrics.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author andrea */ public class NettyAcceptor implements ServerAcceptor { static class WebSocketFrameToByteBufDecoder extends MessageToMessageDecoder<BinaryWebSocketFrame> { @Override protected void decode(ChannelHandlerContext chc, BinaryWebSocketFrame frame, List<Object> out) throws Exception { //convert the frame to a ByteBuf ByteBuf bb = frame.content(); //System.out.println("WebSocketFrameToByteBufDecoder decode - " + ByteBufUtil.hexDump(bb)); bb.retain(); out.add(bb); } } static class ByteBufToWebSocketFrameEncoder extends MessageToMessageEncoder<ByteBuf> { @Override protected void encode(ChannelHandlerContext chc, ByteBuf bb, List<Object> out) throws Exception { //convert the ByteBuf to a WebSocketFrame BinaryWebSocketFrame result = new BinaryWebSocketFrame(); //System.out.println("ByteBufToWebSocketFrameEncoder encode - " + ByteBufUtil.hexDump(bb)); result.content().writeBytes(bb); out.add(result); } } abstract class PipelineInitializer { abstract void init(ChannelPipeline pipeline) throws Exception; } private static final Logger LOG = LoggerFactory.getLogger(NettyAcceptor.class); EventLoopGroup m_bossGroup; EventLoopGroup m_workerGroup; BytesMetricsCollector m_bytesMetricsCollector = new BytesMetricsCollector(); MessageMetricsCollector m_metricsCollector = new MessageMetricsCollector(); @Override public void initialize(IMessaging messaging, IConfig props) throws IOException { m_bossGroup = new NioEventLoopGroup(); m_workerGroup = new NioEventLoopGroup(); initializePlainTCPTransport(messaging, props); initializeWebSocketTransport(messaging, props); String sslTcpPortProp = props.getProperty(Constants.SSL_PORT_PROPERTY_NAME); String wssPortProp = props.getProperty(Constants.WSS_PORT_PROPERTY_NAME); if (sslTcpPortProp != null || wssPortProp != null) { SslHandlerFactory sslHandlerFactory = initSSLHandlerFactory(props); if (!sslHandlerFactory.canCreate()) { LOG.error("Can't initialize SSLHandler layer! Exiting, check your configuration of jks"); return; } initializeSSLTCPTransport(messaging, props, sslHandlerFactory); initializeWSSTransport(messaging, props, sslHandlerFactory); } } private void initFactory(String host, int port, final PipelineInitializer pipeliner) { ServerBootstrap b = new ServerBootstrap(); b.group(m_bossGroup, m_workerGroup) .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<SocketChannel>() { @Override public void initChannel(SocketChannel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); try { pipeliner.init(pipeline); } catch (Throwable th) { LOG.error("Severe error during pipeline creation", th); throw th; } } }) .option(ChannelOption.SO_BACKLOG, 128) .option(ChannelOption.SO_REUSEADDR, true) .option(ChannelOption.TCP_NODELAY, true) .childOption(ChannelOption.SO_KEEPALIVE, true); try { // Bind and start to accept incoming connections. ChannelFuture f = b.bind(host, port); LOG.info("Server binded host: {}, port: {}", host, port); f.sync(); } catch (InterruptedException ex) { LOG.error(null, ex); } } private void initializePlainTCPTransport(IMessaging messaging, IConfig props) throws IOException { final NettyMQTTHandler handler = new NettyMQTTHandler(); final MoquetteIdleTimoutHandler timeoutHandler = new MoquetteIdleTimoutHandler(); handler.setMessaging(messaging); String host = props.getProperty(Constants.HOST_PROPERTY_NAME); int port = Integer.parseInt(props.getProperty(Constants.PORT_PROPERTY_NAME)); initFactory(host, port, new PipelineInitializer() { @Override void init(ChannelPipeline pipeline) { pipeline.addFirst("idleStateHandler", new IdleStateHandler(0, 0, Constants.DEFAULT_CONNECT_TIMEOUT)); pipeline.addAfter("idleStateHandler", "idleEventHandler", timeoutHandler); //pipeline.addLast("logger", new LoggingHandler("Netty", LogLevel.ERROR)); pipeline.addFirst("bytemetrics", new BytesMetricsHandler(m_bytesMetricsCollector)); pipeline.addLast("decoder", new MQTTDecoder()); pipeline.addLast("encoder", new MQTTEncoder()); pipeline.addLast("metrics", new MessageMetricsHandler(m_metricsCollector)); pipeline.addLast("handler", handler); } }); } private void initializeWebSocketTransport(IMessaging messaging, IConfig props) throws IOException { String webSocketPortProp = props.getProperty(Constants.WEB_SOCKET_PORT_PROPERTY_NAME); if (webSocketPortProp == null) { //Do nothing no WebSocket configured LOG.info("WebSocket is disabled"); return; } int port = Integer.parseInt(webSocketPortProp); final NettyMQTTHandler handler = new NettyMQTTHandler(); final MoquetteIdleTimoutHandler timeoutHandler = new MoquetteIdleTimoutHandler(); handler.setMessaging(messaging); String host = props.getProperty(Constants.HOST_PROPERTY_NAME); initFactory(host, port, new PipelineInitializer() { @Override void init(ChannelPipeline pipeline) { pipeline.addLast("httpEncoder", new HttpResponseEncoder()); pipeline.addLast("httpDecoder", new HttpRequestDecoder()); pipeline.addLast("aggregator", new HttpObjectAggregator(65536)); pipeline.addLast("webSocketHandler", new WebSocketServerProtocolHandler("/mqtt", "mqtt, mqttv3.1, mqttv3.1.1")); pipeline.addLast("ws2bytebufDecoder", new WebSocketFrameToByteBufDecoder()); pipeline.addLast("bytebuf2wsEncoder", new ByteBufToWebSocketFrameEncoder()); pipeline.addFirst("idleStateHandler", new IdleStateHandler(0, 0, Constants.DEFAULT_CONNECT_TIMEOUT)); pipeline.addAfter("idleStateHandler", "idleEventHandler", timeoutHandler); pipeline.addFirst("bytemetrics", new BytesMetricsHandler(m_bytesMetricsCollector)); pipeline.addLast("decoder", new MQTTDecoder()); pipeline.addLast("encoder", new MQTTEncoder()); pipeline.addLast("metrics", new MessageMetricsHandler(m_metricsCollector)); pipeline.addLast("handler", handler); } }); } private void initializeSSLTCPTransport(IMessaging messaging, IConfig props, final SslHandlerFactory sslHandlerFactory) throws IOException { String sslPortProp = props.getProperty(Constants.SSL_PORT_PROPERTY_NAME); if (sslPortProp == null) { //Do nothing no SSL configured LOG.info("SSL is disabled"); return; } int sslPort = Integer.parseInt(sslPortProp); LOG.info("Starting SSL on port {}", sslPort); final NettyMQTTHandler handler = new NettyMQTTHandler(); final MoquetteIdleTimoutHandler timeoutHandler = new MoquetteIdleTimoutHandler(); handler.setMessaging(messaging); String host = props.getProperty(Constants.HOST_PROPERTY_NAME); initFactory(host, sslPort, new PipelineInitializer() { @Override void init(ChannelPipeline pipeline) throws Exception { pipeline.addLast("ssl", sslHandlerFactory.create()); pipeline.addFirst("idleStateHandler", new IdleStateHandler(0, 0, Constants.DEFAULT_CONNECT_TIMEOUT)); pipeline.addAfter("idleStateHandler", "idleEventHandler", timeoutHandler); //pipeline.addLast("logger", new LoggingHandler("Netty", LogLevel.ERROR)); pipeline.addFirst("bytemetrics", new BytesMetricsHandler(m_bytesMetricsCollector)); pipeline.addLast("decoder", new MQTTDecoder()); pipeline.addLast("encoder", new MQTTEncoder()); pipeline.addLast("metrics", new MessageMetricsHandler(m_metricsCollector)); pipeline.addLast("handler", handler); } }); } private void initializeWSSTransport(IMessaging messaging, IConfig props, final SslHandlerFactory sslHandlerFactory) throws IOException { String sslPortProp = props.getProperty(Constants.WSS_PORT_PROPERTY_NAME); if (sslPortProp == null) { //Do nothing no SSL configured LOG.info("SSL is disabled"); return; } int sslPort = Integer.parseInt(sslPortProp); final NettyMQTTHandler handler = new NettyMQTTHandler(); final MoquetteIdleTimoutHandler timeoutHandler = new MoquetteIdleTimoutHandler(); handler.setMessaging(messaging); String host = props.getProperty(Constants.HOST_PROPERTY_NAME); initFactory(host, sslPort, new PipelineInitializer() { @Override void init(ChannelPipeline pipeline) throws Exception { pipeline.addLast("ssl", sslHandlerFactory.create()); pipeline.addLast("httpEncoder", new HttpResponseEncoder()); pipeline.addLast("httpDecoder", new HttpRequestDecoder()); pipeline.addLast("aggregator", new HttpObjectAggregator(65536)); pipeline.addLast("webSocketHandler", new WebSocketServerProtocolHandler("/mqtt", "mqtt mqttv3.1, mqttv3.1.1")); pipeline.addLast("ws2bytebufDecoder", new WebSocketFrameToByteBufDecoder()); pipeline.addLast("bytebuf2wsEncoder", new ByteBufToWebSocketFrameEncoder()); pipeline.addFirst("idleStateHandler", new IdleStateHandler(0, 0, Constants.DEFAULT_CONNECT_TIMEOUT)); pipeline.addAfter("idleStateHandler", "idleEventHandler", timeoutHandler); pipeline.addFirst("bytemetrics", new BytesMetricsHandler(m_bytesMetricsCollector)); pipeline.addLast("decoder", new MQTTDecoder()); pipeline.addLast("encoder", new MQTTEncoder()); pipeline.addLast("metrics", new MessageMetricsHandler(m_metricsCollector)); pipeline.addLast("handler", handler); } }); } public void close() { if (m_workerGroup == null) { throw new IllegalStateException("Invoked close on an Acceptor that wasn't initialized"); } if (m_bossGroup == null) { throw new IllegalStateException("Invoked close on an Acceptor that wasn't initialized"); } Future workerWaiter = m_workerGroup.shutdownGracefully(); Future bossWaiter = m_bossGroup.shutdownGracefully(); try { workerWaiter.await(100); } catch (InterruptedException iex) { throw new IllegalStateException(iex); } try { bossWaiter.await(100); } catch (InterruptedException iex) { throw new IllegalStateException(iex); } MessageMetrics metrics = m_metricsCollector.computeMetrics(); LOG.info("Msg read: {}, msg wrote: {}", metrics.messagesRead(), metrics.messagesWrote()); BytesMetrics bytesMetrics = m_bytesMetricsCollector.computeMetrics(); LOG.info(String.format("Bytes read: %d, bytes wrote: %d", bytesMetrics.readBytes(), bytesMetrics.wroteBytes())); } private SslHandlerFactory initSSLHandlerFactory(IConfig props) { SslHandlerFactory factory = new SslHandlerFactory(props); return factory.canCreate() ? factory : null; } private static class SslHandlerFactory { private SSLContext sslContext; public SslHandlerFactory(IConfig props) { this.sslContext = initSSLContext(props); } public boolean canCreate() { return this.sslContext != null; } private SSLContext initSSLContext(IConfig props) { final String jksPath = props.getProperty(Constants.JKS_PATH_PROPERTY_NAME); LOG.info("Starting SSL using keystore at {}", jksPath); if (jksPath == null || jksPath.isEmpty()) { //key_store_password or key_manager_password are empty LOG.warn("You have configured the SSL port but not the jks_path, SSL not started"); return null; } //if we have the port also the jks then keyStorePassword and keyManagerPassword //has to be defined final String keyStorePassword = props.getProperty(Constants.KEY_STORE_PASSWORD_PROPERTY_NAME); final String keyManagerPassword = props.getProperty(Constants.KEY_MANAGER_PASSWORD_PROPERTY_NAME); if (keyStorePassword == null || keyStorePassword.isEmpty()) { //key_store_password or key_manager_password are empty LOG.warn("You have configured the SSL port but not the key_store_password, SSL not started"); return null; } if (keyManagerPassword == null || keyManagerPassword.isEmpty()) { //key_manager_password or key_manager_password are empty LOG.warn("You have configured the SSL port but not the key_manager_password, SSL not started"); return null; } try { InputStream jksInputStream = jksDatastore(jksPath); SSLContext serverContext = SSLContext.getInstance("TLS"); final KeyStore ks = KeyStore.getInstance("JKS"); ks.load(jksInputStream, keyStorePassword.toCharArray()); final KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(ks, keyManagerPassword.toCharArray()); serverContext.init(kmf.getKeyManagers(), null, null); return serverContext; } catch (NoSuchAlgorithmException | UnrecoverableKeyException | CertificateException | KeyStoreException | KeyManagementException | IOException ex) { LOG.error("Can't start SSL layer!", ex); return null; } } private InputStream jksDatastore(String jksPath) throws FileNotFoundException { URL jksUrl = getClass().getClassLoader().getResource(jksPath); if (jksUrl != null) { LOG.info("Starting with jks at {}, jks normal {}", jksUrl.toExternalForm(), jksUrl); return getClass().getClassLoader().getResourceAsStream(jksPath); } LOG.info("jks not found in bundled resources, try on the filesystem"); File jksFile = new File(jksPath); if (jksFile.exists()) { LOG.info("Using {} ", jksFile.getAbsolutePath()); return new FileInputStream(jksFile); } LOG.warn("File {} doesn't exists", jksFile.getAbsolutePath()); return null; } public ChannelHandler create() { SSLEngine sslEngine = sslContext.createSSLEngine(); sslEngine.setUseClientMode(false); return new SslHandler(sslEngine); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.execution.buffer.PagesSerde; import com.facebook.presto.execution.buffer.PagesSerdeFactory; import com.facebook.presto.metadata.Split; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.UpdatablePageSource; import com.facebook.presto.spi.block.SortOrder; import com.facebook.presto.spi.type.Type; import com.facebook.presto.split.RemoteSplit; import com.facebook.presto.sql.gen.OrderingCompiler; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.google.common.io.Closer; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.function.Supplier; import static com.facebook.presto.util.MergeSortedPages.mergeSortedPages; import static com.facebook.presto.util.MoreLists.mappedCopy; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static java.util.Objects.requireNonNull; public class MergeOperator implements SourceOperator, Closeable { public static class MergeOperatorFactory implements SourceOperatorFactory { private final int operatorId; private final PlanNodeId sourceId; private final ExchangeClientSupplier exchangeClientSupplier; private final PagesSerdeFactory serdeFactory; private final List<Type> types; private final List<Integer> outputChannels; private final List<Type> outputTypes; private final List<Integer> sortChannels; private final List<SortOrder> sortOrder; private final OrderingCompiler orderingCompiler; private boolean closed; public MergeOperatorFactory( int operatorId, PlanNodeId sourceId, ExchangeClientSupplier exchangeClientSupplier, PagesSerdeFactory serdeFactory, OrderingCompiler orderingCompiler, List<Type> types, List<Integer> outputChannels, List<Integer> sortChannels, List<SortOrder> sortOrder) { this.operatorId = operatorId; this.sourceId = requireNonNull(sourceId, "sourceId is null"); this.exchangeClientSupplier = requireNonNull(exchangeClientSupplier, "exchangeClientSupplier is null"); this.serdeFactory = requireNonNull(serdeFactory, "serdeFactory is null"); this.types = requireNonNull(types, "types is null"); this.outputChannels = requireNonNull(outputChannels, "outputChannels is null"); this.outputTypes = mappedCopy(outputChannels, types::get); this.sortChannels = requireNonNull(sortChannels, "sortChannels is null"); this.sortOrder = requireNonNull(sortOrder, "sortOrder is null"); this.orderingCompiler = requireNonNull(orderingCompiler, "mergeSortComparatorFactory is null"); } @Override public PlanNodeId getSourceId() { return sourceId; } @Override public SourceOperator createOperator(DriverContext driverContext) { checkState(!closed, "Factory is already closed"); OperatorContext operatorContext = driverContext.addOperatorContext(operatorId, sourceId, MergeOperator.class.getSimpleName()); return new MergeOperator( operatorContext, sourceId, exchangeClientSupplier, serdeFactory.createPagesSerde(), orderingCompiler.compilePageWithPositionComparator(types, sortChannels, sortOrder), outputChannels, outputTypes); } @Override public void noMoreOperators() { closed = true; } } private final OperatorContext operatorContext; private final PlanNodeId sourceId; private final ExchangeClientSupplier exchangeClientSupplier; private final PagesSerde pagesSerde; private final PageWithPositionComparator comparator; private final List<Integer> outputChannels; private final List<Type> outputTypes; private final SettableFuture<Void> blockedOnSplits = SettableFuture.create(); private final List<WorkProcessor<Page>> pageProducers = new ArrayList<>(); private final Closer closer = Closer.create(); private WorkProcessor<Page> mergedPages; private boolean closed; public MergeOperator( OperatorContext operatorContext, PlanNodeId sourceId, ExchangeClientSupplier exchangeClientSupplier, PagesSerde pagesSerde, PageWithPositionComparator comparator, List<Integer> outputChannels, List<Type> outputTypes) { this.operatorContext = requireNonNull(operatorContext, "operatorContext is null"); this.sourceId = requireNonNull(sourceId, "sourceId is null"); this.exchangeClientSupplier = requireNonNull(exchangeClientSupplier, "exchangeClientSupplier is null"); this.pagesSerde = requireNonNull(pagesSerde, "pagesSerde is null"); this.comparator = requireNonNull(comparator, "comparator is null"); this.outputChannels = requireNonNull(outputChannels, "outputChannels is null"); this.outputTypes = requireNonNull(outputTypes, "outputTypes is null"); } @Override public PlanNodeId getSourceId() { return sourceId; } @Override public Supplier<Optional<UpdatablePageSource>> addSplit(Split split) { requireNonNull(split, "split is null"); checkArgument(split.getConnectorSplit() instanceof RemoteSplit, "split is not a remote split"); checkState(!blockedOnSplits.isDone(), "noMoreSplits has been called already"); URI location = ((RemoteSplit) split.getConnectorSplit()).getLocation(); ExchangeClient exchangeClient = closer.register(exchangeClientSupplier.get(operatorContext.localSystemMemoryContext())); exchangeClient.addLocation(location); exchangeClient.noMoreLocations(); pageProducers.add(exchangeClient.pages().map(pagesSerde::deserialize)); return Optional::empty; } @Override public void noMoreSplits() { mergedPages = mergeSortedPages( pageProducers, comparator, outputChannels, outputTypes, (pageBuilder, pageWithPosition) -> pageBuilder.isFull(), false, operatorContext.aggregateUserMemoryContext(), operatorContext.getDriverContext().getYieldSignal()); blockedOnSplits.set(null); } @Override public OperatorContext getOperatorContext() { return operatorContext; } @Override public void finish() { close(); } @Override public boolean isFinished() { return closed || (mergedPages != null && mergedPages.isFinished()); } @Override public ListenableFuture<?> isBlocked() { if (!blockedOnSplits.isDone()) { return blockedOnSplits; } if (mergedPages.isBlocked()) { return mergedPages.getBlockedFuture(); } return NOT_BLOCKED; } @Override public boolean needsInput() { return false; } @Override public void addInput(Page page) { throw new UnsupportedOperationException(getClass().getName() + " can not take input"); } @Override public Page getOutput() { if (closed || mergedPages == null || !mergedPages.process() || mergedPages.isFinished()) { return null; } Page page = mergedPages.getResult(); operatorContext.recordGeneratedInput(page.getSizeInBytes(), page.getPositionCount()); return page; } @Override public void close() { try { closer.close(); closed = true; } catch (IOException e) { throw new UncheckedIOException(e); } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hssf.record; import org.apache.poi.util.BitField; import org.apache.poi.util.BitFieldFactory; import org.apache.poi.util.LittleEndianOutput; /** * Title: Window Two Record<P> * Description: sheet window settings<P> * REFERENCE: PG 422 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)<P> * @author Andrew C. Oliver (acoliver at apache dot org) * @author Jason Height (jheight at chariot dot net dot au) * @version 2.0-pre */ public final class WindowTwoRecord extends StandardRecord { public final static short sid = 0x023E; // bitfields private static final BitField displayFormulas = BitFieldFactory.getInstance(0x01); private static final BitField displayGridlines = BitFieldFactory.getInstance(0x02); private static final BitField displayRowColHeadings = BitFieldFactory.getInstance(0x04); private static final BitField freezePanes = BitFieldFactory.getInstance(0x08); private static final BitField displayZeros = BitFieldFactory.getInstance(0x10); /** if false use color in field 4 if true use default foreground for headers */ private static final BitField defaultHeader = BitFieldFactory.getInstance(0x20); private static final BitField arabic = BitFieldFactory.getInstance(0x040); private static final BitField displayGuts = BitFieldFactory.getInstance(0x080); private static final BitField freezePanesNoSplit = BitFieldFactory.getInstance(0x100); private static final BitField selected = BitFieldFactory.getInstance(0x200); private static final BitField active = BitFieldFactory.getInstance(0x400); private static final BitField savedInPageBreakPreview = BitFieldFactory.getInstance(0x800); // 4-7 reserved // end bitfields private short field_1_options; private short field_2_top_row; private short field_3_left_col; private int field_4_header_color; private short field_5_page_break_zoom; private short field_6_normal_zoom; private int field_7_reserved; public WindowTwoRecord() { } public WindowTwoRecord(RecordInputStream in) { int size = in.remaining(); field_1_options = in.readShort(); field_2_top_row = in.readShort(); field_3_left_col = in.readShort(); field_4_header_color = in.readInt(); if (size > 10) { field_5_page_break_zoom = in.readShort(); field_6_normal_zoom = in.readShort(); } if (size > 14) { // there is a special case of this record that has only 14 bytes...undocumented! field_7_reserved = in.readInt(); } } /** * set the options bitmask or just use the bit setters. * @param options */ public void setOptions(short options) { field_1_options = options; } // option bitfields /** * set whether the window should display formulas * @param formulas or not */ public void setDisplayFormulas(boolean formulas) { field_1_options = displayFormulas.setShortBoolean(field_1_options, formulas); } /** * set whether the window should display gridlines * @param gridlines or not */ public void setDisplayGridlines(boolean gridlines) { field_1_options = displayGridlines.setShortBoolean(field_1_options, gridlines); } /** * set whether the window should display row and column headings * @param headings or not */ public void setDisplayRowColHeadings(boolean headings) { field_1_options = displayRowColHeadings.setShortBoolean(field_1_options, headings); } /** * set whether the window should freeze panes * @param freezepanes freeze panes or not */ public void setFreezePanes(boolean freezepanes) { field_1_options = freezePanes.setShortBoolean(field_1_options, freezepanes); } /** * set whether the window should display zero values * @param zeros or not */ public void setDisplayZeros(boolean zeros) { field_1_options = displayZeros.setShortBoolean(field_1_options, zeros); } /** * set whether the window should display a default header * @param header or not */ public void setDefaultHeader(boolean header) { field_1_options = defaultHeader.setShortBoolean(field_1_options, header); } /** * is this arabic? * @param isarabic arabic or not */ public void setArabic(boolean isarabic) { field_1_options = arabic.setShortBoolean(field_1_options, isarabic); } /** * set whether the outline symbols are displaed * @param guts symbols or not */ public void setDisplayGuts(boolean guts) { field_1_options = displayGuts.setShortBoolean(field_1_options, guts); } /** * freeze unsplit panes or not * @param freeze or not */ public void setFreezePanesNoSplit(boolean freeze) { field_1_options = freezePanesNoSplit.setShortBoolean(field_1_options, freeze); } /** * sheet tab is selected * @param sel selected or not */ public void setSelected(boolean sel) { field_1_options = selected.setShortBoolean(field_1_options, sel); } /** * is the sheet currently displayed in the window * @param p displayed or not */ public void setActive(boolean p) { field_1_options = active.setShortBoolean(field_1_options, p); } /** * was the sheet saved in page break view * @param p pagebreaksaved or not */ public void setSavedInPageBreakPreview(boolean p) { field_1_options = savedInPageBreakPreview.setShortBoolean(field_1_options, p); } // end of bitfields. /** * set the top row visible in the window * @param topRow top row visible */ public void setTopRow(short topRow) { field_2_top_row = topRow; } /** * set the leftmost column displayed in the window * @param leftCol leftmost column */ public void setLeftCol(short leftCol) { field_3_left_col = leftCol; } /** * set the palette index for the header color * @param color */ public void setHeaderColor(int color) { field_4_header_color = color; } /** * zoom magification in page break view * @param zoom */ public void setPageBreakZoom(short zoom) { field_5_page_break_zoom = zoom; } /** * set the zoom magnification in normal view * @param zoom */ public void setNormalZoom(short zoom) { field_6_normal_zoom = zoom; } /** * set the reserved (don't do this) value */ public void setReserved(int reserved) { field_7_reserved = reserved; } /** * get the options bitmask or just use the bit setters. * @return options */ public short getOptions() { return field_1_options; } // option bitfields /** * get whether the window should display formulas * @return formulas or not */ public boolean getDisplayFormulas() { return displayFormulas.isSet(field_1_options); } /** * get whether the window should display gridlines * @return gridlines or not */ public boolean getDisplayGridlines() { return displayGridlines.isSet(field_1_options); } /** * get whether the window should display row and column headings * @return headings or not */ public boolean getDisplayRowColHeadings() { return displayRowColHeadings.isSet(field_1_options); } /** * get whether the window should freeze panes * @return freeze panes or not */ public boolean getFreezePanes() { return freezePanes.isSet(field_1_options); } /** * get whether the window should display zero values * @return zeros or not */ public boolean getDisplayZeros() { return displayZeros.isSet(field_1_options); } /** * get whether the window should display a default header * @return header or not */ public boolean getDefaultHeader() { return defaultHeader.isSet(field_1_options); } /** * is this arabic? * @return arabic or not */ public boolean getArabic() { return arabic.isSet(field_1_options); } /** * get whether the outline symbols are displaed * @return symbols or not */ public boolean getDisplayGuts() { return displayGuts.isSet(field_1_options); } /** * freeze unsplit panes or not * @return freeze or not */ public boolean getFreezePanesNoSplit() { return freezePanesNoSplit.isSet(field_1_options); } /** * sheet tab is selected * @return selected or not */ public boolean getSelected() { return selected.isSet(field_1_options); } /** * is the sheet currently displayed in the window * @return displayed or not */ public boolean isActive() { return active.isSet(field_1_options); } /** * was the sheet saved in page break view * @return pagebreaksaved or not */ public boolean getSavedInPageBreakPreview() { return savedInPageBreakPreview.isSet(field_1_options); } // end of bitfields. /** * get the top row visible in the window * @return toprow */ public short getTopRow() { return field_2_top_row; } /** * get the leftmost column displayed in the window * @return leftmost */ public short getLeftCol() { return field_3_left_col; } /** * get the palette index for the header color * @return color */ public int getHeaderColor() { return field_4_header_color; } /** * zoom magification in page break view * @return zoom */ public short getPageBreakZoom() { return field_5_page_break_zoom; } /** * get the zoom magnification in normal view * @return zoom */ public short getNormalZoom() { return field_6_normal_zoom; } /** * get the reserved bits - why would you do this? * @return reserved stuff -probably garbage */ public int getReserved() { return field_7_reserved; } public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append("[WINDOW2]\n"); buffer.append(" .options = ") .append(Integer.toHexString(getOptions())).append("\n"); buffer.append(" .dispformulas= ").append(getDisplayFormulas()) .append("\n"); buffer.append(" .dispgridlins= ").append(getDisplayGridlines()) .append("\n"); buffer.append(" .disprcheadin= ") .append(getDisplayRowColHeadings()).append("\n"); buffer.append(" .freezepanes = ").append(getFreezePanes()) .append("\n"); buffer.append(" .displayzeros= ").append(getDisplayZeros()) .append("\n"); buffer.append(" .defaultheadr= ").append(getDefaultHeader()) .append("\n"); buffer.append(" .arabic = ").append(getArabic()) .append("\n"); buffer.append(" .displayguts = ").append(getDisplayGuts()) .append("\n"); buffer.append(" .frzpnsnosplt= ") .append(getFreezePanesNoSplit()).append("\n"); buffer.append(" .selected = ").append(getSelected()) .append("\n"); buffer.append(" .active = ").append(isActive()) .append("\n"); buffer.append(" .svdinpgbrkpv= ") .append(getSavedInPageBreakPreview()).append("\n"); buffer.append(" .toprow = ") .append(Integer.toHexString(getTopRow())).append("\n"); buffer.append(" .leftcol = ") .append(Integer.toHexString(getLeftCol())).append("\n"); buffer.append(" .headercolor = ") .append(Integer.toHexString(getHeaderColor())).append("\n"); buffer.append(" .pagebreakzoom = ") .append(Integer.toHexString(getPageBreakZoom())).append("\n"); buffer.append(" .normalzoom = ") .append(Integer.toHexString(getNormalZoom())).append("\n"); buffer.append(" .reserved = ") .append(Integer.toHexString(getReserved())).append("\n"); buffer.append("[/WINDOW2]\n"); return buffer.toString(); } public void serialize(LittleEndianOutput out) { out.writeShort(getOptions()); out.writeShort(getTopRow()); out.writeShort(getLeftCol()); out.writeInt(getHeaderColor()); out.writeShort(getPageBreakZoom()); out.writeShort(getNormalZoom()); out.writeInt(getReserved()); } protected int getDataSize() { return 18; } public short getSid() { return sid; } public Object clone() { WindowTwoRecord rec = new WindowTwoRecord(); rec.field_1_options = field_1_options; rec.field_2_top_row = field_2_top_row; rec.field_3_left_col = field_3_left_col; rec.field_4_header_color = field_4_header_color; rec.field_5_page_break_zoom = field_5_page_break_zoom; rec.field_6_normal_zoom = field_6_normal_zoom; rec.field_7_reserved = field_7_reserved; return rec; } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.consistent.impl; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.Set; import org.apache.commons.lang3.tuple.Pair; import org.onosproject.store.service.UpdateOperation; import org.onosproject.store.service.Versioned; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import net.kuujo.copycat.state.Initializer; import net.kuujo.copycat.state.StateContext; /** * Default database state. * * @param <K> key type * @param <V> value type */ public class DefaultDatabaseState<K, V> implements DatabaseState<K, V> { private Long nextVersion; private Map<String, Map<K, Versioned<V>>> tables; @Initializer @Override public void init(StateContext<DatabaseState<K, V>> context) { tables = context.get("tables"); if (tables == null) { tables = new HashMap<>(); context.put("tables", tables); } nextVersion = context.get("nextVersion"); if (nextVersion == null) { nextVersion = new Long(0); context.put("nextVersion", nextVersion); } } private Map<K, Versioned<V>> getTableMap(String tableName) { Map<K, Versioned<V>> table = tables.get(tableName); if (table == null) { table = new HashMap<>(); tables.put(tableName, table); } return table; } @Override public int size(String tableName) { return getTableMap(tableName).size(); } @Override public boolean isEmpty(String tableName) { return getTableMap(tableName).isEmpty(); } @Override public boolean containsKey(String tableName, K key) { return getTableMap(tableName).containsKey(key); } @Override public boolean containsValue(String tableName, V value) { return getTableMap(tableName).values().stream().anyMatch(v -> checkEquality(v.value(), value)); } @Override public Versioned<V> get(String tableName, K key) { return getTableMap(tableName).get(key); } @Override public Versioned<V> put(String tableName, K key, V value) { return getTableMap(tableName).put(key, new Versioned<>(value, ++nextVersion)); } @Override public Versioned<V> remove(String tableName, K key) { return getTableMap(tableName).remove(key); } @Override public void clear(String tableName) { getTableMap(tableName).clear(); } @Override public Set<K> keySet(String tableName) { return ImmutableSet.copyOf(getTableMap(tableName).keySet()); } @Override public Collection<Versioned<V>> values(String tableName) { return ImmutableList.copyOf(getTableMap(tableName).values()); } @Override public Set<Entry<K, Versioned<V>>> entrySet(String tableName) { return ImmutableSet.copyOf(getTableMap(tableName) .entrySet() .stream() .map(entry -> Pair.of(entry.getKey(), entry.getValue())) .collect(Collectors.toSet())); } @Override public Versioned<V> putIfAbsent(String tableName, K key, V value) { Versioned<V> existingValue = getTableMap(tableName).get(key); return existingValue != null ? existingValue : put(tableName, key, value); } @Override public boolean remove(String tableName, K key, V value) { Versioned<V> existing = getTableMap(tableName).get(key); if (existing != null && checkEquality(existing.value(), value)) { getTableMap(tableName).remove(key); return true; } return false; } @Override public boolean remove(String tableName, K key, long version) { Versioned<V> existing = getTableMap(tableName).get(key); if (existing != null && existing.version() == version) { remove(tableName, key); return true; } return false; } @Override public boolean replace(String tableName, K key, V oldValue, V newValue) { Versioned<V> existing = getTableMap(tableName).get(key); if (existing != null && checkEquality(existing.value(), oldValue)) { put(tableName, key, newValue); return true; } return false; } @Override public boolean replace(String tableName, K key, long oldVersion, V newValue) { Versioned<V> existing = getTableMap(tableName).get(key); if (existing != null && existing.version() == oldVersion) { put(tableName, key, newValue); return true; } return false; } @Override public boolean batchUpdate(List<UpdateOperation<K, V>> updates) { if (updates.stream().anyMatch(update -> !checkIfUpdateIsPossible(update))) { return false; } else { updates.stream().forEach(this::doUpdate); return true; } } private void doUpdate(UpdateOperation<K, V> update) { String tableName = update.tableName(); K key = update.key(); switch (update.type()) { case PUT: put(tableName, key, update.value()); return; case REMOVE: remove(tableName, key); return; case PUT_IF_ABSENT: putIfAbsent(tableName, key, update.value()); return; case PUT_IF_VERSION_MATCH: replace(tableName, key, update.currentValue(), update.value()); return; case PUT_IF_VALUE_MATCH: replace(tableName, key, update.currentVersion(), update.value()); return; case REMOVE_IF_VERSION_MATCH: remove(tableName, key, update.currentVersion()); return; case REMOVE_IF_VALUE_MATCH: remove(tableName, key, update.currentValue()); return; default: throw new IllegalStateException("Unsupported type: " + update.type()); } } private boolean checkIfUpdateIsPossible(UpdateOperation<K, V> update) { Versioned<V> existingEntry = get(update.tableName(), update.key()); switch (update.type()) { case PUT: case REMOVE: return true; case PUT_IF_ABSENT: return existingEntry == null; case PUT_IF_VERSION_MATCH: return existingEntry != null && existingEntry.version() == update.currentVersion(); case PUT_IF_VALUE_MATCH: return existingEntry != null && checkEquality(existingEntry.value(), update.currentValue()); case REMOVE_IF_VERSION_MATCH: return existingEntry == null || existingEntry.version() == update.currentVersion(); case REMOVE_IF_VALUE_MATCH: return existingEntry == null || checkEquality(existingEntry.value(), update.currentValue()); default: throw new IllegalStateException("Unsupported type: " + update.type()); } } private boolean checkEquality(V value1, V value2) { if (value1 instanceof byte[]) { return Arrays.equals((byte[]) value1, (byte[]) value2); } return value1.equals(value2); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.debugger; import com.intellij.debugger.NoDataException; import com.intellij.debugger.PositionManager; import com.intellij.debugger.SourcePosition; import com.intellij.debugger.engine.CompoundPositionManager; import com.intellij.debugger.engine.DebugProcess; import com.intellij.debugger.engine.DebugProcessImpl; import com.intellij.debugger.engine.jdi.VirtualMachineProxy; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.requests.ClassPrepareRequestor; import com.intellij.openapi.application.AccessToken; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.impl.scopes.ModuleWithDependenciesScope; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiClassUtil; import com.intellij.psi.util.PsiTreeUtil; import com.sun.jdi.AbsentInformationException; import com.sun.jdi.Location; import com.sun.jdi.ReferenceType; import com.sun.jdi.request.ClassPrepareRequest; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.GroovyFileType; import org.jetbrains.plugins.groovy.extensions.debugger.ScriptPositionManagerHelper; import org.jetbrains.plugins.groovy.lang.psi.GroovyFile; import org.jetbrains.plugins.groovy.lang.psi.GroovyFileBase; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement; import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.stubs.GroovyShortNamesCache; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; public class GroovyPositionManager implements PositionManager { private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.PositionManagerImpl"); private final DebugProcess myDebugProcess; private static final Set<FileType> ourFileTypes = Collections.singleton(GroovyFileType.GROOVY_FILE_TYPE); public GroovyPositionManager(DebugProcess debugProcess) { myDebugProcess = debugProcess; } public DebugProcess getDebugProcess() { return myDebugProcess; } @Override @NotNull public List<Location> locationsOfLine(@NotNull ReferenceType type, @NotNull SourcePosition position) throws NoDataException { checkGroovyFile(position); try { if (LOG.isDebugEnabled()) { LOG.debug("locationsOfLine: " + type + "; " + position); } int line = position.getLine() + 1; List<Location> locations = getDebugProcess().getVirtualMachineProxy().versionHigher("1.4") ? type.locationsOfLine(DebugProcess.JAVA_STRATUM, null, line) : type.locationsOfLine(line); if (locations == null || locations.isEmpty()) throw NoDataException.INSTANCE; return locations; } catch (AbsentInformationException e) { throw NoDataException.INSTANCE; } } @Nullable private static GroovyPsiElement findReferenceTypeSourceImage(SourcePosition position) { PsiFile file = position.getFile(); if (!(file instanceof GroovyFileBase)) return null; PsiElement element = file.findElementAt(position.getOffset()); if (element == null) return null; return PsiTreeUtil.getParentOfType(element, GrClosableBlock.class, GrTypeDefinition.class); } @Nullable private static PsiClass findEnclosingTypeDefinition(SourcePosition position) { PsiFile file = position.getFile(); if (!(file instanceof GroovyFileBase)) return null; PsiElement element = file.findElementAt(position.getOffset()); while (true) { element = PsiTreeUtil.getParentOfType(element, GrTypeDefinition.class, GroovyFileBase.class); if (element instanceof GroovyFileBase) { return ((GroovyFileBase)element).getScriptClass(); } else if (element instanceof GrTypeDefinition && !((GrTypeDefinition)element).isAnonymous()) { return (GrTypeDefinition)element; } } } private static void checkGroovyFile(@NotNull SourcePosition position) throws NoDataException { if (!(position.getFile() instanceof GroovyFileBase)) { throw NoDataException.INSTANCE; } } @Override public ClassPrepareRequest createPrepareRequest(@NotNull final ClassPrepareRequestor requestor, @NotNull final SourcePosition position) throws NoDataException { if (LOG.isDebugEnabled()) { LOG.debug("createPrepareRequest: " + position); } checkGroovyFile(position); String qName = getOuterClassName(position); if (qName != null) { return myDebugProcess.getRequestsManager().createClassPrepareRequest(requestor, qName); } qName = findEnclosingName(position); if (qName == null) throw NoDataException.INSTANCE; ClassPrepareRequestor waitRequestor = new ClassPrepareRequestor() { @Override public void processClassPrepare(DebugProcess debuggerProcess, ReferenceType referenceType) { final CompoundPositionManager positionManager = ((DebugProcessImpl)debuggerProcess).getPositionManager(); if (!positionManager.locationsOfLine(referenceType, position).isEmpty()) { requestor.processClassPrepare(debuggerProcess, referenceType); } } }; return myDebugProcess.getRequestsManager().createClassPrepareRequest(waitRequestor, qName + "$*"); } @Nullable private static String findEnclosingName(@NotNull final SourcePosition position) { AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock(); try { PsiClass typeDefinition = findEnclosingTypeDefinition(position); if (typeDefinition != null) { return getClassNameForJvm(typeDefinition); } return getScriptQualifiedName(position); } finally { accessToken.finish(); } } @Nullable private static String getOuterClassName(final SourcePosition position) { AccessToken accessToken = ApplicationManager.getApplication().acquireReadActionLock(); try { GroovyPsiElement sourceImage = findReferenceTypeSourceImage(position); if (sourceImage instanceof GrTypeDefinition) { return getClassNameForJvm((GrTypeDefinition)sourceImage); } else if (sourceImage == null) { return getScriptQualifiedName(position); } return null; } finally { accessToken.finish(); } } @Nullable private static String getClassNameForJvm(@NotNull final PsiClass typeDefinition) { String suffix = typeDefinition instanceof GrTypeDefinition && ((GrTypeDefinition)typeDefinition).isTrait() ? "$Trait$Helper" : ""; final PsiClass psiClass = typeDefinition.getContainingClass(); if (psiClass != null) { String parent = getClassNameForJvm(psiClass); return parent == null ? null : parent + "$" + typeDefinition.getName() + suffix; } PsiFile file = typeDefinition.getContainingFile(); if (file instanceof GroovyFile && ((GroovyFile)file).isScript()) { for (ScriptPositionManagerHelper helper : ScriptPositionManagerHelper.EP_NAME.getExtensions()) { String s = helper.isAppropriateScriptFile((GroovyFile)file) ? helper.customizeClassName(typeDefinition) : null; if (s != null) { return s; } } } String qname = typeDefinition.getQualifiedName(); return qname == null ? null : qname + suffix; } @Nullable private static String getScriptQualifiedName(@NotNull SourcePosition position) { PsiFile file = position.getFile(); if (file instanceof GroovyFile) { return getScriptFQName((GroovyFile)file); } return null; } @Override public SourcePosition getSourcePosition(@Nullable final Location location) throws NoDataException { if (location == null) throw NoDataException.INSTANCE; if (LOG.isDebugEnabled()) { LOG.debug("getSourcePosition: " + location); } PsiFile psiFile = getPsiFileByLocation(getDebugProcess().getProject(), location); if (psiFile == null) throw NoDataException.INSTANCE; int lineNumber = calcLineIndex(location); if (lineNumber < 0) throw NoDataException.INSTANCE; return SourcePosition.createFromLine(psiFile, lineNumber); } private int calcLineIndex(Location location) { LOG.assertTrue(myDebugProcess != null); if (location == null) return -1; return DebuggerUtilsEx.getLineNumber(location, true); } @Nullable private PsiFile getPsiFileByLocation(@NotNull final Project project, @Nullable final Location location) { if (location == null) return null; final ReferenceType refType = location.declaringType(); if (refType == null) return null; final String originalQName = refType.name().replace('/', '.'); int dollar = originalQName.indexOf('$'); String runtimeName = dollar >= 0 ? originalQName.substring(0, dollar) : originalQName; String qName = getOriginalQualifiedName(refType, runtimeName); GlobalSearchScope searchScope = myDebugProcess.getSearchScope(); GroovyShortNamesCache cache = GroovyShortNamesCache.getGroovyShortNamesCache(project); try { List<PsiClass> classes = cache.getClassesByFQName(qName, searchScope, true); if (classes.isEmpty()) { classes = cache.getClassesByFQName(qName, searchScope, false); } if (classes.isEmpty()) { classes = cache.getClassesByFQName(qName, GlobalSearchScope.projectScope(project), false); } if (classes.isEmpty()) { classes = cache.getClassesByFQName(qName, addModuleContent(searchScope), false); } if (!classes.isEmpty()) { classes.sort(PsiClassUtil.createScopeComparator(searchScope)); PsiClass clazz = classes.get(0); if (clazz != null) return clazz.getContainingFile(); } } catch (ProcessCanceledException | IndexNotReadyException e) { return null; } return getExtraScriptIfNotFound(project, refType, runtimeName, searchScope); } @Nullable private static PsiFile getExtraScriptIfNotFound(@NotNull Project project, @NotNull ReferenceType refType, @NotNull String runtimeName, @NotNull GlobalSearchScope searchScope) { for (ScriptPositionManagerHelper helper : ScriptPositionManagerHelper.EP_NAME.getExtensions()) { if (helper.isAppropriateRuntimeName(runtimeName)) { PsiFile file = helper.getExtraScriptIfNotFound(refType, runtimeName, project, searchScope); if (file != null) return file; } } return null; } private static GlobalSearchScope addModuleContent(GlobalSearchScope scope) { if (scope instanceof ModuleWithDependenciesScope) { Module module = ((ModuleWithDependenciesScope)scope).getModule(); if (!module.isDisposed()) { return scope.uniteWith(module.getModuleContentWithDependenciesScope()); } } return scope; } private static String getOriginalQualifiedName(@NotNull ReferenceType refType, @NotNull String runtimeName) { for (ScriptPositionManagerHelper helper : ScriptPositionManagerHelper.EP_NAME.getExtensions()) { if (helper.isAppropriateRuntimeName(runtimeName)) { String originalScriptName = helper.getOriginalScriptName(refType, runtimeName); if (originalScriptName != null) return originalScriptName; } } return runtimeName; } @Override @NotNull public List<ReferenceType> getAllClasses(@NotNull final SourcePosition position) throws NoDataException { if (LOG.isDebugEnabled()) { LOG.debug("getAllClasses: " + position); } checkGroovyFile(position); List<ReferenceType> result = ReadAction.compute(() -> { GroovyPsiElement sourceImage = findReferenceTypeSourceImage(position); if (sourceImage instanceof GrTypeDefinition && !((GrTypeDefinition)sourceImage).isAnonymous()) { String qName = getClassNameForJvm((GrTypeDefinition)sourceImage); if (qName != null) return myDebugProcess.getVirtualMachineProxy().classesByName(qName); } else if (sourceImage == null) { final String scriptName = getScriptQualifiedName(position); if (scriptName != null) return myDebugProcess.getVirtualMachineProxy().classesByName(scriptName); } else { String enclosingName = findEnclosingName(position); if (enclosingName == null) return null; final List<ReferenceType> outers = myDebugProcess.getVirtualMachineProxy().classesByName(enclosingName); final List<ReferenceType> result1 = new ArrayList<>(outers.size()); for (ReferenceType outer : outers) { final ReferenceType nested = findNested(outer, sourceImage, position); if (nested != null) { result1.add(nested); } } return result1; } return null; }); if (LOG.isDebugEnabled()) { LOG.debug("getAllClasses = " + result); } if (result == null) throw NoDataException.INSTANCE; return result; } @Nullable private static String getScriptFQName(@NotNull GroovyFile groovyFile) { String packageName = groovyFile.getPackageName(); String fileName = getRuntimeScriptName(groovyFile); return StringUtil.getQualifiedName(packageName, fileName); } @Nullable private static String getRuntimeScriptName(@NotNull GroovyFile groovyFile) { if (groovyFile.isScript()) { for (ScriptPositionManagerHelper helper : ScriptPositionManagerHelper.EP_NAME.getExtensions()) { if (helper.isAppropriateScriptFile(groovyFile)) { String runtimeScriptName = helper.getRuntimeScriptName(groovyFile); if (runtimeScriptName != null) return runtimeScriptName; } } } VirtualFile vFile = groovyFile.getVirtualFile(); assert vFile != null; return vFile.getNameWithoutExtension(); } @Nullable private ReferenceType findNested(ReferenceType fromClass, final GroovyPsiElement toFind, SourcePosition classPosition) { final VirtualMachineProxy vmProxy = myDebugProcess.getVirtualMachineProxy(); if (fromClass.isPrepared()) { final List<ReferenceType> nestedTypes = vmProxy.nestedTypes(fromClass); for (ReferenceType nested : nestedTypes) { final ReferenceType found = findNested(nested, toFind, classPosition); if (found != null) { return found; } } try { final int lineNumber = classPosition.getLine() + 1; if (!fromClass.locationsOfLine(lineNumber).isEmpty()) { return fromClass; } //noinspection LoopStatementThatDoesntLoop for (Location location : fromClass.allLineLocations()) { final SourcePosition candidateFirstPosition = SourcePosition.createFromLine( toFind.getContainingFile(), location.lineNumber() - 1 ); if (toFind.equals(findReferenceTypeSourceImage(candidateFirstPosition))) { return fromClass; } break; // isApplicable only the first location } } catch (AbsentInformationException ignored) { } } return null; } @NotNull @Override public Set<? extends FileType> getAcceptedFileTypes() { return ourFileTypes; } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2010, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.mapping; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import org.hibernate.sql.Alias; import org.hibernate.engine.ExecuteUpdateResultCheckStyle; /** * @author Gavin King */ public class Join implements Serializable { private static final Alias PK_ALIAS = new Alias(15, "PK"); private ArrayList properties = new ArrayList(); private ArrayList declaredProperties = new ArrayList(); private Table table; private KeyValue key; private PersistentClass persistentClass; private boolean sequentialSelect; private boolean inverse; private boolean optional; // Custom SQL private String customSQLInsert; private boolean customInsertCallable; private ExecuteUpdateResultCheckStyle insertCheckStyle; private String customSQLUpdate; private boolean customUpdateCallable; private ExecuteUpdateResultCheckStyle updateCheckStyle; private String customSQLDelete; private boolean customDeleteCallable; private ExecuteUpdateResultCheckStyle deleteCheckStyle; public void addProperty(Property prop) { properties.add(prop); declaredProperties.add(prop); prop.setPersistentClass( getPersistentClass() ); } public void addMappedsuperclassProperty(Property prop) { properties.add(prop); prop.setPersistentClass( getPersistentClass() ); } public Iterator getDeclaredPropertyIterator() { return declaredProperties.iterator(); } public boolean containsProperty(Property prop) { return properties.contains(prop); } public Iterator getPropertyIterator() { return properties.iterator(); } public Table getTable() { return table; } public void setTable(Table table) { this.table = table; } public KeyValue getKey() { return key; } public void setKey(KeyValue key) { this.key = key; } public PersistentClass getPersistentClass() { return persistentClass; } public void setPersistentClass(PersistentClass persistentClass) { this.persistentClass = persistentClass; } public void createForeignKey() { getKey().createForeignKeyOfEntity( persistentClass.getEntityName() ); } public void createPrimaryKey() { //Primary key constraint PrimaryKey pk = new PrimaryKey(); pk.setTable(table); pk.setName( PK_ALIAS.toAliasString( table.getName() ) ); table.setPrimaryKey(pk); pk.addColumns( getKey().getColumnIterator() ); } public int getPropertySpan() { return properties.size(); } public void setCustomSQLInsert(String customSQLInsert, boolean callable, ExecuteUpdateResultCheckStyle checkStyle) { this.customSQLInsert = customSQLInsert; this.customInsertCallable = callable; this.insertCheckStyle = checkStyle; } public String getCustomSQLInsert() { return customSQLInsert; } public boolean isCustomInsertCallable() { return customInsertCallable; } public ExecuteUpdateResultCheckStyle getCustomSQLInsertCheckStyle() { return insertCheckStyle; } public void setCustomSQLUpdate(String customSQLUpdate, boolean callable, ExecuteUpdateResultCheckStyle checkStyle) { this.customSQLUpdate = customSQLUpdate; this.customUpdateCallable = callable; this.updateCheckStyle = checkStyle; } public String getCustomSQLUpdate() { return customSQLUpdate; } public boolean isCustomUpdateCallable() { return customUpdateCallable; } public ExecuteUpdateResultCheckStyle getCustomSQLUpdateCheckStyle() { return updateCheckStyle; } public void setCustomSQLDelete(String customSQLDelete, boolean callable, ExecuteUpdateResultCheckStyle checkStyle) { this.customSQLDelete = customSQLDelete; this.customDeleteCallable = callable; this.deleteCheckStyle = checkStyle; } public String getCustomSQLDelete() { return customSQLDelete; } public boolean isCustomDeleteCallable() { return customDeleteCallable; } public ExecuteUpdateResultCheckStyle getCustomSQLDeleteCheckStyle() { return deleteCheckStyle; } public boolean isSequentialSelect() { return sequentialSelect; } public void setSequentialSelect(boolean deferred) { this.sequentialSelect = deferred; } public boolean isInverse() { return inverse; } public void setInverse(boolean leftJoin) { this.inverse = leftJoin; } public String toString() { return getClass().getName() + '(' + table.toString() + ')'; } public boolean isLazy() { Iterator iter = getPropertyIterator(); while ( iter.hasNext() ) { Property prop = (Property) iter.next(); if ( !prop.isLazy() ) return false; } return true; } public boolean isOptional() { return optional; } public void setOptional(boolean nullable) { this.optional = nullable; } }
package org.praisenter.ui.song; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.praisenter.Constants; import org.praisenter.data.json.JsonIO; import org.praisenter.data.song.Author; import org.praisenter.data.song.Lyrics; import org.praisenter.data.song.Section; import org.praisenter.data.song.Song; import org.praisenter.data.song.SongBook; import org.praisenter.ui.Action; import org.praisenter.ui.BulkEditConverter; import org.praisenter.ui.BulkEditParseException; import org.praisenter.ui.DataFormats; import org.praisenter.ui.GlobalContext; import org.praisenter.ui.document.DocumentContext; import org.praisenter.ui.document.DocumentEditor; import org.praisenter.ui.events.ActionStateChangedEvent; import org.praisenter.ui.translations.Translations; import org.praisenter.ui.undo.UndoManager; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.collections.ListChangeListener; import javafx.css.PseudoClass; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.ContextMenu; import javafx.scene.control.Label; import javafx.scene.control.MenuItem; import javafx.scene.control.SelectionMode; import javafx.scene.control.SeparatorMenuItem; import javafx.scene.control.TextArea; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeView; import javafx.scene.input.Clipboard; import javafx.scene.input.ClipboardContent; import javafx.scene.input.DataFormat; import javafx.scene.input.DragEvent; import javafx.scene.input.Dragboard; import javafx.scene.input.MouseEvent; import javafx.scene.input.TransferMode; import javafx.scene.layout.BorderPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Priority; import javafx.scene.layout.StackPane; import javafx.scene.layout.VBox; //FEATURE (L-L) Implement import from PDF for song lyrics //FEATURE (L-L) Implement upload of chord sheet for display on a teleprompter //JAVABUG (L) 11/03/16 Dragging to the edge of a scrollable window doesn't scroll it and there's no good way to scroll it manually public final class SongEditor extends BorderPane implements DocumentEditor<Song> { private static final Logger LOGGER = LogManager.getLogger(); private static final String SONG_EDITOR_CSS = "p-song-editor"; private static final String SONG_EDITOR_BULK_CSS = "p-song-editor-bulk"; private static final String SONG_EDITOR_BULK_BUTTONS_CSS = "p-song-editor-bulk-buttons"; private static final PseudoClass DRAG_OVER_PARENT = PseudoClass.getPseudoClass("drag-over-parent"); private static final PseudoClass DRAG_OVER_SIBLING_TOP = PseudoClass.getPseudoClass("drag-over-sibling-top"); private static final PseudoClass DRAG_OVER_SIBLING_BOTTOM = PseudoClass.getPseudoClass("drag-over-sibling-bottom"); // data private final GlobalContext context; private final DocumentContext<Song> document; // helpers private final Song song; private final UndoManager undoManager; // nodes private final TreeView<Object> treeView; private final StringProperty bulkEditModeValue; private final StringProperty bulkEditModeError; public SongEditor( GlobalContext context, DocumentContext<Song> document) { this.getStyleClass().add(SONG_EDITOR_CSS); this.context = context; this.document = document; // set the helpers this.song = document.getDocument(); this.undoManager = document.getUndoManager(); this.bulkEditModeValue = new SimpleStringProperty(); this.bulkEditModeError = new SimpleStringProperty(); // the tree SongTreeItem root = new SongTreeItem(); root.setValue(this.song); this.treeView = new TreeView<Object>(root); this.treeView.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); this.treeView.setCellFactory((view) -> { SongTreeCell cell = new SongTreeCell(); cell.setOnDragDetected(this::dragDetected); cell.setOnDragExited(this::dragExited); cell.setOnDragEntered(this::dragEntered); cell.setOnDragOver(this::dragOver); cell.setOnDragDropped(this::dragDropped); cell.setOnDragDone(this::dragDone); return cell; }); this.treeView.getSelectionModel().getSelectedItems().addListener((ListChangeListener.Change<? extends TreeItem<Object>> change) -> { // set the selected items document.getSelectedItems().setAll(this.treeView .getSelectionModel() .getSelectedItems() .stream().filter(i -> i != null && i.getValue() != null) .map(i -> i.getValue()) .collect(Collectors.toList())); }); ContextMenu menu = new ContextMenu(); menu.getItems().addAll( this.createMenuItem(Action.BULK_EDIT_BEGIN), new SeparatorMenuItem(), this.createMenuItem(Action.NEW_LYRICS), this.createMenuItem(Action.NEW_AUTHOR), this.createMenuItem(Action.NEW_SONGBOOK), this.createMenuItem(Action.NEW_SECTION), new SeparatorMenuItem(), this.createMenuItem(Action.COPY), this.createMenuItem(Action.CUT), this.createMenuItem(Action.PASTE), new SeparatorMenuItem(), this.createMenuItem(Action.DELETE) ); this.treeView.setContextMenu(menu); // when the menu is shown, update the enabled/disable state menu.showingProperty().addListener((obs, ov, nv) -> { if (nv) { // update the enable state for (MenuItem mnu : menu.getItems()) { Action action = (Action)mnu.getUserData(); if (action != null) { boolean isEnabled = this.isActionEnabled(action); mnu.setDisable(!isEnabled); } } } }); // build the bulk edit UI TextArea textArea = new TextArea(); textArea.setWrapText(false); textArea.textProperty().bindBidirectional(this.bulkEditModeValue); Button btnOk = new Button(Translations.get("ok")); btnOk.minWidthProperty().bind(btnOk.prefWidthProperty()); Button btnCancel = new Button(Translations.get("cancel")); btnCancel.minWidthProperty().bind(btnCancel.prefWidthProperty()); Label lblError = new Label(); lblError.getStyleClass().add("p-error-label"); lblError.setMaxWidth(Double.MAX_VALUE); lblError.textProperty().bind(this.bulkEditModeError); lblError.visibleProperty().bind(this.bulkEditModeError.length().greaterThan(0)); HBox bulkEditorButtons = new HBox(lblError, btnOk, btnCancel); bulkEditorButtons.getStyleClass().add(SONG_EDITOR_BULK_BUTTONS_CSS); HBox.setHgrow(lblError, Priority.ALWAYS); VBox bulkEditor = new VBox( textArea, bulkEditorButtons); bulkEditor.getStyleClass().add(SONG_EDITOR_BULK_CSS); VBox.setVgrow(textArea, Priority.ALWAYS); bulkEditor.visibleProperty().bind(document.bulkEditProperty()); this.treeView.visibleProperty().bind(document.bulkEditProperty().not()); StackPane editorStack = new StackPane(this.treeView, bulkEditor); btnOk.setOnAction(e -> { try { this.processBulkEdit(); document.setBulkEdit(false); this.bulkEditModeValue.set(null); this.bulkEditModeError.set(null); } catch (Exception ex) { this.bulkEditModeError.set(ex.getMessage()); } }); btnCancel.setOnAction(e -> { document.setBulkEdit(false); this.bulkEditModeValue.set(null); this.bulkEditModeError.set(null); }); this.setCenter(editorStack); } private MenuItem createMenuItem(Action action) { MenuItem mnu = new MenuItem(Translations.get(action.getMessageKey())); if (action.getGraphicSupplier() != null) { mnu.setGraphic(action.getGraphicSupplier().get()); } // NOTE: due to bug in JavaFX, we don't apply the accelerator here //mnu.setAccelerator(value); mnu.setOnAction(e -> this.executeAction(action)); mnu.setUserData(action); return mnu; } @Override public DocumentContext<Song> getDocumentContext() { return this.document; } @Override public void setDefaultFocus() { this.treeView.requestFocus(); } @Override public CompletableFuture<Void> executeAction(Action action) { switch (action) { case COPY: return this.copy(false); case PASTE: return this.paste(); case CUT: return this.copy(true); case DELETE: return this.delete(); case NEW_AUTHOR: case NEW_LYRICS: case NEW_SECTION: case NEW_SONGBOOK: return this.create(action); case BULK_EDIT_BEGIN: return this.beginBulkEdit(); default: return CompletableFuture.completedFuture(null); } } @Override public boolean isActionEnabled(Action action) { DocumentContext<Song> ctx = this.document; final Object selected = ctx.getSelectedItem(); Class<?> selectedType = ctx.getSelectedType(); Class<?> containerType = null; if (selected != null && selectedType == Container.class) { containerType = ((Container) selected).getType(); } switch (action) { case COPY: return ctx.isSingleTypeSelected() && selectedType != Song.class && containerType == null; case CUT: return ctx.isSingleTypeSelected() && selectedType != Song.class && containerType == null; case PASTE: return (selectedType == Song.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_LYRICS_ARRAY)) || (selectedType == Lyrics.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_LYRICS_ARRAY)) || (selectedType == Section.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SECTION_ARRAY)) || (selectedType == Author.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY)) || (selectedType == SongBook.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY)) || (selectedType == Lyrics.class && ( Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY) || Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY) || Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SECTION_ARRAY))) || ((containerType == Author.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY)) || (containerType == Section.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SECTION_ARRAY)) || (containerType == SongBook.class && Clipboard.getSystemClipboard().hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY))); case DELETE: return ctx.getSelectedCount() > 0 && selectedType != Song.class && containerType == null; case NEW_LYRICS: return ctx.getSelectedCount() == 1 && (selectedType == Song.class || selectedType == Lyrics.class); case NEW_AUTHOR: return ctx.getSelectedCount() == 1 && (selectedType == Lyrics.class || selectedType == Author.class || containerType == Author.class); case NEW_SECTION: return ctx.getSelectedCount() == 1 && (selectedType == Lyrics.class || selectedType == Section.class || (containerType == Section.class)); case NEW_SONGBOOK: return ctx.getSelectedCount() == 1 && (selectedType == Lyrics.class || selectedType == SongBook.class || (containerType == SongBook.class)); case SAVE: return ctx.hasUnsavedChanges(); case REDO: return ctx.getUndoManager().isRedoAvailable(); case UNDO: return ctx.getUndoManager().isUndoAvailable(); case BULK_EDIT_BEGIN: return ctx.getSelectedCount() == 1 && (selectedType == Lyrics.class || (containerType == Section.class)); default: return false; } } @Override public boolean isActionVisible(Action action) { // specifically show these actions switch (action) { case NEW_LYRICS: case NEW_SECTION: case NEW_SONGBOOK: case NEW_AUTHOR: case BULK_EDIT_BEGIN: return true; default: return false; } } // internal methods private CompletableFuture<Void> beginBulkEdit() { DocumentContext<Song> ctx = this.document; final Object selected = ctx.getSelectedItem(); Class<?> selectedType = ctx.getSelectedType(); Class<?> containerType = null; if (selected != null && selectedType == Container.class) { containerType = ((Container) selected).getType(); } if (selectedType == Lyrics.class) { BulkEditConverter<Lyrics> tx = new LyricsBulkEditConverter(); this.bulkEditModeValue.set(tx.toString((Lyrics)selected)); this.document.setBulkEdit(true); } else if (containerType == Section.class) { Object lyrics = this.treeView.getSelectionModel().getSelectedItem().getParent().getValue(); BulkEditConverter<Lyrics> tx = new LyricsBulkEditConverter(); this.bulkEditModeValue.set(tx.toString((Lyrics)lyrics)); this.document.setBulkEdit(true); } return CompletableFuture.completedFuture(null); } private void processBulkEdit() throws BulkEditParseException { DocumentContext<Song> ctx = this.document; final Object selected = ctx.getSelectedItem(); Class<?> selectedType = ctx.getSelectedType(); Class<?> containerType = null; if (selected != null && selectedType == Container.class) { containerType = ((Container) selected).getType(); } Lyrics lyrics = null; if (selectedType == Lyrics.class) { lyrics = (Lyrics)selected; } else if (containerType == Section.class) { lyrics = (Lyrics)this.treeView.getSelectionModel().getSelectedItem().getParent().getValue(); } if (lyrics != null) { String result = this.bulkEditModeValue.get(); BulkEditConverter<Lyrics> converter = new LyricsBulkEditConverter(); Lyrics edits = converter.fromString(result); UndoManager um = this.document.getUndoManager(); um.beginBatch("LyricsBulkEdit"); lyrics.setTitle(edits.getTitle()); lyrics.getSections().setAll(edits.getSections()); um.completeBatch(); } else { LOGGER.error("Bulk edit failed to apply because the selected item wasn't what we expected (an instance of Lyrics)"); } } private CompletableFuture<Void> delete() { List<TreeItem<Object>> selected = new ArrayList<>(this.treeView.getSelectionModel().getSelectedItems()); this.treeView.getSelectionModel().clearSelection(); this.undoManager.beginBatch("Delete"); try { for (TreeItem<Object> item : selected) { Object value = item.getValue(); TreeItem<Object> parentItem = item.getParent(); if (parentItem != null) { Object parent = parentItem.getValue(); if (parent != null) { if (parent instanceof Lyrics) { // do nothing } else if (parent instanceof Container) { Lyrics lyrics = ((Lyrics) parentItem.getParent().getValue()); if (value instanceof Author) { lyrics.getAuthors().remove(value); } else if (value instanceof SongBook) { lyrics.getSongBooks().remove(value); } else if (value instanceof Section) { lyrics.getSections().remove(value); } } else if (parent instanceof Song) { ((Song) parent).getLyrics().remove(value); } } } } this.undoManager.completeBatch(); } catch (Exception ex) { LOGGER.error("Failed to delete the selected items", ex); this.undoManager.discardBatch(); } return CompletableFuture.completedFuture(null); } private CompletableFuture<Void> create(Action action) { TreeItem<Object> item = this.treeView.getSelectionModel().getSelectedItem(); Lyrics lyrics = this.findClosestLyrics(item); switch (action) { case NEW_LYRICS: Lyrics newLyrics = new Lyrics(); newLyrics.setLanguage(Locale.getDefault().toLanguageTag()); newLyrics.setOriginal(true); newLyrics.setTitle(this.song.getName()); newLyrics.getAuthors().add(new Author(System.getProperty("user.name"), Author.TYPE_LYRICS)); newLyrics.getSections().add(new Section(Translations.get("song.lyrics.section.name.default"), Translations.get("song.lyrics.section.text.default"))); this.song.getLyrics().add(newLyrics); break; case NEW_AUTHOR: if (this.document.getSelectedCount() == 1 && lyrics != null) { lyrics.getAuthors().add(new Author(System.getProperty("user.name"), Author.TYPE_LYRICS)); } break; case NEW_SONGBOOK: if (this.document.getSelectedCount() == 1 && lyrics != null) { lyrics.getSongBooks().add(new SongBook()); } break; case NEW_SECTION: if (this.document.getSelectedCount() == 1 && lyrics != null) { lyrics.getSections().add(new Section(Translations.get("song.lyrics.section.name.default"), Translations.get("song.lyrics.section.text.default"))); } break; default: break; } return CompletableFuture.completedFuture(null); } private Lyrics findClosestLyrics(TreeItem<Object> item) { if (item == null) return null; Object value = item.getValue(); if (value != null && value instanceof Lyrics) { return (Lyrics)value; } return findClosestLyrics(item.getParent()); } private ClipboardContent getClipboardContentForSelection(boolean serializeData) throws Exception { List<TreeItem<Object>> items = this.treeView.getSelectionModel().getSelectedItems(); List<Object> objectData = items.stream().map(i -> i.getValue()).collect(Collectors.toList()); // in the case of Drag n' Drop, we don't need to serialize it String data = serializeData ? JsonIO.write(objectData) : "NA"; List<String> textData = new ArrayList<>(); DataFormat format = null; Class<?> clazz = this.document.getSelectedType(); if (clazz == Author.class) { format = DataFormats.PRAISENTER_AUTHOR_ARRAY; textData = items.stream().map(b -> ((Author)b.getValue()).getName()).collect(Collectors.toList()); } else if (clazz == SongBook.class) { format = DataFormats.PRAISENTER_SONGBOOK_ARRAY; textData = items.stream().map(c -> ((SongBook)c.getValue()).toString()).collect(Collectors.toList()); } else if (clazz == Section.class) { format = DataFormats.PRAISENTER_SECTION_ARRAY; textData = items.stream().map(v -> ((Section)v.getValue()).getText()).collect(Collectors.toList()); } else if (clazz == Lyrics.class) { format = DataFormats.PRAISENTER_LYRICS_ARRAY; textData = items.stream().map(v -> ((Lyrics)v.getValue()).getTitle()).collect(Collectors.toList()); } ClipboardContent content = new ClipboardContent(); content.putString(String.join(Constants.NEW_LINE, textData)); content.put(format, data); return content; } private CompletableFuture<Void> copy(boolean isCut) { Class<?> clazz = this.document.getSelectedType(); if (clazz != null && clazz != Song.class) { List<TreeItem<Object>> items = this.treeView.getSelectionModel().getSelectedItems(); List<Object> objectData = items.stream().map(i -> i.getValue()).collect(Collectors.toList()); try { ClipboardContent content = this.getClipboardContentForSelection(true); Clipboard clipboard = Clipboard.getSystemClipboard(); clipboard.setContent(content); if (isCut) { if (clazz == Lyrics.class) { Object parent = items.get(0).getParent().getValue(); ((Song)parent).getLyrics().removeAll(objectData); } else if (clazz == Author.class) { Object parent = items.get(0).getParent().getParent().getValue(); ((Lyrics)parent).getAuthors().removeAll(objectData); } else if (clazz == SongBook.class) { Object parent = items.get(0).getParent().getParent().getValue(); ((Lyrics)parent).getSongBooks().removeAll(objectData); } else if (clazz == Section.class) { Object parent = items.get(0).getParent().getParent().getValue(); ((Lyrics)parent).getSections().removeAll(objectData); } } // handle the selection state changing this.fireEvent(new ActionStateChangedEvent(this, this.treeView, ActionStateChangedEvent.CLIPBOARD)); } catch (Exception ex) { LOGGER.warn("Failed to create ClipboardContent for current selection (copy/cut)", ex); } } return CompletableFuture.completedFuture(null); } private CompletableFuture<Void> paste() { if (this.document.getSelectedCount() == 1) { Clipboard clipboard = Clipboard.getSystemClipboard(); TreeItem<Object> selected = this.treeView.getSelectionModel().getSelectedItem(); final Object value = selected.getValue(); Class<?> selectedType = value.getClass(); TreeItem<Object> item = this.treeView.getSelectionModel().getSelectedItem(); Lyrics lyrics = this.findClosestLyrics(item); try { if ((selectedType == Song.class || selectedType == Lyrics.class) && clipboard.hasContent(DataFormats.PRAISENTER_LYRICS_ARRAY)) { Lyrics[] newLyrics = JsonIO.read((String)clipboard.getContent(DataFormats.PRAISENTER_LYRICS_ARRAY), Lyrics[].class); this.song.getLyrics().addAll(newLyrics); } else if (lyrics != null && clipboard.hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY)) { Author[] authors = JsonIO.read((String)clipboard.getContent(DataFormats.PRAISENTER_AUTHOR_ARRAY), Author[].class); lyrics.getAuthors().addAll(authors); } else if (lyrics != null && clipboard.hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY)) { SongBook[] songbooks = JsonIO.read((String)clipboard.getContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY), SongBook[].class); lyrics.getSongBooks().addAll(songbooks); } else if (lyrics != null && clipboard.hasContent(DataFormats.PRAISENTER_SECTION_ARRAY)) { Section[] sections = JsonIO.read((String)clipboard.getContent(DataFormats.PRAISENTER_SECTION_ARRAY), Section[].class); lyrics.getSections().addAll(sections); } // TODO select the pasted elements } catch (Exception ex) { LOGGER.warn("Failed to paste clipboard content (likely due to a JSON deserialization error", ex); } } return CompletableFuture.completedFuture(null); } private void dragDetected(MouseEvent e) { if (this.document.isSingleTypeSelected() && this.document.getSelectedType() != Container.class) { try { Dragboard db = ((Node)e.getSource()).startDragAndDrop(TransferMode.COPY_OR_MOVE); ClipboardContent content = this.getClipboardContentForSelection(false); db.setContent(content); } catch (Exception ex) { LOGGER.warn("Failed to create ClipboardContent for current selection (drag detected)", ex); } } } private void dragExited(DragEvent e) { if (e.getSource() instanceof SongTreeCell) { SongTreeCell cell = (SongTreeCell)e.getSource(); cell.pseudoClassStateChanged(DRAG_OVER_PARENT, false); cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_BOTTOM, false); cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_TOP, false); } } private void dragEntered(DragEvent e) { // nothing to do here } private void dragOver(DragEvent e) { if (!(e.getSource() instanceof SongTreeCell)) { return; } // don't allow drop onto itself SongTreeCell cell = (SongTreeCell)e.getSource(); TreeItem<Object> item = cell.getTreeItem(); if (this.treeView.getSelectionModel().getSelectedItems().contains(item)) { return; } // handle null item (happens when you drag onto blank area) if (item == null) { return; } // check for null data Object data = item.getValue(); if (data == null) { return; } // don't allow drop onto incorrect locations boolean dragAuthors = e.getDragboard().hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY); boolean dragLyrics = e.getDragboard().hasContent(DataFormats.PRAISENTER_LYRICS_ARRAY); boolean dragSections = e.getDragboard().hasContent(DataFormats.PRAISENTER_SECTION_ARRAY); boolean dragSongBooks = e.getDragboard().hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY); boolean targetIsSong = data instanceof Song; boolean targetIsLyrics = data instanceof Lyrics; boolean targetIsContainer = data instanceof Container; boolean targetIsAuthor = data instanceof Author; boolean targetIsSongBook = data instanceof SongBook; boolean targetIsSection = data instanceof Section; boolean targetIsAuthorContainer = targetIsContainer && ((Container) data).getType() == Author.class; boolean targetIsSongBookContainer = targetIsContainer && ((Container) data).getType() == SongBook.class; boolean targetIsSectionContainer = targetIsContainer && ((Container) data).getType() == Section.class; boolean isAllowed = (dragAuthors && targetIsLyrics) || (dragAuthors && targetIsAuthor) || (dragAuthors && targetIsAuthorContainer) || (dragLyrics && targetIsSong) || (dragLyrics && targetIsLyrics) || (dragSections && targetIsLyrics) || (dragSections && targetIsSection) || (dragSections && targetIsSectionContainer) || (dragSongBooks && targetIsLyrics) || (dragSongBooks && targetIsSongBook) || (dragSongBooks && targetIsSongBookContainer); if (!isAllowed) { return; } // allow the transfer e.acceptTransferModes(TransferMode.MOVE); boolean isParent = (dragAuthors && targetIsLyrics) || (dragAuthors && targetIsAuthorContainer) || (dragLyrics && targetIsSong) || (dragSections && targetIsLyrics) || (dragSections && targetIsSectionContainer) || (dragSongBooks && targetIsLyrics) || (dragSongBooks && targetIsSongBookContainer); if (isParent) { cell.pseudoClassStateChanged(DRAG_OVER_PARENT, true); } else { if (e.getY() < cell.getHeight() * 0.75) { cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_TOP, true); cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_BOTTOM, false); } else { cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_BOTTOM, true); cell.pseudoClassStateChanged(DRAG_OVER_SIBLING_TOP, false); } } } private void dragDropped(DragEvent e) { // make sure the target is a valid target if (!(e.getGestureTarget() instanceof SongTreeCell)) { return; } // copy the selected items List<TreeItem<Object>> selected = new ArrayList<>(this.treeView.getSelectionModel().getSelectedItems()); // check for null data SongTreeCell target = (SongTreeCell)e.getGestureTarget(); TreeItem<Object> targetItem = target.getTreeItem(); Object targetValue = targetItem.getValue(); // are we dragging to a parent node? boolean dragAuthors = e.getDragboard().hasContent(DataFormats.PRAISENTER_AUTHOR_ARRAY); boolean dragLyrics = e.getDragboard().hasContent(DataFormats.PRAISENTER_LYRICS_ARRAY); boolean dragSections = e.getDragboard().hasContent(DataFormats.PRAISENTER_SECTION_ARRAY); boolean dragSongBooks = e.getDragboard().hasContent(DataFormats.PRAISENTER_SONGBOOK_ARRAY); boolean targetIsSong = targetValue instanceof Song; boolean targetIsLyrics = targetValue instanceof Lyrics; boolean targetIsContainer = targetValue instanceof Container; boolean targetIsAuthorContainer = targetIsContainer && ((Container) targetValue).getType() == Author.class; boolean targetIsSongBookContainer = targetIsContainer && ((Container) targetValue).getType() == SongBook.class; boolean targetIsSectionContainer = targetIsContainer && ((Container) targetValue).getType() == Section.class; boolean isParent = (dragAuthors && targetIsLyrics) || (dragAuthors && targetIsAuthorContainer) || (dragLyrics && targetIsSong) || (dragSections && targetIsLyrics) || (dragSections && targetIsSectionContainer) || (dragSongBooks && targetIsLyrics) || (dragSongBooks && targetIsSongBookContainer); this.undoManager.beginBatch("DragDrop"); // remove the data from its previous location List<Object> items = new ArrayList<>(); for (TreeItem<Object> item : selected) { Object child = item.getValue(); Object parent = item.getParent().getValue(); if (parent instanceof Container) { parent = item.getParent().getParent().getValue(); } if (child instanceof Lyrics) { ((Song)parent).getLyrics().remove(child); } else if (child instanceof Author) { ((Lyrics)parent).getAuthors().remove(child); } else if (child instanceof Section) { ((Lyrics)parent).getSections().remove(child); } else if (child instanceof SongBook) { ((Lyrics)parent).getSongBooks().remove(child); } items.add(child); } // now add the data Object parent = isParent ? targetValue : targetItem.getParent().getValue(); int size = targetItem.getChildren().size(); if (targetIsLyrics) { if (dragAuthors) size = ((Lyrics)parent).getAuthors().size(); if (dragSongBooks) size = ((Lyrics)parent).getSongBooks().size(); if (dragSections) size = ((Lyrics)parent).getSections().size(); } int index = isParent ? size : targetItem.getParent().getChildren().indexOf(targetItem); boolean after = e.getY() >= target.getHeight() * 0.75; if (!isParent && after) index++; if (parent instanceof Container) { TreeItem<?> item = targetItem.getParent(); if (!isParent) item = item.getParent(); parent = item.getValue(); } if (dragLyrics) { ((Song)parent).getLyrics().addAll(index, items.stream().map(i -> (Lyrics)i).collect(Collectors.toList())); } else if (dragSections) { ((Lyrics)parent).getSections().addAll(index, items.stream().map(i -> (Section)i).collect(Collectors.toList())); } else if (dragSongBooks) { ((Lyrics)parent).getSongBooks().addAll(index, items.stream().map(i -> (SongBook)i).collect(Collectors.toList())); } else if (dragAuthors) { ((Lyrics)parent).getAuthors().addAll(index, items.stream().map(i -> (Author)i).collect(Collectors.toList())); } // TODO the selection doesn't seem consistent (probably because of the containers) // int row = (isParent && size > 0 // ? this.treeView.getRow(targetItem.getChildren().get(size - 1)) // : this.treeView.getRow(targetItem)) // + (!isParent && after ? 1 : -items.size()); this.treeView.getSelectionModel().clearSelection(); // this.treeView.getSelectionModel().selectRange(row, row + items.size()); this.undoManager.completeBatch(); e.setDropCompleted(true); } private void dragDone(DragEvent e) { // nothing to do } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.renderscript; /** * Class for exposing the native RenderScript byte2 type back to the Android system. * **/ public class Byte2 { public byte x; public byte y; public Byte2() { } public Byte2(byte initX, byte initY) { x = initX; y = initY; } /** @hide */ public Byte2(Byte2 source) { this.x = source.x; this.y = source.y; } /** @hide * Vector add * * @param a */ public void add(Byte2 a) { this.x += a.x; this.y += a.y; } /** @hide * Vector add * * @param a * @param b * @return */ public static Byte2 add(Byte2 a, Byte2 b) { Byte2 result = new Byte2(); result.x = (byte)(a.x + b.x); result.y = (byte)(a.y + b.y); return result; } /** @hide * Vector add * * @param value */ public void add(byte value) { x += value; y += value; } /** @hide * Vector add * * @param a * @param b * @return */ public static Byte2 add(Byte2 a, byte b) { Byte2 result = new Byte2(); result.x = (byte)(a.x + b); result.y = (byte)(a.y + b); return result; } /** @hide * Vector subtraction * * @param a */ public void sub(Byte2 a) { this.x -= a.x; this.y -= a.y; } /** @hide * Vector subtraction * * @param a * @param b * @return */ public static Byte2 sub(Byte2 a, Byte2 b) { Byte2 result = new Byte2(); result.x = (byte)(a.x - b.x); result.y = (byte)(a.y - b.y); return result; } /** @hide * Vector subtraction * * @param value */ public void sub(byte value) { x -= value; y -= value; } /** @hide * Vector subtraction * * @param a * @param b * @return */ public static Byte2 sub(Byte2 a, byte b) { Byte2 result = new Byte2(); result.x = (byte)(a.x - b); result.y = (byte)(a.y - b); return result; } /** @hide * Vector multiplication * * @param a */ public void mul(Byte2 a) { this.x *= a.x; this.y *= a.y; } /** @hide * Vector multiplication * * @param a * @param b * @return */ public static Byte2 mul(Byte2 a, Byte2 b) { Byte2 result = new Byte2(); result.x = (byte)(a.x * b.x); result.y = (byte)(a.y * b.y); return result; } /** @hide * Vector multiplication * * @param value */ public void mul(byte value) { x *= value; y *= value; } /** @hide * Vector multiplication * * @param a * @param b * @return */ public static Byte2 mul(Byte2 a, byte b) { Byte2 result = new Byte2(); result.x = (byte)(a.x * b); result.y = (byte)(a.y * b); return result; } /** @hide * Vector division * * @param a */ public void div(Byte2 a) { this.x /= a.x; this.y /= a.y; } /** @hide * Vector division * * @param a * @param b * @return */ public static Byte2 div(Byte2 a, Byte2 b) { Byte2 result = new Byte2(); result.x = (byte)(a.x / b.x); result.y = (byte)(a.y / b.y); return result; } /** @hide * Vector division * * @param value */ public void div(byte value) { x /= value; y /= value; } /** @hide * Vector division * * @param a * @param b * @return */ public static Byte2 div(Byte2 a, byte b) { Byte2 result = new Byte2(); result.x = (byte)(a.x / b); result.y = (byte)(a.y / b); return result; } /** @hide * get vector length * * @return */ public byte length() { return 2; } /** @hide * set vector negate */ public void negate() { this.x = (byte)(-x); this.y = (byte)(-y); } /** @hide * Vector dot Product * * @param a * @return */ public byte dotProduct(Byte2 a) { return (byte)((x * a.x) + (y * a.y)); } /** @hide * Vector dot Product * * @param a * @param b * @return */ public static byte dotProduct(Byte2 a, Byte2 b) { return (byte)((b.x * a.x) + (b.y * a.y)); } /** @hide * Vector add Multiple * * @param a * @param factor */ public void addMultiple(Byte2 a, byte factor) { x += a.x * factor; y += a.y * factor; } /** @hide * set vector value by Byte2 * * @param a */ public void set(Byte2 a) { this.x = a.x; this.y = a.y; } /** @hide * set the vector field value by Char * * @param a * @param b */ public void setValues(byte a, byte b) { this.x = a; this.y = b; } /** @hide * return the element sum of vector * * @return */ public byte elementSum() { return (byte)(x + y); } /** @hide * get the vector field value by index * * @param i * @return */ public byte get(int i) { switch (i) { case 0: return x; case 1: return y; default: throw new IndexOutOfBoundsException("Index: i"); } } /** @hide * set the vector field value by index * * @param i * @param value */ public void setAt(int i, byte value) { switch (i) { case 0: x = value; return; case 1: y = value; return; default: throw new IndexOutOfBoundsException("Index: i"); } } /** @hide * add the vector field value by index * * @param i * @param value */ public void addAt(int i, byte value) { switch (i) { case 0: x += value; return; case 1: y += value; return; default: throw new IndexOutOfBoundsException("Index: i"); } } /** @hide * copy the vector to Char array * * @param data * @param offset */ public void copyTo(byte[] data, int offset) { data[offset] = x; data[offset + 1] = y; } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.maven; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.jar.Manifest; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.support.PropertiesLoaderUtils; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Verification utility for use with maven-invoker-plugin verification scripts. * * @author Phillip Webb * @author Andy Wilkinson * @author Stephane Nicoll */ public final class Verify { public static final String SAMPLE_APP = "org.test.SampleApplication"; private Verify() { } public static void verifyJar(File file) throws Exception { new JarArchiveVerification(file, SAMPLE_APP).verify(); } public static void verifyJar(File file, String main, String... scriptContents) throws Exception { verifyJar(file, main, true, scriptContents); } public static void verifyJar(File file, String main, boolean executable, String... scriptContents) throws Exception { new JarArchiveVerification(file, main).verify(executable, scriptContents); } public static void verifyWar(File file) throws Exception { new WarArchiveVerification(file).verify(); } public static void verifyZip(File file) throws Exception { new ZipArchiveVerification(file).verify(); } public static void verifyModule(File file) throws Exception { new ModuleArchiveVerification(file).verify(); } public static Properties verifyBuildInfo(File file, String group, String artifact, String name, String version) throws IOException { FileSystemResource resource = new FileSystemResource(file); Properties properties = PropertiesLoaderUtils.loadProperties(resource); assertThat(properties.get("build.group")).isEqualTo(group); assertThat(properties.get("build.artifact")).isEqualTo(artifact); assertThat(properties.get("build.name")).isEqualTo(name); assertThat(properties.get("build.version")).isEqualTo(version); return properties; } public static class ArchiveVerifier { private final ZipFile zipFile; private final Map<String, ZipEntry> content; public ArchiveVerifier(ZipFile zipFile) { this.zipFile = zipFile; Enumeration<? extends ZipEntry> entries = zipFile.entries(); this.content = new HashMap<>(); while (entries.hasMoreElements()) { ZipEntry zipEntry = entries.nextElement(); this.content.put(zipEntry.getName(), zipEntry); } } public void assertHasEntryNameStartingWith(String entry) { for (String name : this.content.keySet()) { if (name.startsWith(entry)) { return; } } throw new IllegalStateException("Expected entry starting with " + entry); } public void assertHasNoEntryNameStartingWith(String entry) { for (String name : this.content.keySet()) { if (name.startsWith(entry)) { throw new IllegalStateException("Entry starting with " + entry + " should not have been found"); } } } public void assertHasNonUnpackEntry(String entryName) { assertThat(hasNonUnpackEntry(entryName)) .as("Entry starting with " + entryName + " was an UNPACK entry") .isTrue(); } public void assertHasUnpackEntry(String entryName) { assertThat(hasUnpackEntry(entryName)) .as("Entry starting with " + entryName + " was not an UNPACK entry") .isTrue(); } private boolean hasNonUnpackEntry(String entryName) { return !hasUnpackEntry(entryName); } private boolean hasUnpackEntry(String entryName) { String comment = getEntryStartingWith(entryName).getComment(); return comment != null && comment.startsWith("UNPACK:"); } private ZipEntry getEntryStartingWith(String entryName) { for (Map.Entry<String, ZipEntry> entry : this.content.entrySet()) { if (entry.getKey().startsWith(entryName)) { return entry.getValue(); } } throw new IllegalStateException( "Unable to find entry starting with " + entryName); } public boolean hasEntry(String entry) { return this.content.containsKey(entry); } public ZipEntry getEntry(String entry) { return this.content.get(entry); } public InputStream getEntryContent(String entry) throws IOException { ZipEntry zipEntry = getEntry(entry); if (zipEntry == null) { throw new IllegalArgumentException("No entry with name [" + entry + "]"); } return this.zipFile.getInputStream(zipEntry); } } private static abstract class AbstractArchiveVerification { private final File file; AbstractArchiveVerification(File file) { this.file = file; } public void verify() throws Exception { verify(true); } public void verify(boolean executable, String... scriptContents) throws Exception { assertThat(this.file).exists().isFile(); if (scriptContents.length > 0 && executable) { String contents = new String(FileCopyUtils.copyToByteArray(this.file)); contents = contents.substring(0, contents .indexOf(new String(new byte[] { 0x50, 0x4b, 0x03, 0x04 }))); for (String content : scriptContents) { assertThat(contents).contains(content); } } if (!executable) { String contents = new String(FileCopyUtils.copyToByteArray(this.file)); assertThat(contents).as("Is executable") .startsWith(new String(new byte[] { 0x50, 0x4b, 0x03, 0x04 })); } try (ZipFile zipFile = new ZipFile(this.file)) { ArchiveVerifier verifier = new ArchiveVerifier(zipFile); verifyZipEntries(verifier); } } protected void verifyZipEntries(ArchiveVerifier verifier) throws Exception { verifyManifest(verifier); } private void verifyManifest(ArchiveVerifier verifier) throws Exception { Manifest manifest = new Manifest( verifier.getEntryContent("META-INF/MANIFEST.MF")); verifyManifest(manifest); } protected abstract void verifyManifest(Manifest manifest) throws Exception; } public static class JarArchiveVerification extends AbstractArchiveVerification { private final String main; public JarArchiveVerification(File file, String main) { super(file); this.main = main; } @Override protected void verifyZipEntries(ArchiveVerifier verifier) throws Exception { super.verifyZipEntries(verifier); verifier.assertHasEntryNameStartingWith("BOOT-INF/lib/spring-context"); verifier.assertHasEntryNameStartingWith("BOOT-INF/lib/spring-core"); verifier.assertHasEntryNameStartingWith("BOOT-INF/lib/javax.servlet-api-3"); assertThat(verifier .hasEntry("org/springframework/boot/loader/JarLauncher.class")) .as("Unpacked launcher classes").isTrue(); assertThat(verifier .hasEntry("BOOT-INF/classes/org/test/SampleApplication.class")) .as("Own classes").isTrue(); } @Override protected void verifyManifest(Manifest manifest) throws Exception { assertThat(manifest.getMainAttributes().getValue("Main-Class")) .isEqualTo("org.springframework.boot.loader.JarLauncher"); assertThat(manifest.getMainAttributes().getValue("Start-Class")) .isEqualTo(this.main); assertThat(manifest.getMainAttributes().getValue("Not-Used")) .isEqualTo("Foo"); } } public static class WarArchiveVerification extends AbstractArchiveVerification { public WarArchiveVerification(File file) { super(file); } @Override protected void verifyZipEntries(ArchiveVerifier verifier) throws Exception { super.verifyZipEntries(verifier); verifier.assertHasEntryNameStartingWith("WEB-INF/lib/spring-context"); verifier.assertHasEntryNameStartingWith("WEB-INF/lib/spring-core"); verifier.assertHasEntryNameStartingWith( "WEB-INF/lib-provided/javax.servlet-api-3"); assertThat(verifier .hasEntry("org/" + "springframework/boot/loader/JarLauncher.class")) .as("Unpacked launcher classes").isTrue(); assertThat(verifier .hasEntry("WEB-INF/classes/org/" + "test/SampleApplication.class")) .as("Own classes").isTrue(); assertThat(verifier.hasEntry("index.html")).as("Web content").isTrue(); } @Override protected void verifyManifest(Manifest manifest) throws Exception { assertThat(manifest.getMainAttributes().getValue("Main-Class")) .isEqualTo("org.springframework.boot.loader.WarLauncher"); assertThat(manifest.getMainAttributes().getValue("Start-Class")) .isEqualTo("org.test.SampleApplication"); assertThat(manifest.getMainAttributes().getValue("Not-Used")) .isEqualTo("Foo"); } } private static class ZipArchiveVerification extends AbstractArchiveVerification { ZipArchiveVerification(File file) { super(file); } @Override protected void verifyManifest(Manifest manifest) throws Exception { assertThat(manifest.getMainAttributes().getValue("Main-Class")) .isEqualTo("org.springframework.boot.loader.PropertiesLauncher"); assertThat(manifest.getMainAttributes().getValue("Start-Class")) .isEqualTo("org.test.SampleApplication"); assertThat(manifest.getMainAttributes().getValue("Not-Used")) .isEqualTo("Foo"); } } private static class ModuleArchiveVerification extends AbstractArchiveVerification { ModuleArchiveVerification(File file) { super(file); } @Override protected void verifyZipEntries(ArchiveVerifier verifier) throws Exception { super.verifyZipEntries(verifier); verifier.assertHasEntryNameStartingWith("lib/spring-context"); verifier.assertHasEntryNameStartingWith("lib/spring-core"); verifier.assertHasNoEntryNameStartingWith("lib/javax.servlet-api-3"); assertThat(verifier .hasEntry("org/" + "springframework/boot/loader/JarLauncher.class")) .as("Unpacked launcher classes").isFalse(); assertThat(verifier.hasEntry("org/" + "test/SampleModule.class")) .as("Own classes").isTrue(); } @Override protected void verifyManifest(Manifest manifest) throws Exception { } } }
package graph; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import core.DisjointSet; import java.util.PriorityQueue; public class UndirectedWeightedGraph { // Inner class to represent a Vertex private class Vertex { public String name; public Vertex parent; int key; // Required for DFS public boolean visited = false; Vertex(String name) { this.name = name; } @Override public String toString() { return this.name + " " + Integer.toString(key) + " " + this.parent; } } // Inner class to represent an Edge private class Edge implements Comparable<Edge> { public String from; public String to; public int weight; Edge(String from, String to, int weight) { this.from = from; this.to = to; this.weight = weight; } public int compareTo(Edge edge) { return this.weight == edge.weight ? 0 : (this.weight < edge.weight ? -1 : 1); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(this.from).append(" ---> ").append(this.to).append("(").append(Integer.toString(this.weight)) .append(")"); return builder.toString(); } } Map<Vertex, HashMap<Vertex, Integer>> adjList; Map<String, Vertex> allVertices; List<Edge> allEdges; int vertices; int edges; public UndirectedWeightedGraph() { adjList = new HashMap<Vertex, HashMap<Vertex, Integer>>(); allVertices = new HashMap<String, Vertex>(); allEdges = new ArrayList<Edge>(); vertices = 0; edges = 0; } public void addVertex(String name) { Vertex v; if ((v = allVertices.get(name)) == null) { v = new Vertex(name); allVertices.put(name, v); adjList.put(v, new HashMap<Vertex, Integer>()); vertices++; } } public boolean hasVertex(String name) { return allVertices.containsKey(name); } public void addEdge(String from, String to, int weight) { if (hasEdge(from, to)) { return; } Vertex vf = null, vt = null; if (!allVertices.containsKey(from)) { vf = new Vertex(from); allVertices.put(from, vf); adjList.put(vf, new HashMap<Vertex, Integer>()); vertices++; } else { vf = allVertices.get(from); } if (!allVertices.containsKey(to)) { vt = new Vertex(to); allVertices.put(to, vt); adjList.put(vt, new HashMap<Vertex, Integer>()); adjList.put(vt, new HashMap<Vertex, Integer>()); vertices++; } else { vt = allVertices.get(to); } adjList.get(vf).put(vt, weight); adjList.get(vt).put(vf, weight); allEdges.add(new Edge(from, to, weight)); allEdges.add(new Edge(to, from, weight)); edges++; } public boolean hasEdge(String from, String to) { if (!allVertices.containsKey(from) || !allVertices.containsKey(to)) { return false; } return adjList.get(allVertices.get(from)).containsKey(allVertices.get(to)); } public void printAdjList() { if (allVertices.size() == 0) { System.err.println("NO VERTICES"); return; } Iterator<Entry<String, Vertex>> itr = allVertices.entrySet().iterator(); while (itr.hasNext()) { Entry<String, Vertex> pair = itr.next(); System.out.print(pair.getKey() + "-->"); HashMap<Vertex, Integer> list = adjList.get(pair.getValue()); if (list.size() == 0) { System.out.println("END"); continue; } Iterator<Entry<Vertex, Integer>> itr1 = list.entrySet().iterator(); while (itr1.hasNext()) { Map.Entry<Vertex, Integer> pair1 = itr1.next(); Vertex v = pair1.getKey(); System.out.print(v.name + "(" + pair1.getValue() + ")" + "-->"); } System.out.print("END"); System.out.println(); } } public void makeKruskalMST() { ArrayList<Edge> mstForest = new ArrayList<Edge>(); DisjointSet<Vertex> dsets = new DisjointSet<Vertex>(); Iterator<Map.Entry<String, Vertex>> itr = allVertices.entrySet().iterator(); while (itr.hasNext()) { Map.Entry<String, Vertex> entry = itr.next(); dsets.makeSet(entry.getValue()); } Collections.sort(this.allEdges); for (Edge edge : allEdges) { Vertex u = allVertices.get(edge.from); Vertex v = allVertices.get(edge.to); if (dsets.findSet(u) != dsets.findSet(v)) { mstForest.add(edge); dsets.union(u, v); } } System.out.println(mstForest); } public void makePrimMST(String root) { if (!allVertices.containsKey(root)) { System.err.println("ROOT NOT PRESENT"); return; } PriorityQueue<Vertex> queue = new PriorityQueue<Vertex>(allVertices.size(), new Comparator<Vertex>() { public int compare(Vertex v1, Vertex v2) { return v1.key == v2.key ? 0 : (v1.key < v2.key ? -1 : 1); } }); ArrayList<Edge> primTree = new ArrayList<Edge>(); Vertex r = null, u = null; Iterator<Entry<String, Vertex>> itr = allVertices.entrySet().iterator(); while (itr.hasNext()) { Entry<String, Vertex> entry = itr.next(); if (entry.getKey().equals(root)) { r = (Vertex) entry.getValue(); continue; } u = (Vertex) entry.getValue(); u.key = Integer.MAX_VALUE; u.parent = null; } r.key = 0; // Re-iterate to poplute the queue for (Vertex vertex : allVertices.values()) { queue.offer(vertex); } while (queue.size() != 0) { System.out.println("Queue Right Now:: " + queue); u = queue.poll(); System.out.println("Curret Vertex:: " + u); HashMap<Vertex, Integer> list = adjList.get(u); System.out.println("Processing Adjacency List of " + u); for (Map.Entry<Vertex, Integer> entry : list.entrySet()) { Vertex v = (Vertex) entry.getKey(); Integer w = (Integer) entry.getValue(); System.out.println("Current Neibhour:: " + v); System.out.println("Key:: " + v.key + " Edge Weight:: " + w.toString()); if (queue.contains(v) && w.intValue() < v.key) { System.out.println("Conditions true. Adding to primTree"); v.parent = u; v.key = w; primTree.add(new Edge(u.name, v.name, w.intValue())); } } System.out.println(); } System.out.println(primTree); } // This method is a simplified version of DFS, and I // find it easier to implement and use public void dfs(String root) { if (hasVertex(root)) { throw new RuntimeException("Root not present in graph"); } Vertex v = allVertices.get(root); search(v); } private void search(Vertex root) { if (null == root) { return; } System.out.print(root + " "); root.visited = true; HashMap<Vertex, Integer> list = adjList.get(root); Iterator<Map.Entry<Vertex, Integer>> itr = list.entrySet().iterator(); while (itr.hasNext()) { Vertex v = itr.next().getKey(); if (v.visited == false) { search(v); } } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created by IntelliJ IDEA. * User: Anna.Kozlova * Date: 13-Jul-2006 * Time: 12:07:39 */ package com.intellij.openapi.wm.impl; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.wm.*; import com.intellij.openapi.wm.ex.ToolWindowEx; import com.intellij.openapi.wm.ex.ToolWindowManagerEx; import com.intellij.openapi.wm.ex.ToolWindowManagerListener; import com.intellij.ui.content.*; import com.intellij.util.ArrayUtil; import com.intellij.util.EventDispatcher; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.InputEvent; import java.beans.PropertyChangeListener; import java.util.*; import java.util.List; @SuppressWarnings({"ConstantConditions"}) public class ToolWindowHeadlessManagerImpl extends ToolWindowManagerEx { private final Map<String, ToolWindow> myToolWindows = new HashMap<String, ToolWindow>(); private final Project myProject; public ToolWindowHeadlessManagerImpl(Project project) { myProject = project; } @Override public boolean canShowNotification(@NotNull String toolWindowId) { return false; } @Override public void notifyByBalloon(@NotNull final String toolWindowId, @NotNull final MessageType type, @NotNull final String htmlBody) { } private ToolWindow doRegisterToolWindow(final String id, @Nullable Disposable parentDisposable) { MockToolWindow tw = new MockToolWindow(myProject); myToolWindows.put(id, tw); if (parentDisposable != null) { Disposer.register(parentDisposable, new Disposable() { @Override public void dispose() { unregisterToolWindow(id); } }); } return tw; } @NotNull @Override public ToolWindow registerToolWindow(@NotNull String id, @NotNull JComponent component, @NotNull ToolWindowAnchor anchor, Disposable parentDisposable, boolean canWorkInDumbMode) { return doRegisterToolWindow(id, parentDisposable); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull String id, @NotNull JComponent component, @NotNull ToolWindowAnchor anchor) { return doRegisterToolWindow(id, null); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull String id, @NotNull JComponent component, @NotNull ToolWindowAnchor anchor, Disposable parentDisposable, boolean canWorkInDumbMode, boolean canCloseContents) { return doRegisterToolWindow(id, parentDisposable); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull String id, @NotNull JComponent component, @NotNull ToolWindowAnchor anchor, @NotNull Disposable parentDisposable) { return doRegisterToolWindow(id, parentDisposable); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor) { return doRegisterToolWindow(id, null); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor, final boolean secondary) { return doRegisterToolWindow(id, null); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor, final Disposable parentDisposable, final boolean dumbAware) { return doRegisterToolWindow(id, parentDisposable); } @NotNull @Override public ToolWindow registerToolWindow(@NotNull String id, boolean canCloseContent, @NotNull ToolWindowAnchor anchor, Disposable parentDisposable, boolean canWorkInDumbMode, boolean secondary) { return doRegisterToolWindow(id, parentDisposable); } @Override public void unregisterToolWindow(@NotNull String id) { myToolWindows.remove(id); } @Override public void activateEditorComponent() { } @Override public boolean isEditorComponentActive() { return false; } @NotNull @Override public String[] getToolWindowIds() { return ArrayUtil.EMPTY_STRING_ARRAY; } @Override public String getActiveToolWindowId() { return null; } @Override public ToolWindow getToolWindow(String id) { return myToolWindows.get(id); } @Override public void invokeLater(@NotNull Runnable runnable) { } @NotNull @Override public IdeFocusManager getFocusManager() { return IdeFocusManagerHeadless.INSTANCE; } @Override public void notifyByBalloon(@NotNull final String toolWindowId, @NotNull final MessageType type, @NotNull final String text, @Nullable final Icon icon, @Nullable final HyperlinkListener listener) { } @Override public Balloon getToolWindowBalloon(String id) { return null; } @Override public boolean isMaximized(@NotNull ToolWindow wnd) { return false; } @Override public void setMaximized(@NotNull ToolWindow wnd, boolean maximized) { } @Override public void initToolWindow(@NotNull ToolWindowEP bean) { } @Override public void addToolWindowManagerListener(@NotNull ToolWindowManagerListener l) { } @Override public void addToolWindowManagerListener(@NotNull ToolWindowManagerListener l, @NotNull Disposable parentDisposable) { } @Override public void removeToolWindowManagerListener(@NotNull ToolWindowManagerListener l) { } @Override public String getLastActiveToolWindowId() { return null; } @Override public String getLastActiveToolWindowId(Condition<JComponent> condition) { return null; } @Override public DesktopLayout getLayout() { return new DesktopLayout(); } @Override public void setLayoutToRestoreLater(DesktopLayout layout) { } @Override public DesktopLayout getLayoutToRestoreLater() { return new DesktopLayout(); } @Override public void setLayout(@NotNull DesktopLayout layout) { } @Override public void clearSideStack() { } @Override public void hideToolWindow(@NotNull final String id, final boolean hideSide) { } @Override public List<String> getIdsOn(@NotNull final ToolWindowAnchor anchor) { return new ArrayList<String>(); } public static class MockToolWindow implements ToolWindowEx { ContentManager myContentManager = new MockContentManager(); public MockToolWindow(@NotNull Project project) { Disposer.register(project, myContentManager); } @Override public boolean isActive() { return false; } @Override public void activate(@Nullable Runnable runnable) { } @Override public boolean isDisposed() { return false; } @Override public boolean isVisible() { return false; } @NotNull @Override public ActionCallback getReady(@NotNull Object requestor) { return new ActionCallback.Done(); } @Override public void show(@Nullable Runnable runnable) { } @Override public void hide(@Nullable Runnable runnable) { } @Override public ToolWindowAnchor getAnchor() { return ToolWindowAnchor.BOTTOM; } @Override public void setAnchor(ToolWindowAnchor anchor, @Nullable Runnable runnable) { } @Override public boolean isSplitMode() { return false; } @Override public void setSplitMode(final boolean isSideTool, @Nullable final Runnable runnable) { } @Override public boolean isAutoHide() { return false; } @Override public void setAutoHide(boolean state) { } @Override public void setToHideOnEmptyContent(final boolean hideOnEmpty) { } @Override public boolean isToHideOnEmptyContent() { return false; } @Override public ToolWindowType getType() { return ToolWindowType.SLIDING; } @Override public void setType(ToolWindowType type, @Nullable Runnable runnable) { } @Override public Icon getIcon() { return null; } @Override public void setIcon(Icon icon) { } @Override public String getTitle() { return ""; } @Override public void setTitle(String title) { } @NotNull @Override public String getStripeTitle() { return ""; } @Override public void setStripeTitle(@NotNull String title) { } @Override public boolean isAvailable() { return false; } @Override public void setContentUiType(ToolWindowContentUiType type, @Nullable Runnable runnable) { } @Override public void setDefaultContentUiType(@NotNull ToolWindowContentUiType type) { } @Override public ToolWindowContentUiType getContentUiType() { return ToolWindowContentUiType.TABBED; } @Override public void setAvailable(boolean available, @Nullable Runnable runnable) { } @Override public void installWatcher(ContentManager contentManager) { } @Override public JComponent getComponent() { return null; } @Override public ContentManager getContentManager() { return myContentManager; } @Override public void setDefaultState(@Nullable final ToolWindowAnchor anchor, @Nullable final ToolWindowType type, @Nullable final Rectangle floatingBounds) { } @Override public void activate(@Nullable final Runnable runnable, final boolean autoFocusContents) { } @Override public void activate(@Nullable Runnable runnable, boolean autoFocusContents, boolean forced) { } @Override public void showContentPopup(InputEvent inputEvent) { } @Override public ActionCallback getActivation() { return new ActionCallback.Done(); } @Override public void removePropertyChangeListener(PropertyChangeListener l) { } @Override public ToolWindowType getInternalType() { return ToolWindowType.DOCKED; } @Override public void stretchWidth(int value) { } @Override public void stretchHeight(int value) { } @Override public InternalDecorator getDecorator() { return null; } @Override public void setAdditionalGearActions(ActionGroup additionalGearActions) { } @Override public void setTitleActions(AnAction... actions) { } @Override public void setUseLastFocusedOnActivation(boolean focus) { } @Override public boolean isUseLastFocusedOnActivation() { return false; } } private static class MockContentManager implements ContentManager { private final EventDispatcher<ContentManagerListener> myDispatcher = EventDispatcher.create(ContentManagerListener.class); private final List<Content> myContents = new ArrayList<Content>(); private Content mySelected; @NotNull @Override public ActionCallback getReady(@NotNull Object requestor) { return new ActionCallback.Done(); } @Override public void addContent(@NotNull final Content content) { myContents.add(content); ContentManagerEvent e = new ContentManagerEvent(this, content, myContents.indexOf(content), ContentManagerEvent.ContentOperation.add); myDispatcher.getMulticaster().contentAdded(e); if (mySelected == null) setSelectedContent(content); } @Override public void addContent(@NotNull Content content, int order) { myContents.add(order, content); ContentManagerEvent e = new ContentManagerEvent(this, content, myContents.indexOf(content), ContentManagerEvent.ContentOperation.add); myDispatcher.getMulticaster().contentAdded(e); if (mySelected == null) setSelectedContent(content); } @Override public void addContent(@NotNull final Content content, final Object constraints) { addContent(content); } @Override public void addSelectedContent(@NotNull final Content content) { addContent(content); setSelectedContent(content); } @Override public void addContentManagerListener(@NotNull final ContentManagerListener l) { myDispatcher.getListeners().add(0, l); } @Override public void addDataProvider(@NotNull final DataProvider provider) { } @Override public boolean canCloseAllContents() { return false; } @Override public boolean canCloseContents() { return false; } @Override public Content findContent(final String displayName) { for (Content each : myContents) { if (each.getDisplayName().equals(displayName)) return each; } return null; } @Override public List<AnAction> getAdditionalPopupActions(@NotNull final Content content) { return Collections.emptyList(); } @NotNull @Override public String getCloseActionName() { return "close"; } @NotNull @Override public String getCloseAllButThisActionName() { return "closeallbutthis"; } @NotNull @Override public String getPreviousContentActionName() { return "previous"; } @NotNull @Override public String getNextContentActionName() { return "next"; } @NotNull @Override public JComponent getComponent() { return new JLabel(); } @Override public Content getContent(final JComponent component) { Content[] contents = getContents(); for (Content content : contents) { if (Comparing.equal(component, content.getComponent())) { return content; } } return null; } @Override @Nullable public Content getContent(final int index) { return myContents.get(index); } @Override public int getContentCount() { return myContents.size(); } @Override @NotNull public Content[] getContents() { return myContents.toArray(new Content[myContents.size()]); } @Override public int getIndexOfContent(final Content content) { return myContents.indexOf(content); } @Override @Nullable public Content getSelectedContent() { return mySelected; } @Override @NotNull public Content[] getSelectedContents() { return mySelected != null ? new Content[]{mySelected} : new Content[0]; } @Override public boolean isSelected(@NotNull final Content content) { return content == mySelected; } @Override public void removeAllContents(final boolean dispose) { for (Content content : getContents()) { removeContent(content, dispose); } } @Override public boolean removeContent(@NotNull final Content content, final boolean dispose) { boolean wasSelected = mySelected == content; int oldIndex = myContents.indexOf(content); if (wasSelected) { removeFromSelection(content); } boolean result = myContents.remove(content); if (dispose) Disposer.dispose(content); ContentManagerEvent e = new ContentManagerEvent(this, content, oldIndex, ContentManagerEvent.ContentOperation.remove); myDispatcher.getMulticaster().contentRemoved(e); Content item = ContainerUtil.getFirstItem(myContents); if (item != null) setSelectedContent(item); return result; } @NotNull @Override public ActionCallback removeContent(@NotNull Content content, boolean dispose, boolean trackFocus, boolean implicitFocus) { removeContent(content, dispose); return new ActionCallback.Done(); } @Override public void removeContentManagerListener(@NotNull final ContentManagerListener l) { myDispatcher.removeListener(l); } @Override public void removeFromSelection(@NotNull final Content content) { ContentManagerEvent e = new ContentManagerEvent(this, content, myContents.indexOf(mySelected), ContentManagerEvent.ContentOperation.remove); myDispatcher.getMulticaster().selectionChanged(e); } @Override public ActionCallback selectNextContent() { return new ActionCallback.Done(); } @Override public ActionCallback selectPreviousContent() { return new ActionCallback.Done(); } @Override public void setSelectedContent(@NotNull final Content content) { if (mySelected != null) { removeFromSelection(mySelected); } mySelected = content; ContentManagerEvent e = new ContentManagerEvent(this, content, myContents.indexOf(content), ContentManagerEvent.ContentOperation.add); myDispatcher.getMulticaster().selectionChanged(e); } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull Content content) { setSelectedContent(content); return new ActionCallback.Done(); } @Override public void setSelectedContent(@NotNull final Content content, final boolean requestFocus) { setSelectedContent(content); } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull final Content content, final boolean requestFocus) { return setSelectedContentCB(content); } @Override public void setSelectedContent(@NotNull Content content, boolean requestFocus, boolean forcedFocus) { setSelectedContent(content); } @NotNull @Override public ActionCallback setSelectedContentCB(@NotNull final Content content, final boolean requestFocus, final boolean forcedFocus) { return setSelectedContentCB(content); } @NotNull @Override public ActionCallback setSelectedContent(@NotNull Content content, boolean requestFocus, boolean forcedFocus, boolean implicit) { return setSelectedContentCB(content); } @NotNull @Override public ActionCallback requestFocus(@Nullable final Content content, final boolean forced) { return new ActionCallback.Done(); } @Override public void dispose() { myContents.clear(); mySelected = null; myDispatcher.getListeners().clear(); } @Override public boolean isDisposed() { return false; } @Override public boolean isSingleSelection() { return true; } @Override @NotNull public ContentFactory getFactory() { return ServiceManager.getService(ContentFactory.class); } }}
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.block; import com.facebook.presto.metadata.FunctionManager; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.ByteArrayBlock; import com.facebook.presto.spi.block.MapBlock; import com.facebook.presto.spi.block.MapBlockBuilder; import com.facebook.presto.spi.block.SingleMapBlock; import com.facebook.presto.spi.type.MapType; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.sql.analyzer.FeaturesConfig; import com.facebook.presto.type.TypeRegistry; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.block.BlockAssertions.createLongsBlock; import static com.facebook.presto.block.BlockAssertions.createStringsBlock; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.util.StructuralTestUtil.mapType; import static io.airlift.slice.Slices.utf8Slice; import static java.util.Objects.requireNonNull; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertTrue; public class TestMapBlock extends AbstractTestBlock { private static final TypeManager TYPE_MANAGER = new TypeRegistry(); static { // associate TYPE_MANAGER with a function manager new FunctionManager(TYPE_MANAGER, new BlockEncodingManager(TYPE_MANAGER), new FeaturesConfig()); } @Test public void test() { testWith(createTestMap(9, 3, 4, 0, 8, 0, 6, 5)); } @Test public void testCompactBlock() { Block emptyBlock = new ByteArrayBlock(0, Optional.empty(), new byte[0]); Block compactKeyBlock = new ByteArrayBlock(16, Optional.empty(), createExpectedValue(16).getBytes()); Block compactValueBlock = new ByteArrayBlock(16, Optional.empty(), createExpectedValue(16).getBytes()); Block inCompactKeyBlock = new ByteArrayBlock(16, Optional.empty(), createExpectedValue(17).getBytes()); Block inCompactValueBlock = new ByteArrayBlock(16, Optional.empty(), createExpectedValue(17).getBytes()); int[] offsets = {0, 1, 1, 2, 4, 8, 16}; boolean[] mapIsNull = {false, true, false, false, false, false}; testCompactBlock(mapType(TINYINT, TINYINT).createBlockFromKeyValue(0, Optional.empty(), new int[1], emptyBlock, emptyBlock)); testCompactBlock(mapType(TINYINT, TINYINT).createBlockFromKeyValue(mapIsNull.length, Optional.of(mapIsNull), offsets, compactKeyBlock, compactValueBlock)); // TODO: Add test case for a sliced MapBlock // underlying key/value block is not compact testIncompactBlock(mapType(TINYINT, TINYINT).createBlockFromKeyValue(mapIsNull.length, Optional.of(mapIsNull), offsets, inCompactKeyBlock, inCompactValueBlock)); } // TODO: remove this test when we have a more unified testWith() using assertBlock() @Test public void testLazyHashTableBuildOverBlockRegion() { assertLazyHashTableBuildOverBlockRegion(createTestMap(9, 3, 4, 0, 8, 0, 6, 5)); assertLazyHashTableBuildOverBlockRegion(alternatingNullValues(createTestMap(9, 3, 4, 0, 8, 0, 6, 5))); } private void assertLazyHashTableBuildOverBlockRegion(Map<String, Long>[] testValues) { // use prefix block to build the hash table { MapBlock block = createBlockWithValuesFromKeyValueBlock(testValues); BlockBuilder blockBuilder = createBlockBuilderWithValues(testValues); MapBlock prefix = (MapBlock) block.getRegion(0, 4); assertFalse(block.isHashTablesPresent()); assertFalse(prefix.isHashTablesPresent()); assertBlock(prefix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 0, 4)); assertTrue(block.isHashTablesPresent()); assertTrue(prefix.isHashTablesPresent()); MapBlock midSection = (MapBlock) block.getRegion(2, 4); assertTrue(midSection.isHashTablesPresent()); assertBlock(midSection, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 2, 6)); MapBlock suffix = (MapBlock) block.getRegion(4, 4); assertTrue(suffix.isHashTablesPresent()); assertBlock(suffix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 4, 8)); } // use mid-section block to build the hash table { MapBlock block = createBlockWithValuesFromKeyValueBlock(testValues); BlockBuilder blockBuilder = createBlockBuilderWithValues(testValues); MapBlock midSection = (MapBlock) block.getRegion(2, 4); assertFalse(block.isHashTablesPresent()); assertFalse(midSection.isHashTablesPresent()); assertBlock(midSection, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 2, 6)); assertTrue(block.isHashTablesPresent()); assertTrue(midSection.isHashTablesPresent()); MapBlock prefix = (MapBlock) block.getRegion(0, 4); assertTrue(prefix.isHashTablesPresent()); assertBlock(prefix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 0, 4)); MapBlock suffix = (MapBlock) block.getRegion(4, 4); assertTrue(suffix.isHashTablesPresent()); assertBlock(suffix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 4, 8)); } // use suffix block to build the hash table { MapBlock block = createBlockWithValuesFromKeyValueBlock(testValues); BlockBuilder blockBuilder = createBlockBuilderWithValues(testValues); MapBlock suffix = (MapBlock) block.getRegion(4, 4); assertFalse(block.isHashTablesPresent()); assertFalse(suffix.isHashTablesPresent()); assertBlock(suffix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 4, 8)); assertTrue(block.isHashTablesPresent()); assertTrue(suffix.isHashTablesPresent()); MapBlock prefix = (MapBlock) block.getRegion(0, 4); assertTrue(prefix.isHashTablesPresent()); assertBlock(prefix, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 0, 4)); MapBlock midSection = (MapBlock) block.getRegion(2, 4); assertTrue(midSection.isHashTablesPresent()); assertBlock(midSection, () -> blockBuilder.newBlockBuilderLike(null), Arrays.copyOfRange(testValues, 2, 6)); } } private Map<String, Long>[] createTestMap(int... entryCounts) { Map<String, Long>[] result = new Map[entryCounts.length]; for (int rowNumber = 0; rowNumber < entryCounts.length; rowNumber++) { int entryCount = entryCounts[rowNumber]; Map<String, Long> map = new HashMap<>(); for (int entryNumber = 0; entryNumber < entryCount; entryNumber++) { map.put("key" + entryNumber, entryNumber == 5 ? null : rowNumber * 100L + entryNumber); } result[rowNumber] = map; } return result; } private void testWith(Map<String, Long>[] expectedValues) { BlockBuilder blockBuilder = createBlockBuilderWithValues(expectedValues); assertBlock(blockBuilder, () -> blockBuilder.newBlockBuilderLike(null), expectedValues); assertBlock(blockBuilder.build(), () -> blockBuilder.newBlockBuilderLike(null), expectedValues); assertBlockFilteredPositions(expectedValues, blockBuilder, () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 3, 4, 7); assertBlockFilteredPositions(expectedValues, blockBuilder.build(), () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 3, 4, 7); assertBlockFilteredPositions(expectedValues, blockBuilder, () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 5, 6); assertBlockFilteredPositions(expectedValues, blockBuilder.build(), () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 5, 6); Block block = createBlockWithValuesFromKeyValueBlock(expectedValues); assertBlock(block, () -> blockBuilder.newBlockBuilderLike(null), expectedValues); assertBlockFilteredPositions(expectedValues, block, () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 3, 4, 7); assertBlockFilteredPositions(expectedValues, block, () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 5, 6); Map<String, Long>[] expectedValuesWithNull = alternatingNullValues(expectedValues); BlockBuilder blockBuilderWithNull = createBlockBuilderWithValues(expectedValuesWithNull); assertBlock(blockBuilderWithNull, () -> blockBuilder.newBlockBuilderLike(null), expectedValuesWithNull); assertBlock(blockBuilderWithNull.build(), () -> blockBuilder.newBlockBuilderLike(null), expectedValuesWithNull); assertBlockFilteredPositions(expectedValuesWithNull, blockBuilderWithNull, () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 5, 6, 7, 10, 11, 12, 15); assertBlockFilteredPositions(expectedValuesWithNull, blockBuilderWithNull.build(), () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 5, 6, 7, 10, 11, 12, 15); assertBlockFilteredPositions(expectedValuesWithNull, blockBuilderWithNull, () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 4, 9, 13, 14); assertBlockFilteredPositions(expectedValuesWithNull, blockBuilderWithNull.build(), () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 4, 9, 13, 14); Block blockWithNull = createBlockWithValuesFromKeyValueBlock(expectedValuesWithNull); assertBlock(blockWithNull, () -> blockBuilder.newBlockBuilderLike(null), expectedValuesWithNull); assertBlockFilteredPositions(expectedValuesWithNull, blockWithNull, () -> blockBuilder.newBlockBuilderLike(null), 0, 1, 5, 6, 7, 10, 11, 12, 15); assertBlockFilteredPositions(expectedValuesWithNull, blockWithNull, () -> blockBuilder.newBlockBuilderLike(null), 2, 3, 4, 9, 13, 14); } private BlockBuilder createBlockBuilderWithValues(Map<String, Long>[] maps) { MapType mapType = mapType(VARCHAR, BIGINT); BlockBuilder mapBlockBuilder = mapType.createBlockBuilder(null, 1); for (Map<String, Long> map : maps) { createBlockBuilderWithValues(map, mapBlockBuilder); } return mapBlockBuilder; } private MapBlock createBlockWithValuesFromKeyValueBlock(Map<String, Long>[] maps) { List<String> keys = new ArrayList<>(); List<Long> values = new ArrayList<>(); int positionCount = maps.length; int[] offsets = new int[positionCount + 1]; boolean[] mapIsNull = new boolean[positionCount]; for (int i = 0; i < positionCount; i++) { Map<String, Long> map = maps[i]; mapIsNull[i] = map == null; if (map == null) { offsets[i + 1] = offsets[i]; } else { for (Map.Entry<String, Long> entry : map.entrySet()) { keys.add(entry.getKey()); values.add(entry.getValue()); } offsets[i + 1] = offsets[i] + map.size(); } } return (MapBlock) mapType(VARCHAR, BIGINT).createBlockFromKeyValue(positionCount, Optional.of(mapIsNull), offsets, createStringsBlock(keys), createLongsBlock(values)); } private void createBlockBuilderWithValues(Map<String, Long> map, BlockBuilder mapBlockBuilder) { if (map == null) { mapBlockBuilder.appendNull(); } else { BlockBuilder elementBlockBuilder = mapBlockBuilder.beginBlockEntry(); for (Map.Entry<String, Long> entry : map.entrySet()) { VARCHAR.writeSlice(elementBlockBuilder, utf8Slice(entry.getKey())); if (entry.getValue() == null) { elementBlockBuilder.appendNull(); } else { BIGINT.writeLong(elementBlockBuilder, entry.getValue()); } } mapBlockBuilder.closeEntry(); } } @Override protected <T> void assertCheckedPositionValue(Block block, int position, T expectedValue) { if (expectedValue instanceof Map) { assertValue(block, position, (Map<String, Long>) expectedValue); return; } super.assertCheckedPositionValue(block, position, expectedValue); } @Override protected <T> void assertPositionValueUnchecked(Block block, int internalPosition, T expectedValue) { if (expectedValue instanceof Map) { assertValueUnchecked(block, internalPosition, (Map<String, Long>) expectedValue); return; } super.assertPositionValueUnchecked(block, internalPosition, expectedValue); } private void assertValue(Block mapBlock, int position, Map<String, Long> map) { MapType mapType = mapType(VARCHAR, BIGINT); // null maps are handled by assertPositionValue requireNonNull(map, "map is null"); assertFalse(mapBlock.isNull(position)); SingleMapBlock elementBlock = (SingleMapBlock) mapType.getObject(mapBlock, position); assertEquals(elementBlock.getPositionCount(), map.size() * 2); // Test new/hash-index access: assert inserted keys for (Map.Entry<String, Long> entry : map.entrySet()) { int pos = elementBlock.seekKey(utf8Slice(entry.getKey())); assertNotEquals(pos, -1); if (entry.getValue() == null) { assertTrue(elementBlock.isNull(pos)); } else { assertFalse(elementBlock.isNull(pos)); assertEquals(BIGINT.getLong(elementBlock, pos), (long) entry.getValue()); } } // Test new/hash-index access: assert non-existent keys for (int i = 0; i < 10; i++) { assertEquals(elementBlock.seekKey(utf8Slice("not-inserted-" + i)), -1); } // Test legacy/iterative access for (int i = 0; i < elementBlock.getPositionCount(); i += 2) { String actualKey = VARCHAR.getSlice(elementBlock, i).toStringUtf8(); Long actualValue; if (elementBlock.isNull(i + 1)) { actualValue = null; } else { actualValue = BIGINT.getLong(elementBlock, i + 1); } assertTrue(map.containsKey(actualKey)); assertEquals(actualValue, map.get(actualKey)); } } private void assertValueUnchecked(Block mapBlock, int internalPosition, Map<String, Long> map) { MapType mapType = mapType(VARCHAR, BIGINT); // null maps are handled by assertPositionValue requireNonNull(map, "map is null"); assertFalse(mapBlock.isNullUnchecked((internalPosition))); SingleMapBlock elementBlock = (SingleMapBlock) mapType.getBlockUnchecked(mapBlock, (internalPosition)); assertEquals(elementBlock.getPositionCount(), map.size() * 2); // Test new/hash-index access: assert inserted keys for (Map.Entry<String, Long> entry : map.entrySet()) { int pos = elementBlock.seekKey(utf8Slice(entry.getKey())); assertNotEquals(pos, -1); if (entry.getValue() == null) { assertTrue(elementBlock.isNullUnchecked(pos + elementBlock.getOffsetBase())); } else { assertFalse(elementBlock.isNullUnchecked(pos + elementBlock.getOffsetBase())); assertEquals(BIGINT.getLongUnchecked(elementBlock, pos + elementBlock.getOffsetBase()), (long) entry.getValue()); } } // Test new/hash-index access: assert non-existent keys for (int i = 0; i < 10; i++) { assertEquals(elementBlock.seekKey(utf8Slice("not-inserted-" + i)), -1); } // Test legacy/iterative access for (int i = 0; i < elementBlock.getPositionCount(); i += 2) { String actualKey = VARCHAR.getSliceUnchecked(elementBlock, i + elementBlock.getOffset()).toStringUtf8(); Long actualValue; if (elementBlock.isNullUnchecked(i + 1 + elementBlock.getOffset())) { actualValue = null; } else { actualValue = BIGINT.getLongUnchecked(elementBlock, i + 1 + elementBlock.getOffsetBase()); } assertTrue(map.containsKey(actualKey)); assertEquals(actualValue, map.get(actualKey)); } } @Test public void testCloseEntryStrict() throws Exception { MapType mapType = mapType(BIGINT, BIGINT); MapBlockBuilder mapBlockBuilder = (MapBlockBuilder) mapType.createBlockBuilder(null, 1); // Add 100 maps with only one entry but the same key for (int i = 0; i < 100; i++) { BlockBuilder entryBuilder = mapBlockBuilder.beginBlockEntry(); BIGINT.writeLong(entryBuilder, 1); BIGINT.writeLong(entryBuilder, -1); mapBlockBuilder.closeEntry(); } BlockBuilder entryBuilder = mapBlockBuilder.beginBlockEntry(); // Add 50 keys so we get some chance to get hash conflict // The purpose of this test is to make sure offset is calculated correctly in MapBlockBuilder.closeEntryStrict() for (int i = 0; i < 50; i++) { BIGINT.writeLong(entryBuilder, i); BIGINT.writeLong(entryBuilder, -1); } mapBlockBuilder.closeEntryStrict(); } @Test public void testEstimatedDataSizeForStats() { Map<String, Long>[] expectedValues = alternatingNullValues(createTestMap(9, 3, 4, 0, 8, 0, 6, 5)); BlockBuilder blockBuilder = createBlockBuilderWithValues(expectedValues); Block block = blockBuilder.build(); assertEquals(block.getPositionCount(), expectedValues.length); for (int i = 0; i < block.getPositionCount(); i++) { int expectedSize = getExpectedEstimatedDataSize(expectedValues[i]); assertEquals(blockBuilder.getEstimatedDataSizeForStats(i), expectedSize); assertEquals(block.getEstimatedDataSizeForStats(i), expectedSize); } } private static int getExpectedEstimatedDataSize(Map<String, Long> map) { if (map == null) { return 0; } int size = 0; for (Map.Entry<String, Long> entry : map.entrySet()) { size += entry.getKey().length(); size += entry.getValue() == null ? 0 : Long.BYTES; } return size; } }
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.scim.provider.auth; import org.apache.axis2.AxisFault; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.cxf.jaxrs.model.ClassResourceInfo; import org.apache.cxf.message.Message; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.identity.application.common.model.ProvisioningServiceProviderType; import org.wso2.carbon.identity.application.common.model.ThreadLocalProvisioningServiceProvider; import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil; import org.wso2.carbon.identity.oauth2.OAuth2TokenValidationService; import org.wso2.carbon.identity.oauth2.dto.OAuth2ClientApplicationDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationRequestDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationResponseDTO; import org.wso2.carbon.identity.scim.provider.util.SCIMProviderConstants; import org.wso2.carbon.user.core.service.RealmService; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import org.wso2.charon.core.schema.SCIMConstants; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; public class OAuthHandler implements SCIMAuthenticationHandler { private static Log log = LogFactory.getLog(BasicAuthHandler.class); /* constants specific to this authenticator */ private final String BEARER_AUTH_HEADER = "Bearer"; private final String LOCAL_PREFIX = "local"; private final int DEFAULT_PRIORITY = 10; private final String LOCAL_AUTH_SERVER = "local://services"; /* properties map to be initialized */ private Map<String, String> properties; /* properties specific to this authenticator */ private String remoteServiceURL; private int priority; private String userName; private String password; // Ideally this should be configurable. For the moment, hard code the priority. public int getPriority() { return priority; } public void setPriority(int priority) { this.priority = priority; } public void setDefaultPriority() { this.priority = DEFAULT_PRIORITY; } public void setDefaultAuthzServer() { this.remoteServiceURL = LOCAL_AUTH_SERVER; } public boolean canHandle(Message message, ClassResourceInfo classResourceInfo) { // check the "Authorization" header and if "Bearer" is there, can be handled. // get the map of protocol headers Map protocolHeaders = (TreeMap) message.get(Message.PROTOCOL_HEADERS); // get the value for Authorization Header List authzHeaders = (ArrayList) protocolHeaders .get(SCIMConstants.AUTHORIZATION_HEADER); if (authzHeaders != null) { // get the authorization header value, if provided String authzHeader = (String) authzHeaders.get(0); if (authzHeader != null && authzHeader.contains(BEARER_AUTH_HEADER)) { return true; } } return false; } public boolean isAuthenticated(Message message, ClassResourceInfo classResourceInfo) { // get the map of protocol headers Map protocolHeaders = (TreeMap) message.get(Message.PROTOCOL_HEADERS); // get the value for Authorization Header List authzHeaders = (ArrayList) protocolHeaders .get(SCIMConstants.AUTHORIZATION_HEADER); if (authzHeaders != null) { // get the authorization header value, if provided String authzHeader = (String) authzHeaders.get(0); // extract access token String accessToken = authzHeader.trim().substring(7).trim(); // validate access token try { OAuth2ClientApplicationDTO validationApp = this.validateAccessToken(accessToken); OAuth2TokenValidationResponseDTO validationResponse = null; if (validationApp != null) { validationResponse = validationApp.getAccessTokenValidationResponse(); } if (validationResponse != null && validationResponse.isValid()) { String userName = validationResponse.getAuthorizedUser(); authzHeaders.set(0, userName); // setup thread local variable to be consumed by the provisioning framework. RealmService realmService = (RealmService) PrivilegedCarbonContext .getThreadLocalCarbonContext().getOSGiService(RealmService.class); ThreadLocalProvisioningServiceProvider serviceProvider = new ThreadLocalProvisioningServiceProvider(); serviceProvider.setServiceProviderName(validationApp.getConsumerKey()); serviceProvider .setServiceProviderType(ProvisioningServiceProviderType.OAUTH); serviceProvider.setClaimDialect(SCIMProviderConstants.DEFAULT_SCIM_DIALECT); serviceProvider.setTenantDomain(MultitenantUtils.getTenantDomain(userName)); IdentityApplicationManagementUtil .setThreadLocalProvisioningServiceProvider(serviceProvider); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); String tenantDomain = MultitenantUtils.getTenantDomain(userName); carbonContext.setUsername(MultitenantUtils.getTenantAwareUsername(userName)); carbonContext.setTenantId(realmService.getTenantManager().getTenantId(tenantDomain)); carbonContext.setTenantDomain(tenantDomain); return true; } } catch (Exception e) { String error = "Error in validating OAuth access token."; log.error(error, e); } } return false; } /** * To set the properties specific to each authenticator * * @param authenticatorProperties */ public void setProperties(Map<String, String> authenticatorProperties) { this.properties = authenticatorProperties; String priorityString = properties.get(SCIMProviderConstants.PROPERTY_NAME_PRIORITY); if (priorityString != null) { priority = Integer.parseInt(priorityString); } else { priority = DEFAULT_PRIORITY; } String remoteURLString = properties.get(SCIMProviderConstants.PROPERTY_NAME_AUTH_SERVER); if (remoteURLString != null) { remoteServiceURL = remoteURLString; } else { remoteServiceURL = LOCAL_AUTH_SERVER; } userName = properties.get(SCIMProviderConstants.PROPERTY_NAME_USERNAME); password = properties.get(SCIMProviderConstants.PROPERTY_NAME_PASSWORD); } private String getOAuthAuthzServerURL() { if (remoteServiceURL != null && !remoteServiceURL.endsWith("/")) { remoteServiceURL += "/"; } return remoteServiceURL; } private OAuth2ClientApplicationDTO validateAccessToken(String accessTokenIdentifier) throws Exception { // if it is specified to use local authz server (i.e: local://services) if (remoteServiceURL.startsWith(LOCAL_PREFIX)) { OAuth2TokenValidationRequestDTO oauthValidationRequest = new OAuth2TokenValidationRequestDTO(); OAuth2TokenValidationRequestDTO.OAuth2AccessToken accessToken = oauthValidationRequest.new OAuth2AccessToken(); accessToken.setTokenType(OAuthServiceClient.BEARER_TOKEN_TYPE); accessToken.setIdentifier(accessTokenIdentifier); oauthValidationRequest.setAccessToken(accessToken); OAuth2TokenValidationService oauthValidationService = new OAuth2TokenValidationService(); OAuth2ClientApplicationDTO oauthValidationResponse = oauthValidationService .findOAuthConsumerIfTokenIsValid(oauthValidationRequest); return oauthValidationResponse; } // else do a web service call to the remote authz server try { ConfigurationContext configContext = ConfigurationContextFactory .createConfigurationContextFromFileSystem(null, null); OAuthServiceClient oauthClient = new OAuthServiceClient(getOAuthAuthzServerURL(), userName, password, configContext); org.wso2.carbon.identity.oauth2.stub.dto.OAuth2ClientApplicationDTO validationResponse; validationResponse = oauthClient.findOAuthConsumerIfTokenIsValid(accessTokenIdentifier); OAuth2ClientApplicationDTO appDTO = new OAuth2ClientApplicationDTO(); appDTO.setConsumerKey(validationResponse.getConsumerKey()); OAuth2TokenValidationResponseDTO validationDto = new OAuth2TokenValidationResponseDTO(); validationDto.setAuthorizedUser(validationResponse.getAccessTokenValidationResponse() .getAuthorizedUser()); validationDto .setValid(validationResponse.getAccessTokenValidationResponse().getValid()); appDTO.setAccessTokenValidationResponse(validationDto); return appDTO; } catch (AxisFault axisFault) { throw axisFault; } catch (Exception exception) { throw exception; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.discovery.impl.cluster.voting; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.ValueFormatException; import org.apache.sling.api.resource.ModifiableValueMap; import org.apache.sling.api.resource.PersistenceException; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ValueMap; import org.apache.sling.discovery.commons.providers.util.ResourceHelper; import org.apache.sling.discovery.impl.Config; import org.apache.sling.discovery.impl.common.View; import org.apache.sling.discovery.impl.common.ViewHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * DAO for an ongoing voting, providing a few helper methods */ public class VotingView extends View { /** * use static logger to avoid frequent initialization as is potentially the * case with ClusterViewResource. **/ private final static Logger logger = LoggerFactory .getLogger(VotingView.class); /** * Create a new voting with the given list of instances, the given * voting/view id and the given slingid of the initiator. * @param newViewId the new voting/view id * @param initiatorId the slingid of the initiator * @param liveInstances the list of live instances to add to the voting * @return a DAO object representing the voting */ public static VotingView newVoting(final ResourceResolver resourceResolver, final Config config, final String newViewId, String initiatorId, final Set<String> liveInstances) throws PersistenceException { if (!liveInstances.contains(initiatorId)) { // SLING-4617 : a voting, on a single instance, was created without the local instance // this should in no case happen - the local instance should always be part of the live // instances. if that's not the case, then something's fishy and we should not create // the new voting - and instead rely on a retry later. logger.warn("newVoting: liveInstances does not include initiatorId (local instance) - not creating new, invalid, voting"); return null; } final Resource votingResource = ResourceHelper.getOrCreateResource( resourceResolver, config.getOngoingVotingsPath() + "/" + newViewId); final ModifiableValueMap votingMap = votingResource.adaptTo(ModifiableValueMap.class); votingMap.put("votingStart", Calendar.getInstance()); String clusterId = null; Calendar clusterIdDefinedAt = null; String clusterIdDefinedBy = null; final View currentlyEstablishedView = ViewHelper.getEstablishedView(resourceResolver, config); if (currentlyEstablishedView != null) { final ValueMap establishedViewValueMap = currentlyEstablishedView.getResource().adaptTo(ValueMap.class); clusterId = establishedViewValueMap.get(VIEW_PROPERTY_CLUSTER_ID, String.class); if (clusterId == null || clusterId.length() == 0) { clusterId = currentlyEstablishedView.getResource().getName(); } Date date = establishedViewValueMap.get(VIEW_PROPERTY_CLUSTER_ID_DEFINED_AT, Date.class); if (date!=null) { clusterIdDefinedAt = Calendar.getInstance(); clusterIdDefinedAt.setTime(date); } clusterIdDefinedBy = establishedViewValueMap.get(VIEW_PROPERTY_CLUSTER_ID_DEFINED_BY, String.class); } if (clusterId == null || clusterId.length() == 0) { clusterId = newViewId; clusterIdDefinedAt = Calendar.getInstance(); } votingMap.put(VIEW_PROPERTY_CLUSTER_ID, clusterId); if (clusterIdDefinedAt != null) { votingMap.put(VIEW_PROPERTY_CLUSTER_ID_DEFINED_AT, clusterIdDefinedAt); } if (clusterIdDefinedBy == null || clusterIdDefinedBy.length() == 0) { clusterIdDefinedBy = initiatorId; } votingMap.put(VIEW_PROPERTY_CLUSTER_ID_DEFINED_BY, clusterIdDefinedBy); final Resource membersResource = resourceResolver.create(votingResource, "members", null); final Iterator<String> it = liveInstances.iterator(); while (it.hasNext()) { String memberId = it.next(); Map<String, Object> properties = new HashMap<String, Object>(); if (memberId.equals(initiatorId)) { properties.put("initiator", true); properties.put("vote", true); properties.put("votedAt", Calendar.getInstance()); } Resource instanceResource = ResourceHelper.getOrCreateResource( resourceResolver, config.getClusterInstancesPath() + "/" + memberId); String leaderElectionId = instanceResource.adaptTo(ValueMap.class) .get("leaderElectionId", String.class); properties.put("leaderElectionId", leaderElectionId); resourceResolver.create(membersResource, memberId, properties); } logger.debug("newVoting: committing new voting: newViewId="+newViewId+", initiatorId="+initiatorId+", resource="+votingResource+", #members: "+liveInstances.size()+", members: "+liveInstances); resourceResolver.commit(); logger.info("newVoting: new voting started: newViewId="+newViewId+", initiatorId="+initiatorId+", resource="+votingResource+", #members: "+liveInstances.size()+", members: "+liveInstances); return new VotingView(votingResource); } /** * Construct a voting view based on the given resource * @param viewResource the resource which is the place the voting is kept */ public VotingView(final Resource viewResource) { super(viewResource); } public String getVotingId() { return getResource().getName(); } @Override public String toString() { try { final Resource members = getResource().getChild("members"); String initiatorId = null; final StringBuilder sb = new StringBuilder(); if (members != null) { Iterator<Resource> it = members.getChildren().iterator(); while (it.hasNext()) { Resource r = it.next(); if (sb.length() != 0) { sb.append(", "); } sb.append(r.getName()); ValueMap properties = r.adaptTo(ValueMap.class); if (properties != null) { Boolean initiator = properties.get("initiator", Boolean.class); if (initiator != null && initiator) { initiatorId = r.getName(); } } } } return "a VotingView[viewId=" + getViewId() + ", id=" + getResource().getName() + ", initiator=" + initiatorId + ", members=" + sb + "]"; } catch(Exception e) { return "a VotingView["+super.toString()+"]"; } } /** * Checks whether this voting is still ongoing - that is, whether * a valid votingStart is set and whether that's within the heartbeat timeout configured * @param config * @return */ public boolean isOngoingVoting(final Config config) { final long votingStart = getVotingStartTime(); if (votingStart==-1) { return false; } final long now = System.currentTimeMillis(); final long diff = now - votingStart; return diff < config.getHeartbeatTimeoutMillis(); } /** * Checks whether this voting has timed out - that is, whether * there is a valid votingStart set and whether that has timed out */ public boolean isTimedoutVoting(final Config config) { final long votingStart = getVotingStartTime(); if (votingStart==-1) { return false; } final long now = System.currentTimeMillis(); final long diff = now - votingStart; return diff > config.getHeartbeatTimeoutMillis(); } /** Get the value of the votingStart property - or -1 if anything goes wrong reading that **/ private long getVotingStartTime() { ValueMap properties = null; try{ properties = getResource().adaptTo(ValueMap.class); } catch(RuntimeException e) { logger.info("getVotingStartTime: could not get properties of "+getResource()+". Likely in creation: "+e, e); return -1; } if (properties == null) { // no properties, odd. then it's not a valid voting. return -1; } final Date votingStartDate = properties.get("votingStart", Date.class); if (votingStartDate == null) { if (logger.isDebugEnabled()) { logger.debug("getVotingStartTime: got a voting without votingStart. Likely in creation: " + getResource()); } return -1; } final long votingStart = votingStartDate.getTime(); return votingStart; } /** * Checks whether there are any no votes on this voting * @return true if there are any no votes on this voting */ public boolean hasNoVotes() { Resource m = getResource().getChild("members"); if (m==null) { // the vote is being created. wait. return false; } final Iterator<Resource> it = m.getChildren() .iterator(); while (it.hasNext()) { Resource aMemberRes = it.next(); ValueMap properties = aMemberRes.adaptTo(ValueMap.class); if (properties == null) { continue; } Boolean vote = properties.get("vote", Boolean.class); if (vote != null && !vote) { return true; } } return false; } /** * Checks whether the given slingId has voted yes for this voting * @param slingId the sling id to check for * @return true if the given slingId has voted yes for this voting */ public boolean hasVotedYes(final String slingId) { final Boolean vote = getVote(slingId); return vote != null && vote; } /** * Get the vote of the instance with the given slingId * @param slingId * @return null if that instance did not vote yet (or the structure * is faulty), true if the instance voted yes, false if it voted no */ public Boolean getVote(String slingId) { Resource members = getResource().getChild("members"); if (members==null) { return null; } final Resource memberResource = members.getChild( slingId); if (memberResource == null) { return null; } final ValueMap properties = memberResource.adaptTo(ValueMap.class); if (properties == null) { return null; } final Boolean vote = properties.get("vote", Boolean.class); return vote; } /** * Checks whether this voting was initiated by the given slingId * @return whether this voting was initiated by the given slingId */ public boolean isInitiatedBy(final String slingId) { Resource r = getResource(); if (r == null) { return false; } Resource members = r.getChild("members"); if (members == null) { if (logger.isDebugEnabled()) { logger.debug("isInitiatedBy: slingId=" + slingId + ", members null!"); } return false; } final Resource memberResource = members.getChild( slingId); if (memberResource == null) { if (logger.isDebugEnabled()) { logger.debug("isInitiatedBy: slingId=" + slingId + ", memberResource null!"); } return false; } final ValueMap properties = memberResource.adaptTo(ValueMap.class); if (properties == null) { if (logger.isDebugEnabled()) { logger.debug("isInitiatedBy: slingId=" + slingId + ", properties null!"); } return false; } final Boolean initiator = properties.get("initiator", Boolean.class); boolean result = initiator != null && initiator; if (logger.isDebugEnabled()) { logger.debug("isInitiatedBy: slingId=" + slingId + ", initiator=" + initiator + ", result=" + result); } return result; } /** * add a vote from the given slingId to this voting * @param slingId the slingId which is voting * @param vote true for a yes-vote, false for a no-vote */ public void vote(final String slingId, final Boolean vote, final String leaderElectionId) { if (logger.isDebugEnabled()) { logger.debug("vote: slingId=" + slingId + ", vote=" + vote); } Resource r = getResource(); if (r == null) { logger.error("vote: no resource set. slingId = " + slingId + ", vote=" + vote); return; } Resource members = r.getChild("members"); if (members == null) { logger.error("vote: no members resource available for " + r + ". slingId = " + slingId + ", vote=" + vote); return; } final Resource memberResource = members.getChild( slingId); if (memberResource == null) { if (vote == null || !vote) { // if I wanted to vote no or empty, then it's no big deal // that I can't find my entry .. logger.debug("vote: no memberResource found for slingId=" + slingId + ", vote=" + vote + ", resource=" + getResource()); } else { // if I wanted to vote yes, then it is a big deal that I can't find myself logger.error("vote: no memberResource found for slingId=" + slingId + ", vote=" + vote + ", resource=" + getResource()); } return; } final ModifiableValueMap memberMap = memberResource.adaptTo(ModifiableValueMap.class); if (vote == null) { if (memberMap.containsKey("vote")) { logger.info("vote: removing vote (vote==null) of slingId="+slingId+" on: "+this); } else { logger.debug("vote: removing vote (vote==null) of slingId="+slingId+" on: "+this); } memberMap.remove("vote"); } else { boolean shouldVote = true; try { if (memberMap.containsKey("vote")) { Object v = memberMap.get("vote"); if (v instanceof Property) { Property p = (Property)v; if (p.getBoolean() == vote) { logger.debug("vote: already voted, with same vote ("+vote+"), not voting again"); shouldVote = false; } } else if (v instanceof Boolean) { Boolean b = (Boolean)v; if (b == vote) { logger.debug("vote: already voted, with same vote ("+vote+"), not voting again"); shouldVote = false; } } } } catch (ValueFormatException e) { logger.warn("vote: got a ValueFormatException: "+e, e); } catch (RepositoryException e) { logger.warn("vote: got a RepositoryException: "+e, e); } if (shouldVote) { logger.info("vote: slingId=" + slingId + " is voting vote=" + vote+" on "+getResource()); memberMap.put("vote", vote); memberMap.put("votedAt", Calendar.getInstance()); String currentLeaderElectionId = memberMap.get("leaderElectionId", String.class); if (leaderElectionId!=null && (currentLeaderElectionId == null || !currentLeaderElectionId.equals(leaderElectionId))) { // SLING-5030 : to ensure leader-step-down after being // isolated from the cluster, the leaderElectionId must // be explicitly set upon voting. // for 99% of the cases not be necessary, // for the rejoin-after-isolation case however it is logger.info("vote: changing leaderElectionId on vote to "+leaderElectionId); memberMap.put("leaderElectionId", leaderElectionId); memberMap.put("leaderElectionIdCreatedAt", new Date()); } } } try { getResource().getResourceResolver().commit(); } catch (PersistenceException e) { logger.error("vote: PersistenceException while voting: "+e, e); } } /** * Checks whether this voting is winning - winning is when it has * votes from each of the members and all are yes votes * @return true if this voting is winning */ public boolean isWinning() { final Resource members = getResource().getChild("members"); if (members==null) { // the vote is being created. wait. return false; } try{ final Iterable<Resource> children = members.getChildren(); final Iterator<Resource> it = children.iterator(); boolean isWinning = false; while (it.hasNext()) { Resource aMemberRes = it.next(); try{ ValueMap properties = aMemberRes.adaptTo(ValueMap.class); Boolean vote = properties.get("vote", Boolean.class); if (vote != null && vote) { isWinning = true; continue; } return false; } catch(RuntimeException re) { logger.info("isWinning: Could not check vote due to "+re); return false; } } return isWinning; } catch(RuntimeException re) { // SLING-2945: gracefully handle case where members node is // deleted by another instance logger.info("isWinning: could not check vote due to "+re); return false; } } /** * Checks if this voting matches the current live view * @throws Exception when something failed during matching */ public String matchesLiveView(final Config config) throws Exception { Resource clusterNodesRes = getResource().getResourceResolver() .getResource(config.getClusterInstancesPath()); if (clusterNodesRes == null) { throw new Exception("no clusterNodesRes["+getResource()+"]"); } return matchesLiveView(clusterNodesRes, config); } }
package org.apache.ddlutils.platform.firebird; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.sql.Types; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.ddlutils.Platform; import org.apache.ddlutils.alteration.AddColumnChange; import org.apache.ddlutils.alteration.AddPrimaryKeyChange; import org.apache.ddlutils.alteration.RemoveColumnChange; import org.apache.ddlutils.alteration.TableChange; import org.apache.ddlutils.model.Column; import org.apache.ddlutils.model.Database; import org.apache.ddlutils.model.Index; import org.apache.ddlutils.model.Table; import org.apache.ddlutils.platform.SqlBuilder; import org.apache.ddlutils.util.Jdbc3Utils; /** * The SQL Builder for the FireBird database. * * @version $Revision: 231306 $ */ public class FirebirdBuilder extends SqlBuilder { /** * Creates a new builder instance. * * @param platform The plaftform this builder belongs to */ public FirebirdBuilder(Platform platform) { super(platform); addEscapedCharSequence("'", "''"); } /** * {@inheritDoc} */ public void createTable(Database database, Table table, Map parameters) throws IOException { super.createTable(database, table, parameters); // creating generator and trigger for auto-increment Column[] columns = table.getAutoIncrementColumns(); for (int idx = 0; idx < columns.length; idx++) { writeAutoIncrementCreateStmts(database, table, columns[idx]); } } /** * {@inheritDoc} */ public void dropTable(Table table) throws IOException { // dropping generators for auto-increment Column[] columns = table.getAutoIncrementColumns(); for (int idx = 0; idx < columns.length; idx++) { writeAutoIncrementDropStmts(table, columns[idx]); } super.dropTable(table); } /** * Writes the creation statements to make the given column an auto-increment column. * * @param database The database model * @param table The table * @param column The column to make auto-increment */ private void writeAutoIncrementCreateStmts(Database database, Table table, Column column) throws IOException { print("CREATE GENERATOR "); printIdentifier(getGeneratorName(table, column)); printEndOfStatement(); print("CREATE TRIGGER "); printIdentifier(getConstraintName("trg", table, column.getName(), null)); print(" FOR "); printlnIdentifier(getTableName(table)); println("ACTIVE BEFORE INSERT POSITION 0 AS"); print("BEGIN IF (NEW."); printIdentifier(getColumnName(column)); print(" IS NULL) THEN NEW."); printIdentifier(getColumnName(column)); print(" = GEN_ID("); printIdentifier(getGeneratorName(table, column)); print(", 1); END"); printEndOfStatement(); } /** * Writes the statements to drop the auto-increment status for the given column. * * @param table The table * @param column The column to remove the auto-increment status for */ private void writeAutoIncrementDropStmts(Table table, Column column) throws IOException { print("DROP TRIGGER "); printIdentifier(getConstraintName("trg", table, column.getName(), null)); printEndOfStatement(); print("DROP GENERATOR "); printIdentifier(getGeneratorName(table, column)); printEndOfStatement(); } /** * Determines the name of the generator for an auto-increment column. * * @param table The table * @param column The auto-increment column * @return The generator name */ protected String getGeneratorName(Table table, Column column) { return getConstraintName("gen", table, column.getName(), null); } /** * {@inheritDoc} */ protected void writeColumnAutoIncrementStmt(Table table, Column column) throws IOException { // we're using a generator } /** * {@inheritDoc} */ public String getSelectLastIdentityValues(Table table) { Column[] columns = table.getAutoIncrementColumns(); if (columns.length == 0) { return null; } else { StringBuffer result = new StringBuffer(); result.append("SELECT "); for (int idx = 0; idx < columns.length; idx++) { result.append("GEN_ID("); result.append(getDelimitedIdentifier(getGeneratorName(table, columns[idx]))); result.append(", 0)"); } result.append(" FROM RDB$DATABASE"); return result.toString(); } } /** * {@inheritDoc} */ protected String getNativeDefaultValue(Column column) { if ((column.getTypeCode() == Types.BIT) || (Jdbc3Utils.supportsJava14JdbcTypes() && (column.getTypeCode() == Jdbc3Utils.determineBooleanTypeCode()))) { return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(), Types.SMALLINT).toString(); } else { return super.getNativeDefaultValue(column); } } /** * * {@inheritDoc} */ public void createExternalForeignKeys(Database database) throws IOException { for (int idx = 0; idx < database.getTableCount(); idx++) { createExternalForeignKeys(database, database.getTable(idx)); } } /** * {@inheritDoc} */ public void writeExternalIndexDropStmt(Table table, Index index) throws IOException { // Index names in Firebird are unique to a schema and hence Firebird does not // use the ON <tablename> clause print("DROP INDEX "); printIdentifier(getIndexName(index)); printEndOfStatement(); } /** * {@inheritDoc} */ protected void processTableStructureChanges(Database currentModel, Database desiredModel, Table sourceTable, Table targetTable, Map parameters, List changes) throws IOException { // TODO: Dropping of primary keys is currently not supported because we cannot // determine the pk constraint names and drop them in one go // (We could used a stored procedure if Firebird would allow them to use DDL) // This will be easier once named primary keys are supported boolean pkColumnAdded = false; for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) { TableChange change = (TableChange)changeIt.next(); if (change instanceof AddColumnChange) { AddColumnChange addColumnChange = (AddColumnChange)change; // TODO: we cannot add columns to the primary key this way // because we would have to drop the pk first and then // add a new one afterwards which is not supported yet if (addColumnChange.getNewColumn().isPrimaryKey()) { pkColumnAdded = true; } else { processChange(currentModel, desiredModel, addColumnChange); changeIt.remove(); } } else if (change instanceof RemoveColumnChange) { RemoveColumnChange removeColumnChange = (RemoveColumnChange)change; // TODO: we cannot drop primary key columns this way // because we would have to drop the pk first and then // add a new one afterwards which is not supported yet if (!removeColumnChange.getColumn().isPrimaryKey()) { processChange(currentModel, desiredModel, removeColumnChange); changeIt.remove(); } } } for (Iterator changeIt = changes.iterator(); changeIt.hasNext();) { TableChange change = (TableChange)changeIt.next(); // we can only add a primary key if all columns are present in the table // i.e. none was added during this alteration if ((change instanceof AddPrimaryKeyChange) && !pkColumnAdded) { processChange(currentModel, desiredModel, (AddPrimaryKeyChange)change); changeIt.remove(); } } } /** * Processes the addition of a column to a table. * * @param currentModel The current database schema * @param desiredModel The desired database schema * @param change The change object */ protected void processChange(Database currentModel, Database desiredModel, AddColumnChange change) throws IOException { print("ALTER TABLE "); printlnIdentifier(getTableName(change.getChangedTable())); printIndent(); print("ADD "); writeColumn(change.getChangedTable(), change.getNewColumn()); printEndOfStatement(); Table curTable = currentModel.findTable(change.getChangedTable().getName(), getPlatform().isDelimitedIdentifierModeOn()); if (!change.isAtEnd()) { Column prevColumn = change.getPreviousColumn(); if (prevColumn != null) { // we need the corresponding column object from the current table prevColumn = curTable.findColumn(prevColumn.getName(), getPlatform().isDelimitedIdentifierModeOn()); } // Even though Firebird can only add columns, we can move them later on print("ALTER TABLE "); printlnIdentifier(getTableName(change.getChangedTable())); printIndent(); print("ALTER "); printIdentifier(getColumnName(change.getNewColumn())); print(" POSITION "); // column positions start at 1 in Firebird print(prevColumn == null ? "1" : String.valueOf(curTable.getColumnIndex(prevColumn) + 2)); printEndOfStatement(); } if (change.getNewColumn().isAutoIncrement()) { writeAutoIncrementCreateStmts(currentModel, curTable, change.getNewColumn()); } change.apply(currentModel, getPlatform().isDelimitedIdentifierModeOn()); } /** * Processes the removal of a column from a table. * * @param currentModel The current database schema * @param desiredModel The desired database schema * @param change The change object */ protected void processChange(Database currentModel, Database desiredModel, RemoveColumnChange change) throws IOException { if (change.getColumn().isAutoIncrement()) { writeAutoIncrementDropStmts(change.getChangedTable(), change.getColumn()); } print("ALTER TABLE "); printlnIdentifier(getTableName(change.getChangedTable())); printIndent(); print("DROP "); printIdentifier(getColumnName(change.getColumn())); printEndOfStatement(); change.apply(currentModel, getPlatform().isDelimitedIdentifierModeOn()); } }
package com.idc.webchannel.pac4j.extensions.saml.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.io.InputStream; import java.security.Key; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.Security; import java.security.UnrecoverableKeyException; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.List; import org.apache.commons.io.IOUtils; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.junit.Before; import org.junit.Test; import org.pac4j.core.context.WebContext; import org.springframework.core.io.Resource; import com.idc.webchannel.pac4j.extensions.saml.dao.api.DbLoadedSamlClientConfigurationDto; import com.idc.webchannel.pac4j.extensions.saml.dao.api.SamlClientDao; /** * Unit test of {@link DatabaseLoadedSAML2ClientConfiguration}. * * @author jkacer */ public class DatabaseLoadedSAML2ClientConfigurationTest { private static final String[] CLIENT_NAMES = {"SAML_0"}; private static final String ENVIRONMENT = "Unit_Test_Env"; private static final String[] KEYSTORE_PASSWORDS = {"Keystore_Pwd_0"}; private static final String[] KEYSTORE_ALIASES = {"SP0"}; private static final String[] PRIV_KEY_PASSWORDS = {"Priv_Key_Pwd_0"}; private static final String[] IDP_ENTITY_IDS = {"https://idp.testshib.org/idp/shibboleth"}; private static final String[] SP_ENTITY_IDS = {"urn:idc:authentication:saml2:entity:unittest:sp1"}; private static final int[] MAX_AUTH_LIFETIMES = {3600}; private static final String[] DEST_BINDING_TYPES = {"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"}; private static final String PATH_TO_TEST_KEYSTORES = "/com/idc/webchannel/pac4j/extensions/saml/client/SP_Keystore_%d.jks"; private static final String PATH_TO_TEST_METADATA = "/com/idc/webchannel/pac4j/extensions/saml/client/IdP_Metadata_%d.xml"; private DatabaseLoadedSAML2ClientConfiguration configurationUnderTest; private WebContext webContextMock; private SamlClientDao samlClientDaoMock; // ------------------------------------------------------------------------------------------------------------------------------------ @Before public void setUp() throws IOException { samlClientDaoMock = createSamlClientDaoMock(); configurationUnderTest = new DatabaseLoadedSAML2ClientConfiguration(samlClientDaoMock); webContextMock = mock(WebContext.class); configurationUnderTest.init(CLIENT_NAMES[0], webContextMock); } private SamlClientDao createSamlClientDaoMock() throws IOException { List<DbLoadedSamlClientConfigurationDto> clients = new ArrayList<>(); DbLoadedSamlClientConfigurationDto client0 = clientDto(0); clients.add(client0); SamlClientDao scd = mock(SamlClientDao.class); when(scd.loadClientNames()).thenReturn(Arrays.asList(CLIENT_NAMES)); when(scd.loadAllClients()).thenReturn(clients); when(scd.loadClient(CLIENT_NAMES[0])).thenReturn(client0); return scd; } private DbLoadedSamlClientConfigurationDto clientDto(int index) throws IOException { DbLoadedSamlClientConfigurationDto dto = new DbLoadedSamlClientConfigurationDto(); dto.setClientName(CLIENT_NAMES[index]); dto.setEnvironment(ENVIRONMENT); dto.setKeystoreBinaryData(loadKeystoreBinaryData(index)); dto.setKeystorePassword(KEYSTORE_PASSWORDS[index]); dto.setKeystoreAlias(KEYSTORE_ALIASES[index]); dto.setPrivateKeyPassword(PRIV_KEY_PASSWORDS[index]); dto.setIdentityProviderMetadata(loadIdentityProviderMetadata(index)); dto.setIdentityProviderEntityId(IDP_ENTITY_IDS[index]); dto.setServiceProviderEntityId(SP_ENTITY_IDS[index]); dto.setMaximumAuthenticationLifetime(MAX_AUTH_LIFETIMES[index]); dto.setDestinationBindingType(DEST_BINDING_TYPES[index]); return dto; } private byte[] loadKeystoreBinaryData(int index) throws IOException { final String path = String.format(PATH_TO_TEST_KEYSTORES, index); try (InputStream is = DatabaseLoadedSAML2ClientConfigurationTest.class.getResourceAsStream(path)) { return IOUtils.toByteArray(is); } } private String loadIdentityProviderMetadata(int index) throws IOException { final String path = String.format(PATH_TO_TEST_METADATA, index); try (InputStream is = DatabaseLoadedSAML2ClientConfigurationTest.class.getResourceAsStream(path)) { return IOUtils.toString(is, "UTF-8"); } } // ------------------------------------------------------------------------------------------------------------------------------------ @Test public void basicGettersWork() { assertEquals(CLIENT_NAMES[0], configurationUnderTest.getClientName()); assertNotNull(configurationUnderTest.getKeystoreResource()); assertNotNull(configurationUnderTest.getIdentityProviderMetadataResource()); assertEquals(DEST_BINDING_TYPES[0], configurationUnderTest.getDestinationBindingType()); assertEquals(IDP_ENTITY_IDS[0], configurationUnderTest.getIdentityProviderEntityId()); assertEquals(KEYSTORE_ALIASES[0], configurationUnderTest.getKeyStoreAlias()); assertEquals(KEYSTORE_PASSWORDS[0], configurationUnderTest.getKeystorePassword()); assertEquals(KeyStore.getDefaultType(), configurationUnderTest.getKeyStoreType()); assertEquals(MAX_AUTH_LIFETIMES[0], configurationUnderTest.getMaximumAuthenticationLifetime()); assertEquals(SP_ENTITY_IDS[0], configurationUnderTest.getServiceProviderEntityId()); } @Test public void keystoreIsOk() throws KeyStoreException, UnrecoverableKeyException, NoSuchAlgorithmException, CertificateException, IOException { Resource keystoreRes = configurationUnderTest.getKeystoreResource(); assertNotNull(keystoreRes); KeyStore ks = keystoreFromResource(keystoreRes); assertNotNull(ks); Enumeration<String> aliases = ks.aliases(); assertTrue(aliases.hasMoreElements()); assertEquals(KEYSTORE_ALIASES[0].toUpperCase(), aliases.nextElement().toUpperCase()); assertTrue(ks.isKeyEntry(KEYSTORE_ALIASES[0])); Key key = ks.getKey(KEYSTORE_ALIASES[0], PRIV_KEY_PASSWORDS[0].toCharArray()); assertTrue(key instanceof PrivateKey); Certificate cert = ks.getCertificate(KEYSTORE_ALIASES[0]); assertNotNull(cert); PublicKey publicKey = cert.getPublicKey(); assertNotNull(publicKey); } @Test public void keystoreResourceIsOk() throws IOException { Resource r = configurationUnderTest.getKeystoreResource(); assertNotNull(r); assertTrue(r.exists()); assertNull(r.getFilename()); InputStream is = r.getInputStream(); assertNotNull(is); is.close(); } @Test public void idpMetadataProvidederResourceIsOk() throws IOException { Resource r = configurationUnderTest.getIdentityProviderMetadataResource(); assertNotNull(r); assertTrue(r.exists()); assertNull(r.getFilename()); InputStream is = r.getInputStream(); assertNotNull(is); is.close(); } private KeyStore keystoreFromResource(Resource res) throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException { Security.addProvider(new BouncyCastleProvider()); final KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType()); final char[] password = KEYSTORE_PASSWORDS[0].toCharArray(); ks.load(res.getInputStream(), password); return ks; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.api; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.tez.dag.api.EdgeProperty.DataMovementType; import org.apache.tez.dag.api.VertexGroup.GroupInfo; import org.apache.tez.dag.api.EdgeProperty.DataSourceType; import org.apache.tez.dag.api.EdgeProperty.SchedulingType; import org.apache.tez.dag.api.VertexLocationHint.TaskLocationHint; import org.apache.tez.dag.api.records.DAGProtos.ConfigurationProto; import org.apache.tez.dag.api.records.DAGProtos.DAGPlan; import org.apache.tez.dag.api.records.DAGProtos.EdgePlan; import org.apache.tez.dag.api.records.DAGProtos.PlanGroupInputEdgeInfo; import org.apache.tez.dag.api.records.DAGProtos.PlanVertexGroupInfo; import org.apache.tez.dag.api.records.DAGProtos.PlanKeyValuePair; import org.apache.tez.dag.api.records.DAGProtos.PlanLocalResource; import org.apache.tez.dag.api.records.DAGProtos.PlanTaskConfiguration; import org.apache.tez.dag.api.records.DAGProtos.PlanTaskLocationHint; import org.apache.tez.dag.api.records.DAGProtos.PlanVertexType; import org.apache.tez.dag.api.records.DAGProtos.VertexPlan; import com.google.common.base.Preconditions; import org.apache.commons.collections4.bidimap.DualLinkedHashBidiMap; import org.apache.commons.collections4.BidiMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class DAG { // FIXME rename to Topology final BidiMap<String, Vertex> vertices = new DualLinkedHashBidiMap<String, Vertex>(); final Set<Edge> edges = Sets.newHashSet(); final String name; final Collection<URI> urisForCredentials = new HashSet<URI>(); Credentials credentials; Set<VertexGroup> vertexGroups = Sets.newHashSet(); Set<GroupInputEdge> groupInputEdges = Sets.newHashSet(); public DAG(String name) { this.name = name; } public synchronized DAG addVertex(Vertex vertex) { if (vertices.containsKey(vertex.getVertexName())) { throw new IllegalStateException( "Vertex " + vertex.getVertexName() + " already defined!"); } vertices.put(vertex.getVertexName(), vertex); return this; } public synchronized Vertex getVertex(String vertexName) { return vertices.get(vertexName); } /** * One of the methods that can be used to provide information about required * Credentials when running on a secure cluster. A combination of this and * addURIsForCredentials should be used to specify information about all * credentials required by a DAG. AM specific credentials are not used when * executing a DAG. * * Set credentials which will be required to run this dag. This method can be * used if the client has already obtained some or all of the required * credentials. * * @param credentials Credentials for the DAG * @return this */ public synchronized DAG setCredentials(Credentials credentials) { this.credentials = credentials; return this; } public synchronized VertexGroup createVertexGroup(String name, Vertex... members) { VertexGroup uv = new VertexGroup(name, members); vertexGroups.add(uv); return uv; } @Private public synchronized Credentials getCredentials() { return this.credentials; } /** * One of the methods that can be used to provide information about required * Credentials when running on a secure cluster. A combination of this and * setCredentials should be used to specify information about all credentials * required by a DAG. AM specific credentials are not used when executing a * DAG. * * This method can be used to specify a list of URIs for which Credentials * need to be obtained so that the job can run. An incremental list of URIs * can be provided by making multiple calls to the method. * * Currently, credentials can only be fetched for HDFS and other * {@link org.apache.hadoop.fs.FileSystem} implementations. * * @param uris * a list of {@link URI}s * @return the DAG instance being used */ public synchronized DAG addURIsForCredentials(Collection<URI> uris) { Preconditions.checkNotNull(uris, "URIs cannot be null"); urisForCredentials.addAll(uris); return this; } /** * * @return an unmodifiable list representing the URIs for which credentials * are required. */ @Private public synchronized Collection<URI> getURIsForCredentials() { return Collections.unmodifiableCollection(urisForCredentials); } @Private public synchronized Set<Vertex> getVertices() { return Collections.unmodifiableSet(this.vertices.values()); } public synchronized DAG addEdge(Edge edge) { // Sanity checks if (!vertices.containsValue(edge.getInputVertex())) { throw new IllegalArgumentException( "Input vertex " + edge.getInputVertex() + " doesn't exist!"); } if (!vertices.containsValue(edge.getOutputVertex())) { throw new IllegalArgumentException( "Output vertex " + edge.getOutputVertex() + " doesn't exist!"); } if (edges.contains(edge)) { throw new IllegalArgumentException( "Edge " + edge + " already defined!"); } // inform the vertices edge.getInputVertex().addOutputVertex(edge.getOutputVertex(), edge); edge.getOutputVertex().addInputVertex(edge.getInputVertex(), edge); edges.add(edge); return this; } public synchronized DAG addEdge(GroupInputEdge edge) { // Sanity checks if (!vertexGroups.contains(edge.getInputVertexGroup())) { throw new IllegalArgumentException( "Input vertex " + edge.getInputVertexGroup() + " doesn't exist!"); } if (!vertices.containsValue(edge.getOutputVertex())) { throw new IllegalArgumentException( "Output vertex " + edge.getOutputVertex() + " doesn't exist!"); } if (groupInputEdges.contains(edge)) { throw new IllegalArgumentException( "Edge " + edge + " already defined!"); } VertexGroup av = edge.getInputVertexGroup(); av.addOutputVertex(edge.getOutputVertex(), edge); groupInputEdges.add(edge); return this; } public String getName() { return this.name; } private void processEdgesAndGroups() throws IllegalStateException { // process all VertexGroups by transferring outgoing connections to the members // add edges between VertexGroup members and destination vertices List<Edge> newEdges = Lists.newLinkedList(); for (GroupInputEdge e : groupInputEdges) { Vertex dstVertex = e.getOutputVertex(); VertexGroup uv = e.getInputVertexGroup(); for (Vertex member : uv.getMembers()) { newEdges.add(new Edge(member, dstVertex, e.getEdgeProperty())); } dstVertex.addGroupInput(uv.getGroupName(), uv.getGroupInfo()); } for (Edge e : newEdges) { addEdge(e); } // add outputs to VertexGroup members for(VertexGroup av : vertexGroups) { for (RootInputLeafOutput<OutputDescriptor> output : av.getOutputs()) { for (Vertex member : av.getMembers()) { member.addAdditionalOutput(output); } } } } // AnnotatedVertex is used by verify() private static class AnnotatedVertex { Vertex v; int index; //for Tarjan's algorithm int lowlink; //for Tarjan's algorithm boolean onstack; //for Tarjan's algorithm private AnnotatedVertex(Vertex v) { this.v = v; index = -1; lowlink = -1; } } // verify() // // Default rules // Illegal: // - duplicate vertex id // - cycles // // Ok: // - orphaned vertex. Occurs in map-only // - islands. Occurs if job has unrelated workflows. // // Not yet categorized: // - orphaned vertex in DAG of >1 vertex. Could be unrelated map-only job. // - v1->v2 via two edges. perhaps some self-join job would use this? // // "restricted" mode: // In short term, the supported DAGs are limited. Call with restricted=true for these verifications. // Illegal: // - any vertex with more than one input or output edge. (n-ary input, n-ary merge) public void verify() throws IllegalStateException { verify(true); } public void verify(boolean restricted) throws IllegalStateException { if (vertices.isEmpty()) { throw new IllegalStateException("Invalid dag containing 0 vertices"); } processEdgesAndGroups(); // check for valid vertices, duplicate vertex names, // and prepare for cycle detection Map<String, AnnotatedVertex> vertexMap = new HashMap<String, AnnotatedVertex>(); Map<Vertex, Set<String>> inboundVertexMap = new HashMap<Vertex, Set<String>>(); Map<Vertex, Set<String>> outboundVertexMap = new HashMap<Vertex, Set<String>>(); for (Vertex v : vertices.values()) { if (vertexMap.containsKey(v.getVertexName())) { throw new IllegalStateException("DAG contains multiple vertices" + " with name: " + v.getVertexName()); } vertexMap.put(v.getVertexName(), new AnnotatedVertex(v)); } Map<Vertex, List<Edge>> edgeMap = new HashMap<Vertex, List<Edge>>(); for (Edge e : edges) { // Construct structure for cycle detection Vertex inputVertex = e.getInputVertex(); Vertex outputVertex = e.getOutputVertex(); List<Edge> edgeList = edgeMap.get(inputVertex); if (edgeList == null) { edgeList = new ArrayList<Edge>(); edgeMap.put(inputVertex, edgeList); } edgeList.add(e); // Construct map for Input name verification Set<String> inboundSet = inboundVertexMap.get(outputVertex); if (inboundSet == null) { inboundSet = new HashSet<String>(); inboundVertexMap.put(outputVertex, inboundSet); } inboundSet.add(inputVertex.getVertexName()); // Construct map for Output name verification Set<String> outboundSet = outboundVertexMap.get(inputVertex); if (outboundSet == null) { outboundSet = new HashSet<String>(); outboundVertexMap.put(inputVertex, outboundSet); } outboundSet.add(outputVertex.getVertexName()); } // check input and output names dont collide with vertex names for (Vertex vertex : vertices.values()) { for (RootInputLeafOutput<InputDescriptor> input : vertex.getInputs()) { if (vertexMap.containsKey(input.getName())) { throw new IllegalStateException("Vertex: " + vertex.getVertexName() + " contains an Input with the same name as vertex: " + input.getName()); } } for (RootInputLeafOutput<OutputDescriptor> output : vertex.getOutputs()) { if (vertexMap.containsKey(output.getName())) { throw new IllegalStateException("Vertex: " + vertex.getVertexName() + " contains an Output with the same name as vertex: " + output.getName()); } } } // Check for valid InputNames for (Entry<Vertex, Set<String>> entry : inboundVertexMap.entrySet()) { Vertex vertex = entry.getKey(); for (RootInputLeafOutput<InputDescriptor> input : vertex.getInputs()) { if (entry.getValue().contains(input.getName())) { throw new IllegalStateException("Vertex: " + vertex.getVertexName() + " contains an incoming vertex and Input with the same name: " + input.getName()); } } } // Check for valid OutputNames for (Entry<Vertex, Set<String>> entry : outboundVertexMap.entrySet()) { Vertex vertex = entry.getKey(); for (RootInputLeafOutput<OutputDescriptor> output : vertex.getOutputs()) { if (entry.getValue().contains(output.getName())) { throw new IllegalStateException("Vertex: " + vertex.getVertexName() + " contains an outgoing vertex and Output with the same name: " + output.getName()); } } } // Not checking for repeated input names / output names vertex names on the same vertex, // since we only allow 1 at the moment. // When additional inputs are supported, this can be chceked easily (and early) // within the addInput / addOutput call itself. detectCycles(edgeMap, vertexMap); if (restricted) { for (Edge e : edges) { if (e.getEdgeProperty().getDataSourceType() != DataSourceType.PERSISTED) { throw new IllegalStateException( "Unsupported source type on edge. " + e); } if (e.getEdgeProperty().getSchedulingType() != SchedulingType.SEQUENTIAL) { throw new IllegalStateException( "Unsupported scheduling type on edge. " + e); } } } } // Adaptation of Tarjan's algorithm for connected components. // http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm private void detectCycles(Map<Vertex, List<Edge>> edgeMap, Map<String, AnnotatedVertex> vertexMap) throws IllegalStateException { Integer nextIndex = 0; // boxed integer so it is passed by reference. Stack<AnnotatedVertex> stack = new Stack<DAG.AnnotatedVertex>(); for (AnnotatedVertex av : vertexMap.values()) { if (av.index == -1) { assert stack.empty(); strongConnect(av, vertexMap, edgeMap, stack, nextIndex); } } } // part of Tarjan's algorithm for connected components. // http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm private void strongConnect( AnnotatedVertex av, Map<String, AnnotatedVertex> vertexMap, Map<Vertex, List<Edge>> edgeMap, Stack<AnnotatedVertex> stack, Integer nextIndex) throws IllegalStateException { av.index = nextIndex; av.lowlink = nextIndex; nextIndex++; stack.push(av); av.onstack = true; List<Edge> edges = edgeMap.get(av.v); if (edges != null) { for (Edge e : edgeMap.get(av.v)) { AnnotatedVertex outVertex = vertexMap.get(e.getOutputVertex().getVertexName()); if (outVertex.index == -1) { strongConnect(outVertex, vertexMap, edgeMap, stack, nextIndex); av.lowlink = Math.min(av.lowlink, outVertex.lowlink); } else if (outVertex.onstack) { // strongly connected component detected, but we will wait till later so that the full cycle can be displayed. // update lowlink in case outputVertex should be considered the root of this component. av.lowlink = Math.min(av.lowlink, outVertex.index); } } } if (av.lowlink == av.index) { AnnotatedVertex pop = stack.pop(); pop.onstack = false; if (pop != av) { // there was something on the stack other than this "av". // this indicates there is a scc/cycle. It comprises all nodes from top of stack to "av" StringBuilder message = new StringBuilder(); message.append(av.v.getVertexName() + " <- "); for (; pop != av; pop = stack.pop()) { message.append(pop.v.getVertexName() + " <- "); pop.onstack = false; } message.append(av.v.getVertexName()); throw new IllegalStateException("DAG contains a cycle: " + message); } } } // create protobuf message describing DAG @Private public DAGPlan createDag(Configuration dagConf) { verify(true); DAGPlan.Builder dagBuilder = DAGPlan.newBuilder(); dagBuilder.setName(this.name); if (!vertexGroups.isEmpty()) { for (VertexGroup av : vertexGroups) { GroupInfo groupInfo = av.getGroupInfo(); PlanVertexGroupInfo.Builder groupBuilder = PlanVertexGroupInfo.newBuilder(); groupBuilder.setGroupName(groupInfo.getGroupName()); for (Vertex v : groupInfo.getMembers()) { groupBuilder.addGroupMembers(v.getVertexName()); } groupBuilder.addAllOutputs(groupInfo.outputs); for (Map.Entry<String, InputDescriptor> entry : groupInfo.edgeMergedInputs.entrySet()) { groupBuilder.addEdgeMergedInputs( PlanGroupInputEdgeInfo.newBuilder().setDestVertexName(entry.getKey()). setMergedInput(DagTypeConverters.convertToDAGPlan(entry.getValue()))); } dagBuilder.addVertexGroups(groupBuilder); } } for (Vertex vertex : vertices.values()) { VertexPlan.Builder vertexBuilder = VertexPlan.newBuilder(); vertexBuilder.setName(vertex.getVertexName()); vertexBuilder.setType(PlanVertexType.NORMAL); // vertex type is implicitly NORMAL until TEZ-46. vertexBuilder.setProcessorDescriptor(DagTypeConverters .convertToDAGPlan(vertex.getProcessorDescriptor())); if (vertex.getInputs().size() > 0) { for (RootInputLeafOutput<InputDescriptor> input : vertex.getInputs()) { vertexBuilder.addInputs(DagTypeConverters.convertToDAGPlan(input)); } } if (vertex.getOutputs().size() > 0) { for (RootInputLeafOutput<OutputDescriptor> output : vertex.getOutputs()) { vertexBuilder.addOutputs(DagTypeConverters.convertToDAGPlan(output)); } } //task config PlanTaskConfiguration.Builder taskConfigBuilder = PlanTaskConfiguration.newBuilder(); Resource resource = vertex.getTaskResource(); taskConfigBuilder.setNumTasks(vertex.getParallelism()); taskConfigBuilder.setMemoryMb(resource.getMemory()); taskConfigBuilder.setVirtualCores(resource.getVirtualCores()); taskConfigBuilder.setJavaOpts(vertex.getJavaOpts()); taskConfigBuilder.setTaskModule(vertex.getVertexName()); PlanLocalResource.Builder localResourcesBuilder = PlanLocalResource.newBuilder(); localResourcesBuilder.clear(); for (Entry<String, LocalResource> entry : vertex.getTaskLocalResources().entrySet()) { String key = entry.getKey(); LocalResource lr = entry.getValue(); localResourcesBuilder.setName(key); localResourcesBuilder.setUri( DagTypeConverters.convertToDAGPlan(lr.getResource())); localResourcesBuilder.setSize(lr.getSize()); localResourcesBuilder.setTimeStamp(lr.getTimestamp()); localResourcesBuilder.setType( DagTypeConverters.convertToDAGPlan(lr.getType())); localResourcesBuilder.setVisibility( DagTypeConverters.convertToDAGPlan(lr.getVisibility())); if (lr.getType() == LocalResourceType.PATTERN) { if (lr.getPattern() == null || lr.getPattern().isEmpty()) { throw new TezUncheckedException("LocalResource type set to pattern" + " but pattern is null or empty"); } localResourcesBuilder.setPattern(lr.getPattern()); } taskConfigBuilder.addLocalResource(localResourcesBuilder); } for (String key : vertex.getTaskEnvironment().keySet()) { PlanKeyValuePair.Builder envSettingBuilder = PlanKeyValuePair.newBuilder(); envSettingBuilder.setKey(key); envSettingBuilder.setValue(vertex.getTaskEnvironment().get(key)); taskConfigBuilder.addEnvironmentSetting(envSettingBuilder); } if (vertex.getTaskLocationsHint() != null) { if (vertex.getTaskLocationsHint().getTaskLocationHints() != null) { for (TaskLocationHint hint : vertex.getTaskLocationsHint().getTaskLocationHints()) { PlanTaskLocationHint.Builder taskLocationHintBuilder = PlanTaskLocationHint.newBuilder(); if (hint.getDataLocalHosts() != null) { taskLocationHintBuilder.addAllHost(hint.getDataLocalHosts()); } if (hint.getRacks() != null) { taskLocationHintBuilder.addAllRack(hint.getRacks()); } vertexBuilder.addTaskLocationHint(taskLocationHintBuilder); } } } if (vertex.getVertexManagerPlugin() != null) { vertexBuilder.setVertexManagerPlugin(DagTypeConverters .convertToDAGPlan(vertex.getVertexManagerPlugin())); } for (String inEdgeId : vertex.getInputEdgeIds()) { vertexBuilder.addInEdgeId(inEdgeId); } for (String outEdgeId : vertex.getOutputEdgeIds()) { vertexBuilder.addOutEdgeId(outEdgeId); } vertexBuilder.setTaskConfig(taskConfigBuilder); dagBuilder.addVertex(vertexBuilder); } for (Edge edge : edges) { EdgePlan.Builder edgeBuilder = EdgePlan.newBuilder(); edgeBuilder.setId(edge.getId()); edgeBuilder.setInputVertexName(edge.getInputVertex().getVertexName()); edgeBuilder.setOutputVertexName(edge.getOutputVertex().getVertexName()); edgeBuilder.setDataMovementType(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataMovementType())); edgeBuilder.setDataSourceType(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataSourceType())); edgeBuilder.setSchedulingType(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getSchedulingType())); edgeBuilder.setEdgeSource(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeSource())); edgeBuilder.setEdgeDestination(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeDestination())); if (edge.getEdgeProperty().getDataMovementType() == DataMovementType.CUSTOM) { if (edge.getEdgeProperty().getEdgeManagerDescriptor() != null) { edgeBuilder.setEdgeManager(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeManagerDescriptor())); } // else the AM will deal with this. } dagBuilder.addEdge(edgeBuilder); } if (dagConf != null) { Iterator<Entry<String, String>> iter = dagConf.iterator(); ConfigurationProto.Builder confProtoBuilder = ConfigurationProto.newBuilder(); while (iter.hasNext()) { Entry<String, String> entry = iter.next(); PlanKeyValuePair.Builder kvp = PlanKeyValuePair.newBuilder(); kvp.setKey(entry.getKey()); kvp.setValue(entry.getValue()); confProtoBuilder.addConfKeyValues(kvp); } dagBuilder.setDagKeyValues(confProtoBuilder); } if (credentials != null) { dagBuilder.setCredentialsBinary(DagTypeConverters.convertCredentialsToProto(credentials)); } return dagBuilder.build(); } }
package net.glider.src.blocks; import java.lang.reflect.Method; import java.util.Random; import micdoodle8.mods.galacticraft.api.vector.BlockVec3; import micdoodle8.mods.galacticraft.core.GalacticraftCore; import micdoodle8.mods.galacticraft.core.tile.IMultiBlock; import net.glider.src.GliderCore; import net.glider.src.gui.GuiHandler; import net.glider.src.tiles.TileEntityArmorStand; import net.glider.src.utils.ChatUtils; import net.glider.src.utils.LocalizedChatComponent; import net.glider.src.utils.LocalizedString; import net.minecraft.block.Block; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ChunkCoordinates; import net.minecraft.util.EnumChatFormatting; import net.minecraft.util.IIcon; import net.minecraft.util.MathHelper; import net.minecraft.world.World; import cpw.mods.fml.common.registry.GameRegistry; public class BlockArmorStand extends BlockContainerMod { public BlockArmorStand(String uln) { super(uln); this.setStepSound(soundTypeMetal); this.setShowDescr(true); GameRegistry.registerBlock(this, this.getItemBlockClass(), uln); } @Override public void registerBlockIcons(IIconRegister par1IconRegister) { this.blockIcon = par1IconRegister.registerIcon(GalacticraftCore.TEXTURE_PREFIX + "machine_blank"); } @Override public void breakBlock(World var1, int var2, int var3, int var4, Block var5, int var6) { TileEntity tileEntity = var1.getTileEntity(var2, var3, var4); if (tileEntity instanceof TileEntityArmorStand) { ((TileEntityArmorStand) tileEntity).onDestroy(null); } dropEntireInventory(var1, var2, var3, var4, var5, var6); super.breakBlock(var1, var2, var3, var4, var5, var6); } public void dropEntireInventory(World world, int x, int y, int z, Block par5, int par6) { TileEntity tileEntity = world.getTileEntity(x, y, z); if (tileEntity != null) { if (tileEntity instanceof IInventory) { IInventory inventory = (IInventory) tileEntity; for (int var6 = 0; var6 < inventory.getSizeInventory(); ++var6) { ItemStack var7 = inventory.getStackInSlot(var6); if (var7 != null) { Random random = new Random(); float var8 = random.nextFloat() * 0.8F + 0.1F; float var9 = random.nextFloat() * 0.8F + 0.1F; float var10 = random.nextFloat() * 0.8F + 0.1F; while (var7.stackSize > 0) { int var11 = random.nextInt(21) + 10; if (var11 > var7.stackSize) { var11 = var7.stackSize; } var7.stackSize -= var11; EntityItem var12 = new EntityItem(world, x + var8, y + var9, z + var10, new ItemStack(var7.getItem(), var11, var7.getItemDamage())); if (var7.hasTagCompound()) { var12.getEntityItem().setTagCompound((NBTTagCompound) var7.getTagCompound().copy()); } float var13 = 0.05F; var12.motionX = (float) random.nextGaussian() * var13; var12.motionY = (float) random.nextGaussian() * var13 + 0.2F; var12.motionZ = (float) random.nextGaussian() * var13; world.spawnEntityInWorld(var12); } } } } } } @Override public IIcon getIcon(int side, int metadata) { return this.blockIcon; } /** * Called when the block is placed in the world. */ @Override public void onBlockPlacedBy(World world, int x, int y, int z, EntityLivingBase entityLiving, ItemStack itemStack) { int angle = MathHelper.floor_double(entityLiving.rotationYaw * 4.0F / 360.0F + 0.5D) & 3; if (!this.canPlaceChamberAt(world, x, y, z, entityLiving)) { if (entityLiving instanceof EntityPlayer) { if (!world.isRemote) { ChatUtils.SendChatMessageOnClient(((EntityPlayer) entityLiving), new LocalizedChatComponent(new LocalizedString("gui.warning.noroom", EnumChatFormatting.RED))); } world.setBlockToAir(x, y, z); ((EntityPlayer) entityLiving).inventory.addItemStackToInventory(new ItemStack(BlockContainerMod.BlockArmorStand, 1)); return; } } else { world.setBlockMetadataWithNotify(x, y, z, angle > 3 ? 0 : angle, 3); } TileEntity var8 = world.getTileEntity(x, y, z); if (var8 instanceof IMultiBlock) { ((IMultiBlock) var8).onCreate(new BlockVec3(x, y, z)); } } @Override public boolean onBlockActivated(World world, int x, int y, int z, EntityPlayer entityPlayer, int side, float hitX, float hitY, float hitZ) { if (this.isUsableWrench(entityPlayer, entityPlayer.inventory.getCurrentItem(), x, y, z)) { this.damageWrench(entityPlayer, entityPlayer.inventory.getCurrentItem(), x, y, z); if (this.onUseWrench(world, x, y, z, entityPlayer, side, hitX, hitY, hitZ)) { return true; } } return this.onMachineActivated(world, x, y, z, entityPlayer, side, hitX, hitY, hitZ); } public boolean isUsableWrench(EntityPlayer entityPlayer, ItemStack itemStack, int x, int y, int z) { if (entityPlayer != null && itemStack != null) { Class<? extends Item> wrenchClass = itemStack.getItem().getClass(); /** * UE and Buildcraft */ try { Method methodCanWrench = wrenchClass.getMethod("canWrench", EntityPlayer.class, Integer.TYPE, Integer.TYPE, Integer.TYPE); return (Boolean) methodCanWrench.invoke(itemStack.getItem(), entityPlayer, x, y, z); } catch (NoClassDefFoundError e) { } catch (Exception e) { } /** * Industrialcraft */ try { if (wrenchClass == Class.forName("ic2.core.item.tool.ItemToolWrench") || wrenchClass == Class.forName("ic2.core.item.tool.ItemToolWrenchElectric")) { return itemStack.getItemDamage() < itemStack.getMaxDamage(); } } catch (Exception e) { } } return false; } public boolean damageWrench(EntityPlayer entityPlayer, ItemStack itemStack, int x, int y, int z) { Class<? extends Item> wrenchClass = itemStack.getItem().getClass(); /** * UE and Buildcraft */ try { Method methodWrenchUsed = wrenchClass.getMethod("wrenchUsed", EntityPlayer.class, Integer.TYPE, Integer.TYPE, Integer.TYPE); methodWrenchUsed.invoke(itemStack.getItem(), entityPlayer, x, y, z); return true; } catch (Exception e) { } /** * Industrialcraft */ try { if (wrenchClass == Class.forName("ic2.core.item.tool.ItemToolWrench") || wrenchClass == Class.forName("ic2.core.item.tool.ItemToolWrenchElectric")) { Method methodWrenchDamage = wrenchClass.getMethod("damage", ItemStack.class, Integer.TYPE, EntityPlayer.class); methodWrenchDamage.invoke(itemStack.getItem(), itemStack, 1, entityPlayer); return true; } } catch (Exception e) { } return false; } public boolean onUseWrench(World par1World, int x, int y, int z, EntityPlayer par5EntityPlayer, int side, float hitX, float hitY, float hitZ) { int metadata = par1World.getBlockMetadata(x, y, z); int original = metadata; int change = 0; // Re-orient the block switch (original) { case 0: change = 1; break; case 1: change = 2; break; case 2: change = 3; break; case 3: change = 0; break; } par1World.setBlockMetadataWithNotify(x, y, z, change, 3); return true; } /** * Called when the block is right clicked by the player */ public boolean onMachineActivated(World world, int x, int y, int z, EntityPlayer par5EntityPlayer, int side, float hitX, float hitY, float hitZ) { par5EntityPlayer.openGui(GliderCore.instance, GuiHandler.ARMORSTANDGUI, world, x, y, z); return true; } @Override public boolean isOpaqueCube() { return false; } @Override public boolean renderAsNormalBlock() { return false; } @Override public TileEntity createTileEntity(World world, int metadata) { return new TileEntityArmorStand(); } private boolean canPlaceChamberAt(World world, int x0, int y0, int z0, EntityLivingBase player) { for (int y = 0; y < 2; y++) { Block blockAt = world.getBlock(x0, y0 + y, z0); if (y == 0) { continue; } if (!blockAt.getMaterial().isReplaceable()) { return false; } } return true; } @Override public int damageDropped(int metadata) { return 0; } @Override public int getRenderType() { return -1; } public static ChunkCoordinates getNearestEmptyChunkCoordinates(World par0World, int par1, int par2, int par3, int par4) { for (int k1 = 0; k1 <= 1; ++k1) { int l1 = par1 - 1; int i2 = par3 - 1; int j2 = l1 + 2; int k2 = i2 + 2; for (int l2 = l1; l2 <= j2; ++l2) { for (int i3 = i2; i3 <= k2; ++i3) { if (World.doesBlockHaveSolidTopSurface(par0World, l2, par2 - 1, i3) && !par0World.getBlock(l2, par2, i3).getMaterial().isOpaque() && !par0World.getBlock(l2, par2 + 1, i3).getMaterial().isOpaque()) { if (par4 <= 0) { return new ChunkCoordinates(l2, par2, i3); } --par4; } } } } return null; } }
package ch.usi.dag.disl.util.cfg; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.objectweb.asm.Opcodes; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.InsnList; import org.objectweb.asm.tree.JumpInsnNode; import org.objectweb.asm.tree.LabelNode; import org.objectweb.asm.tree.LookupSwitchInsnNode; import org.objectweb.asm.tree.MethodNode; import org.objectweb.asm.tree.TableSwitchInsnNode; import org.objectweb.asm.tree.TryCatchBlockNode; import ch.usi.dag.disl.util.AsmHelper.Insns; import ch.usi.dag.disl.util.BasicBlockCalc; public class CtrlFlowGraph { private final static int NOT_FOUND = -1; private final static int NEW = -2; // basic blocks of a method private List<BasicBlock> nodes; // a basic block is marked as connected after visited private List<BasicBlock> connected_nodes; // size of connected basic blocks since last visit private int connected_size; // basic blocks that ends with a 'return' or 'athrow' private Set<BasicBlock> method_exits; // Initialize the control flow graph. public CtrlFlowGraph(InsnList instructions, List<TryCatchBlockNode> tryCatchBlocks) { nodes = new LinkedList<BasicBlock>(); connected_nodes = new LinkedList<BasicBlock>(); connected_size = 0; method_exits = new HashSet<BasicBlock>(); // Generating basic blocks List<AbstractInsnNode> separators = BasicBlockCalc.getAll(instructions, tryCatchBlocks, false); AbstractInsnNode last = instructions.getLast(); separators.add(last); for (int i = 0; i < separators.size() - 1; i++) { AbstractInsnNode start = separators.get(i); AbstractInsnNode end = separators.get(i + 1); if (i != separators.size() - 2) { end = end.getPrevious(); } end = Insns.REVERSE.firstRealInsn (end); nodes.add(new BasicBlock(i, start, end)); } } // Initialize the control flow graph. public CtrlFlowGraph(MethodNode method) { this(method.instructions, method.tryCatchBlocks); } public List<BasicBlock> getNodes() { return nodes; } // Return the index of basic block that contains the input instruction. // If not found, return NOT_FOUND. public int getIndex(AbstractInsnNode instr) { BasicBlock bb = getBB(instr); if (bb == null) { return NOT_FOUND; } else { return bb.getIndex(); } } // Return a basic block that contains the input instruction. // If not found, return null. public BasicBlock getBB(AbstractInsnNode instr) { instr = Insns.FORWARD.firstRealInsn (instr); while (instr != null) { for (int i = 0; i < nodes.size(); i++) { if (nodes.get(i).getEntrance().equals(instr)) { return nodes.get(i); } } instr = instr.getPrevious(); } return null; } // Visit a successor. // If the basic block, which starts with the input 'node', // is not found, return NOT_FOUND; // If the basic block has been visited, then returns its index; // Otherwise return NEW. private int tryVisit(BasicBlock current, AbstractInsnNode node) { BasicBlock bb = getBB(node); if (bb == null) { return NOT_FOUND; } if (connected_nodes.contains(bb)) { int index = connected_nodes.indexOf(bb); if (current != null) { if (index < connected_size) { current.getJoins().add(bb); } else { current.getSuccessors().add(bb); bb.getPredecessors().add(current); } } return index; } if (current != null) { current.getSuccessors().add(bb); bb.getPredecessors().add(current); } connected_nodes.add(bb); return NEW; } // Try to visit a successor. If it is visited last build, then regards // it as an exit. private void tryVisit(BasicBlock current, AbstractInsnNode node, AbstractInsnNode exit, List<AbstractInsnNode> joins) { int ret = tryVisit(current, node); if (ret >= 0 && ret < connected_size) { joins.add(exit); } } // Generate a control flow graph. // Returns a list of instruction that stands for the exit point // of the current visit. // For the first time this method is called, it will generate // the normal return of this method. // Otherwise, it will generate the join instruction between // the current visit and a existing visit. public List<AbstractInsnNode> visit(AbstractInsnNode root) { List<AbstractInsnNode> joins = new LinkedList<AbstractInsnNode>(); if (tryVisit(null, root) == NOT_FOUND) { return joins; } for (int i = connected_size; i < connected_nodes.size(); i++) { BasicBlock current = connected_nodes.get(i); AbstractInsnNode exit = current.getExit(); int opcode = exit.getOpcode(); switch (exit.getType()) { case AbstractInsnNode.JUMP_INSN: { // Covers IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, // IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, // IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL, and IFNONNULL. tryVisit(current, ((JumpInsnNode) exit).label, exit, joins); // goto never returns. if (opcode != Opcodes.GOTO) { tryVisit(current, exit.getNext(), exit, joins); } break; } case AbstractInsnNode.LOOKUPSWITCH_INSN: { // Covers LOOKUPSWITCH LookupSwitchInsnNode lsin = (LookupSwitchInsnNode) exit; for (LabelNode label : lsin.labels) { tryVisit(current, label, exit, joins); } tryVisit(current, lsin.dflt, exit, joins); break; } case AbstractInsnNode.TABLESWITCH_INSN: { // Covers TABLESWITCH TableSwitchInsnNode tsin = (TableSwitchInsnNode) exit; for (LabelNode label : tsin.labels) { tryVisit(current, label, exit, joins); } tryVisit(current, tsin.dflt, exit, joins); break; } default: if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN) || opcode == Opcodes.ATHROW) { method_exits.add(current); } else { tryVisit(current, exit.getNext(), exit, joins); } break; } } connected_size = connected_nodes.size(); return joins; } public List<AbstractInsnNode> getEnds() { List<AbstractInsnNode> ends = new LinkedList<AbstractInsnNode>(); for (BasicBlock bb : nodes) { if (bb.getSuccessors().size() == 0) { ends.add(bb.getExit()); } } return ends; } // Build up a control flow graph using instruction list and exception // handlers. public static CtrlFlowGraph build(InsnList instructions, List<TryCatchBlockNode> tryCatchBlocks) { CtrlFlowGraph cfg = new CtrlFlowGraph(instructions, tryCatchBlocks); cfg.visit(instructions.getFirst()); for (TryCatchBlockNode tcb : tryCatchBlocks) { cfg.visit(tcb.handler); } return cfg; } // Build up a control flow graph using a method node. public static CtrlFlowGraph build(MethodNode method) { return build(method.instructions, method.tryCatchBlocks); } }
/* * Copyright (C) 2013, 2014 Brett Wooldridge * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zaxxer.hikari.pool; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.concurrent.TimeUnit; import org.junit.Assert; import org.junit.Test; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.mocks.StubConnection; import com.zaxxer.hikari.mocks.StubStatement; public class TestProxies { @Test public void testProxyCreation() throws SQLException { HikariConfig config = new HikariConfig(); config.setMinimumIdle(0); config.setMaximumPoolSize(1); config.setConnectionTestQuery("VALUES 1"); config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource"); try (HikariDataSource ds = new HikariDataSource(config)) { Connection conn = ds.getConnection(); Assert.assertNotNull(conn.createStatement(ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE)); Assert.assertNotNull(conn.createStatement(ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.HOLD_CURSORS_OVER_COMMIT)); Assert.assertNotNull(conn.prepareCall("some sql")); Assert.assertNotNull(conn.prepareCall("some sql", ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE)); Assert.assertNotNull(conn.prepareCall("some sql", ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.HOLD_CURSORS_OVER_COMMIT)); Assert.assertNotNull(conn.prepareStatement("some sql", PreparedStatement.NO_GENERATED_KEYS)); Assert.assertNotNull(conn.prepareStatement("some sql", new int[3])); Assert.assertNotNull(conn.prepareStatement("some sql", new String[3])); Assert.assertNotNull(conn.prepareStatement("some sql", ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE)); Assert.assertNotNull(conn.prepareStatement("some sql", ResultSet.FETCH_FORWARD, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.HOLD_CURSORS_OVER_COMMIT)); Assert.assertNotNull(conn.toString()); Assert.assertTrue(conn.isWrapperFor(Connection.class)); Assert.assertTrue(conn.isValid(10)); Assert.assertFalse(conn.isClosed()); Assert.assertTrue(conn.unwrap(StubConnection.class) instanceof StubConnection); try { conn.unwrap(TestProxies.class); Assert.fail(); } catch (SQLException e) { // pass } } } @Test public void testStatementProxy() throws SQLException { HikariConfig config = new HikariConfig(); config.setMinimumIdle(0); config.setMaximumPoolSize(1); config.setConnectionTestQuery("VALUES 1"); config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource"); try (HikariDataSource ds = new HikariDataSource(config)) { Connection conn = ds.getConnection(); PreparedStatement stmt = conn.prepareStatement("some sql"); stmt.executeQuery(); stmt.executeQuery("some sql"); Assert.assertFalse(stmt.isClosed()); Assert.assertNotNull(stmt.getGeneratedKeys()); Assert.assertNotNull(stmt.getResultSet()); Assert.assertNotNull(stmt.getConnection()); Assert.assertTrue(stmt.unwrap(StubStatement.class) instanceof StubStatement); try { stmt.unwrap(TestProxies.class); Assert.fail(); } catch (SQLException e) { // pass } } } @Test public void testStatementExceptions() throws SQLException { HikariConfig config = new HikariConfig(); config.setMinimumIdle(0); config.setMaximumPoolSize(1); config.setConnectionTimeout(TimeUnit.SECONDS.toMillis(1)); config.setConnectionTestQuery("VALUES 1"); config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource"); try (HikariDataSource ds = new HikariDataSource(config)) { Connection conn = ds.getConnection(); StubConnection stubConnection = conn.unwrap(StubConnection.class); stubConnection.throwException = true; try { conn.createStatement(); Assert.fail(); } catch (SQLException e) { // pass } try { conn.createStatement(0, 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.createStatement(0, 0, 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareCall(""); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareCall("", 0, 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareCall("", 0, 0, 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement(""); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement("", 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement("", new int[0]); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement("", new String[0]); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement("", 0, 0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.prepareStatement("", 0, 0, 0); Assert.fail(); } catch (SQLException e) { // pass } } } @Test public void testOtherExceptions() throws SQLException { HikariConfig config = new HikariConfig(); config.setMinimumIdle(0); config.setMaximumPoolSize(1); config.setConnectionTestQuery("VALUES 1"); config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource"); try (HikariDataSource ds = new HikariDataSource(config)) { Connection conn = ds.getConnection(); StubConnection stubConnection = conn.unwrap(StubConnection.class); stubConnection.throwException = true; try { conn.setTransactionIsolation(Connection.TRANSACTION_NONE); Assert.fail(); } catch (SQLException e) { // pass } try { conn.isReadOnly(); Assert.fail(); } catch (SQLException e) { // pass } try { conn.setReadOnly(false); Assert.fail(); } catch (SQLException e) { // pass } try { conn.setCatalog(""); Assert.fail(); } catch (SQLException e) { // pass } try { conn.setAutoCommit(false); Assert.fail(); } catch (SQLException e) { // pass } try { conn.clearWarnings(); Assert.fail(); } catch (SQLException e) { // pass } try { conn.isValid(0); Assert.fail(); } catch (SQLException e) { // pass } try { conn.isWrapperFor(getClass()); Assert.fail(); } catch (SQLException e) { // pass } try { conn.unwrap(getClass()); Assert.fail(); } catch (SQLException e) { // pass } try { conn.close(); Assert.fail(); } catch (SQLException e) { // pass } try { Assert.assertFalse(conn.isValid(0)); } catch (SQLException e) { Assert.fail(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.java.record.io; import java.io.IOException; import org.junit.Assert; import org.apache.flink.api.common.io.DefaultInputSplitAssigner; import org.apache.flink.api.common.io.statistics.BaseStatistics; import org.apache.flink.api.java.record.io.ExternalProcessFixedLengthInputFormat; import org.apache.flink.api.java.record.io.ExternalProcessInputFormat; import org.apache.flink.api.java.record.io.ExternalProcessInputSplit; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.io.GenericInputSplit; import org.apache.flink.types.IntValue; import org.apache.flink.types.Record; import org.apache.flink.util.OperatingSystem; import org.junit.Before; import org.junit.Test; public class ExternalProcessFixedLengthInputFormatTest { private ExternalProcessFixedLengthInputFormat<ExternalProcessInputSplit> format; private final String neverEndingCommand = "cat /dev/urandom"; private final String thousandRecordsCommand = "dd if=/dev/zero bs=8 count=1000"; private final String incompleteRecordsCommand = "dd if=/dev/zero bs=7 count=2"; private final String failingCommand = "ls /I/do/not/exist"; @Before public void prepare() { format = new MyExternalProcessTestInputFormat(); } @Test public void testOpen() { if(OperatingSystem.isWindows()) { return; } Configuration config = new Configuration(); config.setInteger(ExternalProcessFixedLengthInputFormat.RECORDLENGTH_PARAMETER_KEY, 8); ExternalProcessInputSplit split = new ExternalProcessInputSplit(1, 1, this.neverEndingCommand); boolean processDestroyed = false; try { format.configure(config); format.open(split); String[] cmd = {"/bin/sh","-c","ps aux | grep -v grep | grep \"cat /dev/urandom\" | wc -l"}; byte[] wcOut = new byte[128]; Process p = Runtime.getRuntime().exec(cmd); p.getInputStream().read(wcOut); int pCnt = Integer.parseInt(new String(wcOut).trim()); Assert.assertTrue(pCnt > 0); format.close(); } catch (IOException e) { Assert.fail(); } catch (RuntimeException e) { if(e.getMessage().equals("External process was destroyed although stream was not fully read.")) { processDestroyed = true; } } finally { Assert.assertTrue(processDestroyed); } } @Test public void testCheckExitCode() { if(OperatingSystem.isWindows()) { return; } Configuration config = new Configuration(); config.setInteger(ExternalProcessFixedLengthInputFormat.RECORDLENGTH_PARAMETER_KEY, 8); ExternalProcessInputSplit split = new ExternalProcessInputSplit(1, 1, failingCommand); format.configure(config); boolean invalidExitCode = false; try { format.open(split); format.waitForProcessToFinish(); format.close(); } catch (IOException e) { Assert.fail(); } catch (InterruptedException e) { Assert.fail(); } catch (RuntimeException e) { if(e.getMessage().startsWith("External process did not finish with an allowed exit code:")) { invalidExitCode = true; } } Assert.assertTrue(invalidExitCode); invalidExitCode = false; config.setString(ExternalProcessInputFormat.ALLOWEDEXITCODES_PARAMETER_KEY,"0,1,2"); format.configure(config); try { format.open(split); // wait for process to start... Thread.sleep(100); format.close(); } catch (IOException e) { Assert.fail(); } catch (InterruptedException e) { Assert.fail(); } catch (RuntimeException e) { if(e.getMessage().startsWith("External process did not finish with an allowed exit code:")) { invalidExitCode = true; } } Assert.assertTrue(!invalidExitCode); } @Test public void testUserCodeTermination() { if(OperatingSystem.isWindows()) { return; } Configuration config = new Configuration(); config.setInteger(ExternalProcessFixedLengthInputFormat.RECORDLENGTH_PARAMETER_KEY, 8); config.setInteger(MyExternalProcessTestInputFormat.FAILCOUNT_PARAMETER_KEY, 100); ExternalProcessInputSplit split = new ExternalProcessInputSplit(1, 1, this.neverEndingCommand); Record record = new Record(); boolean userException = false; boolean processDestroyed = false; try { format.configure(config); format.open(split); while(!format.reachedEnd()) { try { format.nextRecord(record); } catch(RuntimeException re) { userException = true; break; } } format.close(); } catch (IOException e) { Assert.fail(); } catch (RuntimeException e) { if(e.getMessage().equals("External process was destroyed although stream was not fully read.")) { processDestroyed = true; } } finally { Assert.assertTrue(userException && processDestroyed); } } @Test public void testReadStream() { if(OperatingSystem.isWindows()) { return; } Configuration config = new Configuration(); config.setInteger(ExternalProcessFixedLengthInputFormat.RECORDLENGTH_PARAMETER_KEY, 8); ExternalProcessInputSplit split = new ExternalProcessInputSplit(1, 1, this.thousandRecordsCommand); Record record = new Record(); int cnt = 0; try { format.configure(config); format.open(split); while(!format.reachedEnd()) { if (format.nextRecord(record) != null) { cnt++; } } format.close(); } catch (IOException e) { Assert.fail(); } catch (RuntimeException e) { Assert.fail(e.getMessage()); } Assert.assertTrue(cnt == 1000); } @Test public void testReadInvalidStream() { if(OperatingSystem.isWindows()) { return; } Configuration config = new Configuration(); config.setInteger(ExternalProcessFixedLengthInputFormat.RECORDLENGTH_PARAMETER_KEY, 8); ExternalProcessInputSplit split = new ExternalProcessInputSplit(1, 1, this.incompleteRecordsCommand); Record record = new Record(); boolean incompleteRecordDetected = false; @SuppressWarnings("unused") int cnt = 0; try { format.configure(config); format.open(split); while(!format.reachedEnd()) { if (format.nextRecord(record) != null) { cnt++; } } format.close(); } catch (IOException e) { Assert.fail(); } catch (RuntimeException e) { if(e.getMessage().equals("External process produced incomplete record")) { incompleteRecordDetected = true; } else { Assert.fail(e.getMessage()); } } Assert.assertTrue(incompleteRecordDetected); } private final class MyExternalProcessTestInputFormat extends ExternalProcessFixedLengthInputFormat<ExternalProcessInputSplit> { private static final long serialVersionUID = 1L; public static final String FAILCOUNT_PARAMETER_KEY = "test.failingCount"; private long cnt = 0; private int failCnt; @Override public void configure(Configuration parameters) { super.configure(parameters); failCnt = parameters.getInteger(FAILCOUNT_PARAMETER_KEY, Integer.MAX_VALUE); } @Override public boolean readBytes(Record record, byte[] bytes, int startPos) { if(cnt == failCnt) { throw new RuntimeException("This is a test exception!"); } int v1 = 0; v1 = v1 | (0xFF & bytes[startPos+0]); v1 = (v1 << 8) | (0xFF & bytes[startPos+1]); v1 = (v1 << 8) | (0xFF & bytes[startPos+2]); v1 = (v1 << 8) | (0xFF & bytes[startPos+3]); int v2 = 0; v2 = v2 | (0xFF & bytes[startPos+4]); v2 = (v2 << 8) | (0xFF & bytes[startPos+5]); v2 = (v2 << 8) | (0xFF & bytes[startPos+6]); v2 = (v2 << 8) | (0xFF & bytes[startPos+7]); record.setField(0,new IntValue(v1)); record.setField(1,new IntValue(v2)); cnt++; return true; } @Override public ExternalProcessInputSplit[] createInputSplits(int minNumSplits) throws IOException { return null; } @Override public DefaultInputSplitAssigner getInputSplitAssigner(GenericInputSplit[] splits) { return new DefaultInputSplitAssigner(splits); } @Override public BaseStatistics getStatistics(BaseStatistics cachedStatistics) { return null; } } }
/* Generated by camel build tools - do NOT edit this file! */ package org.apache.camel.component.minio; import java.util.Map; import org.apache.camel.CamelContext; import org.apache.camel.spi.GeneratedPropertyConfigurer; import org.apache.camel.spi.PropertyConfigurerGetter; import org.apache.camel.util.CaseInsensitiveMap; import org.apache.camel.support.component.PropertyConfigurerSupport; /** * Generated by camel build tools - do NOT edit this file! */ @SuppressWarnings("unchecked") public class MinioComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { private org.apache.camel.component.minio.MinioConfiguration getOrCreateConfiguration(MinioComponent target) { if (target.getConfiguration() == null) { target.setConfiguration(new org.apache.camel.component.minio.MinioConfiguration()); } return target.getConfiguration(); } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { MinioComponent target = (MinioComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesskey": case "accessKey": getOrCreateConfiguration(target).setAccessKey(property(camelContext, java.lang.String.class, value)); return true; case "autoclosebody": case "autoCloseBody": getOrCreateConfiguration(target).setAutoCloseBody(property(camelContext, boolean.class, value)); return true; case "autocreatebucket": case "autoCreateBucket": getOrCreateConfiguration(target).setAutoCreateBucket(property(camelContext, boolean.class, value)); return true; case "basicpropertybinding": case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "bypassgovernancemode": case "bypassGovernanceMode": getOrCreateConfiguration(target).setBypassGovernanceMode(property(camelContext, boolean.class, value)); return true; case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.minio.MinioConfiguration.class, value)); return true; case "customhttpclient": case "customHttpClient": getOrCreateConfiguration(target).setCustomHttpClient(property(camelContext, okhttp3.OkHttpClient.class, value)); return true; case "deleteafterread": case "deleteAfterRead": getOrCreateConfiguration(target).setDeleteAfterRead(property(camelContext, boolean.class, value)); return true; case "deleteafterwrite": case "deleteAfterWrite": getOrCreateConfiguration(target).setDeleteAfterWrite(property(camelContext, boolean.class, value)); return true; case "delimiter": getOrCreateConfiguration(target).setDelimiter(property(camelContext, java.lang.String.class, value)); return true; case "destinationbucketname": case "destinationBucketName": getOrCreateConfiguration(target).setDestinationBucketName(property(camelContext, java.lang.String.class, value)); return true; case "destinationobjectname": case "destinationObjectName": getOrCreateConfiguration(target).setDestinationObjectName(property(camelContext, java.lang.String.class, value)); return true; case "endpoint": getOrCreateConfiguration(target).setEndpoint(property(camelContext, java.lang.String.class, value)); return true; case "includebody": case "includeBody": getOrCreateConfiguration(target).setIncludeBody(property(camelContext, boolean.class, value)); return true; case "includefolders": case "includeFolders": getOrCreateConfiguration(target).setIncludeFolders(property(camelContext, boolean.class, value)); return true; case "includeusermetadata": case "includeUserMetadata": getOrCreateConfiguration(target).setIncludeUserMetadata(property(camelContext, boolean.class, value)); return true; case "includeversions": case "includeVersions": getOrCreateConfiguration(target).setIncludeVersions(property(camelContext, boolean.class, value)); return true; case "keyname": case "keyName": getOrCreateConfiguration(target).setKeyName(property(camelContext, java.lang.String.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "length": getOrCreateConfiguration(target).setLength(property(camelContext, long.class, value)); return true; case "matchetag": case "matchETag": getOrCreateConfiguration(target).setMatchETag(property(camelContext, java.lang.String.class, value)); return true; case "maxconnections": case "maxConnections": getOrCreateConfiguration(target).setMaxConnections(property(camelContext, int.class, value)); return true; case "maxmessagesperpoll": case "maxMessagesPerPoll": getOrCreateConfiguration(target).setMaxMessagesPerPoll(property(camelContext, int.class, value)); return true; case "minioclient": case "minioClient": getOrCreateConfiguration(target).setMinioClient(property(camelContext, io.minio.MinioClient.class, value)); return true; case "modifiedsince": case "modifiedSince": getOrCreateConfiguration(target).setModifiedSince(property(camelContext, java.time.ZonedDateTime.class, value)); return true; case "moveafterread": case "moveAfterRead": getOrCreateConfiguration(target).setMoveAfterRead(property(camelContext, boolean.class, value)); return true; case "notmatchetag": case "notMatchETag": getOrCreateConfiguration(target).setNotMatchETag(property(camelContext, java.lang.String.class, value)); return true; case "objectlock": case "objectLock": getOrCreateConfiguration(target).setObjectLock(property(camelContext, boolean.class, value)); return true; case "objectname": case "objectName": getOrCreateConfiguration(target).setObjectName(property(camelContext, java.lang.String.class, value)); return true; case "offset": getOrCreateConfiguration(target).setOffset(property(camelContext, long.class, value)); return true; case "operation": getOrCreateConfiguration(target).setOperation(property(camelContext, org.apache.camel.component.minio.MinioOperations.class, value)); return true; case "pojorequest": case "pojoRequest": getOrCreateConfiguration(target).setPojoRequest(property(camelContext, boolean.class, value)); return true; case "policy": getOrCreateConfiguration(target).setPolicy(property(camelContext, java.lang.String.class, value)); return true; case "prefix": getOrCreateConfiguration(target).setPrefix(property(camelContext, java.lang.String.class, value)); return true; case "proxyport": case "proxyPort": getOrCreateConfiguration(target).setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true; case "recursive": getOrCreateConfiguration(target).setRecursive(property(camelContext, boolean.class, value)); return true; case "region": getOrCreateConfiguration(target).setRegion(property(camelContext, java.lang.String.class, value)); return true; case "secretkey": case "secretKey": getOrCreateConfiguration(target).setSecretKey(property(camelContext, java.lang.String.class, value)); return true; case "secure": getOrCreateConfiguration(target).setSecure(property(camelContext, boolean.class, value)); return true; case "serversideencryption": case "serverSideEncryption": getOrCreateConfiguration(target).setServerSideEncryption(property(camelContext, io.minio.ServerSideEncryption.class, value)); return true; case "serversideencryptioncustomerkey": case "serverSideEncryptionCustomerKey": getOrCreateConfiguration(target).setServerSideEncryptionCustomerKey(property(camelContext, io.minio.ServerSideEncryptionCustomerKey.class, value)); return true; case "startafter": case "startAfter": getOrCreateConfiguration(target).setStartAfter(property(camelContext, java.lang.String.class, value)); return true; case "storageclass": case "storageClass": getOrCreateConfiguration(target).setStorageClass(property(camelContext, java.lang.String.class, value)); return true; case "unmodifiedsince": case "unModifiedSince": getOrCreateConfiguration(target).setUnModifiedSince(property(camelContext, java.time.ZonedDateTime.class, value)); return true; case "useversion1": case "useVersion1": getOrCreateConfiguration(target).setUseVersion1(property(camelContext, boolean.class, value)); return true; case "versionid": case "versionId": getOrCreateConfiguration(target).setVersionId(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { Map<String, Object> answer = new CaseInsensitiveMap(); answer.put("accessKey", java.lang.String.class); answer.put("autoCloseBody", boolean.class); answer.put("autoCreateBucket", boolean.class); answer.put("basicPropertyBinding", boolean.class); answer.put("bridgeErrorHandler", boolean.class); answer.put("bypassGovernanceMode", boolean.class); answer.put("configuration", org.apache.camel.component.minio.MinioConfiguration.class); answer.put("customHttpClient", okhttp3.OkHttpClient.class); answer.put("deleteAfterRead", boolean.class); answer.put("deleteAfterWrite", boolean.class); answer.put("delimiter", java.lang.String.class); answer.put("destinationBucketName", java.lang.String.class); answer.put("destinationObjectName", java.lang.String.class); answer.put("endpoint", java.lang.String.class); answer.put("includeBody", boolean.class); answer.put("includeFolders", boolean.class); answer.put("includeUserMetadata", boolean.class); answer.put("includeVersions", boolean.class); answer.put("keyName", java.lang.String.class); answer.put("lazyStartProducer", boolean.class); answer.put("length", long.class); answer.put("matchETag", java.lang.String.class); answer.put("maxConnections", int.class); answer.put("maxMessagesPerPoll", int.class); answer.put("minioClient", io.minio.MinioClient.class); answer.put("modifiedSince", java.time.ZonedDateTime.class); answer.put("moveAfterRead", boolean.class); answer.put("notMatchETag", java.lang.String.class); answer.put("objectLock", boolean.class); answer.put("objectName", java.lang.String.class); answer.put("offset", long.class); answer.put("operation", org.apache.camel.component.minio.MinioOperations.class); answer.put("pojoRequest", boolean.class); answer.put("policy", java.lang.String.class); answer.put("prefix", java.lang.String.class); answer.put("proxyPort", java.lang.Integer.class); answer.put("recursive", boolean.class); answer.put("region", java.lang.String.class); answer.put("secretKey", java.lang.String.class); answer.put("secure", boolean.class); answer.put("serverSideEncryption", io.minio.ServerSideEncryption.class); answer.put("serverSideEncryptionCustomerKey", io.minio.ServerSideEncryptionCustomerKey.class); answer.put("startAfter", java.lang.String.class); answer.put("storageClass", java.lang.String.class); answer.put("unModifiedSince", java.time.ZonedDateTime.class); answer.put("useVersion1", boolean.class); answer.put("versionId", java.lang.String.class); return answer; } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { MinioComponent target = (MinioComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesskey": case "accessKey": return getOrCreateConfiguration(target).getAccessKey(); case "autoclosebody": case "autoCloseBody": return getOrCreateConfiguration(target).isAutoCloseBody(); case "autocreatebucket": case "autoCreateBucket": return getOrCreateConfiguration(target).isAutoCreateBucket(); case "basicpropertybinding": case "basicPropertyBinding": return target.isBasicPropertyBinding(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "bypassgovernancemode": case "bypassGovernanceMode": return getOrCreateConfiguration(target).isBypassGovernanceMode(); case "configuration": return target.getConfiguration(); case "customhttpclient": case "customHttpClient": return getOrCreateConfiguration(target).getCustomHttpClient(); case "deleteafterread": case "deleteAfterRead": return getOrCreateConfiguration(target).isDeleteAfterRead(); case "deleteafterwrite": case "deleteAfterWrite": return getOrCreateConfiguration(target).isDeleteAfterWrite(); case "delimiter": return getOrCreateConfiguration(target).getDelimiter(); case "destinationbucketname": case "destinationBucketName": return getOrCreateConfiguration(target).getDestinationBucketName(); case "destinationobjectname": case "destinationObjectName": return getOrCreateConfiguration(target).getDestinationObjectName(); case "endpoint": return getOrCreateConfiguration(target).getEndpoint(); case "includebody": case "includeBody": return getOrCreateConfiguration(target).isIncludeBody(); case "includefolders": case "includeFolders": return getOrCreateConfiguration(target).isIncludeFolders(); case "includeusermetadata": case "includeUserMetadata": return getOrCreateConfiguration(target).isIncludeUserMetadata(); case "includeversions": case "includeVersions": return getOrCreateConfiguration(target).isIncludeVersions(); case "keyname": case "keyName": return getOrCreateConfiguration(target).getKeyName(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "length": return getOrCreateConfiguration(target).getLength(); case "matchetag": case "matchETag": return getOrCreateConfiguration(target).getMatchETag(); case "maxconnections": case "maxConnections": return getOrCreateConfiguration(target).getMaxConnections(); case "maxmessagesperpoll": case "maxMessagesPerPoll": return getOrCreateConfiguration(target).getMaxMessagesPerPoll(); case "minioclient": case "minioClient": return getOrCreateConfiguration(target).getMinioClient(); case "modifiedsince": case "modifiedSince": return getOrCreateConfiguration(target).getModifiedSince(); case "moveafterread": case "moveAfterRead": return getOrCreateConfiguration(target).isMoveAfterRead(); case "notmatchetag": case "notMatchETag": return getOrCreateConfiguration(target).getNotMatchETag(); case "objectlock": case "objectLock": return getOrCreateConfiguration(target).isObjectLock(); case "objectname": case "objectName": return getOrCreateConfiguration(target).getObjectName(); case "offset": return getOrCreateConfiguration(target).getOffset(); case "operation": return getOrCreateConfiguration(target).getOperation(); case "pojorequest": case "pojoRequest": return getOrCreateConfiguration(target).isPojoRequest(); case "policy": return getOrCreateConfiguration(target).getPolicy(); case "prefix": return getOrCreateConfiguration(target).getPrefix(); case "proxyport": case "proxyPort": return getOrCreateConfiguration(target).getProxyPort(); case "recursive": return getOrCreateConfiguration(target).isRecursive(); case "region": return getOrCreateConfiguration(target).getRegion(); case "secretkey": case "secretKey": return getOrCreateConfiguration(target).getSecretKey(); case "secure": return getOrCreateConfiguration(target).isSecure(); case "serversideencryption": case "serverSideEncryption": return getOrCreateConfiguration(target).getServerSideEncryption(); case "serversideencryptioncustomerkey": case "serverSideEncryptionCustomerKey": return getOrCreateConfiguration(target).getServerSideEncryptionCustomerKey(); case "startafter": case "startAfter": return getOrCreateConfiguration(target).getStartAfter(); case "storageclass": case "storageClass": return getOrCreateConfiguration(target).getStorageClass(); case "unmodifiedsince": case "unModifiedSince": return getOrCreateConfiguration(target).getUnModifiedSince(); case "useversion1": case "useVersion1": return getOrCreateConfiguration(target).isUseVersion1(); case "versionid": case "versionId": return getOrCreateConfiguration(target).getVersionId(); default: return null; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.serviceusage.v1.model; /** * `Service` is the root object of Google service configuration schema. It describes basic * information about a service, such as the name and the title, and delegates other aspects to sub- * sections. Each sub-section is either a proto message or a repeated proto message that configures * a specific aspect, such as auth. See each proto message definition for details. * * Example: * * type: google.api.Service config_version: 3 name: calendar.googleapis.com title: * Google Calendar API apis: - name: google.calendar.v3.Calendar authentication: * providers: - id: google_calendar_auth jwks_uri: * https://www.googleapis.com/oauth2/v1/certs issuer: https://securetoken.google.com * rules: - selector: "*" requirements: provider_id: google_calendar_auth * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Service Usage API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleApiService extends com.google.api.client.json.GenericJson { /** * A list of API interfaces exported by this service. Only the `name` field of the * google.protobuf.Api needs to be provided by the configuration author, as the remaining fields * will be derived from the IDL during the normalization process. It is an error to specify an API * interface here which cannot be resolved against the associated IDL files. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Api> apis; static { // hack to force ProGuard to consider Api used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Api.class); } /** * Auth configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Authentication authentication; /** * API backend configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Backend backend; /** * Billing configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Billing billing; /** * The semantic version of the service configuration. The config version affects the * interpretation of the service configuration. For example, certain features are enabled by * default for certain config versions. The latest config version is `3`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Long configVersion; /** * Context configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Context context; /** * Configuration for the service control plane. * The value may be {@code null}. */ @com.google.api.client.util.Key private Control control; /** * Custom error configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomError customError; /** * Additional API documentation. * The value may be {@code null}. */ @com.google.api.client.util.Key private Documentation documentation; /** * Configuration for network endpoints. If this is empty, then an endpoint with the same name as * the service is automatically generated to service all defined APIs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Endpoint> endpoints; static { // hack to force ProGuard to consider Endpoint used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Endpoint.class); } /** * A list of all enum types included in this API service. Enums referenced directly or indirectly * by the `apis` are automatically included. Enums which are not referenced but shall be included * should be listed here by name. Example: * * enums: - name: google.someapi.v1.SomeEnum * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ServiceUsageEnum> enums; static { // hack to force ProGuard to consider ServiceUsageEnum used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(ServiceUsageEnum.class); } /** * HTTP configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Http http; /** * A unique ID for a specific instance of this message, typically assigned by the client for * tracking purpose. If empty, the server may choose to generate one instead. Must be no longer * than 60 characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Logging configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Logging logging; /** * Defines the logs used by this service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<LogDescriptor> logs; /** * Defines the metrics used by this service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<MetricDescriptor> metrics; /** * Defines the monitored resources used by this service. This is required by the * Service.monitoring and Service.logging configurations. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<MonitoredResourceDescriptor> monitoredResources; /** * Monitoring configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Monitoring monitoring; /** * The service name, which is a DNS-like logical identifier for the service, such as * `calendar.googleapis.com`. The service name typically goes through DNS verification to make * sure the owner of the service also owns the DNS name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * The Google project that owns this service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String producerProjectId; /** * Quota configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private Quota quota; /** * Output only. The source information for this configuration if available. * The value may be {@code null}. */ @com.google.api.client.util.Key private SourceInfo sourceInfo; /** * System parameter configuration. * The value may be {@code null}. */ @com.google.api.client.util.Key private SystemParameters systemParameters; /** * A list of all proto message types included in this API service. It serves similar purpose as * [google.api.Service.types], except that these types are not needed by user-defined APIs. * Therefore, they will not show up in the generated discovery doc. This field should only be used * to define system APIs in ESF. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Type> systemTypes; /** * The product title for this service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String title; /** * A list of all proto message types included in this API service. Types referenced directly or * indirectly by the `apis` are automatically included. Messages which are not referenced but * shall be included, such as types used by the `google.protobuf.Any` type, should be listed here * by name. Example: * * types: - name: google.protobuf.Int32 * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Type> types; /** * Configuration controlling usage of this service. * The value may be {@code null}. */ @com.google.api.client.util.Key private Usage usage; /** * A list of API interfaces exported by this service. Only the `name` field of the * google.protobuf.Api needs to be provided by the configuration author, as the remaining fields * will be derived from the IDL during the normalization process. It is an error to specify an API * interface here which cannot be resolved against the associated IDL files. * @return value or {@code null} for none */ public java.util.List<Api> getApis() { return apis; } /** * A list of API interfaces exported by this service. Only the `name` field of the * google.protobuf.Api needs to be provided by the configuration author, as the remaining fields * will be derived from the IDL during the normalization process. It is an error to specify an API * interface here which cannot be resolved against the associated IDL files. * @param apis apis or {@code null} for none */ public GoogleApiService setApis(java.util.List<Api> apis) { this.apis = apis; return this; } /** * Auth configuration. * @return value or {@code null} for none */ public Authentication getAuthentication() { return authentication; } /** * Auth configuration. * @param authentication authentication or {@code null} for none */ public GoogleApiService setAuthentication(Authentication authentication) { this.authentication = authentication; return this; } /** * API backend configuration. * @return value or {@code null} for none */ public Backend getBackend() { return backend; } /** * API backend configuration. * @param backend backend or {@code null} for none */ public GoogleApiService setBackend(Backend backend) { this.backend = backend; return this; } /** * Billing configuration. * @return value or {@code null} for none */ public Billing getBilling() { return billing; } /** * Billing configuration. * @param billing billing or {@code null} for none */ public GoogleApiService setBilling(Billing billing) { this.billing = billing; return this; } /** * The semantic version of the service configuration. The config version affects the * interpretation of the service configuration. For example, certain features are enabled by * default for certain config versions. The latest config version is `3`. * @return value or {@code null} for none */ public java.lang.Long getConfigVersion() { return configVersion; } /** * The semantic version of the service configuration. The config version affects the * interpretation of the service configuration. For example, certain features are enabled by * default for certain config versions. The latest config version is `3`. * @param configVersion configVersion or {@code null} for none */ public GoogleApiService setConfigVersion(java.lang.Long configVersion) { this.configVersion = configVersion; return this; } /** * Context configuration. * @return value or {@code null} for none */ public Context getContext() { return context; } /** * Context configuration. * @param context context or {@code null} for none */ public GoogleApiService setContext(Context context) { this.context = context; return this; } /** * Configuration for the service control plane. * @return value or {@code null} for none */ public Control getControl() { return control; } /** * Configuration for the service control plane. * @param control control or {@code null} for none */ public GoogleApiService setControl(Control control) { this.control = control; return this; } /** * Custom error configuration. * @return value or {@code null} for none */ public CustomError getCustomError() { return customError; } /** * Custom error configuration. * @param customError customError or {@code null} for none */ public GoogleApiService setCustomError(CustomError customError) { this.customError = customError; return this; } /** * Additional API documentation. * @return value or {@code null} for none */ public Documentation getDocumentation() { return documentation; } /** * Additional API documentation. * @param documentation documentation or {@code null} for none */ public GoogleApiService setDocumentation(Documentation documentation) { this.documentation = documentation; return this; } /** * Configuration for network endpoints. If this is empty, then an endpoint with the same name as * the service is automatically generated to service all defined APIs. * @return value or {@code null} for none */ public java.util.List<Endpoint> getEndpoints() { return endpoints; } /** * Configuration for network endpoints. If this is empty, then an endpoint with the same name as * the service is automatically generated to service all defined APIs. * @param endpoints endpoints or {@code null} for none */ public GoogleApiService setEndpoints(java.util.List<Endpoint> endpoints) { this.endpoints = endpoints; return this; } /** * A list of all enum types included in this API service. Enums referenced directly or indirectly * by the `apis` are automatically included. Enums which are not referenced but shall be included * should be listed here by name. Example: * * enums: - name: google.someapi.v1.SomeEnum * @return value or {@code null} for none */ public java.util.List<ServiceUsageEnum> getEnums() { return enums; } /** * A list of all enum types included in this API service. Enums referenced directly or indirectly * by the `apis` are automatically included. Enums which are not referenced but shall be included * should be listed here by name. Example: * * enums: - name: google.someapi.v1.SomeEnum * @param enums enums or {@code null} for none */ public GoogleApiService setEnums(java.util.List<ServiceUsageEnum> enums) { this.enums = enums; return this; } /** * HTTP configuration. * @return value or {@code null} for none */ public Http getHttp() { return http; } /** * HTTP configuration. * @param http http or {@code null} for none */ public GoogleApiService setHttp(Http http) { this.http = http; return this; } /** * A unique ID for a specific instance of this message, typically assigned by the client for * tracking purpose. If empty, the server may choose to generate one instead. Must be no longer * than 60 characters. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * A unique ID for a specific instance of this message, typically assigned by the client for * tracking purpose. If empty, the server may choose to generate one instead. Must be no longer * than 60 characters. * @param id id or {@code null} for none */ public GoogleApiService setId(java.lang.String id) { this.id = id; return this; } /** * Logging configuration. * @return value or {@code null} for none */ public Logging getLogging() { return logging; } /** * Logging configuration. * @param logging logging or {@code null} for none */ public GoogleApiService setLogging(Logging logging) { this.logging = logging; return this; } /** * Defines the logs used by this service. * @return value or {@code null} for none */ public java.util.List<LogDescriptor> getLogs() { return logs; } /** * Defines the logs used by this service. * @param logs logs or {@code null} for none */ public GoogleApiService setLogs(java.util.List<LogDescriptor> logs) { this.logs = logs; return this; } /** * Defines the metrics used by this service. * @return value or {@code null} for none */ public java.util.List<MetricDescriptor> getMetrics() { return metrics; } /** * Defines the metrics used by this service. * @param metrics metrics or {@code null} for none */ public GoogleApiService setMetrics(java.util.List<MetricDescriptor> metrics) { this.metrics = metrics; return this; } /** * Defines the monitored resources used by this service. This is required by the * Service.monitoring and Service.logging configurations. * @return value or {@code null} for none */ public java.util.List<MonitoredResourceDescriptor> getMonitoredResources() { return monitoredResources; } /** * Defines the monitored resources used by this service. This is required by the * Service.monitoring and Service.logging configurations. * @param monitoredResources monitoredResources or {@code null} for none */ public GoogleApiService setMonitoredResources(java.util.List<MonitoredResourceDescriptor> monitoredResources) { this.monitoredResources = monitoredResources; return this; } /** * Monitoring configuration. * @return value or {@code null} for none */ public Monitoring getMonitoring() { return monitoring; } /** * Monitoring configuration. * @param monitoring monitoring or {@code null} for none */ public GoogleApiService setMonitoring(Monitoring monitoring) { this.monitoring = monitoring; return this; } /** * The service name, which is a DNS-like logical identifier for the service, such as * `calendar.googleapis.com`. The service name typically goes through DNS verification to make * sure the owner of the service also owns the DNS name. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * The service name, which is a DNS-like logical identifier for the service, such as * `calendar.googleapis.com`. The service name typically goes through DNS verification to make * sure the owner of the service also owns the DNS name. * @param name name or {@code null} for none */ public GoogleApiService setName(java.lang.String name) { this.name = name; return this; } /** * The Google project that owns this service. * @return value or {@code null} for none */ public java.lang.String getProducerProjectId() { return producerProjectId; } /** * The Google project that owns this service. * @param producerProjectId producerProjectId or {@code null} for none */ public GoogleApiService setProducerProjectId(java.lang.String producerProjectId) { this.producerProjectId = producerProjectId; return this; } /** * Quota configuration. * @return value or {@code null} for none */ public Quota getQuota() { return quota; } /** * Quota configuration. * @param quota quota or {@code null} for none */ public GoogleApiService setQuota(Quota quota) { this.quota = quota; return this; } /** * Output only. The source information for this configuration if available. * @return value or {@code null} for none */ public SourceInfo getSourceInfo() { return sourceInfo; } /** * Output only. The source information for this configuration if available. * @param sourceInfo sourceInfo or {@code null} for none */ public GoogleApiService setSourceInfo(SourceInfo sourceInfo) { this.sourceInfo = sourceInfo; return this; } /** * System parameter configuration. * @return value or {@code null} for none */ public SystemParameters getSystemParameters() { return systemParameters; } /** * System parameter configuration. * @param systemParameters systemParameters or {@code null} for none */ public GoogleApiService setSystemParameters(SystemParameters systemParameters) { this.systemParameters = systemParameters; return this; } /** * A list of all proto message types included in this API service. It serves similar purpose as * [google.api.Service.types], except that these types are not needed by user-defined APIs. * Therefore, they will not show up in the generated discovery doc. This field should only be used * to define system APIs in ESF. * @return value or {@code null} for none */ public java.util.List<Type> getSystemTypes() { return systemTypes; } /** * A list of all proto message types included in this API service. It serves similar purpose as * [google.api.Service.types], except that these types are not needed by user-defined APIs. * Therefore, they will not show up in the generated discovery doc. This field should only be used * to define system APIs in ESF. * @param systemTypes systemTypes or {@code null} for none */ public GoogleApiService setSystemTypes(java.util.List<Type> systemTypes) { this.systemTypes = systemTypes; return this; } /** * The product title for this service. * @return value or {@code null} for none */ public java.lang.String getTitle() { return title; } /** * The product title for this service. * @param title title or {@code null} for none */ public GoogleApiService setTitle(java.lang.String title) { this.title = title; return this; } /** * A list of all proto message types included in this API service. Types referenced directly or * indirectly by the `apis` are automatically included. Messages which are not referenced but * shall be included, such as types used by the `google.protobuf.Any` type, should be listed here * by name. Example: * * types: - name: google.protobuf.Int32 * @return value or {@code null} for none */ public java.util.List<Type> getTypes() { return types; } /** * A list of all proto message types included in this API service. Types referenced directly or * indirectly by the `apis` are automatically included. Messages which are not referenced but * shall be included, such as types used by the `google.protobuf.Any` type, should be listed here * by name. Example: * * types: - name: google.protobuf.Int32 * @param types types or {@code null} for none */ public GoogleApiService setTypes(java.util.List<Type> types) { this.types = types; return this; } /** * Configuration controlling usage of this service. * @return value or {@code null} for none */ public Usage getUsage() { return usage; } /** * Configuration controlling usage of this service. * @param usage usage or {@code null} for none */ public GoogleApiService setUsage(Usage usage) { this.usage = usage; return this; } @Override public GoogleApiService set(String fieldName, Object value) { return (GoogleApiService) super.set(fieldName, value); } @Override public GoogleApiService clone() { return (GoogleApiService) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.http; import static org.apache.jena.http.HttpLib.*; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpRequest.BodyPublisher; import java.net.http.HttpRequest.BodyPublishers; import java.net.http.HttpResponse; import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.function.Consumer; import org.apache.jena.atlas.io.IO; import org.apache.jena.atlas.web.HttpException; import org.apache.jena.graph.Graph; import org.apache.jena.riot.*; import org.apache.jena.riot.system.StreamRDF; import org.apache.jena.riot.system.StreamRDFLib; import org.apache.jena.riot.web.HttpNames; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.exec.http.GSP; import org.apache.jena.sparql.graph.GraphFactory; /** * A collection of convenience operations for HTTP level operations * for RDF related tasks. This does not include GSP naming * which is in {@link GSP}. * * See also {@link AsyncHttpRDF}. */ public class HttpRDF { // ---- GET /** * GET a graph from a URL * * @throws HttpException */ public static Graph httpGetGraph(String url) { return httpGetGraph(HttpEnv.getDftHttpClient(), url); } /** * GET a graph from a URL using the provided "Accept" header. * * @throws HttpException */ public static Graph httpGetGraph(String url, String acceptHeader) { return httpGetGraph(HttpEnv.getDftHttpClient(), url, acceptHeader); } /** * GET a graph from a URL using the {@link HttpClient} provided. * * @throws HttpException */ public static Graph httpGetGraph(HttpClient httpClient, String url) { Graph graph = GraphFactory.createDefaultGraph(); httpGetToStream(httpClient, url, WebContent.defaultGraphAcceptHeader, StreamRDFLib.graph(graph)); return graph; } /** * GET a graph from a URL using the {@link HttpClient} provided * and the "Accept" header. * * @throws HttpException */ public static Graph httpGetGraph(HttpClient httpClient, String url, String acceptHeader) { Graph graph = GraphFactory.createDefaultGraph(); httpGetToStream(httpClient, url, acceptHeader, StreamRDFLib.graph(graph)); return graph; } /** * Send the RDF data from the resource at the URL to the StreamRDF. * Beware of parse errors! * @throws HttpException */ public static void httpGetToStream(String url, String acceptHeader, StreamRDF dest) { httpGetToStream(HttpEnv.getDftHttpClient(), url, acceptHeader, dest); } /** * Read the RDF data from the resource at the URL and send to the StreamRDF. * <p> * Beware of parse errors! * @throws HttpException * @throws RiotException */ public static void httpGetToStream(HttpClient client, String url, String acceptHeader, StreamRDF dest) { if ( acceptHeader == null ) acceptHeader = "*/*"; httpGetToStream(client, url, HttpLib.setAcceptHeader(acceptHeader), dest); } /** * Read the RDF data from the resource at the URL and send to the StreamRDF. * <p> * Beware of parse errors! * @throws HttpException * @throws RiotException */ public static void httpGetToStream(HttpClient client, String url, Map<String, String> headers, StreamRDF dest) { httpGetToStream(client, url, HttpLib.setHeaders(headers), dest); } // Worker private static void httpGetToStream(HttpClient client, String url, Consumer<HttpRequest.Builder> modifier, StreamRDF dest) { HttpResponse<InputStream> response = execGetToInput(client, url, modifier); httpResponseToStreamRDF(url, response, dest); } /*package*/ static void httpResponseToStreamRDF(String url, HttpResponse<InputStream> response, StreamRDF dest) { InputStream in = handleResponseInputStream(response); String base = determineBaseURI(url, response); Lang lang = determineSyntax(response, Lang.RDFXML); try { RDFParser.create() .base(base) .source(in) .lang(lang) .parse(dest); } catch (RiotParseException ex) { // We only read part of the input stream. throw ex; } finally { // Even if parsing finished, it is possible we only read part of the input stream (e.g. RDF/XML). finish(in); } } /** * MUST consume or close the input stream * @see HttpLib#finish(HttpResponse) */ private static HttpResponse<InputStream> execGetToInput(HttpClient client, String url, Consumer<HttpRequest.Builder> modifier) { Objects.requireNonNull(client); Objects.requireNonNull(url); HttpRequest requestData = HttpLib.newGetRequest(url, modifier); HttpResponse<InputStream> response = execute(client, requestData); handleHttpStatusCode(response); return response; } public static void httpPostGraph(String url, Graph graph) { httpPostGraph(HttpEnv.getDftHttpClient(), url, graph, HttpEnv.defaultTriplesFormat); } public static void httpPostGraph(HttpClient httpClient, String url, Graph graph, RDFFormat format) { httpPostGraph(httpClient, url, graph, format, null); } public static void httpPostGraph(HttpClient httpClient, String url, Graph graph, RDFFormat format, Map<String, String> httpHeaders) { BodyPublisher bodyPublisher = graphToHttpBody(graph, format); pushBody(httpClient, url, Push.POST, bodyPublisher, format, httpHeaders); } /** Post a graph and expect an RDF graph back as the result. */ public static Graph httpPostGraphRtn(String url, Graph graph) { return httpPostGraphRtn(HttpEnv.getDftHttpClient(), url, graph, HttpEnv.defaultTriplesFormat, null); } /** Post a graph and expect an RDF graph back as the result. */ public static Graph httpPostGraphRtn(HttpClient httpClient, String url, Graph graph, RDFFormat format, Map<String, String> httpHeaders) { BodyPublisher bodyPublisher = graphToHttpBody(graph, HttpEnv.defaultTriplesFormat); HttpResponse<InputStream> httpResponse = pushWithResponse(httpClient, url, Push.POST, bodyPublisher, format, httpHeaders); Graph graphResponse = GraphFactory.createDefaultGraph(); StreamRDF dest = StreamRDFLib.graph(graphResponse); httpResponseToStreamRDF(url, httpResponse, dest); return graphResponse; } public static void httpPostDataset(HttpClient httpClient, String url, DatasetGraph dataset, RDFFormat format) { httpPostDataset(httpClient, url, dataset, format, null); } public static void httpPostDataset(HttpClient httpClient, String url, DatasetGraph dataset, RDFFormat format, Map<String, String> httpHeaders) { BodyPublisher bodyPublisher = datasetToHttpBody(dataset, format); pushBody(httpClient, url, Push.POST, bodyPublisher, format, httpHeaders); } public static void httpPutGraph(String url, Graph graph) { httpPutGraph(HttpEnv.getDftHttpClient(), url, graph, HttpEnv.defaultTriplesFormat); } public static void httpPutGraph(HttpClient httpClient, String url, Graph graph, RDFFormat fmt) { httpPutGraph(httpClient, url, graph, fmt, null); } public static void httpPutGraph(HttpClient httpClient, String url, Graph graph, RDFFormat format, Map<String, String> httpHeaders) { BodyPublisher bodyPublisher = graphToHttpBody(graph, format); pushBody(httpClient, url, Push.PUT, bodyPublisher, format, httpHeaders); } public static void httpPutDataset(HttpClient httpClient, String url, DatasetGraph dataset, RDFFormat format) { httpPutDataset(httpClient, url, dataset, format, null); } public static void httpPutDataset(HttpClient httpClient, String url, DatasetGraph dataset, RDFFormat format, Map<String, String> httpHeaders) { BodyPublisher bodyPublisher = datasetToHttpBody(dataset, format); pushBody(httpClient, url, Push.PUT, bodyPublisher, format, httpHeaders); } // Shared between push* and put* private static void pushBody(HttpClient httpClient, String url, Push style, BodyPublisher bodyPublisher, RDFFormat format, Map<String, String> httpHeaders) { String contentType = format.getLang().getHeaderString(); if ( httpHeaders == null ) httpHeaders = Collections.singletonMap(HttpNames.hContentType, contentType); else httpHeaders.put(HttpNames.hContentType, contentType); HttpLib.httpPushData(httpClient, style, url, HttpLib.setHeaders(httpHeaders), bodyPublisher); } private static HttpResponse<InputStream> pushWithResponse(HttpClient httpClient, String url, Push style, BodyPublisher bodyPublisher, RDFFormat format, Map<String, String> httpHeaders) { String contentType = format.getLang().getHeaderString(); if ( httpHeaders == null ) httpHeaders = Collections.singletonMap(HttpNames.hContentType, contentType); else httpHeaders.put(HttpNames.hContentType, contentType); return HttpLib.httpPushWithResponse(httpClient, style, url, HttpLib.setHeaders(httpHeaders), bodyPublisher); } public static void httpDeleteGraph(String url) { httpDeleteGraph(HttpEnv.getDftHttpClient(), url); } public static void httpDeleteGraph(HttpClient httpClient, String url) { URI uri = toRequestURI(url); HttpRequest requestData = HttpLib.requestBuilderFor(url) .DELETE() .uri(uri) .build(); HttpResponse<InputStream> response = execute(httpClient, requestData); handleResponseNoBody(response); } /** RDF {@link Lang}. */ /*package*/ static <T> Lang determineSyntax(HttpResponse<T> response, Lang dftSyntax) { String ctStr = determineContentType(response); Lang lang = RDFLanguages.contentTypeToLang(ctStr); return dft(lang, dftSyntax); } /** * Content-Type, without charset. * <p> * RDF formats are either UTF-8 or XML, where the charset is determined by the * processing instruction at the start of the content. Parsing is on byte * streams. */ /*package*/ static <T> String determineContentType(HttpResponse<T> response) { String ctStr = HttpLib.responseHeader(response, HttpNames.hContentType); if ( ctStr != null ) { int i = ctStr.indexOf(';'); if ( i >= 0 ) ctStr = ctStr.substring(0, i); } return ctStr; } /*package*/ static <T> String determineBaseURI(String url, HttpResponse<T> response) { // RFC 7231: 3.1.4.2. and Appendix B: Content-Location does not affect base URI. // With help from Stuart Williams. URI uri = response.uri(); return uri.toString(); } // This sets Content-Length but requires the entire graph being serialized // to get the serialization size. // // An alternative is to stream the output but then the HTTP connection can't // be reused (don't know when the request finishes, only closing the connection // indicates that). /*package*/ static BodyPublisher graphToHttpBody(Graph graph, RDFFormat syntax) { ByteArrayOutputStream out = new ByteArrayOutputStream(128*1024); RDFDataMgr.write(out, graph, syntax); byte[] bytes = out.toByteArray(); IO.close(out); return BodyPublishers.ofByteArray(bytes); } /*package*/ static BodyPublisher datasetToHttpBody(DatasetGraph dataset, RDFFormat syntax) { ByteArrayOutputStream out = new ByteArrayOutputStream(128*1024); RDFDataMgr.write(out, dataset, syntax); byte[] bytes = out.toByteArray(); IO.close(out); return BodyPublishers.ofByteArray(bytes); } }
/* * @(#)GridLayout.java 1.42 05/11/17 * * Copyright 2006 Sun Microsystems, Inc. All rights reserved. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ package java.awt; /** * The <code>GridLayout</code> class is a layout manager that * lays out a container's components in a rectangular grid. * The container is divided into equal-sized rectangles, * and one component is placed in each rectangle. * For example, the following is an applet that lays out six buttons * into three rows and two columns: * <p> * <hr><blockquote> * <pre> * import java.awt.*; * import java.applet.Applet; * public class ButtonGrid extends Applet { * public void init() { * setLayout(new GridLayout(3,2)); * add(new Button("1")); * add(new Button("2")); * add(new Button("3")); * add(new Button("4")); * add(new Button("5")); * add(new Button("6")); * } * } * </pre></blockquote><hr> * <p> * If the container's <code>ComponentOrientation</code> property is horizontal * and left-to-right, the above example produces the output shown in Figure 1. * If the container's <code>ComponentOrientation</code> property is horizontal * and right-to-left, the example produces the output shown in Figure 2. * <p> * <center><table COLS=2 WIDTH=600 summary="layout"> * <tr ALIGN=CENTER> * <td><img SRC="doc-files/GridLayout-1.gif" * alt="Shows 6 buttons in rows of 2. Row 1 shows buttons 1 then 2. * Row 2 shows buttons 3 then 4. Row 3 shows buttons 5 then 6."> * </td> * * <td ALIGN=CENTER><img SRC="doc-files/GridLayout-2.gif" * alt="Shows 6 buttons in rows of 2. Row 1 shows buttons 2 then 1. * Row 2 shows buttons 4 then 3. Row 3 shows buttons 6 then 5."> * </td> * </tr> * * <tr ALIGN=CENTER> * <td>Figure 1: Horizontal, Left-to-Right</td> * * <td>Figure 2: Horizontal, Right-to-Left</td> * </tr> * </table></center> * <p> * When both the number of rows and the number of columns have * been set to non-zero values, either by a constructor or * by the <tt>setRows</tt> and <tt>setColumns</tt> methods, the number of * columns specified is ignored. Instead, the number of * columns is determined from the specified number of rows * and the total number of components in the layout. So, for * example, if three rows and two columns have been specified * and nine components are added to the layout, they will * be displayed as three rows of three columns. Specifying * the number of columns affects the layout only when the * number of rows is set to zero. * * @version 1.42, 11/17/05 * @author Arthur van Hoff * @since JDK1.0 */ public class GridLayout implements LayoutManager, java.io.Serializable { /* * serialVersionUID */ private static final long serialVersionUID = -7411804673224730901L; /** * This is the horizontal gap (in pixels) which specifies the space * between columns. They can be changed at any time. * This should be a non-negative integer. * * @serial * @see #getHgap() * @see #setHgap(int) */ int hgap; /** * This is the vertical gap (in pixels) which specifies the space * between rows. They can be changed at any time. * This should be a non negative integer. * * @serial * @see #getVgap() * @see #setVgap(int) */ int vgap; /** * This is the number of rows specified for the grid. The number * of rows can be changed at any time. * This should be a non negative integer, where '0' means * 'any number' meaning that the number of Rows in that * dimension depends on the other dimension. * * @serial * @see #getRows() * @see #setRows(int) */ int rows; /** * This is the number of columns specified for the grid. The number * of columns can be changed at any time. * This should be a non negative integer, where '0' means * 'any number' meaning that the number of Columns in that * dimension depends on the other dimension. * * @serial * @see #getColumns() * @see #setColumns(int) */ int cols; /** * Creates a grid layout with a default of one column per component, * in a single row. * @since JDK1.1 */ public GridLayout() { this(1, 0, 0, 0); } /** * Creates a grid layout with the specified number of rows and * columns. All components in the layout are given equal size. * <p> * One, but not both, of <code>rows</code> and <code>cols</code> can * be zero, which means that any number of objects can be placed in a * row or in a column. * @param rows the rows, with the value zero meaning * any number of rows. * @param cols the columns, with the value zero meaning * any number of columns. */ public GridLayout(int rows, int cols) { this(rows, cols, 0, 0); } /** * Creates a grid layout with the specified number of rows and * columns. All components in the layout are given equal size. * <p> * In addition, the horizontal and vertical gaps are set to the * specified values. Horizontal gaps are placed between each * of the columns. Vertical gaps are placed between each of * the rows. * <p> * One, but not both, of <code>rows</code> and <code>cols</code> can * be zero, which means that any number of objects can be placed in a * row or in a column. * <p> * All <code>GridLayout</code> constructors defer to this one. * @param rows the rows, with the value zero meaning * any number of rows * @param cols the columns, with the value zero meaning * any number of columns * @param hgap the horizontal gap * @param vgap the vertical gap * @exception IllegalArgumentException if the value of both * <code>rows</code> and <code>cols</code> is * set to zero */ public GridLayout(int rows, int cols, int hgap, int vgap) { if ((rows == 0) && (cols == 0)) { throw new IllegalArgumentException("rows and cols cannot both be zero"); } this.rows = rows; this.cols = cols; this.hgap = hgap; this.vgap = vgap; } /** * Gets the number of rows in this layout. * @return the number of rows in this layout * @since JDK1.1 */ public int getRows() { return rows; } /** * Sets the number of rows in this layout to the specified value. * @param rows the number of rows in this layout * @exception IllegalArgumentException if the value of both * <code>rows</code> and <code>cols</code> is set to zero * @since JDK1.1 */ public void setRows(int rows) { if ((rows == 0) && (this.cols == 0)) { throw new IllegalArgumentException("rows and cols cannot both be zero"); } this.rows = rows; } /** * Gets the number of columns in this layout. * @return the number of columns in this layout * @since JDK1.1 */ public int getColumns() { return cols; } /** * Sets the number of columns in this layout to the specified value. * Setting the number of columns has no affect on the layout * if the number of rows specified by a constructor or by * the <tt>setRows</tt> method is non-zero. In that case, the number * of columns displayed in the layout is determined by the total * number of components and the number of rows specified. * @param cols the number of columns in this layout * @exception IllegalArgumentException if the value of both * <code>rows</code> and <code>cols</code> is set to zero * @since JDK1.1 */ public void setColumns(int cols) { if ((cols == 0) && (this.rows == 0)) { throw new IllegalArgumentException("rows and cols cannot both be zero"); } this.cols = cols; } /** * Gets the horizontal gap between components. * @return the horizontal gap between components * @since JDK1.1 */ public int getHgap() { return hgap; } /** * Sets the horizontal gap between components to the specified value. * @param hgap the horizontal gap between components * @since JDK1.1 */ public void setHgap(int hgap) { this.hgap = hgap; } /** * Gets the vertical gap between components. * @return the vertical gap between components * @since JDK1.1 */ public int getVgap() { return vgap; } /** * Sets the vertical gap between components to the specified value. * @param vgap the vertical gap between components * @since JDK1.1 */ public void setVgap(int vgap) { this.vgap = vgap; } /** * Adds the specified component with the specified name to the layout. * @param name the name of the component * @param comp the component to be added */ public void addLayoutComponent(String name, Component comp) { } /** * Removes the specified component from the layout. * @param comp the component to be removed */ public void removeLayoutComponent(Component comp) { } /** * Determines the preferred size of the container argument using * this grid layout. * <p> * The preferred width of a grid layout is the largest preferred * width of all of the components in the container times the number of * columns, plus the horizontal padding times the number of columns * minus one, plus the left and right insets of the target container. * <p> * The preferred height of a grid layout is the largest preferred * height of all of the components in the container times the number of * rows, plus the vertical padding times the number of rows minus one, * plus the top and bottom insets of the target container. * * @param parent the container in which to do the layout * @return the preferred dimensions to lay out the * subcomponents of the specified container * @see java.awt.GridLayout#minimumLayoutSize * @see java.awt.Container#getPreferredSize() */ public Dimension preferredLayoutSize(Container parent) { synchronized (parent.getTreeLock()) { Insets insets = parent.getInsets(); int ncomponents = parent.getComponentCount(); int nrows = rows; int ncols = cols; if (nrows > 0) { ncols = (ncomponents + nrows - 1) / nrows; } else { nrows = (ncomponents + ncols - 1) / ncols; } int w = 0; int h = 0; for (int i = 0 ; i < ncomponents ; i++) { Component comp = parent.getComponent(i); Dimension d = comp.getPreferredSize(); if (w < d.width) { w = d.width; } if (h < d.height) { h = d.height; } } return new Dimension(insets.left + insets.right + ncols*w + (ncols-1)*hgap, insets.top + insets.bottom + nrows*h + (nrows-1)*vgap); } } /** * Determines the minimum size of the container argument using this * grid layout. * <p> * The minimum width of a grid layout is the largest minimum width * of all of the components in the container times the number of columns, * plus the horizontal padding times the number of columns minus one, * plus the left and right insets of the target container. * <p> * The minimum height of a grid layout is the largest minimum height * of all of the components in the container times the number of rows, * plus the vertical padding times the number of rows minus one, plus * the top and bottom insets of the target container. * * @param parent the container in which to do the layout * @return the minimum dimensions needed to lay out the * subcomponents of the specified container * @see java.awt.GridLayout#preferredLayoutSize * @see java.awt.Container#doLayout */ public Dimension minimumLayoutSize(Container parent) { synchronized (parent.getTreeLock()) { Insets insets = parent.getInsets(); int ncomponents = parent.getComponentCount(); int nrows = rows; int ncols = cols; if (nrows > 0) { ncols = (ncomponents + nrows - 1) / nrows; } else { nrows = (ncomponents + ncols - 1) / ncols; } int w = 0; int h = 0; for (int i = 0 ; i < ncomponents ; i++) { Component comp = parent.getComponent(i); Dimension d = comp.getMinimumSize(); if (w < d.width) { w = d.width; } if (h < d.height) { h = d.height; } } return new Dimension(insets.left + insets.right + ncols*w + (ncols-1)*hgap, insets.top + insets.bottom + nrows*h + (nrows-1)*vgap); } } /** * Lays out the specified container using this layout. * <p> * This method reshapes the components in the specified target * container in order to satisfy the constraints of the * <code>GridLayout</code> object. * <p> * The grid layout manager determines the size of individual * components by dividing the free space in the container into * equal-sized portions according to the number of rows and columns * in the layout. The container's free space equals the container's * size minus any insets and any specified horizontal or vertical * gap. All components in a grid layout are given the same size. * * @param parent the container in which to do the layout * @see java.awt.Container * @see java.awt.Container#doLayout */ public void layoutContainer(Container parent) { synchronized (parent.getTreeLock()) { Insets insets = parent.getInsets(); int ncomponents = parent.getComponentCount(); int nrows = rows; int ncols = cols; boolean ltr = parent.getComponentOrientation().isLeftToRight(); if (ncomponents == 0) { return; } if (nrows > 0) { ncols = (ncomponents + nrows - 1) / nrows; } else { nrows = (ncomponents + ncols - 1) / ncols; } int w = parent.width - (insets.left + insets.right); int h = parent.height - (insets.top + insets.bottom); w = (w - (ncols - 1) * hgap) / ncols; h = (h - (nrows - 1) * vgap) / nrows; if (ltr) { for (int c = 0, x = insets.left ; c < ncols ; c++, x += w + hgap) { for (int r = 0, y = insets.top ; r < nrows ; r++, y += h + vgap) { int i = r * ncols + c; if (i < ncomponents) { parent.getComponent(i).setBounds(x, y, w, h); } } } } else { for (int c = 0, x = parent.width - insets.right - w; c < ncols ; c++, x -= w + hgap) { for (int r = 0, y = insets.top ; r < nrows ; r++, y += h + vgap) { int i = r * ncols + c; if (i < ncomponents) { parent.getComponent(i).setBounds(x, y, w, h); } } } } } } /** * Returns the string representation of this grid layout's values. * @return a string representation of this grid layout */ public String toString() { return getClass().getName() + "[hgap=" + hgap + ",vgap=" + vgap + ",rows=" + rows + ",cols=" + cols + "]"; } }
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.util; import com.graphhopper.reader.OSMWay; import com.graphhopper.reader.OSMRelation; import static com.graphhopper.routing.util.PriorityCode.*; import com.graphhopper.util.Helper; import com.graphhopper.util.InstructionAnnotation; import com.graphhopper.util.Translation; import java.util.*; /** * Defines bit layout of bicycles (not motorcycles) for speed, access and relations (network). * <p/> * @author Peter Karich * @author Nop * @author ratrun */ public class BikeCommonFlagEncoder extends AbstractFlagEncoder { /** * Reports wether this edge is unpaved. */ public static final int K_UNPAVED = 100; protected static final int PUSHING_SECTION_SPEED = 4; private long unpavedBit = 0; // Pushing section heighways are parts where you need to get off your bike and push it (German: Schiebestrecke) protected final HashSet<String> pushingSections = new HashSet<String>(); protected final HashSet<String> oppositeLanes = new HashSet<String>(); protected final Set<String> preferHighwayTags = new HashSet<String>(); protected final Set<String> avoidHighwayTags = new HashSet<String>(); protected final Set<String> unpavedSurfaceTags = new HashSet<String>(); private final Map<String, Integer> trackTypeSpeeds = new HashMap<String, Integer>(); private final Map<String, Integer> surfaceSpeeds = new HashMap<String, Integer>(); private final Set<String> roadValues = new HashSet<String>(); private final Map<String, Integer> highwaySpeeds = new HashMap<String, Integer>(); // convert network tag of bicycle routes into a way route code private final Map<String, Integer> bikeNetworkToCode = new HashMap<String, Integer>(); protected EncodedValue relationCodeEncoder; private EncodedValue wayTypeEncoder; private EncodedValue preferWayEncoder; // Car speed limit which switches the preference from UNCHANGED to AVOID_IF_POSSIBLE private int avoidSpeedLimit; // This is the specific bicycle class private String specificBicycleClass; protected BikeCommonFlagEncoder( int speedBits, double speedFactor, int maxTurnCosts ) { super(speedBits, speedFactor, maxTurnCosts); // strict set, usually vehicle and agricultural/forestry are ignored by cyclists restrictions.addAll(Arrays.asList("bicycle", "access")); restrictedValues.add("private"); restrictedValues.add("no"); restrictedValues.add("restricted"); restrictedValues.add("military"); intendedValues.add("yes"); intendedValues.add("designated"); intendedValues.add("official"); intendedValues.add("permissive"); oppositeLanes.add("opposite"); oppositeLanes.add("opposite_lane"); oppositeLanes.add("opposite_track"); setBlockByDefault(false); potentialBarriers.add("gate"); // potentialBarriers.add("lift_gate"); potentialBarriers.add("swing_gate"); absoluteBarriers.add("stile"); absoluteBarriers.add("turnstile"); // make intermodal connections possible but mark as pushing section acceptedRailways.add("platform"); unpavedSurfaceTags.add("unpaved"); unpavedSurfaceTags.add("gravel"); unpavedSurfaceTags.add("ground"); unpavedSurfaceTags.add("dirt"); unpavedSurfaceTags.add("grass"); unpavedSurfaceTags.add("compacted"); unpavedSurfaceTags.add("earth"); unpavedSurfaceTags.add("fine_gravel"); unpavedSurfaceTags.add("grass_paver"); unpavedSurfaceTags.add("ice"); unpavedSurfaceTags.add("mud"); unpavedSurfaceTags.add("salt"); unpavedSurfaceTags.add("sand"); unpavedSurfaceTags.add("wood"); roadValues.add("living_street"); roadValues.add("road"); roadValues.add("service"); roadValues.add("unclassified"); roadValues.add("residential"); roadValues.add("trunk"); roadValues.add("trunk_link"); roadValues.add("primary"); roadValues.add("primary_link"); roadValues.add("secondary"); roadValues.add("secondary_link"); roadValues.add("tertiary"); roadValues.add("tertiary_link"); maxPossibleSpeed = 30; setTrackTypeSpeed("grade1", 18); // paved setTrackTypeSpeed("grade2", 12); // now unpaved ... setTrackTypeSpeed("grade3", 8); setTrackTypeSpeed("grade4", 6); setTrackTypeSpeed("grade5", 4); // like sand/grass setSurfaceSpeed("paved", 18); setSurfaceSpeed("asphalt", 18); setSurfaceSpeed("cobblestone", 8); setSurfaceSpeed("cobblestone:flattened", 10); setSurfaceSpeed("sett", 10); setSurfaceSpeed("concrete", 18); setSurfaceSpeed("concrete:lanes", 16); setSurfaceSpeed("concrete:plates", 16); setSurfaceSpeed("paving_stones", 12); setSurfaceSpeed("paving_stones:30", 12); setSurfaceSpeed("unpaved", 14); setSurfaceSpeed("compacted", 16); setSurfaceSpeed("dirt", 10); setSurfaceSpeed("earth", 12); setSurfaceSpeed("fine_gravel", 18); setSurfaceSpeed("grass", 8); setSurfaceSpeed("grass_paver", 8); setSurfaceSpeed("gravel", 12); setSurfaceSpeed("ground", 12); setSurfaceSpeed("ice", PUSHING_SECTION_SPEED / 2); setSurfaceSpeed("metal", 10); setSurfaceSpeed("mud", 10); setSurfaceSpeed("pebblestone", 16); setSurfaceSpeed("salt", 6); setSurfaceSpeed("sand", 6); setSurfaceSpeed("wood", 6); setHighwaySpeed("living_street", 6); setHighwaySpeed("steps", PUSHING_SECTION_SPEED / 2); setHighwaySpeed("cycleway", 18); setHighwaySpeed("path", 12); setHighwaySpeed("footway", 6); setHighwaySpeed("pedestrian", 6); setHighwaySpeed("track", 12); setHighwaySpeed("service", 14); setHighwaySpeed("residential", 18); // no other highway applies: setHighwaySpeed("unclassified", 16); // unknown road: setHighwaySpeed("road", 12); setHighwaySpeed("trunk", 18); setHighwaySpeed("trunk_link", 18); setHighwaySpeed("primary", 18); setHighwaySpeed("primary_link", 18); setHighwaySpeed("secondary", 18); setHighwaySpeed("secondary_link", 18); setHighwaySpeed("tertiary", 18); setHighwaySpeed("tertiary_link", 18); // special case see tests and #191 setHighwaySpeed("motorway", 18); setHighwaySpeed("motorway_link", 18); avoidHighwayTags.add("motorway"); avoidHighwayTags.add("motorway_link"); setCyclingNetworkPreference("icn", PriorityCode.BEST.getValue()); setCyclingNetworkPreference("ncn", PriorityCode.BEST.getValue()); setCyclingNetworkPreference("rcn", PriorityCode.VERY_NICE.getValue()); setCyclingNetworkPreference("lcn", PriorityCode.PREFER.getValue()); setCyclingNetworkPreference("mtb", PriorityCode.UNCHANGED.getValue()); setCyclingNetworkPreference("deprecated", PriorityCode.AVOID_AT_ALL_COSTS.getValue()); setAvoidSpeedLimit(71); } @Override public int getVersion() { return 1; } @Override public int defineWayBits( int index, int shift ) { // first two bits are reserved for route handling in superclass shift = super.defineWayBits(index, shift); speedEncoder = new EncodedDoubleValue("Speed", shift, speedBits, speedFactor, highwaySpeeds.get("cycleway"), maxPossibleSpeed); shift += speedEncoder.getBits(); unpavedBit = 1L << shift++; // 2 bits wayTypeEncoder = new EncodedValue("WayType", shift, 2, 1, 0, 3, true); shift += wayTypeEncoder.getBits(); preferWayEncoder = new EncodedValue("PreferWay", shift, 3, 1, 0, 7); shift += preferWayEncoder.getBits(); return shift; } @Override public int defineRelationBits( int index, int shift ) { relationCodeEncoder = new EncodedValue("RelationCode", shift, 3, 1, 0, 7); return shift + relationCodeEncoder.getBits(); } @Override public long acceptWay( OSMWay way ) { String highwayValue = way.getTag("highway"); if (highwayValue == null) { if (way.hasTag("route", ferries)) { // if bike is NOT explictly tagged allow bike but only if foot is not specified String bikeTag = way.getTag("bicycle"); if (bikeTag == null && !way.hasTag("foot") || "yes".equals(bikeTag)) return acceptBit | ferryBit; } // special case not for all acceptedRailways, only platform if (way.hasTag("railway", "platform")) return acceptBit; return 0; } if (!highwaySpeeds.containsKey(highwayValue)) return 0; // use the way if it is tagged for bikes if (way.hasTag("bicycle", intendedValues)) return acceptBit; // accept only if explicitely tagged for bike usage if ("motorway".equals(highwayValue) || "motorway_link".equals(highwayValue)) return 0; if (way.hasTag("motorroad", "yes")) return 0; // do not use fords with normal bikes, flagged fords are in included above if (isBlockFords() && (way.hasTag("highway", "ford") || way.hasTag("ford"))) return 0; // check access restrictions if (way.hasTag(restrictions, restrictedValues)) return 0; // do not accept railways (sometimes incorrectly mapped!) if (way.hasTag("railway") && !way.hasTag("railway", acceptedRailways)) return 0; String sacScale = way.getTag("sac_scale"); if (sacScale != null) { if ((way.hasTag("highway", "cycleway")) && (way.hasTag("sac_scale", "hiking"))) return acceptBit; if (!allowedSacScale(sacScale)) return 0; } return acceptBit; } boolean allowedSacScale( String sacScale ) { // other scales are nearly impossible by an ordinary bike, see http://wiki.openstreetmap.org/wiki/Key:sac_scale return "hiking".equals(sacScale); } @Override public long handleRelationTags( OSMRelation relation, long oldRelationFlags ) { int code = 0; if (relation.hasTag("route", "bicycle")) { Integer val = bikeNetworkToCode.get(relation.getTag("network")); if (val != null) code = val; } else if (relation.hasTag("route", "ferry")) { code = PriorityCode.AVOID_IF_POSSIBLE.getValue(); } int oldCode = (int) relationCodeEncoder.getValue(oldRelationFlags); if (oldCode < code) return relationCodeEncoder.setValue(0, code); return oldRelationFlags; } @Override public long handleWayTags( OSMWay way, long allowed, long relationFlags ) { if (!isAccept(allowed)) return 0; long encoded = 0; if (!isFerry(allowed)) { double speed = getSpeed(way); int priorityFromRelation = 0; if (relationFlags != 0) priorityFromRelation = (int) relationCodeEncoder.getValue(relationFlags); encoded = setLong(encoded, PriorityWeighting.KEY, handlePriority(way, priorityFromRelation)); // bike maxspeed handling is different from car as we don't increase speed speed = applyMaxSpeed(way, speed, false); encoded = handleSpeed(way, speed, encoded); encoded = handleBikeRelated(way, encoded, relationFlags > UNCHANGED.getValue()); boolean isRoundabout = way.hasTag("junction", "roundabout"); if (isRoundabout) { encoded = setBool(encoded, K_ROUNDABOUT, true); } } else { encoded = handleFerryTags(way, highwaySpeeds.get("living_street"), highwaySpeeds.get("track"), highwaySpeeds.get("primary")); encoded |= directionBitMask; } return encoded; } int getSpeed( OSMWay way ) { int speed = PUSHING_SECTION_SPEED; String highwayTag = way.getTag("highway"); Integer highwaySpeed = highwaySpeeds.get(highwayTag); String s = way.getTag("surface"); if (!Helper.isEmpty(s)) { Integer surfaceSpeed = surfaceSpeeds.get(s); if (surfaceSpeed != null) { speed = surfaceSpeed; // Boost handling for good surfaces if (highwaySpeed != null && surfaceSpeed > highwaySpeed) { // Avoid boosting if pushing section if (pushingSections.contains(highwayTag)) speed = highwaySpeed; else speed = surfaceSpeed; } } } else { String tt = way.getTag("tracktype"); if (!Helper.isEmpty(tt)) { Integer tInt = trackTypeSpeeds.get(tt); if (tInt != null) speed = tInt; } else { if (highwaySpeed != null) { if (!way.hasTag("service")) speed = highwaySpeed; else speed = highwaySpeeds.get("living_street"); } } } // Until now we assumed that the way is no pushing section // Now we check, but only in case that our speed is bigger compared to the PUSHING_SECTION_SPEED if ((speed > PUSHING_SECTION_SPEED) && (!way.hasTag("bicycle", intendedValues) && way.hasTag("highway", pushingSections))) { if (way.hasTag("highway", "steps")) speed = PUSHING_SECTION_SPEED / 2; else speed = PUSHING_SECTION_SPEED; } return speed; } @Override public InstructionAnnotation getAnnotation( long flags, Translation tr ) { int paveType = 0; // paved if (isBool(flags, K_UNPAVED)) paveType = 1; // unpaved int wayType = (int) wayTypeEncoder.getValue(flags); String wayName = getWayName(paveType, wayType, tr); return new InstructionAnnotation(0, wayName); } String getWayName( int pavementType, int wayType, Translation tr ) { String pavementName = ""; if (pavementType == 1) pavementName = tr.tr("unpaved"); String wayTypeName = ""; switch (wayType) { case 0: wayTypeName = tr.tr("road"); break; case 1: wayTypeName = tr.tr("off_bike"); break; case 2: wayTypeName = tr.tr("cycleway"); break; case 3: wayTypeName = tr.tr("way"); break; } if (pavementName.isEmpty()) { if (wayType == 0 || wayType == 3) return ""; return wayTypeName; } else { if (wayTypeName.isEmpty()) return pavementName; else return wayTypeName + ", " + pavementName; } } /** * In this method we prefer cycleways or roads with designated bike access and avoid big roads * or roads with trams or pedestrian. * <p/> * @return new priority based on priorityFromRelation and on the tags in OSMWay. */ protected int handlePriority( OSMWay way, int priorityFromRelation ) { TreeMap<Double, Integer> weightToPrioMap = new TreeMap<Double, Integer>(); if (priorityFromRelation == 0) weightToPrioMap.put(0d, UNCHANGED.getValue()); else weightToPrioMap.put(110d, priorityFromRelation); collect(way, weightToPrioMap); // pick priority with biggest order value return weightToPrioMap.lastEntry().getValue(); } // Conversion of class value to priority. See http://wiki.openstreetmap.org/wiki/Class:bicycle private PriorityCode convertCallValueToPriority( String tagvalue ) { int classvalue; try { classvalue = Integer.parseInt(tagvalue); } catch (NumberFormatException e) { return PriorityCode.UNCHANGED; } switch (classvalue) { case 3: return PriorityCode.BEST; case 2: return PriorityCode.VERY_NICE; case 1: return PriorityCode.PREFER; case 0: return PriorityCode.UNCHANGED; case -1: return PriorityCode.AVOID_IF_POSSIBLE; case -2: return PriorityCode.REACH_DEST; case -3: return PriorityCode.AVOID_AT_ALL_COSTS; default: return PriorityCode.UNCHANGED; } } /** * @param weightToPrioMap associate a weight with every priority. This sorted map allows * subclasses to 'insert' more important priorities as well as overwrite determined priorities. */ void collect( OSMWay way, TreeMap<Double, Integer> weightToPrioMap ) { String service = way.getTag("service"); String highway = way.getTag("highway"); if (way.hasTag("bicycle", "designated")) weightToPrioMap.put(100d, PREFER.getValue()); if ("cycleway".equals(highway)) weightToPrioMap.put(100d, VERY_NICE.getValue()); double maxSpeed = getMaxSpeed(way); if (preferHighwayTags.contains(highway) || maxSpeed > 0 && maxSpeed <= 30) { if (maxSpeed < avoidSpeedLimit) { weightToPrioMap.put(40d, PREFER.getValue()); if (way.hasTag("tunnel", intendedValues)) weightToPrioMap.put(40d, UNCHANGED.getValue()); } } else { if (avoidHighwayTags.contains(highway) || ((maxSpeed >= avoidSpeedLimit) && (highway != "track"))) { weightToPrioMap.put(50d, REACH_DEST.getValue()); if (way.hasTag("tunnel", intendedValues)) weightToPrioMap.put(50d, AVOID_AT_ALL_COSTS.getValue()); } } if (pushingSections.contains(highway) || way.hasTag("bicycle", "use_sidepath") || "parking_aisle".equals(service)) { if (way.hasTag("bicycle", "yes")) weightToPrioMap.put(100d, UNCHANGED.getValue()); else weightToPrioMap.put(50d, AVOID_IF_POSSIBLE.getValue()); } if (way.hasTag("railway", "tram")) weightToPrioMap.put(50d, AVOID_AT_ALL_COSTS.getValue()); String classBicycleSpecific = way.getTag(specificBicycleClass); if (classBicycleSpecific != null) { // We assume that humans are better in classifying preferences compared to our algorithm above -> weight = 100 weightToPrioMap.put(100d, convertCallValueToPriority(classBicycleSpecific).getValue()); } else { String classBicycle = way.getTag("class:bicycle"); if (classBicycle != null) { weightToPrioMap.put(100d, convertCallValueToPriority(classBicycle).getValue()); } } } /** * Handle surface and wayType encoding */ long handleBikeRelated( OSMWay way, long encoded, boolean partOfCycleRelation ) { String surfaceTag = way.getTag("surface"); String highway = way.getTag("highway"); String trackType = way.getTag("tracktype"); // Populate bits at wayTypeMask with wayType WayType wayType = WayType.OTHER_SMALL_WAY; boolean isPusingSection = isPushingSection(way); if (isPusingSection && !partOfCycleRelation || "steps".equals(highway)) wayType = WayType.PUSHING_SECTION; if ("track".equals(highway) && (trackType == null || !"grade1".equals(trackType)) || "path".equals(highway) && surfaceTag == null || unpavedSurfaceTags.contains(surfaceTag)) { encoded = setBool(encoded, K_UNPAVED, true); } if (way.hasTag("bicycle", intendedValues)) { if (isPusingSection && !way.hasTag("bicycle", "designated")) wayType = WayType.OTHER_SMALL_WAY; else wayType = WayType.CYCLEWAY; } else if ("cycleway".equals(highway)) wayType = WayType.CYCLEWAY; else if (roadValues.contains(highway)) wayType = WayType.ROAD; return wayTypeEncoder.setValue(encoded, wayType.getValue()); } @Override public long setBool( long flags, int key, boolean value ) { switch (key) { case K_UNPAVED: return value ? flags | unpavedBit : flags & ~unpavedBit; default: return super.setBool(flags, key, value); } } @Override public boolean isBool( long flags, int key ) { switch (key) { case K_UNPAVED: return (flags & unpavedBit) != 0; default: return super.isBool(flags, key); } } @Override public double getDouble( long flags, int key ) { switch (key) { case PriorityWeighting.KEY: double prio = preferWayEncoder.getValue(flags); if (prio == 0) return (double) UNCHANGED.getValue() / BEST.getValue(); return prio / BEST.getValue(); default: return super.getDouble(flags, key); } } @Override public long getLong( long flags, int key ) { switch (key) { case PriorityWeighting.KEY: return preferWayEncoder.getValue(flags); default: return super.getLong(flags, key); } } @Override public long setLong( long flags, int key, long value ) { switch (key) { case PriorityWeighting.KEY: return preferWayEncoder.setValue(flags, value); default: return super.setLong(flags, key, value); } } boolean isPushingSection( OSMWay way ) { return way.hasTag("highway", pushingSections) || way.hasTag("railway", "platform"); } protected long handleSpeed( OSMWay way, double speed, long encoded ) { encoded = setSpeed(encoded, speed); // handle oneways boolean isOneway = way.hasTag("oneway", oneways) || way.hasTag("oneway:bicycle", oneways) || way.hasTag("vehicle:backward") || way.hasTag("vehicle:forward") || way.hasTag("bicycle:forward"); if ((isOneway || way.hasTag("junction", "roundabout")) && !way.hasTag("oneway:bicycle", "no") && !way.hasTag("bicycle:backward") && !way.hasTag("cycleway", oppositeLanes)) { boolean isBackward = way.hasTag("oneway", "-1") || way.hasTag("oneway:bicycle", "-1") || way.hasTag("vehicle:forward", "no") || way.hasTag("bicycle:forward", "no"); if (isBackward) encoded |= backwardBit; else encoded |= forwardBit; } else { encoded |= directionBitMask; } return encoded; } private enum WayType { ROAD(0), PUSHING_SECTION(1), CYCLEWAY(2), OTHER_SMALL_WAY(3); private final int value; private WayType( int value ) { this.value = value; } public int getValue() { return value; } }; protected void setHighwaySpeed( String highway, int speed ) { highwaySpeeds.put(highway, speed); } protected int getHighwaySpeed( String key ) { return highwaySpeeds.get(key); } void setTrackTypeSpeed( String tracktype, int speed ) { trackTypeSpeeds.put(tracktype, speed); } void setSurfaceSpeed( String surface, int speed ) { surfaceSpeeds.put(surface, speed); } void setCyclingNetworkPreference( String network, int code ) { bikeNetworkToCode.put(network, code); } void addPushingSection( String highway ) { pushingSections.add(highway); } @Override public boolean supports( Class<?> feature ) { if (super.supports(feature)) return true; return PriorityWeighting.class.isAssignableFrom(feature); } public void setAvoidSpeedLimit( int limit ) { avoidSpeedLimit = limit; } public void setSpecificBicycleClass( String subkey ) { specificBicycleClass = "class:bicycle:" + subkey.toString(); } }
package io.quarkus.qute; import io.quarkus.qute.TemplateNode.Origin; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; public final class Expressions { public static final String TYPECHECK_NAMESPACE_PLACEHOLDER = "$$namespace$$"; static final String LEFT_BRACKET = "("; static final String RIGHT_BRACKET = ")"; static final String SQUARE_LEFT_BRACKET = "["; static final String SQUARE_RIGHT_BRACKET = "]"; public static final char TYPE_INFO_SEPARATOR = '|'; private Expressions() { } public static boolean isVirtualMethod(String value) { return value.indexOf(LEFT_BRACKET) != -1; } public static boolean isBracketNotation(String value) { return value.startsWith(SQUARE_LEFT_BRACKET); } public static String parseVirtualMethodName(String value) { int start = value.indexOf(LEFT_BRACKET); return value.substring(0, start); } public static List<String> parseVirtualMethodParams(String value, Origin origin, String exprValue) { int start = value.indexOf(LEFT_BRACKET); if (start != -1 && value.endsWith(RIGHT_BRACKET)) { String params = value.substring(start + 1, value.length() - 1); return splitParts(params, PARAMS_SPLIT_CONFIG); } throw Parser.parserError("invalid virtual method in {" + exprValue + "}", origin); } public static String parseBracketContent(String value, Origin origin, String exprValue) { if (value.endsWith(SQUARE_RIGHT_BRACKET)) { return value.substring(1, value.length() - 1); } throw Parser.parserError("invalid bracket notation expression in {" + exprValue + "}", origin); } public static String buildVirtualMethodSignature(String name, List<String> params) { return name + LEFT_BRACKET + params.stream().collect(Collectors.joining(",")) + RIGHT_BRACKET; } public static List<String> splitParts(String value) { return splitParts(value, DEFAULT_SPLIT_CONFIG); } /** * * @param value * @return the parts */ public static List<String> splitTypeInfoParts(String value) { return splitParts(value, TYPE_INFO_SPLIT_CONFIG); } public static List<String> splitParts(String value, SplitConfig splitConfig) { if (value == null || value.isEmpty()) { return Collections.emptyList(); } boolean literal = false; char separator = 0; byte infix = 0; byte brackets = 0; ImmutableList.Builder<String> parts = ImmutableList.builder(); StringBuilder buffer = new StringBuilder(); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (splitConfig.isSeparator(c)) { // Adjacent separators may be ignored if (separator == 0 || separator != c) { if (!literal && brackets == 0 && infix == 0) { if (splitConfig.shouldPrependSeparator(c)) { buffer.append(c); } if (addPart(buffer, parts)) { buffer = new StringBuilder(); } if (splitConfig.shouldAppendSeparator(c)) { buffer.append(c); } separator = c; } else { buffer.append(c); } } } else { if (splitConfig.isLiteralSeparator(c)) { literal = !literal; } // Non-separator char if (!literal) { // Not inside a string/type literal if (brackets == 0 && c == ' ' && splitConfig.isInfixNotationSupported()) { // Infix supported, blank space and not inside a virtual method if (separator == 0 && (buffer.length() == 0 || buffer.charAt(buffer.length() - 1) == '(')) { // Skip redundant blank space: // 1. before the infix method // foo or bar // ----^ // 2. before an infix method parameter // foo or bar // -------^ } else if (infix == 1) { // The space after the infix method // foo or bar // ------^ buffer.append(LEFT_BRACKET); infix++; } else if (infix == 2) { // Next infix method // foo or bar or baz // ----------^ infix = 1; buffer.append(RIGHT_BRACKET); if (addPart(buffer, parts)) { buffer = new StringBuilder(); } } else { // First space - start a new infix method // foo or bar // ---^ infix++; if (addPart(buffer, parts)) { buffer = new StringBuilder(); } } } else { if (Parser.isLeftBracket(c)) { // Start of a virtual method brackets++; } else if (Parser.isRightBracket(c)) { // End of a virtual method brackets--; } buffer.append(c); } separator = 0; } else { buffer.append(c); separator = 0; } } } if (infix > 0) { buffer.append(RIGHT_BRACKET); } addPart(buffer, parts); return parts.build(); } public static String typeInfoFrom(String typeName) { return TYPE_INFO_SEPARATOR + typeName + TYPE_INFO_SEPARATOR; } /** * * @param buffer * @param parts * @return true if a new buffer should be created */ private static boolean addPart(StringBuilder buffer, ImmutableList.Builder<String> parts) { if (buffer.length() == 0) { return false; } String val = buffer.toString().trim(); if (!val.isEmpty()) { parts.add(val); } return true; } private static final SplitConfig DEFAULT_SPLIT_CONFIG = new DefaultSplitConfig(); private static final SplitConfig PARAMS_SPLIT_CONFIG = new SplitConfig() { @Override public boolean isSeparator(char candidate) { return ',' == candidate; } public boolean isInfixNotationSupported() { return false; } }; private static final SplitConfig TYPE_INFO_SPLIT_CONFIG = new DefaultSplitConfig() { @Override public boolean isLiteralSeparator(char candidate) { return candidate == TYPE_INFO_SEPARATOR || LiteralSupport.isStringLiteralSeparator(candidate); } }; private static class DefaultSplitConfig implements SplitConfig { @Override public boolean isSeparator(char candidate) { return candidate == '.' || candidate == '[' || candidate == ']'; } @Override public boolean shouldPrependSeparator(char candidate) { return candidate == ']'; } @Override public boolean shouldAppendSeparator(char candidate) { return candidate == '['; } } interface SplitConfig { boolean isSeparator(char candidate); default boolean isLiteralSeparator(char candidate) { return LiteralSupport.isStringLiteralSeparator(candidate); } default boolean shouldPrependSeparator(char candidate) { return false; } default boolean shouldAppendSeparator(char candidate) { return false; } default boolean isInfixNotationSupported() { return true; } } }
/* * #%L * ImageJ2 software for multidimensional image processing and analysis. * %% * Copyright (C) 2014 - 2021 ImageJ2 developers. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.ops; import static org.junit.Assert.assertTrue; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import org.scijava.command.CommandService; import org.scijava.module.ModuleItem; import org.scijava.plugin.Parameter; import org.scijava.util.ClassUtils; import org.scijava.util.ConversionUtils; import org.scijava.util.GenericUtils; /** * Base class for unit testing of namespaces. In particular, this class has * functionality to verify the completeness of the namespace's built-in method * signatures. * * @author Curtis Rueden */ public abstract class AbstractNamespaceTest extends AbstractOpTest { @Parameter private CommandService commandService; /** * Checks that all ops of the given namespace are "covered" by * {@link OpMethod}-annotated methods declared in the given namespace class. * * @param namespace The namespace of the ops to scrutinize (e.g., "math"). * @param namespaceClass Class with the {@link OpMethod}-annotated methods. */ public void assertComplete(final String namespace, final Class<?> namespaceClass) { boolean success = true; // whether the test will succeed for (final String op : ops.ops()) { final String ns = OpUtils.getNamespace(op); if (!Objects.equals(namespace, ns)) continue; if (!checkComplete(namespaceClass, op)) success = false; } assertTrue("Coverage mismatch", success); } /** * Checks that the given class's list of {@link OpMethod}-annotated methods * "covers" the available ops, and vice versa. * <p> * This method verifies that all ops with the given name have type-safe * {@link OpMethod}-annotated methods. And vice versa: it verifies that all * the annotated methods could theoretically invoke at least one op * implementation. * </p> * <p> * This method provides a general-purpose verification test which extensions * to Ops can also use to verify their own cache of type-safe methods provided * by their own service(s). * </p> * <p> * The completeness tests are not 100% accurate: * </p> * <ul> * <li>The comparison of method parameters to op parameters is too lenient: * the matching should ideally be exact rather than accepting "compatible" * (i.e., subtype) matches.</li> * <li>There are some limitations to the matching of generic parameters.</li> * <li>When a method is missing, the system generates a suggested code block, * but that code block includes only raw type parameters, not generified type * parameters. For details on why, see <a * href="http://stackoverflow.com/q/28143029">this post on StackOverflow</a>.</li> * </ul> * * @param namespaceClass Class with the {@link OpMethod}-annotated methods. * @param qName The fully qualified (with namespace) name of the op to verify * is completely covered. * @see GlobalNamespaceTest Usage examples for global namespace ops. * @see net.imagej.ops.math.MathNamespaceTest Usage examples for math ops. */ public boolean checkComplete(final Class<?> namespaceClass, final String qName) { final String namespace = OpUtils.getNamespace(qName); final String opName = OpUtils.stripNamespace(qName); // obtain the list of built-in methods final List<Method> allMethods = ClassUtils.getAnnotatedMethods(namespaceClass, OpMethod.class); // obtain the list of ops final Collection<OpInfo> allOps = ops.infos(); // filter methods and ops to only those with the given name final List<Method> methods; final Collection<OpInfo> opList; if (opName == null) { methods = allMethods; opList = allOps; } else { // filter the methods methods = new ArrayList<>(); for (final Method method : allMethods) { if (opName.equals(method.getName())) methods.add(method); } // filter the ops opList = new ArrayList<>(); for (final OpInfo op : allOps) { if (qName.equals(op.getName())) opList.add(op); } } // cross-check them! return checkComplete(namespace, methods, opList); } /** * Checks that the given list of methods corresponds to the specified list of * available ops. The test will fail if either: * <ol> * <li>There is a method that does not correspond to an op; or</li> * <li>There is an op that cannot be invoked by any method.</li> * </ol> * <p> * Note that this test does not verify sanity of either priorities or * {@link Contingent} ops. It assumes that if a candidate's types match, there * might be some possibility that it could potentially match in the proper * circumstances. * </p> * * @param namespace The namespace prefix of the ops in question. * @param methods List of methods. * @param infos List of ops. */ public boolean checkComplete(final String namespace, final Collection<Method> methods, final Collection<? extends OpInfo> infos) { final OpCoverSet coverSet = new OpCoverSet(); boolean success = true; for (final Method method : methods) { final String name = method.getName(); final String qName = namespace == null ? name : namespace + "." + name; if (!checkVarArgs(method)) success = false; // HACK: Allow @OpMethod to specify that type checking should be skipped. final OpMethod opMethod = method.getAnnotation(OpMethod.class); final boolean checkTypes = opMethod == null || !opMethod.skipTypeCheck(); for (final Class<? extends Op> opType : opTypes(method)) { if (!checkOpImpl(method, qName, opType, coverSet, checkTypes)) { success = false; } } } // verify that all ops have been completely covered final StringBuilder missingMessage = new StringBuilder("Missing methods:"); int missingCount = 0; for (final OpInfo info : infos) { int requiredCount = 0, inputCount = 0; for (final ModuleItem<?> input : info.inputs()) { if (input.isRequired()) requiredCount++; inputCount++; } for (int argCount = requiredCount; argCount <= inputCount; argCount++) { if (!coverSet.contains(info, argCount)) { missingMessage.append("\n\n" + methodString(info, argCount - requiredCount)); missingCount++; } } } if (missingCount > 0) { error(missingMessage.toString()); success = false; } return success; } // -- Helper methods -- /** * Ensures that, if the method's last argument is an array, it was written as * varargs. * <p> * Good: {@code foo(int a, Number... nums)}<br> * Bad: {@code foo(int a, Number[] num)} * </p> */ private boolean checkVarArgs(final Method method) { final Class<?>[] argTypes = method.getParameterTypes(); if (argTypes.length == 0) return true; if (!argTypes[argTypes.length - 1].isArray()) return true; if (method.isVarArgs()) return true; error("Last argument should be varargs for method:\n\t" + method); return false; } /** * Gets the list of {@link Op} classes associated with the given method via * the {@link OpMethod} annotation. */ private Set<Class<? extends Op>> opTypes(final Method method) { final Set<Class<? extends Op>> opSet = new HashSet<>(); final OpMethod ann = method.getAnnotation(OpMethod.class); if (ann != null) { final Class<? extends Op>[] opTypes = ann.ops(); if (opTypes.length == 0) opSet.add(ann.op()); for (Class<? extends Op> opType : opTypes) { opSet.add(opType); } } return opSet; } /** * Checks whether the given op implementation matches the specified method, * including op name, as well as input and output type parameters. * * @param method The method to which the {@link Op} should be compared. * @param qName The fully qualified (with namespace) name of the op. * @param opType The {@link Op} to which the method should be compared. * @param coverSet The set of ops which have already matched a method. * @param checkTypes Whether to validate that the method's type arguments and * return type match the given op implementation's types. * @return true iff the method and {@link Op} match up. */ private boolean checkOpImpl(final Method method, final String qName, final Class<? extends Op> opType, final OpCoverSet coverSet, final boolean checkTypes) { // TODO: Type matching needs to be type<->type instead of class<->type. // That is, the "special class placeholder" also needs to work with Type. // Then we can pass Types here instead of Class instances. // final Object[] argTypes = method.getGenericParameterTypes(); final Object[] argTypes = method.getParameterTypes(); final OpInfo info = ops.info(opType); if (checkTypes) { final OpRef ref = OpRef.create(qName, argTypes); final OpCandidate candidate = new OpCandidate(ops, ref, info); // check input types if (!inputTypesMatch(candidate)) { error("Mismatched inputs", opType, method); return false; } // check output types final Type returnType = method.getGenericReturnType(); if (!outputTypesMatch(returnType, candidate)) { error("Mismatched outputs", opType, method); return false; } } // mark this op as covered (w.r.t. the given number of args) coverSet.add(info, argTypes.length); return true; } private boolean inputTypesMatch(final OpCandidate candidate) { // check for assignment compatibility, including generics if (!matcher.typesMatch(candidate)) return false; // also check that raw types exactly match final Object[] paddedArgs = matcher.padArgs(candidate); int i = 0; for (final ModuleItem<?> input : candidate.inputs()) { final Object arg = paddedArgs[i++]; if (!typeMatches(arg, input.getType())) return false; } return true; } private boolean typeMatches(final Object arg, final Class<?> type) { if (arg == null) return true; // NB: Handle special typed null placeholder. final Class<?> argType = arg instanceof Class ? ((Class<?>) arg) : arg.getClass(); return argType == type; } private boolean outputTypesMatch(final Type returnType, final OpCandidate candidate) { final List<Type> outTypes = new ArrayList<>(); for (final ModuleItem<?> output : candidate.outputs()) { outTypes.add(output.getGenericType()); } if (outTypes.size() == 0) return returnType == void.class; final Type baseType; if (outTypes.size() == 1) baseType = returnType; else { // multiple return types; so the method return type must be a list if (GenericUtils.getClass(returnType) != List.class) return false; // use the list's generic type parameter as the base type baseType = GenericUtils.getTypeParameter(returnType, List.class, 0); } for (final Type outType : outTypes) { if (!isSuperType(baseType, outType)) return false; } return true; } private boolean isSuperType(final Type baseType, final Type subType) { // TODO: Handle generics. final Class<?> baseClass = GenericUtils.getClass(baseType); final Class<?> subClass = GenericUtils.getClass(subType); // NB: This avoids a bug in generics reflection processing. // See: https://github.com/scijava/scijava-common/issues/172 // But it means that List return type matching is imperfect. if (baseClass == null || subClass == null) return true; return baseClass.isAssignableFrom(subClass); } private void error(final String message, final Class<? extends Op> opType, final Method method) { error(message + ":\n\top = " + opType.getName() + "\n\tmethod = " + method); } private void error(final String message) { System.err.println("[ERROR] " + message); } private String methodString(final OpInfo info, final int optionalsToFill) { final StringBuilder sb = new StringBuilder(); // outputs int outputCount = 0; String returnType = "void"; String castPrefix = ""; for (final ModuleItem<?> output : info.outputs()) { if (++outputCount == 1) { returnType = typeString(output); castPrefix = "(" + castTypeString(output) + ") "; } else { returnType = "List"; castPrefix = "(List) "; break; } } final String className = info.cInfo().getDelegateClassName().replaceAll("\\$", ".") + ".class"; sb.append("\t@OpMethod(op = " + className + ")\n"); final String methodName = info.getSimpleName(); sb.append("\tpublic " + returnType + " " + methodName + "("); // inputs boolean first = true; int optionalIndex = 0; final StringBuilder args = new StringBuilder(); args.append(className); for (final ModuleItem<?> input : info.inputs()) { if (!input.isRequired()) { // leave off unspecified optional arguments if (++optionalIndex > optionalsToFill) continue; } if (first) first = false; else sb.append(", "); sb.append("final " + typeString(input) + " " + input.getName()); args.append(", " + input.getName()); } sb.append(") {\n"); sb.append("\t\t"); if (outputCount > 0) { sb.append("final " + returnType + " result =\n" + // "\t\t\t" + castPrefix); } sb.append("ops().run(" + args + ");\n"); if (outputCount > 0) sb.append("\t\treturn result;\n"); sb.append("\t}"); return sb.toString(); } private String typeString(final ModuleItem<?> item) { return item.getType().getSimpleName(); } private String castTypeString(final ModuleItem<?> item) { return ConversionUtils.getNonprimitiveType(item.getType()).getSimpleName(); } // -- Helper classes -- /** A data structure which maps each key to a set of values. */ private static class MultiMap<K, V> extends HashMap<K, Set<V>> { public void add(final K key, final V value) { Set<V> set = get(key); if (set == null) { set = new HashSet<>(); put(key, set); } set.add(value); } public boolean contains(final K key, final V value) { final Set<V> set = get(key); return set != null && set.contains(value); } } /** * Maps an op implementation (i.e., {@link OpInfo}) to a list of integers. * Each integer represents a different number of arguments to the op. */ public static class OpCoverSet extends MultiMap<OpInfo, Integer> { // NB: No implementation needed. } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.container; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.refEq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.Mockito.atLeastOnce; import java.io.IOException; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.util.AbstractMap.SimpleEntry; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerRetryContext; import org.apache.hadoop.yarn.api.records.ContainerRetryPolicy; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.ContainerStateTransitionListener; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager; import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.deletion.task.DockerContainerDeletionMatcher; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationCleanupEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationRequestEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerMetrics; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants; import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler; import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEventType; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.ControlledClock; import org.junit.Assert; import org.junit.Test; import org.mockito.ArgumentMatcher; import org.mockito.Mockito; public class TestContainer { final NodeManagerMetrics metrics = NodeManagerMetrics.create(); final Configuration conf = new YarnConfiguration(); final String FAKE_LOCALIZATION_ERROR = "Fake localization error"; /** * Verify correct container request events sent to localizer. */ @Test public void testLocalizationRequest() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(7, 314159265358979L, 4344, "yak"); assertEquals(ContainerState.NEW, wc.c.getContainerState()); wc.initContainer(); // Verify request for public/private resources to localizer ResourcesRequestedMatcher matchesReq = new ResourcesRequestedMatcher(wc.localResources, EnumSet.of( LocalResourceVisibility.PUBLIC, LocalResourceVisibility.PRIVATE, LocalResourceVisibility.APPLICATION)); verify(wc.localizerBus).handle(argThat(matchesReq)); assertEquals(ContainerState.LOCALIZING, wc.c.getContainerState()); } finally { if (wc != null) { wc.finished(); } } } /** * Verify container launch when all resources already cached. */ @Test public void testLocalizationLaunch() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(8, 314159265358979L, 4344, "yak"); assertEquals(ContainerState.NEW, wc.c.getContainerState()); wc.initContainer(); Map<Path, List<String>> localPaths = wc.localizeResources(); // all resources should be localized assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); assertNotNull(wc.c.getLocalizedResources()); for (Entry<Path, List<String>> loc : wc.c.getLocalizedResources() .entrySet()) { assertEquals(localPaths.remove(loc.getKey()), loc.getValue()); } assertTrue(localPaths.isEmpty()); final WrappedContainer wcf = wc; // verify container launch ArgumentMatcher<ContainersLauncherEvent> matchesContainerLaunch = new ArgumentMatcher<ContainersLauncherEvent>() { @Override public boolean matches(Object o) { ContainersLauncherEvent launchEvent = (ContainersLauncherEvent) o; return wcf.c == launchEvent.getContainer(); } }; verify(wc.launcherBus).handle(argThat(matchesContainerLaunch)); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testExternalKill() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(13, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); int running = metrics.getRunningContainers(); wc.launchContainer(); assertEquals(running + 1, metrics.getRunningContainers()); reset(wc.localizerBus); wc.containerKilledOnRequest(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int failed = metrics.getFailedContainers(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(failed + 1, metrics.getFailedContainers()); assertEquals(running, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testDockerContainerExternalKill() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(13, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); int running = metrics.getRunningContainers(); wc.launchContainer(); assertEquals(running + 1, metrics.getRunningContainers()); reset(wc.localizerBus); wc.containerKilledOnRequest(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int failed = metrics.getFailedContainers(); wc.dockerContainerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(failed + 1, metrics.getFailedContainers()); assertEquals(running, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testContainerPauseAndResume() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(13, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); int running = metrics.getRunningContainers(); wc.launchContainer(); assertEquals(running + 1, metrics.getRunningContainers()); reset(wc.localizerBus); wc.pauseContainer(); assertEquals(ContainerState.PAUSED, wc.c.getContainerState()); wc.resumeContainer(); assertEquals(ContainerState.RUNNING, wc.c.getContainerState()); wc.containerKilledOnRequest(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int failed = metrics.getFailedContainers(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(failed + 1, metrics.getFailedContainers()); assertEquals(running, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testCleanupOnFailure() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(10, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode()); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testDockerContainerCleanupOnFailure() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(10, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode()); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); wc.dockerContainerResourcesCleanup(); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testCleanupOnSuccess() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(11, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); int running = metrics.getRunningContainers(); wc.launchContainer(); assertEquals(running + 1, metrics.getRunningContainers()); reset(wc.localizerBus); wc.containerSuccessful(); assertEquals(ContainerState.EXITED_WITH_SUCCESS, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int completed = metrics.getCompletedContainers(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(completed + 1, metrics.getCompletedContainers()); assertEquals(running, metrics.getRunningContainers()); ContainerEventType e1 = wc.initStateToEvent.get(ContainerState.NEW); ContainerState s2 = wc.eventToFinalState.get(e1); ContainerEventType e2 = wc.initStateToEvent.get(s2); ContainerState s3 = wc.eventToFinalState.get(e2); ContainerEventType e3 = wc.initStateToEvent.get(s3); ContainerState s4 = wc.eventToFinalState.get(e3); ContainerEventType e4 = wc.initStateToEvent.get(s4); ContainerState s5 = wc.eventToFinalState.get(e4); ContainerEventType e5 = wc.initStateToEvent.get(s5); ContainerState s6 = wc.eventToFinalState.get(e5); Assert.assertEquals(ContainerState.LOCALIZING, s2); Assert.assertEquals(ContainerState.SCHEDULED, s3); Assert.assertEquals(ContainerState.RUNNING, s4); Assert.assertEquals(ContainerState.EXITED_WITH_SUCCESS, s5); Assert.assertEquals(ContainerState.DONE, s6); Assert.assertEquals(ContainerEventType.INIT_CONTAINER, e1); Assert.assertEquals(ContainerEventType.RESOURCE_LOCALIZED, e2); Assert.assertEquals(ContainerEventType.CONTAINER_LAUNCHED, e3); Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS, e4); Assert.assertEquals(ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP, e5); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testDockerContainerCleanupOnSuccess() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(11, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); int running = metrics.getRunningContainers(); wc.launchContainer(); assertEquals(running + 1, metrics.getRunningContainers()); reset(wc.localizerBus); wc.containerSuccessful(); assertEquals(ContainerState.EXITED_WITH_SUCCESS, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int completed = metrics.getCompletedContainers(); wc.dockerContainerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(completed + 1, metrics.getCompletedContainers()); assertEquals(running, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testInitWhileDone() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(6, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerSuccessful(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); verifyOutofBandHeartBeat(wc); assertNull(wc.c.getLocalizedResources()); // Now in DONE, issue INIT wc.initContainer(); // Verify still in DONE assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testDockerContainerInitWhileDone() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(6, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerSuccessful(); wc.dockerContainerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); verifyOutofBandHeartBeat(wc); assertNull(wc.c.getLocalizedResources()); // Now in DONE, issue INIT wc.initContainer(); // Verify still in DONE assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testLocalizationFailureAtDone() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(6, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerSuccessful(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); verifyOutofBandHeartBeat(wc); assertNull(wc.c.getLocalizedResources()); // Now in DONE, issue RESOURCE_FAILED as done by LocalizeRunner wc.resourceFailedContainer(); // Verify still in DONE assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testDockerContainerLocalizationFailureAtDone() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(6, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.containerSuccessful(); wc.dockerContainerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); verifyOutofBandHeartBeat(wc); assertNull(wc.c.getLocalizedResources()); // Now in DONE, issue RESOURCE_FAILED as done by LocalizeRunner wc.resourceFailedContainer(); // Verify still in DONE assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") public void testLocalizationFailureWhileRunning() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(6, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); assertEquals(ContainerState.RUNNING, wc.c.getContainerState()); // Now in RUNNING, handle ContainerResourceFailedEvent, cause NPE before wc.handleContainerResourceFailedEvent(); } finally { if (wc != null) { wc.finished(); } } } @Test @SuppressWarnings("unchecked") // mocked generic public void testCleanupOnKillRequest() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(12, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); reset(wc.localizerBus); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.containerKilledOnRequest(); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnNew() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(13, 314159265358979L, 4344, "yak"); assertEquals(ContainerState.NEW, wc.c.getContainerState()); int killed = metrics.getKilledContainers(); wc.killContainer(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); verifyOutofBandHeartBeat(wc); assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER, wc.c.cloneAndGetContainerStatus().getExitStatus()); assertTrue(wc.c.cloneAndGetContainerStatus().getDiagnostics() .contains("KillRequest")); assertEquals(killed + 1, metrics.getKilledContainers()); // check container metrics is generated. ContainerMetrics containerMetrics = ContainerMetrics.forContainer(wc.cId, 1, 5000); Assert.assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER, containerMetrics.exitCode.value()); Assert.assertTrue(containerMetrics.startTime.value() > 0); Assert.assertTrue(containerMetrics.finishTime.value() >= containerMetrics.startTime.value()); Assert.assertEquals(ContainerEventType.KILL_CONTAINER, wc.initStateToEvent.get(ContainerState.NEW)); Assert.assertEquals(ContainerState.DONE, wc.eventToFinalState.get(ContainerEventType.KILL_CONTAINER)); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizing() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(14, 314159265358979L, 4344, "yak"); wc.initContainer(); assertEquals(ContainerState.LOCALIZING, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER, wc.c.cloneAndGetContainerStatus().getExitStatus()); assertTrue(wc.c.cloneAndGetContainerStatus().getDiagnostics() .contains("KillRequest")); int killed = metrics.getKilledContainers(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(killed + 1, metrics.getKilledContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizationFailed() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(15, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.failLocalizeResources(wc.getLocalResourceCount()); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.killContainer(); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int failed = metrics.getFailedContainers(); wc.containerResourcesCleanup(); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(failed + 1, metrics.getFailedContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizedWhenContainerNotLaunchedContainerKilled() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); // check that container cleanup hasn't started at this point. LocalizationCleanupMatcher cleanupResources = new LocalizationCleanupMatcher(wc.c); verify(wc.localizerBus, times(0)).handle(argThat(cleanupResources)); // check if containerlauncher cleans up the container launch. verify(wc.launcherBus) .handle(refEq(new ContainersLauncherEvent(wc.c, ContainersLauncherEventType.CLEANUP_CONTAINER), "timestamp")); launcher.call(); wc.drainDispatcherEvents(); assertEquals(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); int killed = metrics.getKilledContainers(); wc.c.handle(new ContainerEvent(wc.c.getContainerId(), ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(killed + 1, metrics.getKilledContainers()); assertEquals(0, metrics.getRunningContainers()); assertEquals(0, wc.launcher.running.size()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testDockerKillOnLocalizedWhenContainerNotLaunchedContainerKilled() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); launcher.call(); wc.drainDispatcherEvents(); assertEquals(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyDockerContainerCleanupCall(wc); int killed = metrics.getKilledContainers(); wc.c.handle(new ContainerEvent(wc.c.getContainerId(), ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(killed + 1, metrics.getKilledContainers()); assertEquals(0, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizedWhenContainerNotLaunchedContainerSuccess() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); wc.containerSuccessful(); wc.drainDispatcherEvents(); assertEquals(ContainerState.EXITED_WITH_SUCCESS, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); wc.c.handle(new ContainerEvent(wc.c.getContainerId(), ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(0, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizedWhenContainerNotLaunchedContainerFailure() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode()); wc.drainDispatcherEvents(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); wc.c.handle(new ContainerEvent(wc.c.getContainerId(), ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(0, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testDockerKillOnLocalizedContainerNotLaunchedContainerFailure() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode()); wc.drainDispatcherEvents(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyDockerContainerCleanupCall(wc); wc.c.handle(new ContainerEvent(wc.c.getContainerId(), ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); assertEquals(ContainerState.DONE, wc.c.getContainerState()); assertEquals(0, metrics.getRunningContainers()); } finally { if (wc != null) { wc.finished(); } } } @Test public void testKillOnLocalizedWhenContainerLaunched() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId()); launcher.call(); wc.drainDispatcherEvents(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testDockerKillOnLocalizedWhenContainerLaunched() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(17, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState()); ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId()); launcher.call(); wc.drainDispatcherEvents(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); wc.killContainer(); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyDockerContainerCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testResourceLocalizedOnLocalizationFailed() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(16, 314159265358979L, 4344, "yak"); wc.initContainer(); int failCount = wc.getLocalResourceCount()/2; if (failCount == 0) { failCount = 1; } wc.failLocalizeResources(failCount); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.localizeResourcesFromInvalidState(failCount); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); Assert.assertTrue(wc.getDiagnostics().contains(FAKE_LOCALIZATION_ERROR)); } finally { if (wc != null) { wc.finished(); } } } @Test public void testResourceFailedOnLocalizationFailed() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(16, 314159265358979L, 4344, "yak"); wc.initContainer(); Iterator<String> lRsrcKeys = wc.localResources.keySet().iterator(); String key1 = lRsrcKeys.next(); String key2 = lRsrcKeys.next(); wc.failLocalizeSpecificResource(key1); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.failLocalizeSpecificResource(key2); assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testResourceFailedOnKilling() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(16, 314159265358979L, 4344, "yak"); wc.initContainer(); Iterator<String> lRsrcKeys = wc.localResources.keySet().iterator(); String key1 = lRsrcKeys.next(); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.failLocalizeSpecificResource(key1); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } /** * Verify serviceData correctly sent. */ @Test public void testServiceData() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(9, 314159265358979L, 4344, "yak", false, true); assertEquals(ContainerState.NEW, wc.c.getContainerState()); wc.initContainer(); for (final Map.Entry<String,ByteBuffer> e : wc.serviceData.entrySet()) { ArgumentMatcher<AuxServicesEvent> matchesServiceReq = new ArgumentMatcher<AuxServicesEvent>() { @Override public boolean matches(Object o) { AuxServicesEvent evt = (AuxServicesEvent) o; return e.getKey().equals(evt.getServiceID()) && 0 == e.getValue().compareTo(evt.getServiceData()); } }; verify(wc.auxBus).handle(argThat(matchesServiceReq)); } final WrappedContainer wcf = wc; // verify launch on empty resource request ArgumentMatcher<ContainersLauncherEvent> matchesLaunchReq = new ArgumentMatcher<ContainersLauncherEvent>() { @Override public boolean matches(Object o) { ContainersLauncherEvent evt = (ContainersLauncherEvent) o; return evt.getType() == ContainersLauncherEventType.LAUNCH_CONTAINER && wcf.cId.equals(evt.getContainer().getContainerId()); } }; verify(wc.launcherBus).handle(argThat(matchesLaunchReq)); } finally { if (wc != null) { wc.finished(); } } } @Test public void testLaunchAfterKillRequest() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(14, 314159265358979L, 4344, "yak"); wc.initContainer(); wc.localizeResources(); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.launchContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.containerKilledOnRequest(); verifyCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testDockerContainerLaunchAfterKillRequest() throws Exception { WrappedContainer wc = null; try { wc = new WrappedContainer(14, 314159265358979L, 4344, "yak"); wc.setupDockerContainerEnv(); wc.initContainer(); wc.localizeResources(); wc.killContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.launchContainer(); assertEquals(ContainerState.KILLING, wc.c.getContainerState()); assertNull(wc.c.getLocalizedResources()); wc.containerKilledOnRequest(); verifyDockerContainerCleanupCall(wc); } finally { if (wc != null) { wc.finished(); } } } @Test public void testContainerRetry() throws Exception{ ContainerRetryContext containerRetryContext1 = ContainerRetryContext .newInstance(ContainerRetryPolicy.NEVER_RETRY, null, 3, 0); testContainerRetry(containerRetryContext1, 2, 0); ContainerRetryContext containerRetryContext2 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0); testContainerRetry(containerRetryContext2, 2, 3); ContainerRetryContext containerRetryContext3 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0); // If exit code is 0, it will not retry testContainerRetry(containerRetryContext3, 0, 0); ContainerRetryContext containerRetryContext4 = ContainerRetryContext .newInstance( ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES, null, 3, 0); testContainerRetry(containerRetryContext4, 2, 0); HashSet<Integer> errorCodes = new HashSet<>(); errorCodes.add(2); errorCodes.add(6); ContainerRetryContext containerRetryContext5 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES, errorCodes, 3, 0); testContainerRetry(containerRetryContext5, 2, 3); HashSet<Integer> errorCodes2 = new HashSet<>(); errorCodes.add(143); ContainerRetryContext containerRetryContext6 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES, errorCodes2, 3, 0); // If exit code is 143(SIGTERM), it will not retry even it is in errorCodes. testContainerRetry(containerRetryContext6, 143, 0); } private void testContainerRetry(ContainerRetryContext containerRetryContext, int exitCode, int expectedRetries) throws Exception{ WrappedContainer wc = null; try { int retryTimes = 0; wc = new WrappedContainer(24, 314159265358979L, 4344, "yak", containerRetryContext); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); while (true) { wc.containerFailed(exitCode); if (wc.c.getContainerState() == ContainerState.RUNNING) { retryTimes ++; } else { break; } } Assert.assertEquals(expectedRetries, retryTimes); } finally { if (wc != null) { wc.finished(); } } } @Test public void testContainerRestartInterval() throws IOException { conf.setInt(YarnConfiguration.NM_CONTAINER_RETRY_MINIMUM_INTERVAL_MS, 2000); ContainerRetryContext containerRetryContext1 = ContainerRetryContext .newInstance(ContainerRetryPolicy.NEVER_RETRY, null, 3, 0); testContainerRestartInterval(containerRetryContext1, 0); ContainerRetryContext containerRetryContext2 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0); testContainerRestartInterval(containerRetryContext2, 2000); ContainerRetryContext containerRetryContext3 = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 4000); testContainerRestartInterval(containerRetryContext3, 4000); } private void testContainerRestartInterval( ContainerRetryContext containerRetryContext, int expectedRestartInterval) throws IOException { WrappedContainer wc = null; try { wc = new WrappedContainer(25, 314159265358980L, 4345, "yak", containerRetryContext); Assert.assertEquals( ((ContainerImpl)wc.c).getContainerRetryContext().getRetryInterval(), expectedRestartInterval); } finally { if (wc != null) { wc.finished(); } } } @Test public void testContainerRetryFailureValidityInterval() throws Exception { ContainerRetryContext containerRetryContext = ContainerRetryContext .newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 1, 0, 10); WrappedContainer wc = null; try { wc = new WrappedContainer(25, 314159265358980L, 4200, "test", containerRetryContext); ControlledClock clock = new ControlledClock(); wc.getRetryPolicy().setClock(clock); wc.initContainer(); wc.localizeResources(); wc.launchContainer(); wc.containerFailed(12); assertEquals(ContainerState.RUNNING, wc.c.getContainerState()); clock.setTime(20); wc.containerFailed(12); assertEquals(ContainerState.RUNNING, wc.c.getContainerState()); clock.setTime(40); wc.containerFailed(12); assertEquals(ContainerState.RUNNING, wc.c.getContainerState()); clock.setTime(45); wc.containerFailed(12); assertEquals(ContainerState.EXITED_WITH_FAILURE, wc.c.getContainerState()); } finally { if (wc != null) { wc.finished(); } } } private void verifyCleanupCall(WrappedContainer wc) throws Exception { ResourcesReleasedMatcher matchesReq = new ResourcesReleasedMatcher(wc.localResources, EnumSet.of( LocalResourceVisibility.PUBLIC, LocalResourceVisibility.PRIVATE, LocalResourceVisibility.APPLICATION), wc.c); verify(wc.localizerBus, atLeastOnce()).handle(argThat(matchesReq)); } private void verifyOutofBandHeartBeat(WrappedContainer wc) { verify(wc.context.getNodeStatusUpdater()).sendOutofBandHeartBeat(); } private void verifyDockerContainerCleanupCall(WrappedContainer wc) throws Exception { DeletionService delService = wc.context.getDeletionService(); verify(delService, times(1)).delete(argThat( new DockerContainerDeletionMatcher(delService, wc.c.getContainerId().toString()))); } // Argument matcher for matching container localization cleanup event. private static class LocalizationCleanupMatcher extends ArgumentMatcher<LocalizationEvent> { Container c; LocalizationCleanupMatcher(Container c){ this.c = c; } @Override public boolean matches(Object o) { if (!(o instanceof ContainerLocalizationCleanupEvent)) { return false; } ContainerLocalizationCleanupEvent evt = (ContainerLocalizationCleanupEvent) o; return (evt.getContainer() == c); } } private static class ResourcesReleasedMatcher extends LocalizationCleanupMatcher { final HashSet<LocalResourceRequest> resources = new HashSet<LocalResourceRequest>(); ResourcesReleasedMatcher(Map<String, LocalResource> allResources, EnumSet<LocalResourceVisibility> vis, Container c) throws URISyntaxException { super(c); for (Entry<String, LocalResource> e : allResources.entrySet()) { if (vis.contains(e.getValue().getVisibility())) { resources.add(new LocalResourceRequest(e.getValue())); } } } @Override public boolean matches(Object o) { // match event type and container. if(!super.matches(o)){ return false; } // match resources. ContainerLocalizationCleanupEvent evt = (ContainerLocalizationCleanupEvent) o; final HashSet<LocalResourceRequest> expected = new HashSet<LocalResourceRequest>(resources); for (Collection<LocalResourceRequest> rc : evt.getResources().values()) { for (LocalResourceRequest rsrc : rc) { if (!expected.remove(rsrc)) { return false; } } } return expected.isEmpty(); } } // Accept iff the resource payload matches. private static class ResourcesRequestedMatcher extends ArgumentMatcher<LocalizationEvent> { final HashSet<LocalResourceRequest> resources = new HashSet<LocalResourceRequest>(); ResourcesRequestedMatcher(Map<String, LocalResource> allResources, EnumSet<LocalResourceVisibility> vis) throws URISyntaxException { for (Entry<String, LocalResource> e : allResources.entrySet()) { if (vis.contains(e.getValue().getVisibility())) { resources.add(new LocalResourceRequest(e.getValue())); } } } @Override public boolean matches(Object o) { ContainerLocalizationRequestEvent evt = (ContainerLocalizationRequestEvent) o; final HashSet<LocalResourceRequest> expected = new HashSet<LocalResourceRequest>(resources); for (Collection<LocalResourceRequest> rc : evt.getRequestedResources() .values()) { for (LocalResourceRequest rsrc : rc) { if (!expected.remove(rsrc)) { return false; } } } return expected.isEmpty(); } } private static Entry<String, LocalResource> getMockRsrc(Random r, LocalResourceVisibility vis) { String name = Long.toHexString(r.nextLong()); URL url = BuilderUtils.newURL("file", null, 0, "/local" + vis + "/" + name); LocalResource rsrc = BuilderUtils.newLocalResource(url, LocalResourceType.FILE, vis, r.nextInt(1024) + 1024L, r.nextInt(1024) + 2048L, false); return new SimpleEntry<String, LocalResource>(name, rsrc); } private static Map<String,LocalResource> createLocalResources(Random r) { Map<String,LocalResource> localResources = new HashMap<String,LocalResource>(); for (int i = r.nextInt(5) + 5; i >= 0; --i) { Entry<String,LocalResource> rsrc = getMockRsrc(r, LocalResourceVisibility.PUBLIC); localResources.put(rsrc.getKey(), rsrc.getValue()); } for (int i = r.nextInt(5) + 5; i >= 0; --i) { Entry<String,LocalResource> rsrc = getMockRsrc(r, LocalResourceVisibility.PRIVATE); localResources.put(rsrc.getKey(), rsrc.getValue()); } for (int i = r.nextInt(2) + 2; i >= 0; --i) { Entry<String,LocalResource> rsrc = getMockRsrc(r, LocalResourceVisibility.APPLICATION); localResources.put(rsrc.getKey(), rsrc.getValue()); } return localResources; } private static Map<String,ByteBuffer> createServiceData(Random r) { Map<String,ByteBuffer> serviceData = new HashMap<String,ByteBuffer>(); for (int i = r.nextInt(5) + 5; i >= 0; --i) { String service = Long.toHexString(r.nextLong()); byte[] b = new byte[r.nextInt(1024) + 1024]; r.nextBytes(b); serviceData.put(service, ByteBuffer.wrap(b)); } return serviceData; } @SuppressWarnings("unchecked") private class WrappedContainer { final DrainDispatcher dispatcher; final EventHandler<LocalizationEvent> localizerBus; final EventHandler<ContainersLauncherEvent> launcherBus; final EventHandler<ContainersMonitorEvent> monitorBus; final EventHandler<AuxServicesEvent> auxBus; final EventHandler<ApplicationEvent> appBus; final EventHandler<LogHandlerEvent> LogBus; final EventHandler<ContainerSchedulerEvent> schedBus; final ContainersLauncher launcher; final ContainerLaunchContext ctxt; final ContainerId cId; final Container c; final Map<String, LocalResource> localResources; final Map<String, ByteBuffer> serviceData; final Context context = mock(Context.class); private final DeletionService delService; private final Map<ContainerState, ContainerEventType> initStateToEvent = new HashMap<>(); private final Map<ContainerEventType, ContainerState> eventToFinalState = new HashMap<>(); WrappedContainer(int appId, long timestamp, int id, String user) throws IOException { this(appId, timestamp, id, user, null); } WrappedContainer(int appId, long timestamp, int id, String user, ContainerRetryContext containerRetryContext) throws IOException { this(appId, timestamp, id, user, true, false, containerRetryContext); } WrappedContainer(int appId, long timestamp, int id, String user, boolean withLocalRes, boolean withServiceData) throws IOException { this(appId, timestamp, id, user, withLocalRes, withServiceData, null); } @SuppressWarnings("rawtypes") WrappedContainer(int appId, long timestamp, int id, String user, boolean withLocalRes, boolean withServiceData, ContainerRetryContext containerRetryContext) throws IOException { dispatcher = new DrainDispatcher(); dispatcher.init(new Configuration()); localizerBus = mock(EventHandler.class); launcherBus = mock(EventHandler.class); monitorBus = mock(EventHandler.class); auxBus = mock(EventHandler.class); appBus = mock(EventHandler.class); LogBus = mock(EventHandler.class); delService = mock(DeletionService.class); schedBus = new ContainerScheduler(context, dispatcher, metrics, 0) { @Override protected void scheduleContainer(Container container) { container.sendLaunchEvent(); } }; dispatcher.register(LocalizationEventType.class, localizerBus); dispatcher.register(ContainersLauncherEventType.class, launcherBus); dispatcher.register(ContainersMonitorEventType.class, monitorBus); dispatcher.register(ContainerSchedulerEventType.class, schedBus); dispatcher.register(AuxServicesEventType.class, auxBus); dispatcher.register(ApplicationEventType.class, appBus); dispatcher.register(LogHandlerEventType.class, LogBus); when(context.getApplications()).thenReturn( new ConcurrentHashMap<ApplicationId, Application>()); NMNullStateStoreService stateStore = new NMNullStateStoreService(); when(context.getNMStateStore()).thenReturn(stateStore); NodeStatusUpdater nodeStatusUpdater = mock(NodeStatusUpdater.class); when(context.getNodeStatusUpdater()).thenReturn(nodeStatusUpdater); ContainerExecutor executor = mock(ContainerExecutor.class); Mockito.doNothing().when(executor).pauseContainer(any(Container.class)); Mockito.doNothing().when(executor).resumeContainer(any(Container.class)); launcher = new ContainersLauncher(context, dispatcher, executor, null, null); // create a mock ExecutorService, which will not really launch // ContainerLaunch at all. launcher.containerLauncher = mock(ExecutorService.class); Future future = mock(Future.class); when(launcher.containerLauncher.submit (any(Callable.class))).thenReturn(future); when(future.isDone()).thenReturn(false); when(future.cancel(false)).thenReturn(true); launcher.init(new Configuration()); launcher.start(); dispatcher.register(ContainersLauncherEventType.class, launcher); ctxt = mock(ContainerLaunchContext.class); org.apache.hadoop.yarn.api.records.Container mockContainer = mock(org.apache.hadoop.yarn.api.records.Container.class); cId = BuilderUtils.newContainerId(appId, 1, timestamp, id); when(mockContainer.getId()).thenReturn(cId); Resource resource = BuilderUtils.newResource(1024, 1); when(mockContainer.getResource()).thenReturn(resource); String host = "127.0.0.1"; int port = 1234; long currentTime = System.currentTimeMillis(); ContainerTokenIdentifier identifier = new ContainerTokenIdentifier(cId, "127.0.0.1", user, resource, currentTime + 10000L, 123, currentTime, Priority.newInstance(0), 0); Token token = BuilderUtils.newContainerToken(BuilderUtils.newNodeId(host, port), "password".getBytes(), identifier); when(mockContainer.getContainerToken()).thenReturn(token); if (withLocalRes) { Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); System.out.println("WrappedContainerLocalResource seed: " + seed); localResources = createLocalResources(r); } else { localResources = Collections.<String, LocalResource> emptyMap(); } when(ctxt.getLocalResources()).thenReturn(localResources); if (withServiceData) { Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); System.out.println("ServiceData seed: " + seed); serviceData = createServiceData(r); } else { serviceData = Collections.<String, ByteBuffer> emptyMap(); } when(ctxt.getServiceData()).thenReturn(serviceData); when(ctxt.getContainerRetryContext()).thenReturn(containerRetryContext); when(context.getDeletionService()).thenReturn(delService); ContainerStateTransitionListener listener = new ContainerStateTransitionListener() { @Override public void init(Context cntxt) {} @Override public void preTransition(ContainerImpl op, ContainerState beforeState, ContainerEvent eventToBeProcessed) { initStateToEvent.put(beforeState, eventToBeProcessed.getType()); } @Override public void postTransition(ContainerImpl op, ContainerState beforeState, ContainerState afterState, ContainerEvent processedEvent) { eventToFinalState.put(processedEvent.getType(), afterState); } }; NodeManager.DefaultContainerStateListener multi = new NodeManager.DefaultContainerStateListener(); multi.addListener(listener); when(context.getContainerStateTransitionListener()).thenReturn(multi); c = new ContainerImpl(conf, dispatcher, ctxt, null, metrics, identifier, context); dispatcher.register(ContainerEventType.class, new EventHandler<ContainerEvent>() { @Override public void handle(ContainerEvent event) { c.handle(event); } }); dispatcher.start(); } private void drainDispatcherEvents() { dispatcher.await(); } public void finished() { dispatcher.stop(); } public void initContainer() { c.handle(new ContainerEvent(cId, ContainerEventType.INIT_CONTAINER)); drainDispatcherEvents(); } public void resourceFailedContainer() { c.handle(new ContainerEvent(cId, ContainerEventType.RESOURCE_FAILED)); drainDispatcherEvents(); } public void handleContainerResourceFailedEvent() { c.handle(new ContainerResourceFailedEvent(cId, null, null)); drainDispatcherEvents(); } // Localize resources // Skip some resources so as to consider them failed public Map<Path, List<String>> doLocalizeResources( boolean checkLocalizingState, int skipRsrcCount) throws URISyntaxException { Path cache = new Path("file:///cache"); Map<Path, List<String>> localPaths = new HashMap<Path, List<String>>(); int counter = 0; for (Entry<String, LocalResource> rsrc : localResources.entrySet()) { if (counter++ < skipRsrcCount) { continue; } if (checkLocalizingState) { assertEquals(ContainerState.LOCALIZING, c.getContainerState()); } LocalResourceRequest req = new LocalResourceRequest(rsrc.getValue()); Path p = new Path(cache, rsrc.getKey()); localPaths.put(p, Arrays.asList(rsrc.getKey())); // rsrc copied to p c.handle(new ContainerResourceLocalizedEvent(c.getContainerId(), req, p)); } drainDispatcherEvents(); return localPaths; } public Map<Path, List<String>> localizeResources() throws URISyntaxException { return doLocalizeResources(true, 0); } public void localizeResourcesFromInvalidState(int skipRsrcCount) throws URISyntaxException { doLocalizeResources(false, skipRsrcCount); } public void failLocalizeSpecificResource(String rsrcKey) throws URISyntaxException { LocalResource rsrc = localResources.get(rsrcKey); LocalResourceRequest req = new LocalResourceRequest(rsrc); Exception e = new Exception(FAKE_LOCALIZATION_ERROR); c.handle(new ContainerResourceFailedEvent(c.getContainerId(), req, e .getMessage())); drainDispatcherEvents(); } // fail to localize some resources public void failLocalizeResources(int failRsrcCount) throws URISyntaxException { int counter = 0; for (Entry<String, LocalResource> rsrc : localResources.entrySet()) { if (counter >= failRsrcCount) { break; } ++counter; LocalResourceRequest req = new LocalResourceRequest(rsrc.getValue()); Exception e = new Exception(FAKE_LOCALIZATION_ERROR); c.handle(new ContainerResourceFailedEvent(c.getContainerId(), req, e.getMessage())); } drainDispatcherEvents(); } public void launchContainer() { c.handle(new ContainerEvent(cId, ContainerEventType.CONTAINER_LAUNCHED)); drainDispatcherEvents(); } public void containerSuccessful() { c.handle(new ContainerEvent(cId, ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS)); drainDispatcherEvents(); } public void containerResourcesCleanup() { c.handle(new ContainerEvent(cId, ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); drainDispatcherEvents(); } public void dockerContainerResourcesCleanup() { c.handle(new ContainerEvent(cId, ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP)); verify(delService, times(1)).delete(argThat( new DockerContainerDeletionMatcher(delService, cId.toString()))); drainDispatcherEvents(); } public void setupDockerContainerEnv() { Map<String, String> env = new HashMap<>(); env.put(ContainerRuntimeConstants.ENV_CONTAINER_TYPE, ContainerRuntimeConstants.CONTAINER_RUNTIME_DOCKER); when(this.ctxt.getEnvironment()).thenReturn(env); } public void containerFailed(int exitCode) { String diagnosticMsg = "Container completed with exit code " + exitCode; c.handle(new ContainerExitEvent(cId, ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitCode, diagnosticMsg)); ContainerStatus containerStatus = c.cloneAndGetContainerStatus(); assert containerStatus.getDiagnostics().contains(diagnosticMsg); assert containerStatus.getExitStatus() == exitCode; drainDispatcherEvents(); // If container needs retry, relaunch it if (c.getContainerState() == ContainerState.RELAUNCHING) { launchContainer(); } } public void killContainer() { c.handle(new ContainerKillEvent(cId, ContainerExitStatus.KILLED_BY_RESOURCEMANAGER, "KillRequest")); drainDispatcherEvents(); } public void pauseContainer() { c.handle(new ContainerPauseEvent(cId, "PauseRequest")); drainDispatcherEvents(); } public void resumeContainer() { c.handle(new ContainerResumeEvent(cId, "ResumeRequest")); drainDispatcherEvents(); } public void containerKilledOnRequest() { int exitCode = ContainerExitStatus.KILLED_BY_RESOURCEMANAGER; String diagnosticMsg = "Container completed with exit code " + exitCode; c.handle(new ContainerExitEvent(cId, ContainerEventType.CONTAINER_KILLED_ON_REQUEST, exitCode, diagnosticMsg)); ContainerStatus containerStatus = c.cloneAndGetContainerStatus(); assert containerStatus.getDiagnostics().contains(diagnosticMsg); assert containerStatus.getExitStatus() == exitCode; drainDispatcherEvents(); } public int getLocalResourceCount() { return localResources.size(); } public String getDiagnostics() { return c.cloneAndGetContainerStatus().getDiagnostics(); } public SlidingWindowRetryPolicy getRetryPolicy() { return ((ContainerImpl)c).getRetryPolicy(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Oleg V. Khaschansky * @version $Revision$ */ package org.apache.harmony.awt.gl.color; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.ComponentSampleModel; import java.awt.image.DataBuffer; import java.awt.image.Raster; import java.awt.image.SampleModel; import java.awt.image.SinglePixelPackedSampleModel; import java.util.ArrayList; import org.apache.harmony.awt.gl.AwtImageBackdoorAccessor; import org.apache.harmony.awt.internal.nls.Messages; /** * This class converts java color/sample models to the LCMS pixel formats. * It also encapsulates all the information about the image format, which native CMM * needs to have in order to read/write data. * * At present planar formats (multiple bands) are not supported * and they are handled as a common (custom) case. * Samples other than 1 - 7 bytes and multiple of 8 bits are * also handled as custom (and won't be supported in the nearest future). */ class NativeImageFormat { ////////////////////////////////////////////// // LCMS Pixel types private static final int PT_ANY = 0; // Don't check colorspace // 1 & 2 are reserved private static final int PT_GRAY = 3; private static final int PT_RGB = 4; // Skipping other since we don't use them here /////////////////////////////////////////////// // Conversion of predefined BufferedImage formats to LCMS formats private static final int INT_RGB_LCMS_FMT = colorspaceSh(PT_RGB)| extraSh(1)| channelsSh(3)| bytesSh(1)| doswapSh(1)| swapfirstSh(1); private static final int INT_ARGB_LCMS_FMT = INT_RGB_LCMS_FMT; private static final int INT_BGR_LCMS_FMT = colorspaceSh(PT_RGB)| extraSh(1)| channelsSh(3)| bytesSh(1); private static final int THREE_BYTE_BGR_LCMS_FMT = colorspaceSh(PT_RGB)| channelsSh(3)| bytesSh(1)| doswapSh(1); private static final int FOUR_BYTE_ABGR_LCMS_FMT = colorspaceSh(PT_RGB)| extraSh(1)| channelsSh(3)| bytesSh(1)| doswapSh(1); private static final int BYTE_GRAY_LCMS_FMT = colorspaceSh(PT_GRAY)| channelsSh(1)| bytesSh(1); private static final int USHORT_GRAY_LCMS_FMT = colorspaceSh(PT_GRAY)| channelsSh(1)| bytesSh(2); // LCMS format packed into 32 bit value. For description // of this format refer to LCMS documentation. private int cmmFormat = 0; // Dimensions private int rows = 0; private int cols = 0; // Scanline may contain some padding in the end private int scanlineStride = -1; private Object imageData; // It's possible to have offset from the beginning of the array private int dataOffset; // Has the image alpha channel? If has - here its band band offset goes private int alphaOffset = -1; // initializes proper field IDs private static native void initIDs(); static { NativeCMM.loadCMM(); initIDs(); } //////////////////////////////////// // LCMS image format encoders //////////////////////////////////// private static int colorspaceSh(int s) { return (s << 16); } private static int swapfirstSh(int s) { return (s << 14); } private static int flavorSh(int s) { return (s << 13); } private static int planarSh(int s) { return (s << 12); } private static int endianSh(int s) { return (s << 11); } private static int doswapSh(int s) { return (s << 10); } private static int extraSh(int s) { return (s << 7); } private static int channelsSh(int s) { return (s << 3); } private static int bytesSh(int s) { return s; } //////////////////////////////////// // End of LCMS image format encoders //////////////////////////////////// // Accessors Object getChannelData() { return imageData; } int getNumCols() { return cols; } int getNumRows() { return rows; } // Constructors public NativeImageFormat() { } /** * Simple image layout for common case with * not optimized workflow. * * For hifi colorspaces with 5+ color channels imgData * should be <code>byte</code> array. * * For common colorspaces with up to 4 color channels it * should be <code>short</code> array. * * Alpha channel is handled by caller, not by CMS. * * Color channels are in their natural order (not BGR but RGB). * * @param imgData - array of <code>byte</code> or <code>short</code> * @param nChannels - number of channels * @param nRows - number of scanlines in the image * @param nCols - number of pixels in one row of the image */ public NativeImageFormat(Object imgData, int nChannels, int nRows, int nCols) { if (imgData instanceof short[]) { cmmFormat |= bytesSh(2); } else if (imgData instanceof byte[]) { cmmFormat |= bytesSh(1); } else // awt.47=First argument should be byte or short array throw new IllegalArgumentException(Messages.getString("awt.47")); //$NON-NLS-1$ cmmFormat |= channelsSh(nChannels); rows = nRows; cols = nCols; imageData = imgData; dataOffset = 0; } /** * Deduces image format from the buffered image type * or color and sample models. * @param bi - image * @return image format object */ public static NativeImageFormat createNativeImageFormat(BufferedImage bi) { NativeImageFormat fmt = new NativeImageFormat(); switch (bi.getType()) { case BufferedImage.TYPE_INT_RGB: { fmt.cmmFormat = INT_RGB_LCMS_FMT; break; } case BufferedImage.TYPE_INT_ARGB: case BufferedImage.TYPE_INT_ARGB_PRE: { fmt.cmmFormat = INT_ARGB_LCMS_FMT; fmt.alphaOffset = 3; break; } case BufferedImage.TYPE_INT_BGR: { fmt.cmmFormat = INT_BGR_LCMS_FMT; break; } case BufferedImage.TYPE_3BYTE_BGR: { fmt.cmmFormat = THREE_BYTE_BGR_LCMS_FMT; break; } case BufferedImage.TYPE_4BYTE_ABGR_PRE: case BufferedImage.TYPE_4BYTE_ABGR: { fmt.cmmFormat = FOUR_BYTE_ABGR_LCMS_FMT; fmt.alphaOffset = 0; break; } case BufferedImage.TYPE_BYTE_GRAY: { fmt.cmmFormat = BYTE_GRAY_LCMS_FMT; break; } case BufferedImage.TYPE_USHORT_GRAY: { fmt.cmmFormat = USHORT_GRAY_LCMS_FMT; break; } case BufferedImage.TYPE_BYTE_BINARY: case BufferedImage.TYPE_USHORT_565_RGB: case BufferedImage.TYPE_USHORT_555_RGB: case BufferedImage.TYPE_BYTE_INDEXED: { // A bunch of unsupported formats return null; } default: break; // Try to look at sample model and color model } if (fmt.cmmFormat == 0) { ColorModel cm = bi.getColorModel(); SampleModel sm = bi.getSampleModel(); if (sm instanceof ComponentSampleModel) { ComponentSampleModel csm = (ComponentSampleModel) sm; fmt.cmmFormat = getFormatFromComponentModel(csm, cm.hasAlpha()); fmt.scanlineStride = calculateScanlineStrideCSM(csm, bi.getRaster()); } else if (sm instanceof SinglePixelPackedSampleModel) { SinglePixelPackedSampleModel sppsm = (SinglePixelPackedSampleModel) sm; fmt.cmmFormat = getFormatFromSPPSampleModel(sppsm, cm.hasAlpha()); fmt.scanlineStride = calculateScanlineStrideSPPSM(sppsm, bi.getRaster()); } if (cm.hasAlpha()) fmt.alphaOffset = calculateAlphaOffset(sm, bi.getRaster()); } if (fmt.cmmFormat == 0) return null; if (!fmt.setImageData(bi.getRaster().getDataBuffer())) { return null; } fmt.rows = bi.getHeight(); fmt.cols = bi.getWidth(); fmt.dataOffset = bi.getRaster().getDataBuffer().getOffset(); return fmt; } /** * Deduces image format from the raster sample model. * @param r - raster * @return image format object */ public static NativeImageFormat createNativeImageFormat(Raster r) { NativeImageFormat fmt = new NativeImageFormat(); SampleModel sm = r.getSampleModel(); // Assume that there's no alpha if (sm instanceof ComponentSampleModel) { ComponentSampleModel csm = (ComponentSampleModel) sm; fmt.cmmFormat = getFormatFromComponentModel(csm, false); fmt.scanlineStride = calculateScanlineStrideCSM(csm, r); } else if (sm instanceof SinglePixelPackedSampleModel) { SinglePixelPackedSampleModel sppsm = (SinglePixelPackedSampleModel) sm; fmt.cmmFormat = getFormatFromSPPSampleModel(sppsm, false); fmt.scanlineStride = calculateScanlineStrideSPPSM(sppsm, r); } if (fmt.cmmFormat == 0) return null; fmt.cols = r.getWidth(); fmt.rows = r.getHeight(); fmt.dataOffset = r.getDataBuffer().getOffset(); if (!fmt.setImageData(r.getDataBuffer())) return null; return fmt; } /** * Obtains LCMS format from the component sample model * @param sm - sample model * @param hasAlpha - true if there's an alpha channel * @return LCMS format */ private static int getFormatFromComponentModel(ComponentSampleModel sm, boolean hasAlpha) { // Multiple data arrays (banks) not supported int bankIndex = sm.getBankIndices()[0]; for (int i=1; i < sm.getNumBands(); i++) { if (sm.getBankIndices()[i] != bankIndex) { return 0; } } int channels = hasAlpha ? sm.getNumBands()-1 : sm.getNumBands(); int extra = hasAlpha ? 1 : 0; int bytes = 1; switch (sm.getDataType()) { case DataBuffer.TYPE_BYTE: bytes = 1; break; case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: bytes = 2; break; case DataBuffer.TYPE_INT: bytes = 4; break; case DataBuffer.TYPE_DOUBLE: bytes = 0; break; default: return 0; // Unsupported data type } int doSwap = 0; int swapFirst = 0; boolean knownFormat = false; int i; // "RGBA" for (i=0; i < sm.getNumBands(); i++) { if (sm.getBandOffsets()[i] != i) break; } if (i == sm.getNumBands()) { // Ok, it is it doSwap = 0; swapFirst = 0; knownFormat = true; } // "ARGB" if (!knownFormat) { for (i=0; i < sm.getNumBands()-1; i++) { if (sm.getBandOffsets()[i] != i+1) break; } if (sm.getBandOffsets()[i] == 0) i++; if (i == sm.getNumBands()) { // Ok, it is it doSwap = 0; swapFirst = 1; knownFormat = true; } } // "BGRA" if (!knownFormat) { for (i=0; i < sm.getNumBands()-1; i++) { if (sm.getBandOffsets()[i] != sm.getNumBands() - 2 - i) break; } if (sm.getBandOffsets()[i] == sm.getNumBands()-1) i++; if (i == sm.getNumBands()) { // Ok, it is it doSwap = 1; swapFirst = 1; knownFormat = true; } } // "ABGR" if (!knownFormat) { for (i=0; i < sm.getNumBands(); i++) { if (sm.getBandOffsets()[i] != sm.getNumBands() - 1 - i) break; } if (i == sm.getNumBands()) { // Ok, it is it doSwap = 1; swapFirst = 0; knownFormat = true; } } // XXX - Planar formats are not supported yet if (!knownFormat) return 0; return channelsSh(channels) | bytesSh(bytes) | extraSh(extra) | doswapSh(doSwap) | swapfirstSh(swapFirst); } /** * Obtains LCMS format from the single pixel packed sample model * @param sm - sample model * @param hasAlpha - true if there's an alpha channel * @return LCMS format */ private static int getFormatFromSPPSampleModel(SinglePixelPackedSampleModel sm, boolean hasAlpha) { // Can we extract bytes? int mask = sm.getBitMasks()[0] >>> sm.getBitOffsets()[0]; if (!(mask == 0xFF || mask == 0xFFFF || mask == 0xFFFFFFFF)) return 0; // All masks are same? for (int i = 1; i < sm.getNumBands(); i++) { if ((sm.getBitMasks()[i] >>> sm.getBitOffsets()[i]) != mask) return 0; } int pixelSize = 0; // Check if data type is supported if (sm.getDataType() == DataBuffer.TYPE_USHORT) pixelSize = 2; else if (sm.getDataType() == DataBuffer.TYPE_INT) pixelSize = 4; else return 0; int bytes = 0; switch (mask) { case 0xFF: bytes = 1; break; case 0xFFFF: bytes = 2; break; case 0xFFFFFFFF: bytes = 4; break; default: return 0; } int channels = hasAlpha ? sm.getNumBands()-1 : sm.getNumBands(); int extra = hasAlpha ? 1 : 0; extra += pixelSize/bytes - sm.getNumBands(); // Unused bytes? // Form an ArrayList containing offset for each band ArrayList<Integer> offsetsLst = new ArrayList<Integer>(); for (int k=0; k < sm.getNumBands(); k++) { offsetsLst.add(new Integer(sm.getBitOffsets()[k]/(bytes*8))); } // Add offsets for unused space for (int i=0; i<pixelSize/bytes; i++) { if (offsetsLst.indexOf(new Integer(i)) < 0) offsetsLst.add(new Integer(i)); } int offsets[] = new int[pixelSize/bytes]; for (int i=0; i<offsetsLst.size(); i++) { offsets[i] = offsetsLst.get(i).intValue(); } int doSwap = 0; int swapFirst = 0; boolean knownFormat = false; int i; // "RGBA" for (i=0; i < pixelSize; i++) { if (offsets[i] != i) break; } if (i == pixelSize) { // Ok, it is it doSwap = 0; swapFirst = 0; knownFormat = true; } // "ARGB" if (!knownFormat) { for (i=0; i < pixelSize-1; i++) { if (offsets[i] != i+1) break; } if (offsets[i] == 0) i++; if (i == pixelSize) { // Ok, it is it doSwap = 0; swapFirst = 1; knownFormat = true; } } // "BGRA" if (!knownFormat) { for (i=0; i < pixelSize-1; i++) { if (offsets[i] != pixelSize - 2 - i) break; } if (offsets[i] == pixelSize-1) i++; if (i == pixelSize) { // Ok, it is it doSwap = 1; swapFirst = 1; knownFormat = true; } } // "ABGR" if (!knownFormat) { for (i=0; i < pixelSize; i++) { if (offsets[i] != pixelSize - 1 - i) break; } if (i == pixelSize) { // Ok, it is it doSwap = 1; swapFirst = 0; knownFormat = true; } } // XXX - Planar formats are not supported yet if (!knownFormat) return 0; return channelsSh(channels) | bytesSh(bytes) | extraSh(extra) | doswapSh(doSwap) | swapfirstSh(swapFirst); } /** * Obtains data array from the DataBuffer object * @param db - data buffer * @return - true if successful */ private boolean setImageData(DataBuffer db) { AwtImageBackdoorAccessor dbAccess = AwtImageBackdoorAccessor.getInstance(); try { imageData = dbAccess.getData(db); } catch (IllegalArgumentException e) { return false; // Unknown data buffer type } return true; } /** * Calculates scanline stride in bytes * @param csm - component sample model * @param r - raster * @return scanline stride in bytes */ private static int calculateScanlineStrideCSM(ComponentSampleModel csm, Raster r) { if (csm.getScanlineStride() != csm.getPixelStride()*csm.getWidth()) { int dataTypeSize = DataBuffer.getDataTypeSize(r.getDataBuffer().getDataType()) / 8; return csm.getScanlineStride()*dataTypeSize; } return -1; } /** * Calculates scanline stride in bytes * @param sppsm - sample model * @param r - raster * @return scanline stride in bytes */ private static int calculateScanlineStrideSPPSM(SinglePixelPackedSampleModel sppsm, Raster r) { if (sppsm.getScanlineStride() != sppsm.getWidth()) { int dataTypeSize = DataBuffer.getDataTypeSize(r.getDataBuffer().getDataType()) / 8; return sppsm.getScanlineStride()*dataTypeSize; } return -1; } /** * Calculates byte offset of the alpha channel from the beginning of the pixel data * @param sm - sample model * @param r - raster * @return byte offset of the alpha channel */ private static int calculateAlphaOffset(SampleModel sm, Raster r) { if (sm instanceof ComponentSampleModel) { ComponentSampleModel csm = (ComponentSampleModel) sm; int dataTypeSize = DataBuffer.getDataTypeSize(r.getDataBuffer().getDataType()) / 8; return csm.getBandOffsets()[csm.getBandOffsets().length - 1] * dataTypeSize; } else if (sm instanceof SinglePixelPackedSampleModel) { SinglePixelPackedSampleModel sppsm = (SinglePixelPackedSampleModel) sm; return sppsm.getBitOffsets()[sppsm.getBitOffsets().length - 1] / 8; } else { return -1; // No offset, don't copy alpha } } }